diff --git a/src/HOL/ROOT b/src/HOL/ROOT --- a/src/HOL/ROOT +++ b/src/HOL/ROOT @@ -1,1165 +1,1165 @@ chapter HOL session HOL (main) = Pure + description " Classical Higher-order Logic. " options [strict_facts] sessions Tools theories Main (global) Complex_Main (global) document_files "root.bib" "root.tex" session "HOL-Examples" in Examples = HOL + description " Notable Examples in Isabelle/HOL. " sessions "HOL-Library" theories Adhoc_Overloading_Examples Ackermann Cantor Coherent Commands Drinker Groebner_Examples Iff_Oracle Induction_Schema Knaster_Tarski "ML" Peirce Records Seq document_files "root.bib" "root.tex" session "HOL-Proofs" (timing) in Proofs = Pure + description " HOL-Main with explicit proof terms. " options [quick_and_dirty = false, record_proofs = 2, parallel_limit = 500] sessions "HOL-Library" theories "HOL-Library.Realizers" session "HOL-Library" (main timing) in Library = HOL + description " Classical Higher-order Logic -- batteries included. " theories Library (*conflicting type class instantiations and dependent applications*) Finite_Lattice List_Lexorder List_Lenlexorder Prefix_Order Product_Lexorder Product_Order Subseq_Order (*conflicting syntax*) Datatype_Records (*data refinements and dependent applications*) AList_Mapping Code_Binary_Nat Code_Prolog Code_Real_Approx_By_Float Code_Target_Numeral DAList DAList_Multiset RBT_Mapping RBT_Set (*printing modifications*) OptionalSugar (*prototypic tools*) Predicate_Compile_Quickcheck (*legacy tools*) Old_Datatype Old_Recdef Realizers Refute document_files "root.bib" "root.tex" session "HOL-Analysis" (main timing) in Analysis = HOL + options [document_tags = "theorem%important,corollary%important,proposition%important,class%important,instantiation%important,subsubsection%unimportant,%unimportant", document_variants = "document:manual=-proof,-ML,-unimportant"] sessions "HOL-Library" "HOL-Computational_Algebra" theories Analysis document_files "root.tex" "root.bib" session "HOL-Complex_Analysis" (main timing) in Complex_Analysis = "HOL-Analysis" + options [document_tags = "theorem%important,corollary%important,proposition%important,class%important,instantiation%important,subsubsection%unimportant,%unimportant", document_variants = "document:manual=-proof,-ML,-unimportant"] theories Complex_Analysis document_files "root.tex" "root.bib" session "HOL-Analysis-ex" in "Analysis/ex" = "HOL-Analysis" + theories Approximations Metric_Arith_Examples session "HOL-Homology" (timing) in Homology = "HOL-Analysis" + options [document_tags = "theorem%important,corollary%important,proposition%important,class%important,instantiation%important,subsubsection%unimportant,%unimportant", document_variants = "document:manual=-proof,-ML,-unimportant"] sessions "HOL-Algebra" theories Homology document_files "root.tex" session "HOL-Computational_Algebra" (main timing) in "Computational_Algebra" = "HOL-Library" + theories Computational_Algebra (*conflicting type class instantiations and dependent applications*) Field_as_Ring session "HOL-Real_Asymp" in Real_Asymp = HOL + sessions "HOL-Decision_Procs" theories Real_Asymp Real_Asymp_Approx Real_Asymp_Examples session "HOL-Real_Asymp-Manual" in "Real_Asymp/Manual" = "HOL-Real_Asymp" + theories Real_Asymp_Doc document_files (in "~~/src/Doc") "iman.sty" "extra.sty" "isar.sty" document_files "root.tex" "style.sty" session "HOL-Hahn_Banach" in Hahn_Banach = HOL + description " Author: Gertrud Bauer, TU Munich The Hahn-Banach theorem for real vector spaces. This is the proof of the Hahn-Banach theorem for real vectorspaces, following H. Heuser, Funktionalanalysis, p. 228 -232. The Hahn-Banach theorem is one of the fundamental theorems of functional analysis. It is a conclusion of Zorn's lemma. Two different formaulations of the theorem are presented, one for general real vectorspaces and its application to normed vectorspaces. The theorem says, that every continous linearform, defined on arbitrary subspaces (not only one-dimensional subspaces), can be extended to a continous linearform on the whole vectorspace. " sessions "HOL-Analysis" theories Hahn_Banach document_files "root.bib" "root.tex" session "HOL-Induct" in Induct = HOL + description " Examples of (Co)Inductive Definitions. Comb proves the Church-Rosser theorem for combinators (see http://www.cl.cam.ac.uk/ftp/papers/reports/TR396-lcp-generic-automatic-proof-tools.ps.gz). Mutil is the famous Mutilated Chess Board problem (see http://www.cl.cam.ac.uk/ftp/papers/reports/TR394-lcp-mutilated-chess-board.dvi.gz). PropLog proves the completeness of a formalization of propositional logic (see http://www.cl.cam.ac.uk/Research/Reports/TR312-lcp-set-II.ps.gz). Exp demonstrates the use of iterated inductive definitions to reason about mutually recursive relations. " sessions "HOL-Library" theories [quick_and_dirty] Common_Patterns theories Nested_Datatype QuoDataType QuoNestedDataType Term SList ABexp Infinitely_Branching_Tree Ordinals Sigma_Algebra Comb PropLog Com document_files "root.tex" session "HOL-IMP" (timing) in IMP = "HOL-Library" + options [document_variants = document] theories BExp ASM Finite_Reachable Denotational Compiler2 Poly_Types Sec_Typing Sec_TypingT Def_Init_Big Def_Init_Small Fold Live Live_True Hoare_Examples Hoare_Sound_Complete VCG Hoare_Total VCG_Total_EX VCG_Total_EX2 Collecting1 Collecting_Examples Abs_Int_Tests Abs_Int1_parity Abs_Int1_const Abs_Int3 Procs_Dyn_Vars_Dyn Procs_Stat_Vars_Dyn Procs_Stat_Vars_Stat C_like OO document_files "root.bib" "root.tex" session "HOL-IMPP" in IMPP = HOL + description \ Author: David von Oheimb Copyright 1999 TUM IMPP -- An imperative language with procedures. This is an extension of IMP with local variables and mutually recursive procedures. For documentation see "Hoare Logic for Mutual Recursion and Local Variables" (https://isabelle.in.tum.de/Bali/papers/FSTTCS99.html). \ theories EvenOdd session "HOL-Data_Structures" (timing) in Data_Structures = HOL + options [document_variants = document] sessions "HOL-Number_Theory" theories [document = false] Less_False theories Sorting Balance Tree_Map Interval_Tree AVL_Map AVL_Bal_Set AVL_Bal2_Set Height_Balanced_Tree RBT_Set2 RBT_Map Tree23_Map Tree234_Map Brother12_Map AA_Map Set2_Join_RBT Array_Braun Trie_Fun Trie_Map Tries_Binary Leftist_Heap Binomial_Heap document_files "root.tex" "root.bib" session "HOL-Import" in Import = HOL + theories HOL_Light_Maps theories [condition = HOL_LIGHT_BUNDLE] HOL_Light_Import session "HOL-Number_Theory" (main timing) in Number_Theory = "HOL-Computational_Algebra" + description " Fundamental Theorem of Arithmetic, Chinese Remainder Theorem, Fermat/Euler Theorem, Wilson's Theorem, some lemmas for Quadratic Reciprocity. " sessions "HOL-Algebra" theories Number_Theory document_files "root.tex" session "HOL-Hoare" in Hoare = HOL + description " Verification of imperative programs (verification conditions are generated automatically from pre/post conditions and loop invariants). " theories Hoare document_files "root.bib" "root.tex" session "HOL-Hoare_Parallel" (timing) in Hoare_Parallel = HOL + description " Verification of shared-variable imperative programs a la Owicki-Gries. (verification conditions are generated automatically). " theories Hoare_Parallel document_files "root.bib" "root.tex" session "HOL-Codegenerator_Test" in Codegenerator_Test = "HOL-Number_Theory" + sessions "HOL-Data_Structures" "HOL-ex" theories Generate Generate_Binary_Nat Generate_Target_Nat Generate_Efficient_Datastructures Code_Lazy_Test Code_Test_PolyML Code_Test_Scala theories [condition = ISABELLE_GHC] Code_Test_GHC theories [condition = ISABELLE_MLTON] Code_Test_MLton theories [condition = ISABELLE_OCAMLFIND] Code_Test_OCaml theories [condition = ISABELLE_SMLNJ] Code_Test_SMLNJ session "HOL-Metis_Examples" (timing) in Metis_Examples = "HOL-Library" + description " Author: Lawrence C Paulson, Cambridge University Computer Laboratory Author: Jasmin Blanchette, TU Muenchen Testing Metis and Sledgehammer. " sessions "HOL-Decision_Procs" theories Abstraction Big_O Binary_Tree Clausification Message Proxies Tarski Trans_Closure Sets session "HOL-Nitpick_Examples" in Nitpick_Examples = "HOL-Library" + description " Author: Jasmin Blanchette, TU Muenchen Copyright 2009 " theories [quick_and_dirty] Nitpick_Examples session "HOL-Algebra" (main timing) in Algebra = "HOL-Computational_Algebra" + description " Author: Clemens Ballarin, started 24 September 1999, and many others The Isabelle Algebraic Library. " sessions "HOL-Cardinals" theories (* Orders and Lattices *) Galois_Connection (* Knaster-Tarski theorem and Galois connections *) (* Groups *) FiniteProduct (* Product operator for commutative groups *) Sylow (* Sylow's theorem *) Bij (* Automorphism Groups *) Multiplicative_Group Zassenhaus (* The Zassenhaus lemma *) (* Rings *) Divisibility (* Rings *) IntRing (* Ideals and residue classes *) UnivPoly (* Polynomials *) (* Main theory *) Algebra document_files "root.bib" "root.tex" session "HOL-Auth" (timing) in Auth = HOL + description " A new approach to verifying authentication protocols. " sessions "HOL-Library" directories "Smartcard" "Guard" theories Auth_Shared Auth_Public "Smartcard/Auth_Smartcard" "Guard/Auth_Guard_Shared" "Guard/Auth_Guard_Public" document_files "root.tex" session "HOL-UNITY" (timing) in UNITY = "HOL-Auth" + description " Author: Lawrence C Paulson, Cambridge University Computer Laboratory Copyright 1998 University of Cambridge Verifying security protocols using Chandy and Misra's UNITY formalism. " directories "Simple" "Comp" theories (*Basic meta-theory*) UNITY_Main (*Simple examples: no composition*) "Simple/Deadlock" "Simple/Common" "Simple/Network" "Simple/Token" "Simple/Channel" "Simple/Lift" "Simple/Mutex" "Simple/Reach" "Simple/Reachability" (*Verifying security protocols using UNITY*) "Simple/NSP_Bad" (*Example of composition*) "Comp/Handshake" (*Universal properties examples*) "Comp/Counter" "Comp/Counterc" "Comp/Priority" "Comp/TimerArray" "Comp/Progress" "Comp/Alloc" "Comp/AllocImpl" "Comp/Client" (*obsolete*) ELT document_files "root.tex" session "HOL-Unix" in Unix = HOL + options [print_mode = "no_brackets,no_type_brackets"] sessions "HOL-Library" theories Unix document_files "root.bib" "root.tex" session "HOL-ZF" in ZF = HOL + sessions "HOL-Library" theories MainZF Games document_files "root.tex" session "HOL-Imperative_HOL" (timing) in Imperative_HOL = HOL + options [print_mode = "iff,no_brackets"] sessions "HOL-Library" directories "ex" theories Imperative_HOL_ex document_files "root.bib" "root.tex" session "HOL-Decision_Procs" (timing) in Decision_Procs = "HOL-Algebra" + description " Various decision procedures, typically involving reflection. " directories "ex" theories Decision_Procs session "HOL-Proofs-ex" in "Proofs/ex" = "HOL-Proofs" + sessions - "HOL-Isar_Examples" + "HOL-Examples" theories Hilbert_Classical Proof_Terms XML_Data session "HOL-Proofs-Extraction" (timing) in "Proofs/Extraction" = "HOL-Proofs" + description " Examples for program extraction in Higher-Order Logic. " options [quick_and_dirty = false] sessions "HOL-Computational_Algebra" theories Greatest_Common_Divisor Warshall Higman_Extraction Pigeonhole Euclid document_files "root.bib" "root.tex" session "HOL-Proofs-Lambda" (timing) in "Proofs/Lambda" = "HOL-Proofs" + description \ Lambda Calculus in de Bruijn's Notation. This session defines lambda-calculus terms with de Bruijn indixes and proves confluence of beta, eta and beta+eta. The paper "More Church-Rosser Proofs (in Isabelle/HOL)" describes the whole theory (see http://www.in.tum.de/~nipkow/pubs/jar2001.html). \ options [print_mode = "no_brackets", quick_and_dirty = false] sessions "HOL-Library" theories Eta StrongNorm Standardization WeakNorm document_files "root.bib" "root.tex" session "HOL-Prolog" in Prolog = HOL + description " Author: David von Oheimb (based on a lecture on Lambda Prolog by Nadathur) A bare-bones implementation of Lambda-Prolog. This is a simple exploratory implementation of Lambda-Prolog in HOL, including some minimal examples (in Test.thy) and a more typical example of a little functional language and its type system. " theories Test Type session "HOL-MicroJava" (timing) in MicroJava = HOL + description " Formalization of a fragment of Java, together with a corresponding virtual machine and a specification of its bytecode verifier and a lightweight bytecode verifier, including proofs of type-safety. " sessions "HOL-Library" "HOL-Eisbach" directories "BV" "Comp" "DFA" "J" "JVM" theories MicroJava document_files "introduction.tex" "root.bib" "root.tex" session "HOL-NanoJava" in NanoJava = HOL + description " Hoare Logic for a tiny fragment of Java. " theories Example document_files "root.bib" "root.tex" session "HOL-Bali" (timing) in Bali = HOL + sessions "HOL-Library" theories AxExample AxSound AxCompl Trans TypeSafe document_files "root.tex" session "HOL-IOA" in IOA = HOL + description \ Author: Tobias Nipkow and Konrad Slind and Olaf Müller Copyright 1994--1996 TU Muenchen The meta-theory of I/O-Automata in HOL. This formalization has been significantly changed and extended, see HOLCF/IOA. There are also the proofs of two communication protocols which formerly have been here. @inproceedings{Nipkow-Slind-IOA, author={Tobias Nipkow and Konrad Slind}, title={{I/O} Automata in {Isabelle/HOL}}, booktitle={Proc.\ TYPES Workshop 1994}, publisher=Springer, series=LNCS, note={To appear}} ftp://ftp.informatik.tu-muenchen.de/local/lehrstuhl/nipkow/ioa.ps.gz and @inproceedings{Mueller-Nipkow, author={Olaf M\"uller and Tobias Nipkow}, title={Combining Model Checking and Deduction for {I/O}-Automata}, booktitle={Proc.\ TACAS Workshop}, organization={Aarhus University, BRICS report}, year=1995} ftp://ftp.informatik.tu-muenchen.de/local/lehrstuhl/nipkow/tacas.dvi.gz \ theories Solve session "HOL-Lattice" in Lattice = HOL + description " Author: Markus Wenzel, TU Muenchen Basic theory of lattices and orders. " theories CompleteLattice document_files "root.tex" session "HOL-ex" (timing) in ex = "HOL-Number_Theory" + description " Miscellaneous examples for Higher-Order Logic. " theories Antiquote Argo_Examples Arith_Examples Ballot BinEx Birthday_Paradox Bit_Lists Bubblesort CTL Cartouche_Examples Case_Product Chinese Classical Code_Binary_Nat_examples Code_Lazy_Demo Code_Timing Coercion_Examples Computations Conditional_Parametricity_Examples Cubic_Quartic Datatype_Record_Examples Dedekind_Real Erdoes_Szekeres Eval_Examples Executable_Relation Execute_Choice Functions Function_Growth Gauge_Integration Guess HarmonicSeries Hebrew Hex_Bin_Examples IArray_Examples Intuitionistic Join_Theory Lagrange List_to_Set_Comprehension_Examples LocaleTest2 MergeSort MonoidGroup Multiquote NatSum Normalization_by_Evaluation PER Parallel_Example Peano_Axioms Perm_Fragments PresburgerEx Primrec Pythagoras Quicksort Radix_Sort Reflection_Examples Refute_Examples Residue_Ring Rewrite_Examples SOS SOS_Cert Serbian Set_Comprehension_Pointfree_Examples Set_Theory Simproc_Tests Simps_Case_Conv_Examples Sketch_and_Explore Sorting_Algorithms_Examples Sqrt Sqrt_Script Sudoku Sum_of_Powers Tarski Termination ThreeDivides Transfer_Debug Transfer_Int_Nat Transitive_Closure_Table_Ex Tree23 Triangular_Numbers Unification While_Combinator_Example Word veriT_Preprocessing theories [skip_proofs = false] SAT_Examples Meson_Test session "HOL-Isar_Examples" in Isar_Examples = "HOL-Computational_Algebra" + description " Miscellaneous Isabelle/Isar examples. " options [quick_and_dirty] theories Structured_Statements Basic_Logic Expr_Compiler Fibonacci Group Group_Context Group_Notepad Hoare_Ex Mutilated_Checkerboard Puzzle Summation document_files "root.bib" "root.tex" session "HOL-Eisbach" in Eisbach = HOL + description \ The Eisbach proof method language and "match" method. \ sessions FOL "HOL-Analysis" theories Eisbach Tests Examples Examples_FOL Example_Metric session "HOL-SET_Protocol" (timing) in SET_Protocol = HOL + description " Verification of the SET Protocol. " sessions "HOL-Library" theories SET_Protocol document_files "root.tex" session "HOL-Matrix_LP" in Matrix_LP = HOL + description " Two-dimensional matrices and linear programming. " sessions "HOL-Library" directories "Compute_Oracle" theories Cplex document_files "root.tex" session "HOL-TLA" in TLA = HOL + description " Lamport's Temporal Logic of Actions. " theories TLA session "HOL-TLA-Inc" in "TLA/Inc" = "HOL-TLA" + theories Inc session "HOL-TLA-Buffer" in "TLA/Buffer" = "HOL-TLA" + theories DBuffer session "HOL-TLA-Memory" in "TLA/Memory" = "HOL-TLA" + theories MemoryImplementation session "HOL-TPTP" in TPTP = HOL + description " Author: Jasmin Blanchette, TU Muenchen Author: Nik Sultana, University of Cambridge Copyright 2011 TPTP-related extensions. " sessions "HOL-Library" theories ATP_Theory_Export MaSh_Eval TPTP_Interpret THF_Arith TPTP_Proof_Reconstruction theories ATP_Problem_Import session "HOL-Probability" (main timing) in "Probability" = "HOL-Analysis" + theories Probability document_files "root.tex" session "HOL-Probability-ex" (timing) in "Probability/ex" = "HOL-Probability" + theories Dining_Cryptographers Koepf_Duermuth_Countermeasure Measure_Not_CCC session "HOL-Nominal" in Nominal = HOL + sessions "HOL-Library" theories Nominal session "HOL-Nominal-Examples" (timing) in "Nominal/Examples" = "HOL-Nominal" + theories Class3 CK_Machine Compile Contexts Crary CR_Takahashi CR Fsub Height Lambda_mu Lam_Funs LocalWeakening Pattern SN SOS Standardization Support Type_Preservation Weakening W theories [quick_and_dirty] VC_Condition session "HOL-Cardinals" (timing) in Cardinals = HOL + description " Ordinals and Cardinals, Full Theories. " theories Cardinals Bounded_Set document_files "intro.tex" "root.tex" "root.bib" session "HOL-Datatype_Examples" (timing) in Datatype_Examples = "HOL-Library" + description " (Co)datatype Examples. " directories "Derivation_Trees" theories Compat Lambda_Term Process TreeFsetI "Derivation_Trees/Gram_Lang" "Derivation_Trees/Parallel_Composition" Koenig Lift_BNF Milner_Tofte Stream_Processor Cyclic_List Free_Idempotent_Monoid LDL TLList Misc_Codatatype Misc_Datatype Misc_Primcorec Misc_Primrec Datatype_Simproc_Tests session "HOL-Corec_Examples" (timing) in Corec_Examples = "HOL-Library" + description " Corecursion Examples. " directories "Tests" theories LFilter Paper_Examples Stream_Processor "Tests/Simple_Nesting" "Tests/Iterate_GPV" theories [quick_and_dirty] "Tests/GPV_Bare_Bones" "Tests/Merge_D" "Tests/Merge_Poly" "Tests/Misc_Mono" "Tests/Misc_Poly" "Tests/Small_Concrete" "Tests/Stream_Friends" "Tests/TLList_Friends" "Tests/Type_Class" session "HOL-Word" (main timing) in Word = HOL + sessions "HOL-Library" theories Word More_Word Word_Examples document_files "root.bib" "root.tex" session "HOL-Statespace" in Statespace = HOL + theories [skip_proofs = false] StateSpaceEx document_files "root.tex" session "HOL-Nonstandard_Analysis" (timing) in Nonstandard_Analysis = "HOL-Computational_Algebra" + description " Nonstandard analysis. " theories Nonstandard_Analysis document_files "root.tex" session "HOL-Nonstandard_Analysis-Examples" (timing) in "Nonstandard_Analysis/Examples" = "HOL-Nonstandard_Analysis" + theories NSPrimes session "HOL-Mirabelle" in Mirabelle = HOL + theories Mirabelle_Test session "HOL-Mirabelle-ex" in "Mirabelle/ex" = "HOL-Mirabelle" + options [timeout = 60] theories Ex session "HOL-Word-SMT_Examples" (timing) in SMT_Examples = "HOL-Word" + options [quick_and_dirty] theories Boogie SMT_Examples SMT_Word_Examples SMT_Tests session "HOL-SPARK" in "SPARK" = "HOL-Word" + theories SPARK session "HOL-SPARK-Examples" in "SPARK/Examples" = "HOL-SPARK" + directories "Gcd" "Liseq" "RIPEMD-160" "Sqrt" theories "Gcd/Greatest_Common_Divisor" "Liseq/Longest_Increasing_Subsequence" "RIPEMD-160/F" "RIPEMD-160/Hash" "RIPEMD-160/K_L" "RIPEMD-160/K_R" "RIPEMD-160/R_L" "RIPEMD-160/Round" "RIPEMD-160/R_R" "RIPEMD-160/S_L" "RIPEMD-160/S_R" "Sqrt/Sqrt" export_files (in ".") "*:**.prv" session "HOL-SPARK-Manual" in "SPARK/Manual" = "HOL-SPARK" + options [show_question_marks = false] sessions "HOL-SPARK-Examples" theories Example_Verification VC_Principles Reference Complex_Types document_files "complex_types.ads" "complex_types_app.adb" "complex_types_app.ads" "Gcd.adb" "Gcd.ads" "intro.tex" "loop_invariant.adb" "loop_invariant.ads" "root.bib" "root.tex" "Simple_Gcd.adb" "Simple_Gcd.ads" session "HOL-Mutabelle" in Mutabelle = HOL + sessions "HOL-Library" theories MutabelleExtra session "HOL-Quickcheck_Examples" (timing) in Quickcheck_Examples = HOL + sessions "HOL-Library" theories Quickcheck_Examples Quickcheck_Lattice_Examples Completeness Quickcheck_Interfaces Quickcheck_Nesting_Example theories [condition = ISABELLE_GHC] Hotel_Example Quickcheck_Narrowing_Examples session "HOL-Quotient_Examples" (timing) in Quotient_Examples = "HOL-Algebra" + description " Author: Cezary Kaliszyk and Christian Urban " theories DList Quotient_FSet Quotient_Int Quotient_Message Lift_FSet Lift_Set Lift_Fun Quotient_Rat Lift_DList Int_Pow Lifting_Code_Dt_Test session "HOL-Predicate_Compile_Examples" (timing) in Predicate_Compile_Examples = HOL + sessions "HOL-Library" theories Examples Predicate_Compile_Tests Predicate_Compile_Quickcheck_Examples Specialisation_Examples IMP_1 IMP_2 (* FIXME since 21-Jul-2011 Hotel_Example_Small_Generator *) IMP_3 IMP_4 theories [condition = ISABELLE_SWIPL] Code_Prolog_Examples Context_Free_Grammar_Example Hotel_Example_Prolog Lambda_Example List_Examples theories [condition = ISABELLE_SWIPL, quick_and_dirty] Reg_Exp_Example session "HOL-Types_To_Sets" in Types_To_Sets = HOL + description " Experimental extension of Higher-Order Logic to allow translation of types to sets. " directories "Examples" theories Types_To_Sets "Examples/Prerequisites" "Examples/Finite" "Examples/T2_Spaces" "Examples/Unoverload_Def" "Examples/Linear_Algebra_On" session HOLCF (main timing) in HOLCF = HOL + description " Author: Franz Regensburger Author: Brian Huffman HOLCF -- a semantic extension of HOL by the LCF logic. " sessions "HOL-Library" theories HOLCF (global) document_files "root.tex" session "HOLCF-Tutorial" in "HOLCF/Tutorial" = HOLCF + theories Domain_ex Fixrec_ex New_Domain document_files "root.tex" session "HOLCF-Library" in "HOLCF/Library" = HOLCF + theories HOLCF_Library HOL_Cpo session "HOLCF-IMP" in "HOLCF/IMP" = HOLCF + description " IMP -- A WHILE-language and its Semantics. This is the HOLCF-based denotational semantics of a simple WHILE-language. " sessions "HOL-IMP" theories HoareEx document_files "isaverbatimwrite.sty" "root.tex" "root.bib" session "HOLCF-ex" in "HOLCF/ex" = "HOLCF-Library" + description " Miscellaneous examples for HOLCF. " theories Dnat Dagstuhl Focus_ex Fix2 Hoare Concurrency_Monad Loop Powerdomain_ex Domain_Proofs Letrec Pattern_Match session "HOLCF-FOCUS" in "HOLCF/FOCUS" = "HOLCF-Library" + description \ FOCUS: a theory of stream-processing functions Isabelle/HOLCF. For introductions to FOCUS, see "The Design of Distributed Systems - An Introduction to FOCUS" http://www4.in.tum.de/publ/html.php?e=2 "Specification and Refinement of a Buffer of Length One" http://www4.in.tum.de/publ/html.php?e=15 "Specification and Development of Interactive Systems: Focus on Streams, Interfaces, and Refinement" http://www4.in.tum.de/publ/html.php?e=321 \ theories Fstreams FOCUS Buffer_adm session IOA (timing) in "HOLCF/IOA" = HOLCF + description " Author: Olaf Mueller Copyright 1997 TU München A formalization of I/O automata in HOLCF. The distribution contains simulation relations, temporal logic, and an abstraction theory. Everything is based upon a domain-theoretic model of finite and infinite sequences. " theories Abstraction session "IOA-ABP" in "HOLCF/IOA/ABP" = IOA + description " Author: Olaf Mueller The Alternating Bit Protocol performed in I/O-Automata. " theories Correctness Spec session "IOA-NTP" in "HOLCF/IOA/NTP" = IOA + description " Author: Tobias Nipkow & Konrad Slind A network transmission protocol, performed in the I/O automata formalization by Olaf Mueller. " theories Correctness session "IOA-Storage" in "HOLCF/IOA/Storage" = IOA + description " Author: Olaf Mueller Memory storage case study. " theories Correctness session "IOA-ex" in "HOLCF/IOA/ex" = IOA + description " Author: Olaf Mueller " theories TrivEx TrivEx2 diff --git a/src/Pure/ML/ml_statistics.scala b/src/Pure/ML/ml_statistics.scala --- a/src/Pure/ML/ml_statistics.scala +++ b/src/Pure/ML/ml_statistics.scala @@ -1,245 +1,245 @@ /* Title: Pure/ML/ml_statistics.scala Author: Makarius ML runtime statistics. */ package isabelle import scala.annotation.tailrec import scala.collection.mutable import scala.collection.immutable.{SortedSet, SortedMap} import scala.swing.{Frame, Component} import org.jfree.data.xy.{XYSeries, XYSeriesCollection} import org.jfree.chart.{JFreeChart, ChartPanel, ChartFactory} import org.jfree.chart.plot.PlotOrientation object ML_Statistics { /* properties */ val Now = new Properties.Double("now") def now(props: Properties.T): Double = Now.unapply(props).get /* monitor process */ def monitor(pid: Long, delay: Time = Time.seconds(0.5), consume: Properties.T => Unit = Console.println) { def progress_stdout(line: String) { val props = Library.space_explode(',', line).flatMap((entry: String) => Library.space_explode('=', entry) match { case List(a, b) => Some((a, b)) case _ => None }) if (props.nonEmpty) consume(props) } - Bash.process("exec \"$POLYML_EXE\" -q --use " + - File.bash_platform_path(Path.explode("~~/src/Pure/ML/ml_statistics.ML")) + - " --eval " + Bash.string("ML_Statistics.monitor " + ML_Syntax.print_long(pid) + " " + - ML_Syntax.print_double(delay.seconds))) + Bash.process("exec \"$POLYML_EXE\" -q --use src/Pure/ML/ml_statistics.ML --eval " + + Bash.string("ML_Statistics.monitor " + ML_Syntax.print_long(pid) + " " + + ML_Syntax.print_double(delay.seconds)), + cwd = Path.explode("~~").file) .result(progress_stdout = progress_stdout, strict = false).check } /* memory fields (mega bytes) */ def mem_print(x: Long): Option[String] = if (x == 0L) None else Some(x.toString + " M") def mem_scale(x: Long): Long = x / 1024 / 1024 def mem_field_scale(name: String, x: Double): Double = if (heap_fields._2.contains(name) || program_fields._2.contains(name)) mem_scale(x.toLong).toDouble else x val CODE_SIZE = "size_code" val STACK_SIZE = "size_stacks" val HEAP_SIZE = "size_heap" /* standard fields */ type Fields = (String, List[String]) val tasks_fields: Fields = ("Future tasks", List("tasks_ready", "tasks_pending", "tasks_running", "tasks_passive", "tasks_urgent", "tasks_total")) val workers_fields: Fields = ("Worker threads", List("workers_total", "workers_active", "workers_waiting")) val GC_fields: Fields = ("GCs", List("partial_GCs", "full_GCs", "share_passes")) val heap_fields: Fields = ("Heap", List(HEAP_SIZE, "size_allocation", "size_allocation_free", "size_heap_free_last_full_GC", "size_heap_free_last_GC")) val program_fields: Fields = ("Program", List("size_code", "size_stacks")) val threads_fields: Fields = ("Threads", List("threads_total", "threads_in_ML", "threads_wait_condvar", "threads_wait_IO", "threads_wait_mutex", "threads_wait_signal")) val time_fields: Fields = ("Time", List("time_elapsed", "time_elapsed_GC", "time_CPU", "time_GC")) val speed_fields: Fields = ("Speed", List("speed_CPU", "speed_GC")) private val time_speed = Map("time_CPU" -> "speed_CPU", "time_GC" -> "speed_GC") val all_fields: List[Fields] = List(tasks_fields, workers_fields, GC_fields, heap_fields, program_fields, threads_fields, time_fields, speed_fields) val main_fields: List[Fields] = List(tasks_fields, workers_fields, heap_fields) /* content interpretation */ final case class Entry(time: Double, data: Map[String, Double]) { def get(field: String): Double = data.getOrElse(field, 0.0) } val empty: ML_Statistics = apply(Nil) def apply(ml_statistics: List[Properties.T], heading: String = "", domain: String => Boolean = (key: String) => true): ML_Statistics = { require(ml_statistics.forall(props => Now.unapply(props).isDefined)) val time_start = if (ml_statistics.isEmpty) 0.0 else now(ml_statistics.head) val duration = if (ml_statistics.isEmpty) 0.0 else now(ml_statistics.last) - time_start val fields = SortedSet.empty[String] ++ (for { props <- ml_statistics.iterator (x, _) <- props.iterator if x != Now.name && domain(x) } yield x) val content = { var last_edge = Map.empty[String, (Double, Double, Double)] val result = new mutable.ListBuffer[ML_Statistics.Entry] for (props <- ml_statistics) { val time = now(props) - time_start require(time >= 0.0) // rising edges -- relative speed val speeds = (for { (key, value) <- props.iterator key1 <- time_speed.get(key) if domain(key1) } yield { val (x0, y0, s0) = last_edge.getOrElse(key, (0.0, 0.0, 0.0)) val x1 = time val y1 = java.lang.Double.parseDouble(value) val s1 = if (x1 == x0) 0.0 else (y1 - y0) / (x1 - x0) if (y1 > y0) { last_edge += (key -> (x1, y1, s1)) (key1, s1.toString) } else (key1, s0.toString) }).toList val data = SortedMap.empty[String, Double] ++ (for { (x, y) <- props.iterator ++ speeds.iterator if x != Now.name && domain(x) z = java.lang.Double.parseDouble(y) if z != 0.0 } yield { (x.intern, mem_field_scale(x, z)) }) result += ML_Statistics.Entry(time, data) } result.toList } new ML_Statistics(heading, fields, content, time_start, duration) } } final class ML_Statistics private( val heading: String, val fields: Set[String], val content: List[ML_Statistics.Entry], val time_start: Double, val duration: Double) { /* content */ def maximum(field: String): Double = (0.0 /: content)({ case (m, e) => m max e.get(field) }) def average(field: String): Double = { @tailrec def sum(t0: Double, list: List[ML_Statistics.Entry], acc: Double): Double = list match { case Nil => acc case e :: es => val t = e.time sum(t, es, (t - t0) * e.get(field) + acc) } content match { case Nil => 0.0 case List(e) => e.get(field) case e :: es => sum(e.time, es, 0.0) / duration } } /* charts */ def update_data(data: XYSeriesCollection, selected_fields: List[String]) { data.removeAllSeries for (field <- selected_fields) { val series = new XYSeries(field) content.foreach(entry => series.add(entry.time, entry.get(field))) data.addSeries(series) } } def chart(title: String, selected_fields: List[String]): JFreeChart = { val data = new XYSeriesCollection update_data(data, selected_fields) ChartFactory.createXYLineChart(title, "time", "value", data, PlotOrientation.VERTICAL, true, true, true) } def chart(fields: ML_Statistics.Fields): JFreeChart = chart(fields._1, fields._2) def show_frames(fields: List[ML_Statistics.Fields] = ML_Statistics.main_fields): Unit = fields.map(chart).foreach(c => GUI_Thread.later { new Frame { iconImage = GUI.isabelle_image() title = heading contents = Component.wrap(new ChartPanel(c)) visible = true } }) } diff --git a/src/Pure/PIDE/resources.ML b/src/Pure/PIDE/resources.ML --- a/src/Pure/PIDE/resources.ML +++ b/src/Pure/PIDE/resources.ML @@ -1,312 +1,314 @@ (* Title: Pure/PIDE/resources.ML Author: Makarius Resources for theories and auxiliary files. *) signature RESOURCES = sig val default_qualifier: string val init_session: {pide: bool, session_positions: (string * Properties.T) list, session_directories: (string * string) list, docs: string list, global_theories: (string * string) list, loaded_theories: string list} -> unit val finish_session_base: unit -> unit val is_pide: unit -> bool val global_theory: string -> string option val loaded_theory: string -> bool val check_session: Proof.context -> string * Position.T -> string val check_doc: Proof.context -> string * Position.T -> string val master_directory: theory -> Path.T val imports_of: theory -> (string * Position.T) list val begin_theory: Path.T -> Thy_Header.header -> theory list -> theory val thy_path: Path.T -> Path.T val theory_qualifier: string -> string val find_theory_file: string -> Path.T option val import_name: string -> Path.T -> string -> {node_name: Path.T, master_dir: Path.T, theory_name: string} val check_thy: Path.T -> string -> {master: Path.T * SHA1.digest, text: string, theory_pos: Position.T, imports: (string * Position.T) list, keywords: Thy_Header.keywords} val parse_files: string -> (theory -> Token.file list) parser val provide: Path.T * SHA1.digest -> theory -> theory val provide_file: Token.file -> theory -> theory val provide_parse_files: string -> (theory -> Token.file list * theory) parser val loaded_files_current: theory -> bool val check_path: Proof.context -> Path.T option -> string * Position.T -> Path.T val check_file: Proof.context -> Path.T option -> string * Position.T -> Path.T val check_dir: Proof.context -> Path.T option -> string * Position.T -> Path.T end; structure Resources: RESOURCES = struct (* session base *) val default_qualifier = "Draft"; type entry = {pos: Position.T, serial: serial}; fun make_entry props : entry = {pos = Position.of_properties props, serial = serial ()}; val empty_session_base = {pide = false, session_positions = []: (string * entry) list, session_directories = Symtab.empty: Path.T list Symtab.table, docs = []: (string * entry) list, global_theories = Symtab.empty: string Symtab.table, loaded_theories = Symtab.empty: unit Symtab.table}; val global_session_base = Synchronized.var "Sessions.base" empty_session_base; fun init_session {pide, session_positions, session_directories, docs, global_theories, loaded_theories} = Synchronized.change global_session_base (fn _ => {pide = pide, session_positions = sort_by #1 (map (apsnd make_entry) session_positions), session_directories = fold_rev (fn (dir, name) => Symtab.cons_list (name, Path.explode dir)) session_directories Symtab.empty, docs = sort_by #1 (map (apsnd make_entry o rpair []) docs), global_theories = Symtab.make global_theories, loaded_theories = Symtab.make_set loaded_theories}); fun finish_session_base () = Synchronized.change global_session_base (fn {global_theories, loaded_theories, ...} => {pide = false, session_positions = [], session_directories = Symtab.empty, docs = [], global_theories = global_theories, loaded_theories = loaded_theories}); fun get_session_base f = f (Synchronized.value global_session_base); fun is_pide () = get_session_base #pide; fun global_theory a = Symtab.lookup (get_session_base #global_theories) a; fun loaded_theory a = Symtab.defined (get_session_base #loaded_theories) a; fun check_name which kind markup ctxt arg = Completion.check_item kind markup (get_session_base which |> sort_by #1) ctxt arg; val check_session = check_name #session_positions "session" (fn (name, {pos, serial}) => Markup.entity Markup.sessionN name |> Markup.properties (Position.entity_properties_of false serial pos)); val check_doc = check_name #docs "documentation" (Markup.doc o #1); (* manage source files *) type files = {master_dir: Path.T, (*master directory of theory source*) imports: (string * Position.T) list, (*source specification of imports*) provided: (Path.T * SHA1.digest) list}; (*source path, digest*) fun make_files (master_dir, imports, provided): files = {master_dir = master_dir, imports = imports, provided = provided}; structure Files = Theory_Data ( type T = files; val empty = make_files (Path.current, [], []); - fun extend _ = empty; - fun merge _ = empty; + val extend = I; + fun merge ({master_dir, imports, provided = provided1}, {provided = provided2, ...}) = + let val provided' = Library.merge (op =) (provided1, provided2) + in make_files (master_dir, imports, provided') end ); fun map_files f = Files.map (fn {master_dir, imports, provided} => make_files (f (master_dir, imports, provided))); val master_directory = #master_dir o Files.get; val imports_of = #imports o Files.get; fun begin_theory master_dir {name, imports, keywords} parents = Theory.begin_theory name parents |> map_files (fn _ => (Path.explode (Path.smart_implode master_dir), imports, [])) |> Thy_Header.add_keywords keywords; (* theory files *) val thy_path = Path.ext "thy"; fun theory_qualifier theory = (case global_theory theory of SOME qualifier => qualifier | NONE => Long_Name.qualifier theory); fun theory_name qualifier theory = if Long_Name.is_qualified theory orelse is_some (global_theory theory) then theory else Long_Name.qualify qualifier theory; fun find_theory_file thy_name = let val thy_file = thy_path (Path.basic (Long_Name.base_name thy_name)); val session = theory_qualifier thy_name; val dirs = Symtab.lookup_list (get_session_base #session_directories) session; in dirs |> get_first (fn dir => let val path = Path.append dir thy_file in if File.is_file path then SOME path else NONE end) end; fun make_theory_node node_name theory = {node_name = node_name, master_dir = Path.dir node_name, theory_name = theory}; fun loaded_theory_node theory = {node_name = Path.basic theory, master_dir = Path.current, theory_name = theory}; fun import_name qualifier dir s = let val theory = theory_name qualifier (Thy_Header.import_name s); fun theory_node () = make_theory_node (File.full_path dir (thy_path (Path.expand (Path.explode s)))) theory; in if not (Thy_Header.is_base_name s) then theory_node () else if loaded_theory theory then loaded_theory_node theory else (case find_theory_file theory of SOME node_name => make_theory_node node_name theory | NONE => if Long_Name.is_qualified s then loaded_theory_node theory else theory_node ()) end; fun check_file dir file = File.check_file (File.full_path dir file); fun check_thy dir thy_name = let val thy_base_name = Long_Name.base_name thy_name; val master_file = (case find_theory_file thy_name of SOME path => check_file Path.current path | NONE => check_file dir (thy_path (Path.basic thy_base_name))); val text = File.read master_file; val {name = (name, pos), imports, keywords} = Thy_Header.read (Path.position master_file) text; val _ = thy_base_name <> name andalso error ("Bad theory name " ^ quote name ^ " for file " ^ Path.print (Path.base master_file) ^ Position.here pos); in {master = (master_file, SHA1.digest text), text = text, theory_pos = pos, imports = imports, keywords = keywords} end; (* load files *) fun parse_files cmd = Scan.ahead Parse.not_eof -- Parse.path >> (fn (tok, name) => fn thy => (case Token.get_files tok of [] => let val keywords = Thy_Header.get_keywords thy; val master_dir = master_directory thy; val pos = Token.pos_of tok; val src_paths = Keyword.command_files keywords cmd (Path.explode name); in map (Command.read_file master_dir pos) src_paths end | files => map Exn.release files)); fun provide (src_path, id) = map_files (fn (master_dir, imports, provided) => if AList.defined (op =) provided src_path then error ("Duplicate use of source file: " ^ Path.print src_path) else (master_dir, imports, (src_path, id) :: provided)); fun provide_file (file: Token.file) = provide (#src_path file, #digest file); fun provide_parse_files cmd = parse_files cmd >> (fn files => fn thy => let val fs = files thy; val thy' = fold (fn {src_path, digest, ...} => provide (src_path, digest)) fs thy; in (fs, thy') end); fun load_file thy src_path = let val full_path = check_file (master_directory thy) src_path; val text = File.read full_path; val id = SHA1.digest text; in ((full_path, id), text) end; fun loaded_files_current thy = #provided (Files.get thy) |> forall (fn (src_path, id) => (case try (load_file thy) src_path of NONE => false | SOME ((_, id'), _) => id = id')); (* formal check *) fun formal_check check_file ctxt opt_dir (name, pos) = let fun err msg = error (msg ^ Position.here pos); val _ = Context_Position.report ctxt pos Markup.language_path; val dir = (case opt_dir of SOME dir => dir | NONE => master_directory (Proof_Context.theory_of ctxt)); val path = Path.append dir (Path.explode name) handle ERROR msg => err msg; val _ = Path.expand path handle ERROR msg => err msg; val _ = Context_Position.report ctxt pos (Markup.path (Path.smart_implode path)); val _ = check_file path handle ERROR msg => err msg; in path end; val check_path = formal_check I; val check_file = formal_check File.check_file; val check_dir = formal_check File.check_dir; (* antiquotations *) local fun document_antiq (check: Proof.context -> Path.T option -> string * Position.T -> Path.T) = Args.context -- Scan.lift (Parse.position Parse.path) >> (fn (ctxt, (name, pos)) => let val _ = check ctxt NONE (name, pos); val latex = Latex.string (Latex.output_ascii_breakable "/" name); in Latex.enclose_block "\\isatt{" "}" [latex] end); fun ML_antiq check = Args.context -- Scan.lift (Parse.position Parse.path) >> (fn (ctxt, (name, pos)) => check ctxt (SOME Path.current) (name, pos) |> ML_Syntax.print_path); in val _ = Theory.setup (Thy_Output.antiquotation_verbatim_embedded \<^binding>\session\ (Scan.lift Parse.embedded_position) check_session #> Thy_Output.antiquotation_verbatim_embedded \<^binding>\doc\ (Scan.lift Parse.embedded_position) check_doc #> Thy_Output.antiquotation_raw_embedded \<^binding>\path\ (document_antiq check_path) (K I) #> Thy_Output.antiquotation_raw_embedded \<^binding>\file\ (document_antiq check_file) (K I) #> Thy_Output.antiquotation_raw_embedded \<^binding>\dir\ (document_antiq check_dir) (K I) #> ML_Antiquotation.value_embedded \<^binding>\path\ (ML_antiq check_path) #> ML_Antiquotation.value_embedded \<^binding>\file\ (ML_antiq check_file) #> ML_Antiquotation.value_embedded \<^binding>\dir\ (ML_antiq check_dir) #> ML_Antiquotation.value_embedded \<^binding>\path_binding\ (Scan.lift (Parse.position Parse.path) >> (ML_Syntax.print_path_binding o Path.explode_binding)) #> ML_Antiquotation.value \<^binding>\master_dir\ (Args.theory >> (ML_Syntax.print_path o master_directory))); end; end; diff --git a/src/Pure/Thy/present.ML b/src/Pure/Thy/present.ML --- a/src/Pure/Thy/present.ML +++ b/src/Pure/Thy/present.ML @@ -1,322 +1,332 @@ (* Title: Pure/Thy/present.ML Author: Markus Wenzel and Stefan Berghofer, TU Muenchen Theory presentation: HTML and PDF-LaTeX documents. *) signature PRESENT = sig val get_bibtex_entries: theory -> string list val theory_qualifier: theory -> string val document_option: Options.T -> {enabled: bool, disabled: bool} val document_variants: Options.T -> (string * string) list val init: HTML.symbols -> bool -> Path.T -> string -> string -> (string * string) list -> (Path.T * Path.T) list -> Path.T -> string * string -> bool -> unit val finish: unit -> unit val theory_output: theory -> string list -> unit val begin_theory: string list -> int -> (unit -> HTML.text) -> theory -> theory end; structure Present: PRESENT = struct (** paths **) val tex_ext = Path.ext "tex"; val tex_path = tex_ext o Path.basic; val html_ext = Path.ext "html"; val html_path = html_ext o Path.basic; val index_path = Path.basic "index.html"; val readme_html_path = Path.basic "README.html"; val doc_indexN = "session"; val session_graph_path = Path.basic "session_graph.pdf"; fun show_path path = Path.implode (Path.expand (File.full_path Path.current path)); -(** additional theory data **) +(** theory data **) + +type browser_info = {chapter: string, name: string, bibtex_entries: string list}; +val empty_browser_info: browser_info = {chapter = "Unsorted", name = "Unknown", bibtex_entries = []}; structure Browser_Info = Theory_Data ( - type T = {chapter: string, name: string, bibtex_entries: string list}; - val empty = {chapter = "Unsorted", name = "Unknown", bibtex_entries = []}: T; - fun extend _ = empty; - fun merge _ = empty; + type T = browser_info + val empty = empty_browser_info; + val extend = I; + val merge = #1; ); -val _ = Theory.setup - (Browser_Info.put {chapter = Context.PureN, name = Context.PureN, bibtex_entries = []}); +fun reset_browser_info thy = + if Browser_Info.get thy = empty_browser_info then NONE + else SOME (Browser_Info.put empty_browser_info thy); + +val _ = + Theory.setup + (Theory.at_begin reset_browser_info #> + Browser_Info.put {chapter = Context.PureN, name = Context.PureN, bibtex_entries = []}); val get_bibtex_entries = #bibtex_entries o Browser_Info.get; + (** global browser info state **) (* type theory_info *) type theory_info = {tex_source: string list, html_source: string}; fun make_theory_info (tex_source, html_source) = {tex_source = tex_source, html_source = html_source}: theory_info; fun map_theory_info f {tex_source, html_source} = make_theory_info (f (tex_source, html_source)); (* type browser_info *) type browser_info = {theories: theory_info Symtab.table, tex_index: (int * string) list, html_index: (int * string) list}; fun make_browser_info (theories, tex_index, html_index) : browser_info = {theories = theories, tex_index = tex_index, html_index = html_index}; val empty_browser_info = make_browser_info (Symtab.empty, [], []); fun map_browser_info f {theories, tex_index, html_index} = make_browser_info (f (theories, tex_index, html_index)); (* state *) val browser_info = Synchronized.var "browser_info" empty_browser_info; fun change_browser_info f = Synchronized.change browser_info (map_browser_info f); fun init_theory_info name info = change_browser_info (fn (theories, tex_index, html_index) => (Symtab.update (name, info) theories, tex_index, html_index)); fun change_theory_info name f = change_browser_info (fn (theories, tex_index, html_index) => (case Symtab.lookup theories name of NONE => error ("Browser info: cannot access theory document " ^ quote name) | SOME info => (Symtab.update (name, map_theory_info f info) theories, tex_index, html_index))); fun add_tex_index txt = change_browser_info (fn (theories, tex_index, html_index) => (theories, txt :: tex_index, html_index)); fun add_html_index txt = change_browser_info (fn (theories, tex_index, html_index) => (theories, tex_index, txt :: html_index)); (** global session state **) (* session_info *) type session_info = {symbols: HTML.symbols, name: string, chapter: string, info_path: Path.T, info: bool, doc_format: string, doc_output: Path.T option, doc_files: (Path.T * Path.T) list, graph_file: Path.T, documents: (string * string) list, verbose: bool, readme: Path.T option}; fun make_session_info (symbols, name, chapter, info_path, info, doc_format, doc_output, doc_files, graph_file, documents, verbose, readme) = {symbols = symbols, name = name, chapter = chapter, info_path = info_path, info = info, doc_format = doc_format, doc_output = doc_output, doc_files = doc_files, graph_file = graph_file, documents = documents, verbose = verbose, readme = readme}: session_info; (* state *) val session_info = Unsynchronized.ref (NONE: session_info option); fun with_session_info x f = (case ! session_info of NONE => x | SOME info => f info); val theory_qualifier = Resources.theory_qualifier o Context.theory_long_name; fun is_session_theory thy = (case ! session_info of NONE => false | SOME {name, ...} => name = theory_qualifier thy); (** document preparation **) (* options *) fun document_option options = (case Options.string options "document" of "" => {enabled = false, disabled = false} | "false" => {enabled = false, disabled = true} | _ => {enabled = true, disabled = false}); fun document_variants options = let val variants = space_explode ":" (Options.string options "document_variants") |> map (fn s => (case space_explode "=" s of [name] => (name, "") | [name, tags] => (name, tags) | _ => error ("Malformed document variant specification: " ^ quote s))); val _ = (case duplicates (op =) (map #1 variants) of [] => () | dups => error ("Duplicate document variants: " ^ commas_quote dups)); in variants end; (* init session *) fun init symbols info info_path doc document_output doc_variants doc_files graph_file (chapter, name) verbose = let val doc_output = if document_output = "" then NONE else SOME (Path.explode document_output); val documents = if doc = "" orelse null doc_files then [] else doc_variants; val readme = if File.exists readme_html_path then SOME readme_html_path else NONE; val docs = (case readme of NONE => [] | SOME p => [(Url.File p, "README")]) @ map (fn (name, _) => (Url.File (Path.ext doc (Path.basic name)), name)) documents; in session_info := SOME (make_session_info (symbols, name, chapter, info_path, info, doc, doc_output, doc_files, graph_file, documents, verbose, readme)); Synchronized.change browser_info (K empty_browser_info); add_html_index (0, HTML.begin_session_index symbols name (Url.File session_graph_path) docs) end; (* isabelle tool wrappers *) fun isabelle_document {verbose} format name tags dir = let val script = "isabelle document -d " ^ File.bash_path dir ^ " -o " ^ Bash.string format ^ " -n " ^ Bash.string name ^ (if tags = "" then "" else " -t " ^ Bash.string tags); val doc_path = Path.appends [dir, Path.parent, Path.basic name |> Path.ext format]; val _ = if verbose then writeln script else (); val {out, err, rc, ...} = Bash.process script; val _ = if verbose then writeln (trim_line (normalize_lines out)) else (); val _ = if not (File.exists doc_path) orelse rc <> 0 then error (trim_line err) else (); in doc_path end; (* finish session -- output all generated text *) fun sorted_index index = map snd (sort (int_ord o apply2 fst) (rev index)); fun index_buffer index = Buffer.add (implode (sorted_index index)) Buffer.empty; fun write_tex src name path = File.write_buffer (Path.append path (tex_path name)) src; fun write_tex_index tex_index path = write_tex (index_buffer tex_index) doc_indexN path; fun finish () = with_session_info () (fn {name, chapter, info, info_path, doc_format, doc_output, doc_files, graph_file, documents, verbose, readme, ...} => let val {theories, tex_index, html_index} = Synchronized.value browser_info; val thys = Symtab.dest theories; val session_prefix = Path.append (Path.append info_path (Path.basic chapter)) (Path.basic name); fun finish_html (a, {html_source, ...}: theory_info) = File.write (Path.append session_prefix (html_path a)) html_source; val _ = if info then (Isabelle_System.mkdirs session_prefix; File.write_buffer (Path.append session_prefix index_path) (index_buffer html_index |> Buffer.add HTML.end_document); (case readme of NONE => () | SOME path => Isabelle_System.copy_file path session_prefix); List.app finish_html thys; if verbose then Output.physical_stderr ("Browser info at " ^ show_path session_prefix ^ "\n") else ()) else (); fun document_job doc_prefix backdrop (doc_name, tags) = let val doc_dir = Path.append doc_prefix (Path.basic doc_name); fun purge () = if backdrop then Isabelle_System.rm_tree doc_dir else (); val _ = purge (); val _ = Isabelle_System.mkdirs doc_dir; val _ = Isabelle_System.bash ("isabelle latex -o sty " ^ File.bash_path (Path.append doc_dir (Path.basic "root.tex"))); val _ = List.app (fn file => Isabelle_System.copy_file_base file doc_dir) doc_files; val _ = Isabelle_System.copy_file graph_file (Path.append doc_dir session_graph_path); val _ = write_tex_index tex_index doc_dir; val _ = List.app (fn (a, {tex_source, ...}) => write_tex (fold Buffer.add tex_source Buffer.empty) a doc_dir) thys; in fn () => (isabelle_document {verbose = true} doc_format doc_name tags doc_dir before purge (), fn doc => if verbose orelse not backdrop then Output.physical_stderr ("Document at " ^ show_path doc ^ "\n") else ()) end; val jobs = (if info orelse is_none doc_output then map (document_job session_prefix true) documents else []) @ (case doc_output of NONE => [] | SOME path => map (document_job path false) documents); val _ = jobs |> Par_List.map (fn job => job ()) |> List.app (op |>); in Synchronized.change browser_info (K empty_browser_info); session_info := NONE end); (* theory elements *) fun theory_output thy output = with_session_info () (fn _ => if is_session_theory thy then (change_theory_info (Context.theory_name thy) o apfst) (K output) else ()); fun theory_link (curr_chapter, curr_session) thy = let val {chapter, name = session, ...} = Browser_Info.get thy; val link = html_path (Context.theory_name thy); in if curr_session = session then SOME link else if curr_chapter = chapter then SOME (Path.appends [Path.parent, Path.basic session, link]) else if chapter = Context.PureN then NONE else SOME (Path.appends [Path.parent, Path.parent, Path.basic chapter, Path.basic session, link]) end; fun begin_theory bibtex_entries update_time mk_text thy = with_session_info thy (fn {symbols, name = session_name, chapter, ...} => let val name = Context.theory_name thy; val parent_specs = Theory.parents_of thy |> map (fn parent => (Option.map Url.File (theory_link (chapter, session_name) parent), (Context.theory_name parent))); val html_source = HTML.theory symbols name parent_specs (mk_text ()); val _ = init_theory_info name (make_theory_info ([], html_source)); val bibtex_entries' = if is_session_theory thy then (add_html_index (update_time, HTML.theory_entry symbols (Url.File (html_path name), name)); add_tex_index (update_time, Latex.theory_entry name); bibtex_entries) else []; in thy |> Browser_Info.put {chapter = chapter, name = session_name, bibtex_entries = bibtex_entries'} end); end; diff --git a/src/Pure/Tools/generated_files.ML b/src/Pure/Tools/generated_files.ML --- a/src/Pure/Tools/generated_files.ML +++ b/src/Pure/Tools/generated_files.ML @@ -1,377 +1,393 @@ (* Title: Pure/Tools/generated_files.ML Author: Makarius Generated source files for other languages: with antiquotations, without Isabelle symbols. *) signature GENERATED_FILES = sig val add_files: Path.binding * string -> theory -> theory type file = {path: Path.T, pos: Position.T, content: string} val file_binding: file -> Path.binding val file_markup: file -> Markup.T val print_file: file -> string val report_file: Proof.context -> Position.T -> file -> unit val get_files: theory -> file list val get_file: theory -> Path.binding -> file val get_files_in: Path.binding list * theory -> (file * Position.T) list val check_files_in: Proof.context -> (string * Position.T) list * (string * Position.T) option -> Path.binding list * theory val write_file: Path.T -> file -> unit val export_file: theory -> file -> unit type file_type = {name: string, ext: string, make_comment: string -> string, make_string: string -> string} val file_type: binding -> {ext: string, make_comment: string -> string, make_string: string -> string} -> theory -> theory val antiquotation: binding -> 'a Token.context_parser -> ({context: Proof.context, source: Token.src, file_type: file_type, argument: 'a} -> string) -> theory -> theory val set_string: string -> Proof.context -> Proof.context val generate_file: Path.binding * Input.source -> Proof.context -> local_theory val generate_file_cmd: (string * Position.T) * Input.source -> local_theory -> local_theory val export_generated_files: Proof.context -> (Path.binding list * theory) list -> unit val export_generated_files_cmd: Proof.context -> ((string * Position.T) list * (string * Position.T) option) list -> unit val with_compile_dir: (Path.T -> unit) -> unit val compile_generated_files: Proof.context -> (Path.binding list * theory) list -> (Path.T list * Path.T) list -> (Path.binding list * bool option) list -> Path.binding -> Input.source -> unit val compile_generated_files_cmd: Proof.context -> ((string * Position.T) list * (string * Position.T) option) list -> ((string * Position.T) list * (string * Position.T)) list -> ((string * Position.T) list * bool option) list -> string * Position.T -> Input.source -> unit val execute: Path.T -> Input.source -> string -> string end; structure Generated_Files: GENERATED_FILES = struct (** context data **) +type file = Path.T * (Position.T * string); + type file_type = {name: string, ext: string, make_comment: string -> string, make_string: string -> string}; type antiquotation = Token.src -> Proof.context -> file_type -> string; +fun err_dup_files path pos pos' = + error ("Duplicate generated file: " ^ Path.print path ^ Position.here_list [pos, pos']); + structure Data = Theory_Data ( type T = - (Path.T * (Position.T * string)) list * (*files for current theory*) + file list Symtab.table * (*files for named theory*) file_type Name_Space.table * (*file types*) antiquotation Name_Space.table; (*antiquotations*) val empty = - ([], + (Symtab.empty, Name_Space.empty_table Markup.file_typeN, Name_Space.empty_table Markup.antiquotationN); - val extend = @{apply 3(1)} (K []); - fun merge ((_, types1, antiqs1), (_, types2, antiqs2)) = - ([], - Name_Space.merge_tables (types1, types2), - Name_Space.merge_tables (antiqs1, antiqs2)); + val extend = I; + fun merge ((files1, types1, antiqs1), (files2, types2, antiqs2)) = + let + val files' = + (files1, files2) |> Symtab.join (K (Library.merge (fn ((path1, file1), (path2, file2)) => + if path1 <> path2 then false + else if file1 = file2 then true + else err_dup_files path1 (#1 file1) (#1 file2)))); + val types' = Name_Space.merge_tables (types1, types2); + val antiqs' = Name_Space.merge_tables (antiqs1, antiqs2); + in (files', types', antiqs') end; ); -fun add_files (binding, content) = +fun lookup_files thy = + Symtab.lookup_list (#1 (Data.get thy)) (Context.theory_long_name thy); + +fun update_files thy_files' thy = + (Data.map o @{apply 3(1)}) (Symtab.update (Context.theory_long_name thy, thy_files')) thy; + +fun add_files (binding, content) thy = let val _ = Path.proper_binding binding; val (path, pos) = Path.dest_binding binding; - in - (Data.map o @{apply 3(1)}) (fn files => - (case AList.lookup (op =) files path of - SOME (pos', _) => - error ("Duplicate generated file: " ^ Path.print path ^ Position.here_list [pos, pos']) - | NONE => (path, (pos, content)) :: files)) - end; + val thy_files = lookup_files thy; + val thy_files' = + (case AList.lookup (op =) thy_files path of + SOME (pos', _) => err_dup_files path pos pos' + | NONE => (path, (pos, content)) :: thy_files); + in update_files thy_files' thy end; (* get files *) type file = {path: Path.T, pos: Position.T, content: string}; fun file_binding (file: file) = Path.binding (#path file, #pos file); fun file_markup (file: file) = (Markup.entityN, Position.def_properties_of (#pos file)); fun print_file (file: file) = Markup.markup (file_markup file) (quote (Path.implode (#path file))); fun report_file ctxt pos file = Context_Position.report ctxt pos (file_markup file); fun get_files thy = - Data.get thy |> #1 |> rev |> map (fn (path, (pos, content)) => + lookup_files thy |> rev |> map (fn (path, (pos, content)) => {path = path, pos = pos, content = content}: file); fun get_file thy binding = let val (path, pos) = Path.dest_binding binding in (case find_first (fn file => #path file = path) (get_files thy) of SOME file => file | NONE => error ("Missing generated file " ^ Path.print path ^ " in theory " ^ quote (Context.theory_long_name thy) ^ Position.here pos)) end; fun get_files_in ([], thy) = map (rpair Position.none) (get_files thy) | get_files_in (files, thy) = map (fn binding => (get_file thy binding, Path.pos_of_binding binding)) files; (* check and output files *) fun check_files_in ctxt (files, opt_thy) = let val thy = (case opt_thy of SOME name => Theory.check {long = false} ctxt name | NONE => Proof_Context.theory_of ctxt); in (map Path.explode_binding files, thy) end; fun write_file dir (file: file) = let val path = Path.expand (Path.append dir (#path file)); val _ = Isabelle_System.mkdirs (Path.dir path); val _ = File.generate path (#content file); in () end; fun export_file thy (file: file) = Export.export thy (file_binding file) [XML.Text (#content file)]; (* file types *) fun get_file_type thy ext = if ext = "" then error "Bad file extension" else (#2 (Data.get thy), NONE) |-> Name_Space.fold_table (fn (_, file_type) => fn res => if #ext file_type = ext then SOME file_type else res) |> (fn SOME file_type => file_type | NONE => error ("Unknown file type for extension " ^ quote ext)); fun file_type binding {ext, make_comment, make_string} thy = let val name = Binding.name_of binding; val pos = Binding.pos_of binding; val file_type = {name = name, ext = ext, make_comment = make_comment, make_string = make_string}; val table = #2 (Data.get thy); val space = Name_Space.space_of_table table; val context = Context.Theory thy |> Name_Space.map_naming (K Name_Space.global_naming); val (_, data') = table |> Name_Space.define context true (Binding.make (name, pos), file_type); val _ = (case try (#name o get_file_type thy) ext of NONE => () | SOME name' => error ("Extension " ^ quote ext ^ " already registered for file type " ^ quote (Markup.markup (Name_Space.markup space name') name') ^ Position.here pos)); in thy |> (Data.map o @{apply 3(2)}) (K data') end; (* antiquotations *) val get_antiquotations = #3 o Data.get o Proof_Context.theory_of; fun antiquotation name scan body thy = let fun ant src ctxt file_type : string = let val (x, ctxt') = Token.syntax scan src ctxt in body {context = ctxt', source = src, file_type = file_type, argument = x} end; in thy |> (Data.map o @{apply 3(3)}) (Name_Space.define (Context.Theory thy) true (name, ant) #> #2) end; val scan_antiq = Antiquote.scan_control >> Antiquote.Control || Scan.one (Symbol.not_eof o Symbol_Pos.symbol) >> (Antiquote.Text o Symbol_Pos.symbol); fun read_source ctxt (file_type: file_type) source = let val _ = Context_Position.report ctxt (Input.pos_of source) (Markup.language {name = #name file_type, symbols = false, antiquotes = true, delimited = Input.is_delimited source}); val (input, _) = Input.source_explode source |> Scan.error (Scan.finite Symbol_Pos.stopper (Scan.repeat scan_antiq)); val _ = Context_Position.reports ctxt (Antiquote.antiq_reports input); fun expand antiq = (case antiq of Antiquote.Text s => s | Antiquote.Control {name, body, ...} => let val src = Token.make_src name (if null body then [] else [Token.read_cartouche body]); val (src', ant) = Token.check_src ctxt get_antiquotations src; in ant src' ctxt file_type end | Antiquote.Antiq antiq => error ("Bad antiquotation " ^ Position.here (#1 (#range antiq)))); in implode (map expand input) end; (** Isar commands **) (* generate_file *) fun generate_file (binding, source) lthy = let val thy = Proof_Context.theory_of lthy; val (path, pos) = Path.dest_binding binding; val file_type = get_file_type thy (#2 (Path.split_ext path)) handle ERROR msg => error (msg ^ Position.here pos); val header = #make_comment file_type " generated by Isabelle "; val content = header ^ "\n" ^ read_source lthy file_type source; in lthy |> (Local_Theory.background_theory o add_files) (binding, content) end; fun generate_file_cmd (file, source) lthy = generate_file (Path.explode_binding file, source) lthy; (* export_generated_files *) fun export_generated_files ctxt args = let val thy = Proof_Context.theory_of ctxt in (case map #1 (maps get_files_in args) of [] => () | files => (List.app (export_file thy) files; writeln (Export.message thy Path.current); writeln (cat_lines (map (prefix " " o print_file) files)))) end; fun export_generated_files_cmd ctxt args = export_generated_files ctxt (map (check_files_in ctxt) args); (* compile_generated_files *) val compile_dir = Config.declare_string ("compile_dir", \<^here>) (K ""); fun with_compile_dir body : unit = body (Path.explode (Config.get (Context.the_local_context ()) compile_dir)); fun compile_generated_files ctxt args external export export_prefix source = Isabelle_System.with_tmp_dir "compile" (fn dir => let val thy = Proof_Context.theory_of ctxt; val files = maps get_files_in args; val _ = List.app (fn (file, pos) => report_file ctxt pos file) files; val _ = List.app (write_file dir o #1) files; val _ = external |> List.app (fn (files, base_dir) => files |> List.app (fn file => Isabelle_System.copy_file_base (base_dir, file) dir)); val _ = ML_Context.eval_in (SOME (Config.put compile_dir (Path.implode dir) ctxt)) ML_Compiler.flags (Input.pos_of source) (ML_Lex.read "Generated_Files.with_compile_dir (" @ ML_Lex.read_source source @ ML_Lex.read ")"); val _ = export |> List.app (fn (bindings, executable) => bindings |> List.app (fn binding0 => let val binding = binding0 |> Path.map_binding (executable = SOME true ? Path.exe); val (path, pos) = Path.dest_binding binding; val content = (case try File.read (Path.append dir path) of SOME context => context | NONE => error ("Missing result file " ^ Path.print path ^ Position.here pos)); val _ = Export.report_export thy export_prefix; val binding' = Path.map_binding (Path.append (Path.path_of_binding export_prefix)) binding; in (if is_some executable then Export.export_executable else Export.export) thy binding' [XML.Text content] end)); val _ = if null export then () else writeln (Export.message thy (Path.path_of_binding export_prefix)); in () end); fun compile_generated_files_cmd ctxt args external export export_prefix source = compile_generated_files ctxt (map (check_files_in ctxt) args) (external |> map (fn (raw_files, raw_base_dir) => let val base_dir = Resources.check_dir ctxt NONE raw_base_dir; fun check (s, pos) = (Resources.check_file ctxt (SOME base_dir) (s, pos); Path.explode s); val files = map check raw_files; in (files, base_dir) end)) ((map o apfst o map) Path.explode_binding export) (Path.explode_binding export_prefix) source; (* execute compiler -- auxiliary *) fun execute dir title compile = Isabelle_System.bash_output_check ("cd " ^ File.bash_path dir ^ " && " ^ compile) handle ERROR msg => let val (s, pos) = Input.source_content title in error (s ^ " failed" ^ Position.here pos ^ ":\n" ^ msg) end; (** concrete file types **) val _ = Theory.setup (file_type \<^binding>\Haskell\ {ext = "hs", make_comment = enclose "{-" "-}", make_string = GHC.print_string}); (** concrete antiquotations **) (* ML expression as string literal *) structure Local_Data = Proof_Data ( type T = string option; fun init _ = NONE; ); val set_string = Local_Data.put o SOME; fun the_string ctxt = (case Local_Data.get ctxt of SOME s => s | NONE => raise Fail "No result string"); val _ = Theory.setup (antiquotation \<^binding>\cartouche\ (Scan.lift Args.cartouche_input) (fn {context = ctxt, file_type, argument, ...} => ctxt |> Context.proof_map (ML_Context.expression (Input.pos_of argument) (ML_Lex.read "Theory.local_setup (Generated_Files.set_string (" @ ML_Lex.read_source argument @ ML_Lex.read "))")) |> the_string |> #make_string file_type)); (* file-system paths *) fun path_antiquotation binding check = antiquotation binding (Args.context -- Scan.lift (Parse.position Parse.path) >> (fn (ctxt, (name, pos)) => (check ctxt (SOME Path.current) (name, pos) |> Path.implode))) (fn {file_type, argument, ...} => #make_string file_type argument); val _ = Theory.setup (path_antiquotation \<^binding>\path\ Resources.check_path #> path_antiquotation \<^binding>\file\ Resources.check_file #> path_antiquotation \<^binding>\dir\ Resources.check_dir); end; diff --git a/src/Pure/more_thm.ML b/src/Pure/more_thm.ML --- a/src/Pure/more_thm.ML +++ b/src/Pure/more_thm.ML @@ -1,736 +1,744 @@ (* Title: Pure/more_thm.ML Author: Makarius Further operations on type ctyp/cterm/thm, outside the inference kernel. *) infix aconvc; signature BASIC_THM = sig include BASIC_THM val show_consts: bool Config.T val show_hyps: bool Config.T val show_tags: bool Config.T structure Ctermtab: TABLE structure Thmtab: TABLE val aconvc: cterm * cterm -> bool type attribute = Context.generic * thm -> Context.generic option * thm option end; signature THM = sig include THM structure Ctermtab: TABLE structure Thmtab: TABLE val eq_ctyp: ctyp * ctyp -> bool val aconvc: cterm * cterm -> bool val add_tvars: thm -> ctyp list -> ctyp list val add_frees: thm -> cterm list -> cterm list val add_vars: thm -> cterm list -> cterm list val dest_funT: ctyp -> ctyp * ctyp val strip_type: ctyp -> ctyp list * ctyp val all_name: Proof.context -> string * cterm -> cterm -> cterm val all: Proof.context -> cterm -> cterm -> cterm val mk_binop: cterm -> cterm -> cterm -> cterm val dest_binop: cterm -> cterm * cterm val dest_implies: cterm -> cterm * cterm val dest_equals: cterm -> cterm * cterm val dest_equals_lhs: cterm -> cterm val dest_equals_rhs: cterm -> cterm val lhs_of: thm -> cterm val rhs_of: thm -> cterm val fast_term_ord: cterm ord val term_ord: cterm ord val thm_ord: thm ord val cterm_cache: (cterm -> 'a) -> cterm -> 'a val thm_cache: (thm -> 'a) -> thm -> 'a val is_reflexive: thm -> bool val eq_thm: thm * thm -> bool val eq_thm_prop: thm * thm -> bool val eq_thm_strict: thm * thm -> bool val equiv_thm: theory -> thm * thm -> bool val class_triv: theory -> class -> thm val of_sort: ctyp * sort -> thm list val is_dummy: thm -> bool val add_thm: thm -> thm list -> thm list val del_thm: thm -> thm list -> thm list val merge_thms: thm list * thm list -> thm list val full_rules: thm Item_Net.T val intro_rules: thm Item_Net.T val elim_rules: thm Item_Net.T val declare_hyps: cterm -> Proof.context -> Proof.context val assume_hyps: cterm -> Proof.context -> thm * Proof.context val unchecked_hyps: Proof.context -> Proof.context val restore_hyps: Proof.context -> Proof.context -> Proof.context val undeclared_hyps: Context.generic -> thm -> term list val check_hyps: Context.generic -> thm -> thm val declare_term_sorts: term -> Proof.context -> Proof.context val extra_shyps': Proof.context -> thm -> sort list val check_shyps: Proof.context -> thm -> thm val weaken_sorts': Proof.context -> cterm -> cterm val elim_implies: thm -> thm -> thm val forall_intr_name: string * cterm -> thm -> thm val forall_elim_var: int -> thm -> thm val forall_elim_vars: int -> thm -> thm val instantiate_frees: ((string * sort) * ctyp) list * ((string * typ) * cterm) list -> thm -> thm val instantiate': ctyp option list -> cterm option list -> thm -> thm val forall_intr_frees: thm -> thm val unvarify_global: theory -> thm -> thm val unvarify_axiom: theory -> string -> thm val rename_params_rule: string list * int -> thm -> thm val rename_boundvars: term -> term -> thm -> thm val add_axiom: Proof.context -> binding * term -> theory -> (string * thm) * theory val add_axiom_global: binding * term -> theory -> (string * thm) * theory val add_def: Defs.context -> bool -> bool -> binding * term -> theory -> (string * thm) * theory val add_def_global: bool -> bool -> binding * term -> theory -> (string * thm) * theory type attribute = Context.generic * thm -> Context.generic option * thm option type binding = binding * attribute list val tag_rule: string * string -> thm -> thm val untag_rule: string -> thm -> thm val is_free_dummy: thm -> bool val tag_free_dummy: thm -> thm val def_name: string -> string val def_name_optional: string -> string -> string val def_binding: Binding.binding -> Binding.binding val def_binding_optional: Binding.binding -> Binding.binding -> Binding.binding val make_def_binding: bool -> Binding.binding -> Binding.binding val has_name_hint: thm -> bool val get_name_hint: thm -> string val put_name_hint: string -> thm -> thm val theoremK: string val legacy_get_kind: thm -> string val kind_rule: string -> thm -> thm val rule_attribute: thm list -> (Context.generic -> thm -> thm) -> attribute val declaration_attribute: (thm -> Context.generic -> Context.generic) -> attribute val mixed_attribute: (Context.generic * thm -> Context.generic * thm) -> attribute val apply_attribute: attribute -> thm -> Context.generic -> thm * Context.generic val attribute_declaration: attribute -> thm -> Context.generic -> Context.generic val theory_attributes: attribute list -> thm -> theory -> thm * theory val proof_attributes: attribute list -> thm -> Proof.context -> thm * Proof.context val no_attributes: 'a -> 'a * 'b list val simple_fact: 'a -> ('a * 'b list) list val tag: string * string -> attribute val untag: string -> attribute val kind: string -> attribute val register_proofs: thm list lazy -> theory -> theory val consolidate_theory: theory -> unit val expose_theory: theory -> unit val show_consts: bool Config.T val show_hyps: bool Config.T val show_tags: bool Config.T val pretty_thm_raw: Proof.context -> {quote: bool, show_hyps: bool} -> thm -> Pretty.T val pretty_thm: Proof.context -> thm -> Pretty.T val pretty_thm_item: Proof.context -> thm -> Pretty.T val pretty_thm_global: theory -> thm -> Pretty.T val string_of_thm: Proof.context -> thm -> string val string_of_thm_global: theory -> thm -> string end; structure Thm: THM = struct (** basic operations **) (* collecting ctyps and cterms *) val eq_ctyp = op = o apply2 Thm.typ_of; val op aconvc = op aconv o apply2 Thm.term_of; val add_tvars = Thm.fold_atomic_ctyps (fn a => is_TVar (Thm.typ_of a) ? insert eq_ctyp a); val add_frees = Thm.fold_atomic_cterms (fn a => is_Free (Thm.term_of a) ? insert (op aconvc) a); val add_vars = Thm.fold_atomic_cterms (fn a => is_Var (Thm.term_of a) ? insert (op aconvc) a); (* ctyp operations *) fun dest_funT cT = (case Thm.typ_of cT of Type ("fun", _) => let val [A, B] = Thm.dest_ctyp cT in (A, B) end | T => raise TYPE ("dest_funT", [T], [])); (* ctyp version of strip_type: maps [T1,...,Tn]--->T to ([T1,T2,...,Tn], T) *) fun strip_type cT = (case Thm.typ_of cT of Type ("fun", _) => let val (cT1, cT2) = dest_funT cT; val (cTs, cT') = strip_type cT2 in (cT1 :: cTs, cT') end | _ => ([], cT)); (* cterm operations *) fun all_name ctxt (x, t) A = let val T = Thm.typ_of_cterm t; val all_const = Thm.cterm_of ctxt (Const ("Pure.all", (T --> propT) --> propT)); in Thm.apply all_const (Thm.lambda_name (x, t) A) end; fun all ctxt t A = all_name ctxt ("", t) A; fun mk_binop c a b = Thm.apply (Thm.apply c a) b; fun dest_binop ct = (Thm.dest_arg1 ct, Thm.dest_arg ct); fun dest_implies ct = (case Thm.term_of ct of Const ("Pure.imp", _) $ _ $ _ => dest_binop ct | _ => raise TERM ("dest_implies", [Thm.term_of ct])); fun dest_equals ct = (case Thm.term_of ct of Const ("Pure.eq", _) $ _ $ _ => dest_binop ct | _ => raise TERM ("dest_equals", [Thm.term_of ct])); fun dest_equals_lhs ct = (case Thm.term_of ct of Const ("Pure.eq", _) $ _ $ _ => Thm.dest_arg1 ct | _ => raise TERM ("dest_equals_lhs", [Thm.term_of ct])); fun dest_equals_rhs ct = (case Thm.term_of ct of Const ("Pure.eq", _) $ _ $ _ => Thm.dest_arg ct | _ => raise TERM ("dest_equals_rhs", [Thm.term_of ct])); val lhs_of = dest_equals_lhs o Thm.cprop_of; val rhs_of = dest_equals_rhs o Thm.cprop_of; (* certified term order *) val fast_term_ord = Term_Ord.fast_term_ord o apply2 Thm.term_of; val term_ord = Term_Ord.term_ord o apply2 Thm.term_of; (* thm order: ignores theory context! *) val thm_ord = Term_Ord.fast_term_ord o apply2 Thm.prop_of <<< list_ord (prod_ord Term_Ord.fast_term_ord Term_Ord.fast_term_ord) o apply2 Thm.tpairs_of <<< list_ord Term_Ord.fast_term_ord o apply2 Thm.hyps_of <<< list_ord Term_Ord.sort_ord o apply2 Thm.shyps_of; (* tables and caches *) structure Ctermtab = Table(type key = cterm val ord = fast_term_ord); structure Thmtab = Table(type key = thm val ord = thm_ord); fun cterm_cache f = Cache.create Ctermtab.empty Ctermtab.lookup Ctermtab.update f; fun thm_cache f = Cache.create Thmtab.empty Thmtab.lookup Thmtab.update f; (* equality *) fun is_reflexive th = op aconv (Logic.dest_equals (Thm.prop_of th)) handle TERM _ => false; val eq_thm = is_equal o thm_ord; val eq_thm_prop = op aconv o apply2 Thm.full_prop_of; fun eq_thm_strict ths = eq_thm ths andalso Context.eq_thy_id (apply2 Thm.theory_id ths) andalso op = (apply2 Thm.maxidx_of ths) andalso op = (apply2 Thm.get_tags ths); (* pattern equivalence *) fun equiv_thm thy ths = Pattern.equiv thy (apply2 (Thm.full_prop_of o Thm.transfer thy) ths); (* type classes and sorts *) fun class_triv thy c = Thm.of_class (Thm.global_ctyp_of thy (TVar ((Name.aT, 0), [c])), c); fun of_sort (T, S) = map (fn c => Thm.of_class (T, c)) S; (* misc operations *) fun is_dummy thm = (case try Logic.dest_term (Thm.concl_of thm) of NONE => false | SOME t => Term.is_dummy_pattern (Term.head_of t)); (* collections of theorems in canonical order *) val add_thm = update eq_thm_prop; val del_thm = remove eq_thm_prop; val merge_thms = merge eq_thm_prop; val full_rules = Item_Net.init eq_thm_prop (single o Thm.full_prop_of); val intro_rules = Item_Net.init eq_thm_prop (single o Thm.concl_of); val elim_rules = Item_Net.init eq_thm_prop (single o Thm.major_prem_of); (** declared hyps and sort hyps **) structure Hyps = Proof_Data ( type T = {checked_hyps: bool, hyps: Termtab.set, shyps: sort Ord_List.T}; fun init _ : T = {checked_hyps = true, hyps = Termtab.empty, shyps = []}; ); fun map_hyps f = Hyps.map (fn {checked_hyps, hyps, shyps} => let val (checked_hyps', hyps', shyps') = f (checked_hyps, hyps, shyps) in {checked_hyps = checked_hyps', hyps = hyps', shyps = shyps'} end); (* hyps *) fun declare_hyps raw_ct ctxt = ctxt |> map_hyps (fn (checked_hyps, hyps, shyps) => let val ct = Thm.transfer_cterm (Proof_Context.theory_of ctxt) raw_ct; val hyps' = Termtab.update (Thm.term_of ct, ()) hyps; in (checked_hyps, hyps', shyps) end); fun assume_hyps ct ctxt = (Thm.assume ct, declare_hyps ct ctxt); val unchecked_hyps = map_hyps (fn (_, hyps, shyps) => (false, hyps, shyps)); fun restore_hyps ctxt = map_hyps (fn (_, hyps, shyps) => (#checked_hyps (Hyps.get ctxt), hyps, shyps)); fun undeclared_hyps context th = Thm.hyps_of th |> filter_out (case context of Context.Theory _ => K false | Context.Proof ctxt => (case Hyps.get ctxt of {checked_hyps = false, ...} => K true | {hyps, ...} => Termtab.defined hyps)); fun check_hyps context th = (case undeclared_hyps context th of [] => th | undeclared => error (Pretty.string_of (Pretty.big_list "Undeclared hyps:" (map (Pretty.item o single o Syntax.pretty_term (Syntax.init_pretty context)) undeclared)))); (* shyps *) fun declare_term_sorts t = map_hyps (fn (checked_hyps, hyps, shyps) => (checked_hyps, hyps, Sorts.insert_term t shyps)); fun extra_shyps' ctxt th = Sorts.subtract (#shyps (Hyps.get ctxt)) (Thm.extra_shyps th); fun check_shyps ctxt raw_th = let val th = Thm.strip_shyps raw_th; val extra_shyps = extra_shyps' ctxt th; in if null extra_shyps then th else error (Pretty.string_of (Pretty.block (Pretty.str "Pending sort hypotheses:" :: Pretty.brk 1 :: Pretty.commas (map (Syntax.pretty_sort ctxt) extra_shyps)))) end; val weaken_sorts' = Thm.weaken_sorts o #shyps o Hyps.get; (** basic derived rules **) (*Elimination of implication A A \ B ------------ B *) fun elim_implies thA thAB = Thm.implies_elim thAB thA; (* forall_intr_name *) fun forall_intr_name (a, x) th = let val th' = Thm.forall_intr x th; val prop' = (case Thm.prop_of th' of all $ Abs (_, T, b) => all $ Abs (a, T, b)); in Thm.renamed_prop prop' th' end; (* forall_elim_var(s) *) local fun dest_all ct = (case Thm.term_of ct of Const ("Pure.all", _) $ Abs (a, _, _) => let val (x, ct') = Thm.dest_abs NONE (Thm.dest_arg ct) in SOME ((a, Thm.ctyp_of_cterm x), ct') end | _ => NONE); fun dest_all_list ct = (case dest_all ct of NONE => [] | SOME (v, ct') => v :: dest_all_list ct'); fun forall_elim_vars_list vars i th = let val used = (Thm.fold_terms o Term.fold_aterms) (fn Var ((x, j), _) => if i = j then insert (op =) x else I | _ => I) th []; val vars' = (Name.variant_list used (map #1 vars), vars) |> ListPair.map (fn (x, (_, T)) => Thm.var ((x, i), T)); in fold Thm.forall_elim vars' th end; in fun forall_elim_vars i th = forall_elim_vars_list (dest_all_list (Thm.cprop_of th)) i th; fun forall_elim_var i th = let val vars = (case dest_all (Thm.cprop_of th) of SOME (v, _) => [v] | NONE => raise THM ("forall_elim_var", i, [th])); in forall_elim_vars_list vars i th end; end; (* instantiate frees *) fun instantiate_frees ([], []) th = th | instantiate_frees (instT, inst) th = let val idx = Thm.maxidx_of th + 1; fun index ((a, A), b) = (((a, idx), A), b); val insts = (map index instT, map index inst); val frees = (map (#1 o #1) instT, map (#1 o #1) inst); val hyps = Thm.chyps_of th; val inst_cterm = Thm.generalize_cterm frees idx #> Thm.instantiate_cterm insts; in th |> fold_rev Thm.implies_intr hyps |> Thm.generalize frees idx |> Thm.instantiate insts |> fold (elim_implies o Thm.assume o inst_cterm) hyps end; (* instantiate by left-to-right occurrence of variables *) fun instantiate' cTs cts thm = let fun err msg = raise TYPE ("instantiate': " ^ msg, map_filter (Option.map Thm.typ_of) cTs, map_filter (Option.map Thm.term_of) cts); fun zip_vars xs ys = zip_options xs ys handle ListPair.UnequalLengths => err "more instantiations than variables in thm"; val thm' = Thm.instantiate ((zip_vars (rev (Thm.fold_terms Term.add_tvars thm [])) cTs), []) thm; val thm'' = Thm.instantiate ([], zip_vars (rev (Thm.fold_terms Term.add_vars thm' [])) cts) thm'; in thm'' end; (* forall_intr_frees: generalization over all suitable Free variables *) fun forall_intr_frees th = let val fixed = fold Term.add_frees (Thm.terms_of_tpairs (Thm.tpairs_of th) @ Thm.hyps_of th) []; val frees = Thm.fold_atomic_cterms (fn a => (case Thm.term_of a of Free v => not (member (op =) fixed v) ? insert (op aconvc) a | _ => I)) th []; in fold Thm.forall_intr frees th end; (* unvarify_global: global schematic variables *) fun unvarify_global thy th = let val prop = Thm.full_prop_of th; val _ = map Logic.unvarify_global (prop :: Thm.hyps_of th) handle TERM (msg, _) => raise THM (msg, 0, [th]); val instT = rev (Term.add_tvars prop []) |> map (fn v as ((a, _), S) => (v, TFree (a, S))); val inst = rev (Term.add_vars prop []) |> map (fn ((a, i), T) => let val T' = Term_Subst.instantiateT instT T in (((a, i), T'), Thm.global_cterm_of thy (Free ((a, T')))) end); in Thm.instantiate (map (apsnd (Thm.global_ctyp_of thy)) instT, inst) th end; fun unvarify_axiom thy = unvarify_global thy o Thm.axiom thy; (* user renaming of parameters in a subgoal *) (*The names, if distinct, are used for the innermost parameters of subgoal i; preceding parameters may be renamed to make all parameters distinct.*) fun rename_params_rule (names, i) st = let val (_, Bs, Bi, C) = Thm.dest_state (st, i); val params = map #1 (Logic.strip_params Bi); val short = length params - length names; val names' = if short < 0 then error "More names than parameters in subgoal!" else Name.variant_list names (take short params) @ names; val free_names = Term.fold_aterms (fn Free (x, _) => insert (op =) x | _ => I) Bi []; val Bi' = Logic.list_rename_params names' Bi; in (case duplicates (op =) names of a :: _ => (warning ("Can't rename. Bound variables not distinct: " ^ a); st) | [] => (case inter (op =) names free_names of a :: _ => (warning ("Can't rename. Bound/Free variable clash: " ^ a); st) | [] => Thm.renamed_prop (Logic.list_implies (Bs @ [Bi'], C)) st)) end; (* preservation of bound variable names *) fun rename_boundvars pat obj th = (case Term.rename_abs pat obj (Thm.prop_of th) of NONE => th | SOME prop' => Thm.renamed_prop prop' th); (** specification primitives **) (* rules *) fun stripped_sorts thy t = let val tfrees = rev (Term.add_tfrees t []); val tfrees' = map (fn a => (a, [])) (Name.variant_list [] (map #1 tfrees)); val recover = map2 (fn (a', S') => fn (a, S) => (((a', 0), S'), Thm.global_ctyp_of thy (TVar ((a, 0), S)))) tfrees' tfrees; val strip = map (apply2 TFree) (tfrees ~~ tfrees'); val t' = Term.map_types (Term.map_atyps (perhaps (AList.lookup (op =) strip))) t; in (strip, recover, t') end; fun add_axiom ctxt (b, prop) thy = let val _ = Sign.no_vars ctxt prop; val (strip, recover, prop') = stripped_sorts thy prop; val constraints = map (fn (TFree (_, S), T) => (T, S)) strip; val of_sorts = maps (fn (T as TFree (_, S), _) => of_sort (Thm.ctyp_of ctxt T, S)) strip; val thy' = thy |> Theory.add_axiom ctxt (b, Logic.list_implies (maps Logic.mk_of_sort constraints, prop')); val axm_name = Sign.full_name thy' b; val axm' = Thm.axiom thy' axm_name; val thm = Thm.instantiate (recover, []) axm' |> unvarify_global thy' |> fold elim_implies of_sorts; in ((axm_name, thm), thy') end; fun add_axiom_global arg thy = add_axiom (Syntax.init_pretty_global thy) arg thy; fun add_def (context as (ctxt, _)) unchecked overloaded (b, prop) thy = let val _ = Sign.no_vars ctxt prop; val prems = map (Thm.cterm_of ctxt) (Logic.strip_imp_prems prop); val (_, recover, concl') = stripped_sorts thy (Logic.strip_imp_concl prop); val thy' = Theory.add_def context unchecked overloaded (b, concl') thy; val axm_name = Sign.full_name thy' b; val axm' = Thm.axiom thy' axm_name; val thm = Thm.instantiate (recover, []) axm' |> unvarify_global thy' |> fold_rev Thm.implies_intr prems; in ((axm_name, thm), thy') end; fun add_def_global unchecked overloaded arg thy = add_def (Defs.global_context thy) unchecked overloaded arg thy; (** theorem tags **) (* add / delete tags *) fun tag_rule tg = Thm.map_tags (insert (op =) tg); fun untag_rule s = Thm.map_tags (filter_out (fn (s', _) => s = s')); (* free dummy thm -- for abstract closure *) val free_dummyN = "free_dummy"; fun is_free_dummy thm = Properties.defined (Thm.get_tags thm) free_dummyN; val tag_free_dummy = tag_rule (free_dummyN, ""); (* def_name *) fun def_name c = c ^ "_def"; fun def_name_optional c "" = def_name c | def_name_optional _ name = name; val def_binding = Binding.map_name def_name #> Binding.reset_pos; fun def_binding_optional b name = if Binding.is_empty name then def_binding b else name; fun make_def_binding cond b = if cond then def_binding b else Binding.empty; (* unofficial theorem names *) fun has_name_hint thm = AList.defined (op =) (Thm.get_tags thm) Markup.nameN; fun the_name_hint thm = the (AList.lookup (op =) (Thm.get_tags thm) Markup.nameN); fun get_name_hint thm = if has_name_hint thm then the_name_hint thm else "??.unknown"; fun put_name_hint name = untag_rule Markup.nameN #> tag_rule (Markup.nameN, name); (* theorem kinds *) val theoremK = "theorem"; fun legacy_get_kind thm = the_default "" (Properties.get (Thm.get_tags thm) Markup.kindN); fun kind_rule k = tag_rule (Markup.kindN, k) o untag_rule Markup.kindN; (** attributes **) (*attributes subsume any kind of rules or context modifiers*) type attribute = Context.generic * thm -> Context.generic option * thm option; type binding = binding * attribute list; fun rule_attribute ths f (x, th) = (NONE, (case find_first is_free_dummy (th :: ths) of SOME th' => SOME th' | NONE => SOME (f x th))); fun declaration_attribute f (x, th) = (if is_free_dummy th then NONE else SOME (f th x), NONE); fun mixed_attribute f (x, th) = let val (x', th') = f (x, th) in (SOME x', SOME th') end; fun apply_attribute (att: attribute) th x = let val (x', th') = att (x, check_hyps x (Thm.transfer'' x th)) in (the_default th th', the_default x x') end; fun attribute_declaration att th x = #2 (apply_attribute att th x); fun apply_attributes mk dest = let fun app [] th x = (th, x) | app (att :: atts) th x = apply_attribute att th (mk x) ||> dest |-> app atts; in app end; val theory_attributes = apply_attributes Context.Theory Context.the_theory; val proof_attributes = apply_attributes Context.Proof Context.the_proof; fun no_attributes x = (x, []); fun simple_fact x = [(x, [])]; fun tag tg = rule_attribute [] (K (tag_rule tg)); fun untag s = rule_attribute [] (K (untag_rule s)); fun kind k = rule_attribute [] (K (k <> "" ? kind_rule k)); (** forked proofs **) structure Proofs = Theory_Data ( - type T = thm list lazy list; - val empty = []; - fun extend _ = empty; - fun merge _ = empty; + type T = thm list lazy Inttab.table; + val empty = Inttab.empty; + val extend = I; + val merge = Inttab.merge (K true); ); -fun register_proofs ths = - (Proofs.map o cons) (Lazy.map_finished (map Thm.trim_context) ths); +fun reset_proofs thy = + if Inttab.is_empty (Proofs.get thy) then NONE + else SOME (Proofs.put Inttab.empty thy); -fun force_proofs thy = rev (Proofs.get thy) |> maps (map (Thm.transfer thy) o Lazy.force); +val _ = Theory.setup (Theory.at_begin reset_proofs); + +fun register_proofs ths thy = + let val entry = (serial (), Lazy.map_finished (map Thm.trim_context) ths) + in (Proofs.map o Inttab.update) entry thy end; + +fun force_proofs thy = + Proofs.get thy |> Inttab.dest |> maps (map (Thm.transfer thy) o Lazy.force o #2); val consolidate_theory = Thm.consolidate o force_proofs; fun expose_theory thy = if Proofterm.export_enabled () then Thm.expose_proofs thy (force_proofs thy) else (); (** print theorems **) (* options *) val show_consts = Config.declare_option_bool ("show_consts", \<^here>); val show_hyps = Config.declare_bool ("show_hyps", \<^here>) (K false); val show_tags = Config.declare_bool ("show_tags", \<^here>) (K false); (* pretty_thm etc. *) fun pretty_tag (name, arg) = Pretty.strs [name, quote arg]; val pretty_tags = Pretty.list "[" "]" o map pretty_tag; fun pretty_thm_raw ctxt {quote, show_hyps = show_hyps'} raw_th = let val show_tags = Config.get ctxt show_tags; val show_hyps = Config.get ctxt show_hyps; val th = raw_th |> perhaps (try (Thm.transfer' ctxt)) |> perhaps (try Thm.strip_shyps); val hyps = if show_hyps then Thm.hyps_of th else undeclared_hyps (Context.Proof ctxt) th; val extra_shyps = extra_shyps' ctxt th; val tags = Thm.get_tags th; val tpairs = Thm.tpairs_of th; val q = if quote then Pretty.quote else I; val prt_term = q o Syntax.pretty_term ctxt; val hlen = length extra_shyps + length hyps + length tpairs; val hsymbs = if hlen = 0 then [] else if show_hyps orelse show_hyps' then [Pretty.brk 2, Pretty.list "[" "]" (map (q o Syntax.pretty_flexpair ctxt) tpairs @ map prt_term hyps @ map (Syntax.pretty_sort ctxt) extra_shyps)] else [Pretty.brk 2, Pretty.str ("[" ^ replicate_string hlen "." ^ "]")]; val tsymbs = if null tags orelse not show_tags then [] else [Pretty.brk 1, pretty_tags tags]; in Pretty.block (prt_term (Thm.prop_of th) :: (hsymbs @ tsymbs)) end; fun pretty_thm ctxt = pretty_thm_raw ctxt {quote = false, show_hyps = true}; fun pretty_thm_item ctxt th = Pretty.item [pretty_thm ctxt th]; fun pretty_thm_global thy = pretty_thm_raw (Syntax.init_pretty_global thy) {quote = false, show_hyps = false}; val string_of_thm = Pretty.string_of oo pretty_thm; val string_of_thm_global = Pretty.string_of oo pretty_thm_global; open Thm; end; structure Basic_Thm: BASIC_THM = Thm; open Basic_Thm;