diff --git a/src/Doc/Demo_Easychair/ROOT b/src/Doc/Demo_Easychair/ROOT new file mode 100644 --- /dev/null +++ b/src/Doc/Demo_Easychair/ROOT @@ -0,0 +1,9 @@ +session Demo_Easychair (doc) = HOL + + options [document_variants = "demo_easychair"] + theories + Document + document_files (in "$ISABELLE_EASYCHAIR_HOME") + "easychair.cls" + document_files + "root.bib" + "root.tex" diff --git a/src/Doc/Demo_FoilTeX/ROOT b/src/Doc/Demo_FoilTeX/ROOT new file mode 100644 --- /dev/null +++ b/src/Doc/Demo_FoilTeX/ROOT @@ -0,0 +1,11 @@ +session Demo_FoilTeX (doc) = HOL + + options [document_variants = "demo_foiltex", + document_build = "pdflatex", document_logo = "FoilTeX"] + theories + Document + document_files (in "$ISABELLE_FOILTEX_HOME") + "fltfonts.def" + "foil20.clo" + "foils.cls" + document_files + "root.tex" diff --git a/src/Doc/Demo_LIPIcs/ROOT b/src/Doc/Demo_LIPIcs/ROOT new file mode 100644 --- /dev/null +++ b/src/Doc/Demo_LIPIcs/ROOT @@ -0,0 +1,12 @@ +session Demo_LIPIcs (doc) = HOL + + options [document_variants = "demo_lipics", + document_build = "pdflatex", document_heading_prefix = "", document_comment_latex] + theories + Document + document_files (in "$ISABELLE_LIPICS_HOME") + "cc-by.pdf" + "lipics-logo-bw.pdf" + "lipics-v2021.cls" + document_files + "root.bib" + "root.tex" diff --git a/src/Doc/ROOT b/src/Doc/ROOT --- a/src/Doc/ROOT +++ b/src/Doc/ROOT @@ -1,525 +1,490 @@ chapter Doc session Classes (doc) in "Classes" = HOL + options [document_logo = "Isar", document_bibliography, document_variants = "classes", quick_and_dirty] theories [document = false] Setup theories Classes document_files (in "..") "pdfsetup.sty" "iman.sty" "extra.sty" "isar.sty" "manual.bib" document_files "root.tex" "style.sty" session Codegen (doc) in "Codegen" = HOL + options [document_logo = "Isar", document_bibliography, document_variants = "codegen", print_mode = "no_brackets,iff"] sessions "HOL-Library" theories [document = false] Setup theories Introduction Foundations Refinement Inductive_Predicate Evaluation Computations Adaptation Further document_files (in "..") "pdfsetup.sty" "iman.sty" "extra.sty" "isar.sty" "manual.bib" document_files "root.tex" "style.sty" session Corec (doc) in "Corec" = Datatypes + options [document_bibliography, document_variants = "corec"] theories Corec document_files (in "..") "pdfsetup.sty" "iman.sty" "extra.sty" "isar.sty" "manual.bib" document_files "root.tex" "style.sty" session Datatypes (doc) in "Datatypes" = HOL + options [document_bibliography, document_variants = "datatypes"] sessions "HOL-Library" theories [document = false] Setup theories Datatypes document_files (in "..") "pdfsetup.sty" "iman.sty" "extra.sty" "isar.sty" "manual.bib" document_files "root.tex" "style.sty" session Eisbach (doc) in "Eisbach" = HOL + options [document_logo = "Eisbach", document_bibliography, document_variants = "eisbach", quick_and_dirty, print_mode = "no_brackets,iff", show_question_marks = false] sessions "HOL-Eisbach" theories [document = false] Base theories Preface Manual document_files (in "..") "pdfsetup.sty" "iman.sty" "extra.sty" "isar.sty" "ttbox.sty" "underscore.sty" "manual.bib" document_files "root.tex" "style.sty" session Functions (doc) in "Functions" = HOL + options [document_bibliography, document_variants = "functions", skip_proofs = false, quick_and_dirty] theories Functions document_files (in "..") "pdfsetup.sty" "iman.sty" "extra.sty" "isar.sty" "manual.bib" document_files "conclusion.tex" "intro.tex" "root.tex" "style.sty" session How_to_Prove_it (no_doc) in "How_to_Prove_it" = HOL + options [document_variants = "how_to_prove_it", show_question_marks = false] theories How_to_Prove_it document_files "root.tex" "root.bib" "prelude.tex" session Intro (doc) in "Intro" = Pure + options [document_logo, document_bibliography, document_build = "build", document_variants = "intro"] document_files (in "..") "pdfsetup.sty" "iman.sty" "extra.sty" "ttbox.sty" "manual.bib" document_files "advanced.tex" "build" "foundations.tex" "getting.tex" "root.tex" session Implementation (doc) in "Implementation" = HOL + options [document_logo = "Isar", document_bibliography, document_variants = "implementation", quick_and_dirty] theories Eq Integration Isar Local_Theory "ML" Prelim Proof Syntax Tactic theories [parallel_proofs = 0] Logic document_files (in "..") "pdfsetup.sty" "iman.sty" "extra.sty" "isar.sty" "ttbox.sty" "underscore.sty" "manual.bib" document_files "root.tex" "style.sty" session Isar_Ref (doc) in "Isar_Ref" = HOL + options [document_logo = "Isar", document_bibliography, document_variants = "isar-ref", quick_and_dirty, thy_output_source] sessions "HOL-Library" theories Preface Synopsis Framework First_Order_Logic Outer_Syntax Document_Preparation Spec Proof Proof_Script Inner_Syntax Generic HOL_Specific Quick_Reference Symbols document_files (in "..") "pdfsetup.sty" "iman.sty" "extra.sty" "isar.sty" "ttbox.sty" "underscore.sty" "manual.bib" document_files "isar-vm.pdf" "isar-vm.svg" "root.tex" "style.sty" session JEdit (doc) in "JEdit" = HOL + options [document_logo = "jEdit", document_bibliography, document_variants = "jedit", thy_output_source] theories JEdit document_files (in "..") "extra.sty" "iman.sty" "isar.sty" "manual.bib" "pdfsetup.sty" "ttbox.sty" "underscore.sty" document_files (in "../Isar_Ref/document") "style.sty" document_files "auto-tools.png" "bibtex-mode.png" "cite-completion.png" "isabelle-jedit.png" "markdown-document.png" "ml-debugger.png" "output-and-state.png" "output-including-state.png" "output.png" "popup1.png" "popup2.png" "query.png" "root.tex" "scope1.png" "scope2.png" "sidekick-document.png" "sidekick.png" "sledgehammer.png" "theories.png" session Sugar (doc) in "Sugar" = HOL + options [document_bibliography, document_variants = "sugar"] sessions "HOL-Library" theories Sugar document_files (in "..") "pdfsetup.sty" document_files "root.bib" "root.tex" session Locales (doc) in "Locales" = HOL + options [document_bibliography, document_variants = "locales", thy_output_margin = 65, skip_proofs = false] theories Examples1 Examples2 Examples3 document_files (in "..") "pdfsetup.sty" document_files "root.bib" "root.tex" session Logics (doc) in "Logics" = Pure + options [document_logo, document_bibliography, document_build = "build", document_variants = "logics"] document_files (in "..") "pdfsetup.sty" "iman.sty" "extra.sty" "ttbox.sty" "manual.bib" document_files (in "../Intro/document") "build" document_files "CTT.tex" "HOL.tex" "LK.tex" "Sequents.tex" "preface.tex" "root.tex" "syntax.tex" session Logics_ZF (doc) in "Logics_ZF" = ZF + options [document_logo = "ZF", document_bibliography, document_build = "build", document_variants = "logics-ZF", print_mode = "brackets", thy_output_source] sessions FOL theories IFOL_examples FOL_examples ZF_examples If ZF_Isar document_files (in "..") "pdfsetup.sty" "isar.sty" "ttbox.sty" "manual.bib" document_files (in "../Intro/document") "build" document_files (in "../Logics/document") "syntax.tex" document_files "FOL.tex" "ZF.tex" "logics.sty" "root.tex" session Main (doc) in "Main" = HOL + options [document_variants = "main"] theories Main_Doc document_files (in "..") "pdfsetup.sty" document_files "root.tex" session Nitpick (doc) in "Nitpick" = Pure + options [document_logo = "Nitpick", document_bibliography, document_variants = "nitpick"] document_files (in "..") "pdfsetup.sty" "iman.sty" "manual.bib" document_files "root.tex" session Prog_Prove (doc) in "Prog_Prove" = HOL + options [document_logo = "HOL", document_bibliography, document_variants = "prog-prove", show_question_marks = false] theories Basics Bool_nat_list MyList Types_and_funs Logic Isar document_files (in ".") "MyList.thy" document_files (in "..") "pdfsetup.sty" document_files "bang.pdf" "intro-isabelle.tex" "prelude.tex" "root.bib" "root.tex" "svmono.cls" session Sledgehammer (doc) in "Sledgehammer" = Pure + options [document_logo = "S/H", document_bibliography, document_variants = "sledgehammer"] document_files (in "..") "pdfsetup.sty" "iman.sty" "manual.bib" document_files "root.tex" session System (doc) in "System" = Pure + options [document_logo, document_bibliography, document_variants = "system", thy_output_source] sessions "HOL-Library" theories Environment Sessions Presentation Server Scala Phabricator Misc document_files (in "..") "pdfsetup.sty" "iman.sty" "extra.sty" "isar.sty" "ttbox.sty" "underscore.sty" "manual.bib" document_files (in "../Isar_Ref/document") "style.sty" document_files "root.tex" session Tutorial (doc) in "Tutorial" = HOL + options [document_logo = "HOL", document_bibliography, document_build = "build", document_variants = "tutorial", print_mode = "brackets", skip_proofs = false] directories "Advanced" "CTL" "CodeGen" "Datatype" "Documents" "Fun" "Ifexpr" "Inductive" "Misc" "Protocol" "Rules" "Sets" "ToyList" "Trie" "Types" theories [document = false] Base theories [threads = 1] "ToyList/ToyList_Test" theories [thy_output_indent = 5] "ToyList/ToyList" "Ifexpr/Ifexpr" "CodeGen/CodeGen" "Trie/Trie" "Datatype/ABexpr" "Datatype/unfoldnested" "Datatype/Nested" "Datatype/Fundata" "Fun/fun0" "Advanced/simp2" "CTL/PDL" "CTL/CTL" "CTL/CTLind" "Inductive/Even" "Inductive/Mutual" "Inductive/Star" "Inductive/AB" "Inductive/Advanced" "Misc/Tree" "Misc/Tree2" "Misc/Plus" "Misc/case_exprs" "Misc/fakenat" "Misc/natsum" "Misc/pairs2" "Misc/Option2" "Misc/types" "Misc/prime_def" "Misc/simp" "Misc/Itrev" "Misc/AdvancedInd" "Misc/appendix" theories "Protocol/NS_Public" "Documents/Documents" theories [thy_output_margin = 64, thy_output_indent = 0] "Types/Numbers" "Types/Pairs" "Types/Records" "Types/Typedefs" "Types/Overloading" "Types/Axioms" "Rules/Basic" "Rules/Blast" "Rules/Force" theories [thy_output_margin = 64, thy_output_indent = 5] "Rules/TPrimes" "Rules/Forward" "Rules/Tacticals" "Rules/find2" "Sets/Examples" "Sets/Functions" "Sets/Relations" "Sets/Recur" document_files (in "ToyList") "ToyList1.txt" "ToyList2.txt" document_files (in "..") "pdfsetup.sty" "ttbox.sty" "manual.bib" document_files "advanced0.tex" "appendix0.tex" "basics.tex" "build" "cl2emono-modified.sty" "ctl0.tex" "documents0.tex" "fp.tex" "inductive0.tex" "isa-index" "Isa-logics.pdf" "numerics.tex" "pghead.pdf" "preface.tex" "protocol.tex" "root.tex" "rules.tex" "sets.tex" "tutorial.sty" "typedef.pdf" "types0.tex" session Typeclass_Hierarchy (doc) in "Typeclass_Hierarchy" = HOL + options [document_logo = "Isar", document_bibliography, document_variants = "typeclass_hierarchy"] sessions "HOL-Library" theories [document = false] Setup theories Typeclass_Hierarchy document_files (in "..") "pdfsetup.sty" "iman.sty" "extra.sty" "isar.sty" "manual.bib" document_files "root.tex" "style.sty" - -session Demo_Easychair (doc) in "Demo_Easychair" = HOL + - options [document_variants = "demo_easychair"] - theories - Document - document_files (in "$ISABELLE_EASYCHAIR_HOME") - "easychair.cls" - document_files - "root.bib" - "root.tex" - -session Demo_FoilTeX (doc) in "Demo_FoilTeX" = HOL + - options [document_variants = "demo_foiltex", - document_build = "pdflatex", document_logo = "FoilTeX"] - theories - Document - document_files (in "$ISABELLE_FOILTEX_HOME") - "fltfonts.def" - "foil20.clo" - "foils.cls" - document_files - "root.tex" - -session Demo_LIPIcs (doc) in "Demo_LIPIcs" = HOL + - options [document_variants = "demo_lipics", - document_build = "pdflatex", document_heading_prefix = "", document_comment_latex] - theories - Document - document_files (in "$ISABELLE_LIPICS_HOME") - "cc-by.pdf" - "lipics-logo-bw.pdf" - "lipics-v2021.cls" - document_files - "root.bib" - "root.tex" diff --git a/src/Doc/ROOTS b/src/Doc/ROOTS new file mode 100644 --- /dev/null +++ b/src/Doc/ROOTS @@ -0,0 +1,3 @@ +Demo_Easychair +Demo_FoilTeX +Demo_LIPIcs diff --git a/src/Pure/Isar/isar_cmd.ML b/src/Pure/Isar/isar_cmd.ML --- a/src/Pure/Isar/isar_cmd.ML +++ b/src/Pure/Isar/isar_cmd.ML @@ -1,301 +1,301 @@ (* Title: Pure/Isar/isar_cmd.ML Author: Markus Wenzel, TU Muenchen Miscellaneous Isar commands. *) signature ISAR_CMD = sig val setup: Input.source -> theory -> theory val local_setup: Input.source -> Proof.context -> Proof.context val parse_ast_translation: Input.source -> theory -> theory val parse_translation: Input.source -> theory -> theory val print_translation: Input.source -> theory -> theory val typed_print_translation: Input.source -> theory -> theory val print_ast_translation: Input.source -> theory -> theory val translations: (xstring * string) Syntax.trrule list -> theory -> theory val no_translations: (xstring * string) Syntax.trrule list -> theory -> theory val oracle: bstring * Position.range -> Input.source -> theory -> theory val declaration: {syntax: bool, pervasive: bool} -> Input.source -> local_theory -> local_theory val simproc_setup: string * Position.T -> string list -> Input.source -> local_theory -> local_theory val qed: Method.text_range option -> Toplevel.transition -> Toplevel.transition val terminal_proof: Method.text_range * Method.text_range option -> Toplevel.transition -> Toplevel.transition val default_proof: Toplevel.transition -> Toplevel.transition val immediate_proof: Toplevel.transition -> Toplevel.transition val done_proof: Toplevel.transition -> Toplevel.transition val skip_proof: Toplevel.transition -> Toplevel.transition val ml_diag: bool -> Input.source -> Toplevel.transition -> Toplevel.transition val diag_state: Proof.context -> Toplevel.state val diag_goal: Proof.context -> {context: Proof.context, facts: thm list, goal: thm} val pretty_theorems: bool -> Toplevel.state -> Pretty.T list val print_stmts: string list * (Facts.ref * Token.src list) list -> Toplevel.transition -> Toplevel.transition val print_thms: string list * (Facts.ref * Token.src list) list -> Toplevel.transition -> Toplevel.transition val print_prfs: bool -> string list * (Facts.ref * Token.src list) list option -> Toplevel.transition -> Toplevel.transition val print_prop: (string list * string) -> Toplevel.transition -> Toplevel.transition val print_term: (string list * string) -> Toplevel.transition -> Toplevel.transition val print_type: (string list * (string * string option)) -> Toplevel.transition -> Toplevel.transition end; structure Isar_Cmd: ISAR_CMD = struct (** theory declarations **) (* generic setup *) fun setup source = ML_Context.expression (Input.pos_of source) (ML_Lex.read "Theory.setup (" @ ML_Lex.read_source source @ ML_Lex.read ")") |> Context.theory_map; fun local_setup source = ML_Context.expression (Input.pos_of source) (ML_Lex.read "Theory.local_setup (" @ ML_Lex.read_source source @ ML_Lex.read ")") |> Context.proof_map; (* translation functions *) fun parse_ast_translation source = ML_Context.expression (Input.pos_of source) (ML_Lex.read "Theory.setup (Sign.parse_ast_translation (" @ ML_Lex.read_source source @ ML_Lex.read "))") |> Context.theory_map; fun parse_translation source = ML_Context.expression (Input.pos_of source) (ML_Lex.read "Theory.setup (Sign.parse_translation (" @ ML_Lex.read_source source @ ML_Lex.read "))") |> Context.theory_map; fun print_translation source = ML_Context.expression (Input.pos_of source) (ML_Lex.read "Theory.setup (Sign.print_translation (" @ ML_Lex.read_source source @ ML_Lex.read "))") |> Context.theory_map; fun typed_print_translation source = ML_Context.expression (Input.pos_of source) (ML_Lex.read "Theory.setup (Sign.typed_print_translation (" @ ML_Lex.read_source source @ ML_Lex.read "))") |> Context.theory_map; fun print_ast_translation source = ML_Context.expression (Input.pos_of source) (ML_Lex.read "Theory.setup (Sign.print_ast_translation (" @ ML_Lex.read_source source @ ML_Lex.read "))") |> Context.theory_map; (* translation rules *) fun read_trrules thy raw_rules = let val ctxt = Proof_Context.init_global thy; val read_root = #1 o dest_Type o Proof_Context.read_type_name {proper = true, strict = false} ctxt; in raw_rules |> map (Syntax.map_trrule (fn (r, s) => Syntax_Phases.parse_ast_pattern ctxt (read_root r, s))) end; fun translations args thy = Sign.add_trrules (read_trrules thy args) thy; fun no_translations args thy = Sign.del_trrules (read_trrules thy args) thy; (* oracles *) fun oracle (name, range) source = ML_Context.expression (Input.pos_of source) (ML_Lex.read "val " @ ML_Lex.read_range range name @ ML_Lex.read (" = snd (Context.>>> (Context.map_theory_result (Thm.add_oracle (" ^ ML_Syntax.make_binding (name, #1 range) ^ ", ") @ ML_Lex.read_source source @ ML_Lex.read "))))") |> Context.theory_map; (* declarations *) fun declaration {syntax, pervasive} source = ML_Context.expression (Input.pos_of source) (ML_Lex.read ("Theory.local_setup (Local_Theory.declaration {syntax = " ^ Bool.toString syntax ^ ", pervasive = " ^ Bool.toString pervasive ^ "} (") @ ML_Lex.read_source source @ ML_Lex.read "))") |> Context.proof_map; (* simprocs *) fun simproc_setup name lhss source = ML_Context.expression (Input.pos_of source) (ML_Lex.read ("Theory.local_setup (Simplifier.define_simproc_cmd (" ^ ML_Syntax.make_binding name ^ ") {lhss = " ^ ML_Syntax.print_strings lhss ^ ", proc = (") @ ML_Lex.read_source source @ ML_Lex.read ")})") |> Context.proof_map; (* local endings *) fun local_qed m = Toplevel.proof (Proof.local_qed (m, true)); val local_terminal_proof = Toplevel.proof o Proof.local_future_terminal_proof; val local_default_proof = Toplevel.proof Proof.local_default_proof; val local_immediate_proof = Toplevel.proof Proof.local_immediate_proof; val local_done_proof = Toplevel.proof Proof.local_done_proof; val local_skip_proof = Toplevel.proof' Proof.local_skip_proof; (* global endings *) fun global_qed m = Toplevel.end_proof (K (Proof.global_qed (m, true))); val global_terminal_proof = Toplevel.end_proof o K o Proof.global_future_terminal_proof; val global_default_proof = Toplevel.end_proof (K Proof.global_default_proof); val global_immediate_proof = Toplevel.end_proof (K Proof.global_immediate_proof); val global_skip_proof = Toplevel.end_proof Proof.global_skip_proof; val global_done_proof = Toplevel.end_proof (K Proof.global_done_proof); (* common endings *) fun qed m = local_qed m o global_qed m; fun terminal_proof m = local_terminal_proof m o global_terminal_proof m; val default_proof = local_default_proof o global_default_proof; val immediate_proof = local_immediate_proof o global_immediate_proof; val done_proof = local_done_proof o global_done_proof; val skip_proof = local_skip_proof o global_skip_proof; (* diagnostic ML evaluation *) structure Diag_State = Proof_Data ( type T = Toplevel.state option; fun init _ = NONE; ); fun ml_diag verbose source = Toplevel.keep (fn state => let val opt_ctxt = try Toplevel.generic_theory_of state |> Option.map (Context.proof_of #> Diag_State.put (SOME state)); val flags = ML_Compiler.verbose verbose ML_Compiler.flags; in ML_Context.eval_source_in opt_ctxt flags source end); fun diag_state ctxt = (case Diag_State.get ctxt of SOME st => st - | NONE => Toplevel.init_toplevel ()); + | NONE => Toplevel.make_state NONE); val diag_goal = Proof.goal o Toplevel.proof_of o diag_state; val _ = Theory.setup (ML_Antiquotation.value (Binding.qualify true "Isar" \<^binding>\state\) (Scan.succeed "Isar_Cmd.diag_state ML_context") #> ML_Antiquotation.value (Binding.qualify true "Isar" \<^binding>\goal\) (Scan.succeed "Isar_Cmd.diag_goal ML_context")); (* theorems of theory or proof context *) fun pretty_theorems verbose st = if Toplevel.is_proof st then Proof_Context.pretty_local_facts verbose (Toplevel.context_of st) else let val ctxt = Toplevel.context_of st; val prev_thys = (case Toplevel.previous_theory_of st of SOME thy => [thy] | NONE => Theory.parents_of (Proof_Context.theory_of ctxt)); in Proof_Display.pretty_theorems_diff verbose prev_thys ctxt end; (* print theorems, terms, types etc. *) local fun string_of_stmts ctxt args = Attrib.eval_thms ctxt args |> map (Element.pretty_statement ctxt Thm.theoremK) |> Pretty.chunks2 |> Pretty.string_of; fun string_of_thms ctxt args = Pretty.string_of (Proof_Context.pretty_fact ctxt ("", Attrib.eval_thms ctxt args)); fun string_of_prfs full state arg = Pretty.string_of (case arg of NONE => let val {context = ctxt, goal = thm} = Proof.simple_goal (Toplevel.proof_of state); val thy = Proof_Context.theory_of ctxt; val prf = Thm.proof_of thm; val prop = Thm.full_prop_of thm; val prf' = Proofterm.rewrite_proof_notypes ([], []) prf; in Proof_Syntax.pretty_proof ctxt (if full then Proofterm.reconstruct_proof thy prop prf' else prf') end | SOME srcs => let val ctxt = Toplevel.context_of state; val pretty_proof = Proof_Syntax.pretty_standard_proof_of ctxt full; in Pretty.chunks (map pretty_proof (Attrib.eval_thms ctxt srcs)) end); fun string_of_prop ctxt s = let val prop = Syntax.read_prop ctxt s; val ctxt' = Proof_Context.augment prop ctxt; in Pretty.string_of (Pretty.quote (Syntax.pretty_term ctxt' prop)) end; fun string_of_term ctxt s = let val t = Syntax.read_term ctxt s; val T = Term.type_of t; val ctxt' = Proof_Context.augment t ctxt; in Pretty.string_of (Pretty.block [Pretty.quote (Syntax.pretty_term ctxt' t), Pretty.fbrk, Pretty.str "::", Pretty.brk 1, Pretty.quote (Syntax.pretty_typ ctxt' T)]) end; fun string_of_type ctxt (s, NONE) = let val T = Syntax.read_typ ctxt s in Pretty.string_of (Pretty.quote (Syntax.pretty_typ ctxt T)) end | string_of_type ctxt (s1, SOME s2) = let val ctxt' = Config.put show_sorts true ctxt; val raw_T = Syntax.parse_typ ctxt' s1; val S = Syntax.read_sort ctxt' s2; val T = Syntax.check_term ctxt' (Logic.mk_type raw_T |> Type.constraint (Term.itselfT (Type_Infer.anyT S))) |> Logic.dest_type; in Pretty.string_of (Pretty.quote (Syntax.pretty_typ ctxt' T)) end; fun print_item string_of (modes, arg) = Toplevel.keep (fn state => Print_Mode.with_modes modes (fn () => writeln (string_of state arg)) ()); in val print_stmts = print_item (string_of_stmts o Toplevel.context_of); val print_thms = print_item (string_of_thms o Toplevel.context_of); val print_prfs = print_item o string_of_prfs; val print_prop = print_item (string_of_prop o Toplevel.context_of); val print_term = print_item (string_of_term o Toplevel.context_of); val print_type = print_item (string_of_type o Toplevel.context_of); end; end; diff --git a/src/Pure/Isar/toplevel.ML b/src/Pure/Isar/toplevel.ML --- a/src/Pure/Isar/toplevel.ML +++ b/src/Pure/Isar/toplevel.ML @@ -1,821 +1,821 @@ (* Title: Pure/Isar/toplevel.ML Author: Markus Wenzel, TU Muenchen Isabelle/Isar toplevel transactions. *) signature TOPLEVEL = sig exception UNDEF type state - val init_toplevel: unit -> state - val theory_toplevel: theory -> state + val make_state: theory option -> state val is_toplevel: state -> bool val is_theory: state -> bool val is_proof: state -> bool val is_skipped_proof: state -> bool val level: state -> int val previous_theory_of: state -> theory option val output_of: state -> Latex.text option val context_of: state -> Proof.context val generic_theory_of: state -> generic_theory val theory_of: state -> theory val proof_of: state -> Proof.state val proof_position_of: state -> int val is_end_theory: state -> bool val end_theory: Position.T -> state -> theory val presentation_context: state -> Proof.context val presentation_state: Proof.context -> state val pretty_context: state -> Pretty.T list val pretty_state: state -> Pretty.T list val string_of_state: state -> string val pretty_abstract: state -> Pretty.T type presentation = state -> Latex.text option val presentation: (state -> Latex.text) -> presentation val no_presentation: presentation type transition val empty: transition val name_of: transition -> string val pos_of: transition -> Position.T val timing_of: transition -> Time.time val type_error: transition -> string val name: string -> transition -> transition val position: Position.T -> transition -> transition val markers: Input.source list -> transition -> transition val timing: Time.time -> transition -> transition val init_theory: (unit -> theory) -> transition -> transition val is_init: transition -> bool val modify_init: (unit -> theory) -> transition -> transition val exit: transition -> transition val present: (state -> Latex.text) -> transition -> transition val keep: (state -> unit) -> transition -> transition val keep': (bool -> state -> unit) -> transition -> transition val keep_proof: (state -> unit) -> transition -> transition val is_ignored: transition -> bool val is_malformed: transition -> bool val ignored: Position.T -> transition val malformed: Position.T -> string -> transition val generic_theory: (generic_theory -> generic_theory) -> transition -> transition val theory': (bool -> theory -> theory) -> presentation -> transition -> transition val theory: (theory -> theory) -> transition -> transition val begin_main_target: bool -> (theory -> local_theory) -> transition -> transition val end_main_target: transition -> transition val begin_nested_target: (Context.generic -> local_theory) -> transition -> transition val end_nested_target: transition -> transition val local_theory': (bool * Position.T) option -> (xstring * Position.T) option -> (bool -> local_theory -> local_theory) -> presentation -> transition -> transition val local_theory: (bool * Position.T) option -> (xstring * Position.T) option -> (local_theory -> local_theory) -> transition -> transition val present_local_theory: (xstring * Position.T) option -> (state -> Latex.text) -> transition -> transition val local_theory_to_proof': (bool * Position.T) option -> (xstring * Position.T) option -> (bool -> local_theory -> Proof.state) -> transition -> transition val local_theory_to_proof: (bool * Position.T) option -> (xstring * Position.T) option -> (local_theory -> Proof.state) -> transition -> transition val theory_to_proof: (theory -> Proof.state) -> transition -> transition val end_proof: (bool -> Proof.state -> Proof.context) -> transition -> transition val forget_proof: transition -> transition val proofs': (bool -> Proof.state -> Proof.state Seq.result Seq.seq) -> transition -> transition val proof': (bool -> Proof.state -> Proof.state) -> transition -> transition val proofs: (Proof.state -> Proof.state Seq.result Seq.seq) -> transition -> transition val proof: (Proof.state -> Proof.state) -> transition -> transition val actual_proof: (Proof_Node.T -> Proof_Node.T) -> transition -> transition val skip_proof: (unit -> unit) -> transition -> transition val skip_proof_open: transition -> transition val skip_proof_close: transition -> transition val exec_id: Document_ID.exec -> transition -> transition val setmp_thread_position: transition -> ('a -> 'b) -> 'a -> 'b val transition: bool -> transition -> state -> state * (exn * string) option val command_errors: bool -> transition -> state -> Runtime.error list * state option val command_exception: bool -> transition -> state -> state val reset_theory: state -> state option val reset_proof: state -> state option val reset_notepad: state -> state option val fork_presentation: transition -> transition * transition type result val join_results: result -> (state * transition * state) list val element_result: Keyword.keywords -> transition Thy_Element.element -> state -> result * state end; structure Toplevel: TOPLEVEL = struct (** toplevel state **) exception UNDEF = Runtime.UNDEF; (* datatype node *) datatype node = Toplevel (*toplevel outside of theory body*) | Theory of generic_theory (*global or local theory*) | Proof of Proof_Node.T * ((Proof.context -> generic_theory) * generic_theory) (*proof node, finish, original theory*) | Skipped_Proof of int * (generic_theory * generic_theory); (*proof depth, resulting theory, original theory*) val theory_node = fn Theory gthy => SOME gthy | _ => NONE; val proof_node = fn Proof (prf, _) => SOME prf | _ => NONE; val skipped_proof_node = fn Skipped_Proof _ => true | _ => false; fun cases_node f _ _ Toplevel = f () | cases_node _ g _ (Theory gthy) = g gthy | cases_node _ _ h (Proof (prf, _)) = h (Proof_Node.current prf) | cases_node _ g _ (Skipped_Proof (_, (gthy, _))) = g gthy; fun cases_proper_node g h = cases_node (fn () => raise UNDEF) g h; val get_theory = cases_node (K NONE) (SOME o Context.theory_of) (SOME o Proof.theory_of); (* datatype state *) type node_presentation = node * Proof.context; fun init_presentation () = Proof_Context.init_global (Theory.get_pure_bootstrap ()); fun node_presentation node = (node, cases_node init_presentation Context.proof_of Proof.context_of node); datatype state = State of node_presentation * (theory option * Latex.text option); (*current node with presentation context, previous theory, document output*) fun node_of (State ((node, _), _)) = node; fun previous_theory_of (State (_, (prev_thy, _))) = prev_thy; fun output_of (State (_, (_, output))) = output; -fun init_toplevel () = State (node_presentation Toplevel, (NONE, NONE)); -fun theory_toplevel thy = State (node_presentation (Theory (Context.Theory thy)), (NONE, NONE)); +fun make_state opt_thy = + let val node = (case opt_thy of NONE => Toplevel | SOME thy => Theory (Context.Theory thy)) + in State (node_presentation node, (NONE, NONE)) end; fun level state = (case node_of state of Toplevel => 0 | Theory _ => 0 | Proof (prf, _) => Proof.level (Proof_Node.current prf) | Skipped_Proof (d, _) => d + 1); (*different notion of proof depth!*) fun str_of_state state = (case node_of state of Toplevel => (case previous_theory_of state of NONE => "at top level" | SOME thy => "at top level, result theory " ^ quote (Context.theory_name thy)) | Theory (Context.Theory _) => "in theory mode" | Theory (Context.Proof _) => "in local theory mode" | Proof _ => "in proof mode" | Skipped_Proof _ => "in skipped proof mode"); (* current node *) fun is_toplevel state = (case node_of state of Toplevel => true | _ => false); fun is_theory state = not (is_toplevel state) andalso is_some (theory_node (node_of state)); fun is_proof state = not (is_toplevel state) andalso is_some (proof_node (node_of state)); fun is_skipped_proof state = not (is_toplevel state) andalso skipped_proof_node (node_of state); fun proper_node_of state = if is_toplevel state then raise UNDEF else node_of state; fun proper_node_case f g state = cases_proper_node f g (proper_node_of state); val context_of = proper_node_case Context.proof_of Proof.context_of; val generic_theory_of = proper_node_case I (Context.Proof o Proof.context_of); val theory_of = proper_node_case Context.theory_of Proof.theory_of; val proof_of = proper_node_case (fn _ => error "No proof state") I; fun proof_position_of state = (case proper_node_of state of Proof (prf, _) => Proof_Node.position prf | _ => ~1); fun is_end_theory (State ((Toplevel, _), (SOME _, _))) = true | is_end_theory _ = false; fun end_theory _ (State ((Toplevel, _), (SOME thy, _))) = thy | end_theory pos _ = error ("Malformed theory" ^ Position.here pos); (* presentation context *) structure Presentation_State = Proof_Data ( type T = state option; fun init _ = NONE; ); fun presentation_context0 (State ((_, pr_ctxt), _)) = pr_ctxt; fun presentation_context (state as State (current, _)) = presentation_context0 state |> Presentation_State.put (SOME (State (current, (NONE, NONE)))); fun presentation_state ctxt = (case Presentation_State.get ctxt of NONE => State (node_presentation (Theory (Context.Proof ctxt)), (NONE, NONE)) | SOME state => state); (* print state *) fun pretty_context state = if is_toplevel state then [] else let val gthy = (case node_of state of Toplevel => raise Match | Theory gthy => gthy | Proof (_, (_, gthy)) => gthy | Skipped_Proof (_, (_, gthy)) => gthy); val lthy = Context.cases Named_Target.theory_init I gthy; in Local_Theory.pretty lthy end; fun pretty_state state = (case node_of state of Toplevel => [] | Theory _ => [] | Proof (prf, _) => Proof.pretty_state (Proof_Node.current prf) | Skipped_Proof (d, _) => [Pretty.str ("skipped proof: depth " ^ string_of_int d)]); val string_of_state = pretty_state #> Pretty.chunks #> Pretty.string_of; fun pretty_abstract state = Pretty.str (""); val _ = ML_system_pp (fn _ => fn _ => Pretty.to_polyml o pretty_abstract); (** toplevel transitions **) (* presentation *) type presentation = state -> Latex.text option; fun presentation g : presentation = SOME o g; val no_presentation: presentation = K NONE; (* primitive transitions *) datatype trans = (*init theory*) Init of unit -> theory | (*formal exit of theory*) Exit | - (*peek at state*) + (*keep state unchanged*) Keep of bool -> presentation | (*node transaction and presentation*) Transaction of (bool -> node -> node_presentation) * presentation; local exception FAILURE of state * exn; fun apply_presentation g (st as State (node, (prev_thy, _))) = State (node, (prev_thy, g st)); fun apply f g node = let val node_pr = node_presentation node; val context = cases_proper_node I (Context.Proof o Proof.context_of) node; fun make_state node_pr' = State (node_pr', (get_theory node, NONE)); val (st', err) = (Runtime.controlled_execution (SOME context) (f #> make_state #> apply_presentation g) node, NONE) handle exn => (make_state node_pr, SOME exn); in (case err of NONE => st' | SOME exn => raise FAILURE (st', exn)) end; fun apply_tr int trans state = (case (trans, node_of state) of (Init f, Toplevel) => Runtime.controlled_execution NONE (fn () => State (node_presentation (Theory (Context.Theory (f ()))), (NONE, NONE))) () | (Exit, node as Theory (Context.Theory thy)) => let val State ((node', pr_ctxt), _) = node |> apply (fn _ => node_presentation (Theory (Context.Theory (tap Thm.expose_theory (Theory.end_theory thy))))) no_presentation; in State ((Toplevel, pr_ctxt), (get_theory node', NONE)) end | (Keep f, node) => Runtime.controlled_execution (try generic_theory_of state) (fn () => let val prev_thy = previous_theory_of state; val state' = State (node_presentation node, (prev_thy, NONE)); in apply_presentation (fn st => f int st) state' end) () | (Transaction _, Toplevel) => raise UNDEF | (Transaction (f, g), node) => apply (fn x => f int x) g node | _ => raise UNDEF); fun apply_union _ [] state = raise FAILURE (state, UNDEF) | apply_union int (tr :: trs) state = apply_union int trs state handle Runtime.UNDEF => apply_tr int tr state | FAILURE (alt_state, UNDEF) => apply_tr int tr alt_state | exn as FAILURE _ => raise exn | exn => raise FAILURE (state, exn); fun apply_markers name markers (state as State ((node, pr_ctxt), prev_thy)) = let val state' = Runtime.controlled_execution (try generic_theory_of state) (fn () => State ((node, fold (Document_Marker.evaluate name) markers pr_ctxt), prev_thy)) (); in (state', NONE) end handle exn => (state, SOME exn); in fun apply_trans int name markers trans state = (apply_union int trans state |> apply_markers name markers) handle FAILURE (alt_state, exn) => (alt_state, SOME exn) | exn => (state, SOME exn); end; (* datatype transition *) datatype transition = Transition of {name: string, (*command name*) pos: Position.T, (*source position*) markers: Input.source list, (*semantic document markers*) timing: Time.time, (*prescient timing information*) trans: trans list}; (*primitive transitions (union)*) fun make_transition (name, pos, markers, timing, trans) = Transition {name = name, pos = pos, markers = markers, timing = timing, trans = trans}; fun map_transition f (Transition {name, pos, markers, timing, trans}) = make_transition (f (name, pos, markers, timing, trans)); val empty = make_transition ("", Position.none, [], Time.zeroTime, []); (* diagnostics *) fun name_of (Transition {name, ...}) = name; fun pos_of (Transition {pos, ...}) = pos; fun timing_of (Transition {timing, ...}) = timing; fun command_msg msg tr = msg ^ "command " ^ quote (Markup.markup Markup.keyword1 (name_of tr)) ^ Position.here (pos_of tr); fun at_command tr = command_msg "At " tr; fun type_error tr = command_msg "Bad context for " tr; (* modify transitions *) fun name name = map_transition (fn (_, pos, markers, timing, trans) => (name, pos, markers, timing, trans)); fun position pos = map_transition (fn (name, _, markers, timing, trans) => (name, pos, markers, timing, trans)); fun markers markers = map_transition (fn (name, pos, _, timing, trans) => (name, pos, markers, timing, trans)); fun timing timing = map_transition (fn (name, pos, markers, _, trans) => (name, pos, markers, timing, trans)); fun add_trans tr = map_transition (fn (name, pos, markers, timing, trans) => (name, pos, markers, timing, tr :: trans)); val reset_trans = map_transition (fn (name, pos, markers, timing, _) => (name, pos, markers, timing, [])); (* basic transitions *) fun init_theory f = add_trans (Init f); fun is_init (Transition {trans = [Init _], ...}) = true | is_init _ = false; fun modify_init f tr = if is_init tr then init_theory f (reset_trans tr) else tr; val exit = add_trans Exit; fun present_transaction f g = add_trans (Transaction (f, g)); fun transaction f = present_transaction f no_presentation; fun transaction0 f = present_transaction (node_presentation oo f) no_presentation; fun present f = add_trans (Keep (K (presentation f))); fun keep f = add_trans (Keep (fn _ => fn st => let val () = f st in NONE end)); fun keep' f = add_trans (Keep (fn int => fn st => let val () = f int st in NONE end)); fun keep_proof f = keep (fn st => if is_proof st then f st else if is_skipped_proof st then () else warning "No proof state"); val ignoredN = ""; val malformedN = ""; fun is_ignored tr = name_of tr = ignoredN; fun is_malformed tr = name_of tr = malformedN; fun ignored pos = empty |> name ignoredN |> position pos |> keep (fn _ => ()); fun malformed pos msg = empty |> name malformedN |> position pos |> keep (fn _ => error msg); (* theory transitions *) fun generic_theory f = transaction (fn _ => (fn Theory gthy => node_presentation (Theory (f gthy)) | _ => raise UNDEF)); fun theory' f = present_transaction (fn int => (fn Theory (Context.Theory thy) => let val thy' = thy |> Sign.new_group |> f int |> Sign.reset_group; in node_presentation (Theory (Context.Theory thy')) end | _ => raise UNDEF)); fun theory f = theory' (K f) no_presentation; fun begin_main_target begin f = transaction (fn _ => (fn Theory (Context.Theory thy) => let val lthy = f thy; val gthy = if begin then Context.Proof lthy else Target_Context.end_named_cmd lthy; val _ = (case Local_Theory.pretty lthy of [] => () | prts => Output.state (Pretty.string_of (Pretty.chunks prts))); in (Theory gthy, lthy) end | _ => raise UNDEF)); val end_main_target = transaction (fn _ => (fn Theory (Context.Proof lthy) => (Theory (Target_Context.end_named_cmd lthy), lthy) | _ => raise UNDEF)); fun begin_nested_target f = transaction0 (fn _ => (fn Theory gthy => let val lthy' = f gthy; in Theory (Context.Proof lthy') end | _ => raise UNDEF)); val end_nested_target = transaction (fn _ => (fn Theory (Context.Proof lthy) => (case try Target_Context.end_nested_cmd lthy of SOME gthy' => (Theory gthy', lthy) | NONE => raise UNDEF) | _ => raise UNDEF)); fun restricted_context (SOME (strict, scope)) = Proof_Context.map_naming (Name_Space.restricted strict scope) | restricted_context NONE = I; fun local_theory' restricted target f = present_transaction (fn int => (fn Theory gthy => let val (finish, lthy) = Target_Context.switch_named_cmd target gthy; val lthy' = lthy |> restricted_context restricted |> Local_Theory.new_group |> f int |> Local_Theory.reset_group; in (Theory (finish lthy'), lthy') end | _ => raise UNDEF)); fun local_theory restricted target f = local_theory' restricted target (K f) no_presentation; fun present_local_theory target g = present_transaction (fn _ => (fn Theory gthy => let val (finish, lthy) = Target_Context.switch_named_cmd target gthy; in (Theory (finish lthy), lthy) end | _ => raise UNDEF)) (presentation g); (* proof transitions *) fun end_proof f = transaction (fn int => (fn Proof (prf, (finish, _)) => let val state = Proof_Node.current prf in if can (Proof.assert_bottom true) state then let val ctxt' = f int state; val gthy' = finish ctxt'; in (Theory gthy', ctxt') end else raise UNDEF end | Skipped_Proof (0, (gthy, _)) => node_presentation (Theory gthy) | _ => raise UNDEF)); local fun begin_proof init_proof = transaction0 (fn int => (fn Theory gthy => let val (finish, prf) = init_proof int gthy; val document = Options.default_string "document"; val skip = (document = "" orelse document = "false") andalso Goal.skip_proofs_enabled (); val schematic_goal = try Proof.schematic_goal prf; val _ = if skip andalso schematic_goal = SOME true then warning "Cannot skip proof of schematic goal statement" else (); in if skip andalso schematic_goal = SOME false then Skipped_Proof (0, (finish (Proof.global_skip_proof true prf), gthy)) else Proof (Proof_Node.init prf, (finish, gthy)) end | _ => raise UNDEF)); in fun local_theory_to_proof' restricted target f = begin_proof (fn int => fn gthy => let val (finish, lthy) = Target_Context.switch_named_cmd target gthy; val prf = lthy |> restricted_context restricted |> Local_Theory.new_group |> f int; in (finish o Local_Theory.reset_group, prf) end); fun local_theory_to_proof restricted target f = local_theory_to_proof' restricted target (K f); fun theory_to_proof f = begin_proof (fn _ => fn gthy => (Context.Theory o Sign.reset_group o Sign.change_check o Proof_Context.theory_of, (case gthy of Context.Theory thy => f (Sign.new_group thy) | _ => raise UNDEF))); end; val forget_proof = transaction0 (fn _ => (fn Proof (prf, (_, orig_gthy)) => if Proof.is_notepad (Proof_Node.current prf) then raise UNDEF else Theory orig_gthy | Skipped_Proof (_, (_, orig_gthy)) => Theory orig_gthy | _ => raise UNDEF)); fun proofs' f = transaction0 (fn int => (fn Proof (prf, x) => Proof (Proof_Node.applys (f int) prf, x) | skip as Skipped_Proof _ => skip | _ => raise UNDEF)); fun proof' f = proofs' ((Seq.single o Seq.Result) oo f); val proofs = proofs' o K; val proof = proof' o K; (* skipped proofs *) fun actual_proof f = transaction0 (fn _ => (fn Proof (prf, x) => Proof (f prf, x) | _ => raise UNDEF)); fun skip_proof f = transaction0 (fn _ => (fn skip as Skipped_Proof _ => (f (); skip) | _ => raise UNDEF)); val skip_proof_open = transaction0 (fn _ => (fn Skipped_Proof (d, x) => Skipped_Proof (d + 1, x) | _ => raise UNDEF)); val skip_proof_close = transaction0 (fn _ => (fn Skipped_Proof (0, (gthy, _)) => Theory gthy | Skipped_Proof (d, x) => Skipped_Proof (d - 1, x) | _ => raise UNDEF)); (** toplevel transactions **) (* runtime position *) fun exec_id id (tr as Transition {pos, ...}) = let val put_id = Position.put_id (Document_ID.print id) in position (put_id pos) tr end; fun setmp_thread_position (Transition {pos, ...}) f x = Position.setmp_thread_data pos f x; (* apply transitions *) local fun show_state (st', opt_err) = let val enabled = is_none opt_err andalso Options.default_bool \<^system_option>\show_states\; val opt_err' = if enabled then (case Exn.capture (Output.state o string_of_state) st' of Exn.Exn exn => SOME exn | Exn.Res _ => opt_err) else opt_err; in (st', opt_err') end; fun app int (tr as Transition {name, markers, trans, ...}) = setmp_thread_position tr (Timing.protocol (name_of tr) (pos_of tr) (apply_trans int name markers trans) ##> Option.map (fn UNDEF => ERROR (type_error tr) | exn => exn) #> show_state); in fun transition int tr st = let val (st', opt_err) = Context.setmp_generic_context (try (Context.Proof o presentation_context0) st) (fn () => app int tr st) (); val opt_err' = opt_err |> Option.map (fn Runtime.EXCURSION_FAIL exn_info => exn_info | exn => (Runtime.exn_context (try context_of st) exn, at_command tr)); in (st', opt_err') end; end; (* managed commands *) fun command_errors int tr st = (case transition int tr st of (st', NONE) => ([], SOME st') | (_, SOME (exn, _)) => (Runtime.exn_messages exn, NONE)); fun command_exception int tr st = (case transition int tr st of (st', NONE) => st' | (_, SOME (exn, info)) => if Exn.is_interrupt exn then Exn.reraise exn else raise Runtime.EXCURSION_FAIL (exn, info)); val command = command_exception false; (* reset state *) local fun reset_state check trans st = if check st then NONE else #2 (command_errors false (trans empty) st); in val reset_theory = reset_state is_theory forget_proof; val reset_proof = reset_state is_proof (transaction0 (fn _ => (fn Theory gthy => Skipped_Proof (0, (gthy, gthy)) | _ => raise UNDEF))); val reset_notepad = reset_state (fn st => (case try proof_of st of SOME state => not (Proof.is_notepad state) orelse can Proof.end_notepad state | NONE => true)) (proof Proof.reset_notepad); end; (* scheduled proof result *) datatype result = Result of transition * state | Result_List of result list | Result_Future of result future; fun join_results result = let fun add (tr, st') res = (case res of - [] => [(init_toplevel (), tr, st')] + [] => [(make_state NONE, tr, st')] | (_, _, st) :: _ => (st, tr, st') :: res); fun acc (Result r) = add r | acc (Result_List rs) = fold acc rs | acc (Result_Future x) = acc (Future.join x); in rev (acc result []) end; local structure Result = Proof_Data ( type T = result; fun init _ = Result_List []; ); val get_result = Result.get o Proof.context_of; val put_result = Proof.map_context o Result.put; fun timing_estimate elem = let val trs = tl (Thy_Element.flat_element elem) in fold (fn tr => fn t => timing_of tr + t) trs Time.zeroTime end; fun future_proofs_enabled estimate st = (case try proof_of st of NONE => false | SOME state => not (Proofterm.proofs_enabled ()) andalso not (Proof.is_relevant state) andalso (if can (Proof.assert_bottom true) state then Future.proofs_enabled 1 else Future.proofs_enabled 2 orelse Future.proofs_enabled_timing estimate)); val empty_markers = markers []; val empty_trans = reset_trans #> keep (K ()); in fun fork_presentation tr = (tr |> empty_markers, tr |> empty_trans); fun atom_result keywords tr st = let val st' = if Future.proofs_enabled 1 andalso Keyword.is_diag keywords (name_of tr) then let val (tr1, tr2) = fork_presentation tr; val _ = Execution.fork {name = "Toplevel.diag", pos = pos_of tr, pri = ~1} (fn () => command tr1 st); in command tr2 st end else command tr st; in (Result (tr, st'), st') end; fun element_result keywords (Thy_Element.Element (tr, NONE)) st = atom_result keywords tr st | element_result keywords (elem as Thy_Element.Element (head_tr, SOME element_rest)) st = let val (head_result, st') = atom_result keywords head_tr st; val (body_elems, end_tr) = element_rest; val estimate = timing_estimate elem; in if not (future_proofs_enabled estimate st') then let val proof_trs = maps Thy_Element.flat_element body_elems @ [end_tr]; val (proof_results, st'') = fold_map (atom_result keywords) proof_trs st'; in (Result_List (head_result :: proof_results), st'') end else let val (end_tr1, end_tr2) = fork_presentation end_tr; val finish = Context.Theory o Proof_Context.theory_of; val future_proof = Proof.future_proof (fn state => Execution.fork {name = "Toplevel.future_proof", pos = pos_of head_tr, pri = ~1} (fn () => let val State ((Proof (prf, (_, orig_gthy)), _), prev_thy) = st'; val node' = Proof (Proof_Node.apply (K state) prf, (finish, orig_gthy)); val (results, result_state) = State (node_presentation node', prev_thy) |> fold_map (element_result keywords) body_elems ||> command end_tr1; in (Result_List results, presentation_context0 result_state) end)) #> (fn (res, state') => state' |> put_result (Result_Future res)); val forked_proof = proof (future_proof #> (fn state => state |> Proof.local_done_proof |> put_result (get_result state))) o end_proof (fn _ => future_proof #> (fn state => state |> Proof.global_done_proof |> Result.put (get_result state))); val st'' = st' |> command (head_tr |> reset_trans |> forked_proof); val end_st = st'' |> command end_tr2; val end_result = Result (end_tr, end_st); val result = Result_List [head_result, Result.get (presentation_context0 st''), end_result]; in (result, end_st) end end; end; end; diff --git a/src/Pure/PIDE/command.ML b/src/Pure/PIDE/command.ML --- a/src/Pure/PIDE/command.ML +++ b/src/Pure/PIDE/command.ML @@ -1,504 +1,506 @@ (* Title: Pure/PIDE/command.ML Author: Makarius Prover command execution: read -- eval -- print. *) signature COMMAND = sig type blob = {file_node: string, src_path: Path.T, content: (SHA1.digest * string list) option} val read_file: Path.T -> Position.T -> bool -> Path.T -> Token.file val read_thy: Toplevel.state -> theory val read: Keyword.keywords -> theory -> Path.T-> (unit -> theory) -> blob Exn.result list * int -> Token.T list -> Toplevel.transition val read_span: Keyword.keywords -> Toplevel.state -> Path.T -> (unit -> theory) -> Command_Span.span -> Toplevel.transition type eval val eval_command_id: eval -> Document_ID.command val eval_exec_id: eval -> Document_ID.exec val eval_eq: eval * eval -> bool val eval_running: eval -> bool val eval_finished: eval -> bool val eval_result_command: eval -> Toplevel.transition val eval_result_state: eval -> Toplevel.state val eval: Keyword.keywords -> Path.T -> (unit -> theory) -> blob Exn.result list * int -> Document_ID.command -> Token.T list -> eval -> eval type print type print_fn = Toplevel.transition -> Toplevel.state -> unit val print0: {pri: int, print_fn: print_fn} -> eval -> print val print: bool -> (string * string list) list -> Keyword.keywords -> string -> eval -> print list -> print list option val parallel_print: print -> bool type print_function = {keywords: Keyword.keywords, command_name: string, args: string list, exec_id: Document_ID.exec} -> {delay: Time.time option, pri: int, persistent: bool, strict: bool, print_fn: print_fn} option val print_function: string -> print_function -> unit val no_print_function: string -> unit type exec = eval * print list val init_exec: theory option -> exec val no_exec: exec val exec_ids: exec option -> Document_ID.exec list val exec: Document_ID.execution -> exec -> unit val exec_parallel_prints: Document_ID.execution -> Future.task list -> exec -> exec option end; structure Command: COMMAND = struct (** main phases of execution **) fun task_context group f = f |> Future.interruptible_task |> Future.task_context "Command.run_process" group; (* read *) type blob = {file_node: string, src_path: Path.T, content: (SHA1.digest * string list) option}; fun read_file_node file_node master_dir pos delimited src_path = let val _ = if Context_Position.pide_reports () then Position.report pos (Markup.language_path delimited) else (); fun read_local () = let val path = File.check_file (File.full_path master_dir src_path); val text = File.read path; val file_pos = Path.position path; in (text, file_pos) end; fun read_remote () = let val text = Bytes.content (Isabelle_System.download file_node); val file_pos = Position.file file_node; in (text, file_pos) end; val (text, file_pos) = (case try Url.explode file_node of NONE => read_local () | SOME (Url.File _) => read_local () | _ => read_remote ()); val lines = split_lines text; val digest = SHA1.digest text; in {src_path = src_path, lines = lines, digest = digest, pos = Position.copy_id pos file_pos} end handle ERROR msg => error (msg ^ Position.here pos); val read_file = read_file_node ""; local fun blob_file src_path lines digest file_node = let val file_pos = Position.file file_node |> (case Position.id_of (Position.thread_data ()) of NONE => I | SOME exec_id => Position.put_id exec_id); in {src_path = src_path, lines = lines, digest = digest, pos = file_pos} end fun resolve_files master_dir (blobs, blobs_index) toks = (case Outer_Syntax.parse_spans toks of [Command_Span.Span (Command_Span.Command_Span _, _)] => (case try (nth toks) blobs_index of SOME tok => let val source = Token.input_of tok; val pos = Input.pos_of source; val delimited = Input.is_delimited source; fun make_file (Exn.Res {file_node, src_path, content = NONE}) = Exn.interruptible_capture (fn () => read_file_node file_node master_dir pos delimited src_path) () | make_file (Exn.Res {file_node, src_path, content = SOME (digest, lines)}) = (Position.report pos (Markup.language_path delimited); Exn.Res (blob_file src_path lines digest file_node)) | make_file (Exn.Exn e) = Exn.Exn e; val files = map make_file blobs; in toks |> map_index (fn (i, tok) => if i = blobs_index then Token.put_files files tok else tok) end | NONE => toks) | _ => toks); fun reports_of_token keywords tok = let val malformed_symbols = Input.source_explode (Token.input_of tok) |> map_filter (fn (sym, pos) => if Symbol.is_malformed sym then SOME ((pos, Markup.bad ()), "Malformed symbolic character") else NONE); val is_malformed = Token.is_error tok orelse not (null malformed_symbols); val reports = Token.reports keywords tok @ Token.completion_report tok @ malformed_symbols; in (is_malformed, reports) end; in fun read_thy st = Toplevel.theory_of st handle Toplevel.UNDEF => Pure_Syn.bootstrap_thy; fun read keywords thy master_dir init blobs_info span = let val command_reports = Outer_Syntax.command_reports thy; val token_reports = map (reports_of_token keywords) span; val _ = Position.reports_text (maps #2 token_reports @ maps command_reports span); val core_range = Token.core_range_of span; val tr = if exists #1 token_reports then Toplevel.malformed (#1 core_range) "Malformed command syntax" else Outer_Syntax.parse_span thy init (resolve_files master_dir blobs_info span); val _ = if Toplevel.is_ignored tr orelse Toplevel.is_malformed tr then () else Position.report (#1 core_range) (Markup.command_span (Toplevel.name_of tr)); in tr end; end; fun read_span keywords st master_dir init = Command_Span.content #> read keywords (read_thy st) master_dir init ([], ~1); (* eval *) type eval_state = {failed: bool, command: Toplevel.transition, state: Toplevel.state}; fun init_eval_state opt_thy = {failed = false, command = Toplevel.empty, - state = - (case opt_thy of - NONE => Toplevel.init_toplevel () - | SOME thy => Toplevel.theory_toplevel thy)}; + state = Toplevel.make_state opt_thy}; datatype eval = Eval of {command_id: Document_ID.command, exec_id: Document_ID.exec, eval_process: eval_state lazy}; fun eval_command_id (Eval {command_id, ...}) = command_id; fun eval_exec_id (Eval {exec_id, ...}) = exec_id; val eval_eq = op = o apply2 eval_exec_id; val eval_running = Execution.is_running_exec o eval_exec_id; fun eval_finished (Eval {eval_process, ...}) = Lazy.is_finished eval_process; fun eval_result (Eval {eval_process, ...}) = Exn.release (Lazy.finished_result eval_process); val eval_result_command = #command o eval_result; val eval_result_state = #state o eval_result; local fun reset_state keywords tr st0 = Toplevel.setmp_thread_position tr (fn () => let val name = Toplevel.name_of tr; val res = if Keyword.is_theory_body keywords name then Toplevel.reset_theory st0 else if Keyword.is_proof keywords name then Toplevel.reset_proof st0 else if Keyword.is_theory_end keywords name then (case Toplevel.reset_notepad st0 of NONE => Toplevel.reset_theory st0 | some => some) else NONE; in (case res of NONE => st0 | SOME st => (Output.error_message (Toplevel.type_error tr ^ " -- using reset state"); st)) end) (); fun run keywords int tr st = if Future.proofs_enabled 1 andalso Keyword.is_diag keywords (Toplevel.name_of tr) then let val (tr1, tr2) = Toplevel.fork_presentation tr; val _ = Execution.fork {name = "Toplevel.diag", pos = Toplevel.pos_of tr, pri = ~1} (fn () => Toplevel.command_exception int tr1 st); in Toplevel.command_errors int tr2 st end else Toplevel.command_errors int tr st; fun check_token_comments ctxt tok = (Document_Output.check_comments ctxt (Input.source_explode (Token.input_of tok)); []) handle exn => if Exn.is_interrupt exn then Exn.reraise exn else Runtime.exn_messages exn; fun check_span_comments ctxt span tr = Toplevel.setmp_thread_position tr (fn () => maps (check_token_comments ctxt) span) (); fun report_indent tr st = (case try Toplevel.proof_of st of SOME prf => let val keywords = Thy_Header.get_keywords (Proof.theory_of prf) in if Keyword.command_kind keywords (Toplevel.name_of tr) = SOME Keyword.prf_script then (case try (Thm.nprems_of o #goal o Proof.goal) prf of NONE => () | SOME 0 => () | SOME n => let val report = Markup.markup_only (Markup.command_indent (n - 1)); in Toplevel.setmp_thread_position tr (fn () => Output.report [report]) () end) else () end | NONE => ()); fun status tr m = Toplevel.setmp_thread_position tr (fn () => Output.status [Markup.markup_only m]) (); fun eval_state keywords span tr ({state, ...}: eval_state) = let val _ = Thread_Attributes.expose_interrupt (); val st = reset_state keywords tr state; val _ = report_indent tr st; val _ = status tr Markup.running; val (errs1, result) = run keywords true tr st; val errs2 = (case result of NONE => [] | SOME st' => check_span_comments (Toplevel.presentation_context st') span tr); val errs = errs1 @ errs2; val _ = List.app (Future.error_message (Toplevel.pos_of tr)) errs; in (case result of NONE => let val _ = status tr Markup.failed; val _ = status tr Markup.finished; val _ = if null errs then (status tr Markup.canceled; Exn.interrupt ()) else (); in {failed = true, command = tr, state = st} end | SOME st' => let val _ = if Keyword.is_theory_end keywords (Toplevel.name_of tr) andalso can (Toplevel.end_theory Position.none) st' then status tr Markup.finalized else (); val _ = status tr Markup.finished; in {failed = false, command = tr, state = st'} end) end; in fun eval keywords master_dir init blobs_info command_id span eval0 = let val exec_id = Document_ID.make (); fun process () = let val eval_state0 = eval_result eval0; val thy = read_thy (#state eval_state0); val tr = Position.setmp_thread_data (Position.id_only (Document_ID.print exec_id)) (fn () => read keywords thy master_dir init blobs_info span |> Toplevel.exec_id exec_id) (); in eval_state keywords span tr eval_state0 end; in Eval {command_id = command_id, exec_id = exec_id, eval_process = Lazy.lazy_name "Command.eval" process} end; end; (* print *) datatype print = Print of {name: string, args: string list, delay: Time.time option, pri: int, persistent: bool, exec_id: Document_ID.exec, print_process: unit lazy}; fun print_exec_id (Print {exec_id, ...}) = exec_id; val print_eq = op = o apply2 print_exec_id; +fun print_equiv (name', args') (Print {name, args, ...}) = name' = name andalso args' = args; type print_fn = Toplevel.transition -> Toplevel.state -> unit; type print_function = {keywords: Keyword.keywords, command_name: string, args: string list, exec_id: Document_ID.exec} -> {delay: Time.time option, pri: int, persistent: bool, strict: bool, print_fn: print_fn} option; local val print_functions = Synchronized.var "Command.print_functions" ([]: (string * print_function) list); fun print_error tr opt_context e = (Toplevel.setmp_thread_position tr o Runtime.controlled_execution opt_context) e () handle exn => if Exn.is_interrupt exn then Exn.reraise exn else List.app (Future.error_message (Toplevel.pos_of tr)) (Runtime.exn_messages exn); fun print_finished (Print {print_process, ...}) = Lazy.is_finished print_process; fun print_persistent (Print {persistent, ...}) = persistent; val overlay_ord = prod_ord string_ord (list_ord string_ord); fun make_print (name, args) {delay, pri, persistent, strict, print_fn} eval = let val exec_id = Document_ID.make (); fun process () = let val {failed, command, state = st', ...} = eval_result eval; val tr = Toplevel.exec_id exec_id command; val opt_context = try Toplevel.generic_theory_of st'; in if failed andalso strict then () else print_error tr opt_context (fn () => print_fn tr st') end; in Print { name = name, args = args, delay = delay, pri = pri, persistent = persistent, exec_id = exec_id, print_process = Lazy.lazy_name "Command.print" process} end; fun bad_print name_args exn = make_print name_args {delay = NONE, pri = 0, persistent = false, strict = false, print_fn = fn _ => fn _ => Exn.reraise exn}; in fun print0 {pri, print_fn} = make_print ("", [serial_string ()]) {delay = NONE, pri = pri, persistent = true, strict = true, print_fn = print_fn}; fun print command_visible command_overlays keywords command_name eval old_prints = let val print_functions = Synchronized.value print_functions; fun new_print (name, args) get_pr = let val params = {keywords = keywords, command_name = command_name, args = args, exec_id = eval_exec_id eval}; in (case Exn.capture (Runtime.controlled_execution NONE get_pr) params of Exn.Res NONE => NONE | Exn.Res (SOME pr) => SOME (make_print (name, args) pr eval) | Exn.Exn exn => SOME (bad_print (name, args) exn eval)) end; fun get_print (name, args) = - (case find_first (fn Print print => (#name print, #args print) = (name, args)) old_prints of + (case find_first (print_equiv (name, args)) old_prints of NONE => (case AList.lookup (op =) print_functions name of NONE => SOME (bad_print (name, args) (ERROR ("Missing print function " ^ quote name)) eval) | SOME get_pr => new_print (name, args) get_pr) | some => some); val retained_prints = filter (fn print => print_finished print andalso print_persistent print) old_prints; val visible_prints = if command_visible then fold (fn (name, _) => cons (name, [])) print_functions command_overlays |> sort_distinct overlay_ord |> map_filter get_print else []; val new_prints = visible_prints @ retained_prints; in if eq_list print_eq (old_prints, new_prints) then NONE else SOME new_prints end; fun parallel_print (Print {pri, ...}) = pri <= 0 orelse (Future.enabled () andalso Options.default_bool "parallel_print"); fun print_function name f = Synchronized.change print_functions (fn funs => - (if name = "" then error "Unnamed print function" else (); - if not (AList.defined (op =) funs name) then () - else warning ("Redefining command print function: " ^ quote name); + (if name = "" then error "Unnamed print function" + else if AList.defined (op =) funs name then + warning ("Redefining command print function: " ^ quote name) + else (); AList.update (op =) (name, f) funs)); fun no_print_function name = Synchronized.change print_functions (filter_out (equal name o #1)); end; -val _ = - print_function "Execution.print" - (fn {args, exec_id, ...} => - if null args then - SOME {delay = NONE, pri = Task_Queue.urgent_pri + 2, persistent = false, strict = false, - print_fn = fn _ => fn _ => Execution.fork_prints exec_id} - else NONE); - -val _ = - print_function "print_state" - (fn {keywords, command_name, ...} => - if Options.default_bool "editor_output_state" andalso Keyword.is_printed keywords command_name - then - SOME {delay = NONE, pri = Task_Queue.urgent_pri + 1, persistent = false, strict = false, - print_fn = fn _ => fn st => - if Toplevel.is_proof st then Output.state (Toplevel.string_of_state st) - else ()} - else NONE); - (* combined execution *) type exec = eval * print list; fun init_exec opt_thy : exec = (Eval {command_id = Document_ID.none, exec_id = Document_ID.none, eval_process = Lazy.value (init_eval_state opt_thy)}, []); val no_exec = init_exec NONE; fun exec_ids NONE = [] | exec_ids (SOME (eval, prints)) = eval_exec_id eval :: map print_exec_id prints; local fun run_process execution_id exec_id process = let val group = Future.worker_subgroup () in if Execution.running execution_id exec_id [group] then ignore (task_context group (fn () => Lazy.force_result {strict = true} process) ()) else () end; fun ignore_process process = Lazy.is_running process orelse Lazy.is_finished process; fun run_eval execution_id (Eval {exec_id, eval_process, ...}) = if Lazy.is_finished eval_process then () else run_process execution_id exec_id eval_process; fun fork_print execution_id deps (Print {name, delay, pri, exec_id, print_process, ...}) = let val group = Future.worker_subgroup (); fun fork () = ignore ((singleton o Future.forks) {name = name, group = SOME group, deps = deps, pri = pri, interrupts = true} (fn () => if ignore_process print_process then () else run_process execution_id exec_id print_process)); in (case delay of NONE => fork () | SOME d => ignore (Event_Timer.request {physical = true} (Time.now () + d) fork)) end; fun run_print execution_id (print as Print {exec_id, print_process, ...}) = if ignore_process print_process then () else if parallel_print print then fork_print execution_id [] print else run_process execution_id exec_id print_process; in fun exec execution_id (eval, prints) = (run_eval execution_id eval; List.app (run_print execution_id) prints); fun exec_parallel_prints execution_id deps (exec as (Eval {eval_process, ...}, prints)) = if Lazy.is_finished eval_process then (List.app (fork_print execution_id deps) prints; NONE) else SOME exec; end; end; + + +(* common print functions *) + +val _ = + Command.print_function "Execution.print" + (fn {args, exec_id, ...} => + if null args then + SOME {delay = NONE, pri = Task_Queue.urgent_pri + 2, persistent = false, strict = false, + print_fn = fn _ => fn _ => Execution.fork_prints exec_id} + else NONE); + +val _ = + Command.print_function "print_state" + (fn {keywords, command_name, ...} => + if Options.default_bool "editor_output_state" andalso Keyword.is_printed keywords command_name + then + SOME {delay = NONE, pri = Task_Queue.urgent_pri + 1, persistent = false, strict = false, + print_fn = fn _ => fn st => + if Toplevel.is_proof st then Output.state (Toplevel.string_of_state st) + else ()} + else NONE); diff --git a/src/Pure/PIDE/document.ML b/src/Pure/PIDE/document.ML --- a/src/Pure/PIDE/document.ML +++ b/src/Pure/PIDE/document.ML @@ -1,920 +1,934 @@ (* Title: Pure/PIDE/document.ML Author: Makarius Document as collection of named nodes, each consisting of an editable list of commands, associated with asynchronous execution process. *) signature DOCUMENT = sig val timing: bool Unsynchronized.ref type node_header = {master: string, header: Thy_Header.header, errors: string list} type overlay = Document_ID.command * (string * string list) datatype node_edit = Edits of (Document_ID.command option * Document_ID.command option) list | Deps of node_header | Perspective of bool * Document_ID.command list * overlay list type edit = string * node_edit type state val init_state: state val define_blob: string -> string -> state -> state type blob_digest = {file_node: string, src_path: Path.T, digest: string option} Exn.result type command = {command_id: Document_ID.command, name: string, parents: string list, blobs_digests: blob_digest list, blobs_index: int, tokens: ((int * int) * string) list} val define_command: command -> state -> state val command_exec: state -> string -> Document_ID.command -> Command.exec option val remove_versions: Document_ID.version list -> state -> state val start_execution: state -> state val update: Document_ID.version -> Document_ID.version -> edit list -> string list -> state -> string list * Document_ID.exec list * (Document_ID.command * Document_ID.exec list) list * state val state: unit -> state val change_state: (state -> state) -> unit end; structure Document: DOCUMENT = struct val timing = Unsynchronized.ref false; fun timeit msg e = cond_timeit (! timing) msg e; (** document structure **) fun err_dup kind id = error ("Duplicate " ^ kind ^ ": " ^ Document_ID.print id); fun err_undef kind id = error ("Undefined " ^ kind ^ ": " ^ Document_ID.print id); type node_header = {master: string, header: Thy_Header.header, errors: string list}; type perspective = {required: bool, (*required node*) visible: Inttab.set, (*visible commands*) visible_last: Document_ID.command option, (*last visible command*) overlays: (string * string list) list Inttab.table}; (*command id -> print functions with args*) structure Entries = Linear_Set(type key = Document_ID.command val ord = int_ord); abstype node = Node of {header: node_header, (*master directory, theory header, errors*) keywords: Keyword.keywords option, (*outer syntax keywords*) perspective: perspective, (*command perspective*) entries: Command.exec option Entries.T, (*command entries with executions*) result: (Document_ID.command * Command.eval) option, (*result of last execution*) consolidated: unit lazy} (*consolidated status of eval forks*) and version = Version of node String_Graph.T (*development graph wrt. static imports*) with fun make_node (header, keywords, perspective, entries, result, consolidated) = Node {header = header, keywords = keywords, perspective = perspective, entries = entries, result = result, consolidated = consolidated}; fun map_node f (Node {header, keywords, perspective, entries, result, consolidated}) = make_node (f (header, keywords, perspective, entries, result, consolidated)); fun make_perspective (required, command_ids, overlays) : perspective = {required = required, visible = Inttab.make_set command_ids, visible_last = try List.last command_ids, overlays = Inttab.make_list overlays}; val no_header: node_header = {master = "", header = Thy_Header.make ("", Position.none) [] [], errors = []}; val no_perspective = make_perspective (false, [], []); val empty_node = make_node (no_header, NONE, no_perspective, Entries.empty, NONE, Lazy.value ()); fun is_no_perspective ({required, visible, visible_last, overlays}: perspective) = not required andalso Inttab.is_empty visible andalso is_none visible_last andalso Inttab.is_empty overlays; fun is_empty_node (Node {header, keywords, perspective, entries, result, consolidated}) = header = no_header andalso is_none keywords andalso is_no_perspective perspective andalso Entries.is_empty entries andalso is_none result andalso Lazy.is_finished consolidated; (* basic components *) fun master_directory (Node {header = {master, ...}, ...}) = (case try Url.explode master of SOME (Url.File path) => path | _ => Path.current); fun set_header master header errors = map_node (fn (_, keywords, perspective, entries, result, consolidated) => ({master = master, header = header, errors = errors}, keywords, perspective, entries, result, consolidated)); fun get_header (Node {header, ...}) = header; fun set_keywords keywords = map_node (fn (header, _, perspective, entries, result, consolidated) => (header, keywords, perspective, entries, result, consolidated)); -fun get_keywords (Node {keywords, ...}) = keywords; - fun read_header node span = let val {header, errors, ...} = get_header node; val _ = if null errors then () else cat_lines errors |> (case Position.id_of (Position.thread_data ()) of NONE => I | SOME id => Protocol_Message.command_positions_yxml id) |> error; val {name = (name, _), imports, ...} = header; val {name = (_, pos), imports = imports', keywords} = Thy_Header.read_tokens Position.none span; val imports'' = (map #1 imports ~~ map #2 imports') handle ListPair.UnequalLengths => imports; in Thy_Header.make (name, pos) imports'' keywords end; fun get_perspective (Node {perspective, ...}) = perspective; fun set_perspective args = map_node (fn (header, keywords, _, entries, result, consolidated) => (header, keywords, make_perspective args, entries, result, consolidated)); val required_node = #required o get_perspective; val visible_command = Inttab.defined o #visible o get_perspective; val visible_last = #visible_last o get_perspective; val visible_node = is_some o visible_last val overlays = Inttab.lookup_list o #overlays o get_perspective; fun map_entries f = map_node (fn (header, keywords, perspective, entries, result, consolidated) => (header, keywords, perspective, f entries, result, consolidated)); fun get_entries (Node {entries, ...}) = entries; fun iterate_entries f = Entries.iterate NONE f o get_entries; fun iterate_entries_after start f (Node {entries, ...}) = (case Entries.get_after entries start of NONE => I | SOME id => Entries.iterate (SOME id) f entries); fun get_result (Node {result, ...}) = result; fun set_result result = map_node (fn (header, keywords, perspective, entries, _, consolidated) => (header, keywords, perspective, entries, result, consolidated)); fun pending_result node = (case get_result node of SOME (_, eval) => not (Command.eval_finished eval) | NONE => false); fun finished_result node = (case get_result node of SOME (_, eval) => Command.eval_finished eval | NONE => false); fun finished_result_theory node = if finished_result node then let val (result_id, eval) = the (get_result node); val st = Command.eval_result_state eval; in SOME (result_id, Toplevel.end_theory Position.none st) handle ERROR _ => NONE end else NONE; +fun get_consolidated (Node {consolidated, ...}) = consolidated; + val reset_consolidated = map_node (fn (header, keywords, perspective, entries, result, _) => (header, keywords, perspective, entries, result, Lazy.lazy I)); -fun commit_consolidated (Node {consolidated, ...}) = - (Lazy.force consolidated; Output.status [Markup.markup_only Markup.consolidated]); - -fun consolidated_node (Node {consolidated, ...}) = Lazy.is_finished consolidated; - fun could_consolidate node = - not (consolidated_node node) andalso is_some (finished_result_theory node); + not (Lazy.is_finished (get_consolidated node)) andalso + is_some (finished_result_theory node); fun get_node nodes name = String_Graph.get_node nodes name handle String_Graph.UNDEF _ => empty_node; fun default_node name = String_Graph.default_node (name, empty_node); fun update_node name f = default_node name #> String_Graph.map_node name f; +(* outer syntax keywords *) + +val pure_keywords = Thy_Header.get_keywords o Theory.get_pure; + +fun theory_keywords name = + (case Thy_Info.lookup_theory name of + SOME thy => Thy_Header.get_keywords thy + | NONE => Keyword.empty_keywords); + +fun node_keywords name node = + (case node of + Node {keywords = SOME keywords, ...} => keywords + | _ => theory_keywords name); + + (* node edits and associated executions *) type overlay = Document_ID.command * (string * string list); datatype node_edit = Edits of (Document_ID.command option * Document_ID.command option) list | Deps of node_header | Perspective of bool * Document_ID.command list * overlay list; type edit = string * node_edit; val after_entry = Entries.get_after o get_entries; fun lookup_entry node id = (case Entries.lookup (get_entries node) id of NONE => NONE | SOME (exec, _) => exec); fun the_entry node id = (case Entries.lookup (get_entries node) id of NONE => err_undef "command entry" id | SOME (exec, _) => exec); fun assign_entry (command_id, exec) node = if is_none (Entries.lookup (get_entries node) command_id) then node else map_entries (Entries.update (command_id, exec)) node; fun reset_after id entries = (case Entries.get_after entries id of NONE => entries | SOME next => Entries.update (next, NONE) entries); val edit_node = map_entries o fold (fn (id, SOME id2) => Entries.insert_after id (id2, NONE) | (id, NONE) => Entries.delete_after id #> reset_after id); (* version operations *) val empty_version = Version String_Graph.empty; fun nodes_of (Version nodes) = nodes; val node_of = get_node o nodes_of; fun cycle_msg names = "Cyclic dependency of " ^ space_implode " via " (map quote names); fun edit_nodes (name, node_edit) (Version nodes) = Version (case node_edit of Edits edits => update_node name (edit_node edits) nodes | Deps {master, header, errors} => let val imports = map fst (#imports header); val nodes1 = nodes |> default_node name |> fold default_node imports; val nodes2 = nodes1 |> String_Graph.Keys.fold (fn dep => String_Graph.del_edge (dep, name)) (String_Graph.imm_preds nodes1 name); val (nodes3, errors1) = (String_Graph.add_deps_acyclic (name, imports) nodes2, errors) handle String_Graph.CYCLES cs => (nodes2, errors @ map cycle_msg cs); in String_Graph.map_node name (set_header master header errors1) nodes3 end | Perspective perspective => update_node name (set_perspective perspective) nodes); fun update_keywords name nodes = nodes |> String_Graph.map_node name (fn node => if is_empty_node node then node else let val {master, header, errors} = get_header node; - val imports_keywords = map_filter (get_keywords o get_node nodes o #1) (#imports header); - val keywords = - Library.foldl Keyword.merge_keywords (Session.get_keywords (), imports_keywords); + val imports_keywords = map (node_keywords name o get_node nodes o #1) (#imports header); + val keywords = Library.foldl Keyword.merge_keywords (pure_keywords (), imports_keywords); val (keywords', errors') = (Keyword.add_keywords (#keywords header) keywords, errors) handle ERROR msg => (keywords, if member (op =) errors msg then errors else errors @ [msg]); in node |> set_header master header errors' |> set_keywords (SOME keywords') end); fun edit_keywords edits (Version nodes) = Version (fold update_keywords (String_Graph.all_succs nodes (map_filter (fn (a, Deps _) => SOME a | _ => NONE) edits)) nodes); fun suppressed_node (nodes: node String_Graph.T) (name, node) = String_Graph.is_maximal nodes name andalso is_empty_node node; fun put_node (name, node) (Version nodes) = let val nodes1 = update_node name (K node) nodes; val nodes2 = if suppressed_node nodes1 (name, node) then String_Graph.del_node name nodes1 else nodes1; in Version nodes2 end; end; (** main state -- document structure and execution process **) type blob_digest = {file_node: string, src_path: Path.T, digest: string option} Exn.result; type execution = {version_id: Document_ID.version, (*static version id*) execution_id: Document_ID.execution, (*dynamic execution id*) delay_request: unit future, (*pending event timer request*) parallel_prints: Command.exec list}; (*parallel prints for early execution*) val no_execution: execution = {version_id = Document_ID.none, execution_id = Document_ID.none, delay_request = Future.value (), parallel_prints = []}; fun new_execution version_id delay_request parallel_prints : execution = {version_id = version_id, execution_id = Execution.start (), delay_request = delay_request, parallel_prints = parallel_prints}; abstype state = State of {versions: version Inttab.table, (*version id -> document content*) blobs: (SHA1.digest * string list) Symtab.table, (*raw digest -> digest, lines*) commands: (string * blob_digest list * int * Token.T list lazy) Inttab.table, (*command id -> name, inlined files, token index of files, command span*) execution: execution} (*current execution process*) with fun make_state (versions, blobs, commands, execution) = State {versions = versions, blobs = blobs, commands = commands, execution = execution}; fun map_state f (State {versions, blobs, commands, execution}) = make_state (f (versions, blobs, commands, execution)); val init_state = make_state (Inttab.make [(Document_ID.none, empty_version)], Symtab.empty, Inttab.empty, no_execution); (* document versions *) fun parallel_prints_node node = iterate_entries (fn (_, opt_exec) => fn rev_result => (case opt_exec of SOME (eval, prints) => (case filter Command.parallel_print prints of [] => SOME rev_result | prints' => SOME ((eval, prints') :: rev_result)) | NONE => NONE)) node [] |> rev; fun define_version version_id version assigned_nodes = map_state (fn (versions, blobs, commands, {delay_request, ...}) => let val version' = fold put_node assigned_nodes version; val versions' = Inttab.update_new (version_id, version') versions handle Inttab.DUP dup => err_dup "document version" dup; val parallel_prints = maps (parallel_prints_node o #2) assigned_nodes; val execution' = new_execution version_id delay_request parallel_prints; in (versions', blobs, commands, execution') end); fun the_version (State {versions, ...}) version_id = (case Inttab.lookup versions version_id of NONE => err_undef "document version" version_id | SOME version => version); fun delete_version version_id versions = Inttab.delete version_id versions handle Inttab.UNDEF _ => err_undef "document version" version_id; (* inlined files *) fun define_blob digest text = map_state (fn (versions, blobs, commands, execution) => let val blobs' = Symtab.update (digest, (SHA1.fake digest, split_lines text)) blobs in (versions, blobs', commands, execution) end); fun the_blob (State {blobs, ...}) digest = (case Symtab.lookup blobs digest of NONE => error ("Undefined blob: " ^ digest) | SOME content => content); fun resolve_blob state (blob_digest: blob_digest) = blob_digest |> Exn.map_res (fn {file_node, src_path, digest} => {file_node = file_node, src_path = src_path, content = Option.map (the_blob state) digest}); (* commands *) type command = {command_id: Document_ID.command, name: string, parents: string list, blobs_digests: blob_digest list, blobs_index: int, tokens: ((int * int) * string) list}; fun define_command {command_id, name, parents, blobs_digests, blobs_index, tokens} = map_state (fn (versions, blobs, commands, execution) => let val id = Document_ID.print command_id; val span = Lazy.lazy_name "Document.define_command" (fn () => Position.setmp_thread_data (Position.id_only id) (fn () => let val (tokens, _) = fold_map Token.make tokens (Position.id id); val imports = if name = Thy_Header.theoryN then (#imports (Thy_Header.read_tokens Position.none tokens) handle ERROR _ => []) else []; val _ = if length parents = length imports then (parents, imports) |> ListPair.app (fn (parent, (_, pos)) => let val markup = (case Thy_Info.lookup_theory parent of SOME thy => Theory.get_markup thy | NONE => (case try Url.explode parent of SOME (Url.File path) => Markup.path (Path.implode path) | _ => Markup.path parent)) in Position.report pos markup end) else (); val _ = if blobs_index < 0 then (*inlined errors*) map_filter Exn.get_exn blobs_digests |> List.app (Output.error_message o Runtime.exn_message) else (*auxiliary files*) let val pos = Token.pos_of (nth tokens blobs_index); fun reports (Exn.Res {file_node, ...}) = [(pos, Markup.path file_node)] | reports _ = []; in Position.reports (maps reports blobs_digests) end; in tokens end) ()); val commands' = Inttab.update_new (command_id, (name, blobs_digests, blobs_index, span)) commands handle Inttab.DUP dup => err_dup "command" dup; in (versions, blobs, commands', execution) end); fun the_command (State {commands, ...}) command_id = (case Inttab.lookup commands command_id of NONE => err_undef "command" command_id | SOME command => command); val the_command_name = #1 oo the_command; (* execution *) fun get_execution (State {execution, ...}) = execution; fun get_execution_version state = the_version state (#version_id (get_execution state)); fun command_exec state node_name command_id = let val version = get_execution_version state; val node = get_node (nodes_of version) node_name; in the_entry node command_id end; end; (* remove_versions *) fun remove_versions version_ids state = state |> map_state (fn (versions, _, _, execution) => let val _ = member (op =) version_ids (#version_id execution) andalso error ("Attempt to remove execution version " ^ Document_ID.print (#version_id execution)); val versions' = fold delete_version version_ids versions; val commands' = Inttab.build (versions' |> Inttab.fold (fn (_, version) => nodes_of version |> String_Graph.fold (fn (_, (node, _)) => node |> iterate_entries (fn ((_, command_id), _) => SOME o Inttab.insert (K true) (command_id, the_command state command_id))))); val blobs' = Symtab.build (commands' |> Inttab.fold (fn (_, (_, blobs, _, _)) => blobs |> fold (fn Exn.Res {digest = SOME b, ...} => Symtab.update (b, the_blob state b) | _ => I))); in (versions', blobs', commands', execution) end); (* document execution *) fun make_required nodes = let fun all_preds P = String_Graph.fold (fn (a, (node, _)) => P node ? cons a) nodes [] |> String_Graph.all_preds nodes |> Symtab.make_set; val all_visible = all_preds visible_node; val all_required = all_preds required_node; in Symtab.fold (fn (a, ()) => exists (Symtab.defined all_visible) (String_Graph.immediate_succs nodes a) ? Symtab.update (a, ())) all_visible all_required end; fun start_execution state = state |> map_state (fn (versions, blobs, commands, execution) => timeit "Document.start_execution" (fn () => let val {version_id, execution_id, delay_request, parallel_prints} = execution; val delay = seconds (Options.default_real "editor_execution_delay"); val _ = Future.cancel delay_request; val delay_request' = Event_Timer.future {physical = true} (Time.now () + delay); val delay = Future.task_of delay_request'; val parallel_prints' = parallel_prints |> map_filter (Command.exec_parallel_prints execution_id [delay]); fun finished_import (name, (node, _)) = - finished_result node orelse is_some (Thy_Info.lookup_theory name); + finished_result node orelse Thy_Info.defined_theory name; val nodes = nodes_of (the_version state version_id); val required = make_required nodes; val _ = nodes |> String_Graph.schedule (fn deps => fn (name, node) => if Symtab.defined required name orelse visible_node node orelse pending_result node then let val future = (singleton o Future.forks) {name = "theory:" ^ name, group = SOME (Future.new_group NONE), deps = delay :: Execution.active_tasks name @ maps (the_list o #2 o #2) deps, pri = 0, interrupts = false} (fn () => (Execution.worker_task_active true name; if forall finished_import deps then iterate_entries (fn (_, opt_exec) => fn () => (case opt_exec of SOME exec => if Execution.is_running execution_id then SOME (Command.exec execution_id exec) else NONE | NONE => NONE)) node () else (); Execution.worker_task_active false name) handle exn => (Output.system_message (Runtime.exn_message exn); Execution.worker_task_active false name; Exn.reraise exn)); in (node, SOME (Future.task_of future)) end else (node, NONE)); val execution' = {version_id = version_id, execution_id = execution_id, delay_request = delay_request', parallel_prints = parallel_prints'}; in (versions, blobs, commands, execution') end)); (** document update **) (* exec state assignment *) type assign_update = Command.exec option Inttab.table; (*command id -> exec*) val assign_update_empty: assign_update = Inttab.empty; fun assign_update_defined (tab: assign_update) command_id = Inttab.defined tab command_id; -fun assign_update_change entry (tab: assign_update) = Inttab.update entry tab; +fun assign_update_change entry (tab: assign_update) : assign_update = Inttab.update entry tab; fun assign_update_apply (tab: assign_update) node = Inttab.fold assign_entry tab node; fun assign_update_new upd (tab: assign_update) = Inttab.update_new upd tab handle Inttab.DUP dup => err_dup "exec state assignment" dup; fun assign_update_result (tab: assign_update) = Inttab.fold (fn (command_id, exec) => cons (command_id, Command.exec_ids exec)) tab []; (* update *) local fun init_theory deps node span = let val master_dir = master_directory node; val header = read_header node span; val imports = #imports header; fun maybe_eval_result eval = Command.eval_result_state eval - handle Fail _ => Toplevel.init_toplevel (); + handle Fail _ => Toplevel.make_state NONE; fun maybe_end_theory pos st = SOME (Toplevel.end_theory pos st) handle ERROR msg => (Output.error_message msg; NONE); val parents_reports = imports |> map_filter (fn (import, pos) => (case Thy_Info.lookup_theory import of NONE => maybe_end_theory pos (case get_result (snd (the (AList.lookup (op =) deps import))) of - NONE => Toplevel.init_toplevel () + NONE => Toplevel.make_state NONE | SOME (_, eval) => maybe_eval_result eval) |> Option.map (fn thy => (thy, (pos, Theory.get_markup thy))) | SOME thy => SOME (thy, (Position.none, Markup.empty)))); val parents = if null parents_reports then [Theory.get_pure ()] else map #1 parents_reports; val _ = List.app (fn (thy, r) => Context_Position.reports_global thy [r]) parents_reports; val thy = Resources.begin_theory master_dir header parents; val _ = Output.status [Markup.markup_only Markup.initialized]; in thy end; fun check_root_theory node = let val master_dir = master_directory node; val header = #header (get_header node); val header_name = #1 (#name header); val parent = if header_name = Sessions.root_name then SOME (Thy_Info.get_theory Sessions.theory_name) else if member (op =) Thy_Header.ml_roots header_name then SOME (Thy_Info.get_theory Thy_Header.ml_bootstrapN) else NONE; in parent |> Option.map (fn thy => Resources.begin_theory master_dir header [thy]) end; fun check_theory full name node = - is_some (Thy_Info.lookup_theory name) orelse + Thy_Info.defined_theory name orelse null (#errors (get_header node)) andalso (not full orelse is_some (get_result node)); fun last_common keywords state node_required node0 node = let fun update_flags prev (visible, initial) = let val visible' = visible andalso prev <> visible_last node; val initial' = initial andalso (case prev of NONE => true | SOME command_id => the_command_name state command_id <> Thy_Header.theoryN); in (visible', initial') end; fun get_common ((prev, command_id), opt_exec) (_, ok, flags, assign_update) = if ok then let val flags' as (visible', _) = update_flags prev flags; val ok' = (case (lookup_entry node0 command_id, opt_exec) of (SOME (eval0, _), SOME (eval, _)) => Command.eval_eq (eval0, eval) andalso (visible' orelse node_required orelse Command.eval_running eval) | _ => false); val assign_update' = assign_update |> ok' ? (case opt_exec of SOME (eval, prints) => let val command_visible = visible_command node command_id; val command_overlays = overlays node command_id; val command_name = the_command_name state command_id; + val command_print = + Command.print command_visible command_overlays keywords command_name eval prints; in - (case - Command.print command_visible command_overlays keywords command_name eval prints - of + (case command_print of SOME prints' => assign_update_new (command_id, SOME (eval, prints')) | NONE => I) end | NONE => I); in SOME (prev, ok', flags', assign_update') end else NONE; val (common, ok, flags, assign_update') = iterate_entries get_common node (NONE, true, (true, true), assign_update_empty); val (common', flags') = if ok then let val last = Entries.get_after (get_entries node) common in (last, update_flags last flags) end else (common, flags); in (assign_update', common', flags') end; fun illegal_init _ = error "Illegal theory header"; fun new_exec keywords state node proper_init command_id' (assign_update, command_exec, init) = if not proper_init andalso is_none init then NONE else let val command_visible = visible_command node command_id'; val command_overlays = overlays node command_id'; val (command_name, blob_digests, blobs_index, span0) = the_command state command_id'; val blobs = map (resolve_blob state) blob_digests; val span = Lazy.force span0; val eval' = Command.eval keywords (master_directory node) (fn () => the_default illegal_init init span) (blobs, blobs_index) command_id' span (#1 (#2 command_exec)); val prints' = perhaps (Command.print command_visible command_overlays keywords command_name eval') []; val exec' = (eval', prints'); val assign_update' = assign_update_new (command_id', SOME exec') assign_update; val init' = if command_name = Thy_Header.theoryN then NONE else init; in SOME (assign_update', (command_id', exec'), init') end; fun removed_execs node0 (command_id, exec_ids) = subtract (op =) exec_ids (Command.exec_ids (lookup_entry node0 command_id)); fun print_consolidation options the_command_span node_name (assign_update, node) = (case finished_result_theory node of SOME (result_id, thy) => - let - val active_tasks = - (node, false) |-> iterate_entries (fn (_, opt_exec) => fn active => - if active then NONE - else - (case opt_exec of - NONE => NONE - | SOME (eval, _) => - SOME (not (null (Execution.snapshot [Command.eval_exec_id eval]))))); - in - if not active_tasks then - let - val consolidation = - if Options.bool options "editor_presentation" then - let - val (_, offsets, rev_segments) = - iterate_entries (fn ((prev, _), opt_exec) => fn (offset, offsets, segments) => - (case opt_exec of - SOME (eval, _) => - let - val command_id = Command.eval_command_id eval; - val span = the_command_span command_id; - - val st = - (case try (#1 o the o the_entry node o the) prev of - NONE => Toplevel.init_toplevel () - | SOME prev_eval => Command.eval_result_state prev_eval); - - val exec_id = Command.eval_exec_id eval; - val tr = Command.eval_result_command eval; - val st' = Command.eval_result_state eval; + timeit "Document.print_consolidation" (fn () => + let + val active_tasks = + (node, false) |-> iterate_entries (fn (_, opt_exec) => fn active => + if active then NONE + else + (case opt_exec of + NONE => NONE + | SOME (eval, _) => + SOME (not (null (Execution.snapshot [Command.eval_exec_id eval]))))); + in + if not active_tasks then + let + fun commit_consolidated () = + (Lazy.force (get_consolidated node); + Output.status [Markup.markup_only Markup.consolidated]); + val consolidation = + if Options.bool options "editor_presentation" then + let + val (_, offsets, rev_segments) = + iterate_entries (fn ((prev, _), opt_exec) => fn (offset, offsets, segments) => + (case opt_exec of + SOME (eval, _) => + let + val command_id = Command.eval_command_id eval; + val span = the_command_span command_id; - val offset' = offset + the_default 0 (Command_Span.symbol_length span); - val offsets' = offsets - |> Inttab.update (command_id, offset) - |> Inttab.update (exec_id, offset); - val segments' = (span, (st, tr, st')) :: segments; - in SOME (offset', offsets', segments') end - | NONE => NONE)) node (0, Inttab.empty, []); - - val adjust = Inttab.lookup offsets; - val segments = - rev rev_segments |> map (fn (span, (st, tr, st')) => - {span = Command_Span.adjust_offsets adjust span, - prev_state = st, command = tr, state = st'}); + val st = + (case try (#1 o the o the_entry node o the) prev of + NONE => Toplevel.make_state NONE + | SOME prev_eval => Command.eval_result_state prev_eval); - val presentation_context: Thy_Info.presentation_context = - {options = options, - file_pos = Position.file node_name, - adjust_pos = Position.adjust_offsets adjust, - segments = segments}; - in - fn _ => + val exec_id = Command.eval_exec_id eval; + val tr = Command.eval_result_command eval; + val st' = Command.eval_result_state eval; + + val offset' = offset + the_default 0 (Command_Span.symbol_length span); + val offsets' = offsets + |> Inttab.update (command_id, offset) + |> Inttab.update (exec_id, offset); + val segments' = (span, (st, tr, st')) :: segments; + in SOME (offset', offsets', segments') end + | NONE => NONE)) node (0, Inttab.empty, []); + + val adjust = Inttab.lookup offsets; + val segments = + rev rev_segments |> map (fn (span, (st, tr, st')) => + {span = Command_Span.adjust_offsets adjust span, + prev_state = st, command = tr, state = st'}); + + val presentation_context: Thy_Info.presentation_context = + {options = options, + file_pos = Position.file node_name, + adjust_pos = Position.adjust_offsets adjust, + segments = segments}; + in + fn _ => + let + val _ = Output.status [Markup.markup_only Markup.consolidating]; + val res = Exn.capture (Thy_Info.apply_presentation presentation_context) thy; + val _ = commit_consolidated (); + in Exn.release res end + end + else fn _ => commit_consolidated (); + + val result_entry = + (case lookup_entry node result_id of + NONE => err_undef "result command entry" result_id + | SOME (eval, prints) => let - val _ = Output.status [Markup.markup_only Markup.consolidating]; - val res = Exn.capture (Thy_Info.apply_presentation presentation_context) thy; - val _ = commit_consolidated node; - in Exn.release res end - end - else fn _ => commit_consolidated node; + val print = eval |> Command.print0 + {pri = Task_Queue.urgent_pri + 1, print_fn = K consolidation}; + in (result_id, SOME (eval, print :: prints)) end); - val result_entry = - (case lookup_entry node result_id of - NONE => err_undef "result command entry" result_id - | SOME (eval, prints) => - let - val print = eval |> Command.print0 - {pri = Task_Queue.urgent_pri + 1, print_fn = K consolidation}; - in (result_id, SOME (eval, print :: prints)) end); - - val assign_update' = assign_update |> assign_update_change result_entry; - val node' = node |> assign_entry result_entry; - in (assign_update', node') end - else (assign_update, node) - end + val assign_update' = assign_update |> assign_update_change result_entry; + val node' = node |> assign_entry result_entry; + in (assign_update', node') end + else (assign_update, node) + end) | NONE => (assign_update, node)); in fun update old_version_id new_version_id edits consolidate state = Runtime.exn_trace_system (fn () => let val options = Options.default (); val the_command_span = Outer_Syntax.make_span o Lazy.force o #4 o the_command state; val old_version = the_version state old_version_id; val new_version = timeit "Document.edit_nodes" (fn () => old_version |> fold edit_nodes edits |> edit_keywords edits); val consolidate = Symtab.defined (Symtab.make_set consolidate); val nodes = nodes_of new_version; val required = make_required nodes; val required0 = make_required (nodes_of old_version); val edited = Symtab.build (edits |> fold (fn (name, _) => Symtab.update (name, ()))); val updated = timeit "Document.update" (fn () => nodes |> String_Graph.schedule (fn deps => fn (name, node) => (singleton o Future.forks) {name = "Document.update", group = NONE, deps = map (Future.task_of o #2) deps, pri = 1, interrupts = false} (fn () => timeit ("Document.update " ^ name) (fn () => Runtime.exn_trace_system (fn () => let val root_theory = check_root_theory node; - val keywords = the_default (Session.get_keywords ()) (get_keywords node); + val keywords = node_keywords name node; val maybe_consolidate = consolidate name andalso could_consolidate node; val imports = map (apsnd Future.join) deps; val imports_result_changed = exists (#4 o #1 o #2) imports; val node_required = Symtab.defined required name; in if Symtab.defined edited name orelse maybe_consolidate orelse visible_node node orelse imports_result_changed orelse Symtab.defined required0 name <> node_required then let val node0 = node_of old_version name; val init = init_theory imports node; val proper_init = is_some root_theory orelse check_theory false name node andalso forall (fn (name, (_, node)) => check_theory true name node) imports; val (print_execs, common, (still_visible, initial)) = if imports_result_changed then (assign_update_empty, NONE, (true, true)) else last_common keywords state node_required node0 node; val common_command_exec = (case common of SOME id => (id, the_default Command.no_exec (the_entry node id)) | NONE => (Document_ID.none, Command.init_exec root_theory)); val (updated_execs, (command_id', exec'), _) = (print_execs, common_command_exec, if initial then SOME init else NONE) |> (still_visible orelse node_required) ? iterate_entries_after common (fn ((prev, id), _) => fn res => if not node_required andalso prev = visible_last node then NONE else new_exec keywords state node proper_init id res) node; val assign_update = (node0, updated_execs) |-> iterate_entries_after common (fn ((_, command_id0), exec0) => fn res => if is_none exec0 then NONE else if assign_update_defined updated_execs command_id0 then SOME res else SOME (assign_update_new (command_id0, NONE) res)); val last_exec = if command_id' = Document_ID.none then NONE else SOME command_id'; val result = if is_none last_exec orelse is_some (after_entry node last_exec) then NONE else SOME (command_id', #1 exec'); val result_changed = not (eq_option (Command.eval_eq o apply2 #2) (get_result node0, result)); val (assign_update', node') = node |> assign_update_apply assign_update |> set_result result |> result_changed ? reset_consolidated |> pair assign_update |> (not result_changed andalso maybe_consolidate) ? print_consolidation options the_command_span name; val assign_result = assign_update_result assign_update'; val removed = maps (removed_execs node0) assign_result; val _ = List.app Execution.cancel removed; val assigned_node = SOME (name, node'); in ((removed, assign_result, assigned_node, result_changed), node') end else (([], [], NONE, false), node) end)))) |> Future.joins |> map #1); val removed = maps #1 updated; val assign_result = maps #2 updated; val assigned_nodes = map_filter #3 updated; val state' = state |> define_version new_version_id new_version assigned_nodes; in (Symtab.keys edited, removed, assign_result, state') end); end; (** global state **) val global_state = Synchronized.var "Document.global_state" init_state; fun state () = Synchronized.value global_state; val change_state = Synchronized.change global_state; end; diff --git a/src/Pure/PIDE/query_operation.ML b/src/Pure/PIDE/query_operation.ML --- a/src/Pure/PIDE/query_operation.ML +++ b/src/Pure/PIDE/query_operation.ML @@ -1,52 +1,51 @@ (* Title: Pure/PIDE/query_operation.ML Author: Makarius One-shot query operations via asynchronous print functions and temporary document overlays. *) signature QUERY_OPERATION = sig val register: {name: string, pri: int} -> ({state: Toplevel.state, args: string list, output_result: string -> unit, writeln_result: string -> unit} -> unit) -> unit end; structure Query_Operation: QUERY_OPERATION = struct fun register {name, pri} f = Command.print_function (name ^ "_query") (fn {args = instance :: args, ...} => SOME {delay = NONE, pri = pri, persistent = false, strict = false, print_fn = fn _ => Thread_Attributes.uninterruptible (fn restore_attributes => fn state => let fun output_result s = Output.result [(Markup.instanceN, instance)] [s]; fun status m = output_result (Markup.markup_only m); fun writeln_result s = output_result (Markup.markup Markup.writeln s); fun toplevel_error exn = output_result (Markup.markup Markup.error (Runtime.exn_message exn)); val _ = status Markup.running; fun run () = f {state = state, args = args, output_result = output_result, writeln_result = writeln_result}; val _ = (case Exn.capture (*sic!*) (restore_attributes run) () of Exn.Res () => () | Exn.Exn exn => toplevel_error exn); val _ = status Markup.finished; in () end)} | _ => NONE); +end; (* print_state *) val _ = - register {name = "print_state", pri = Task_Queue.urgent_pri} + Query_Operation.register {name = "print_state", pri = Task_Queue.urgent_pri} (fn {state = st, output_result, ...} => if Toplevel.is_proof st then output_result (Markup.markup Markup.state (Toplevel.string_of_state st)) else ()); - -end; diff --git a/src/Pure/PIDE/session.ML b/src/Pure/PIDE/session.ML --- a/src/Pure/PIDE/session.ML +++ b/src/Pure/PIDE/session.ML @@ -1,59 +1,45 @@ (* Title: Pure/PIDE/session.ML Author: Makarius Prover session: persistent state of logic image. *) signature SESSION = sig val init: string -> unit val get_name: unit -> string val welcome: unit -> string - val get_keywords: unit -> Keyword.keywords val shutdown: unit -> unit val finish: unit -> unit end; structure Session: SESSION = struct (* session name *) val session = Synchronized.var "Session.session" ""; fun init name = Synchronized.change session (K name); fun get_name () = Synchronized.value session; fun description () = (case get_name () of "" => "Isabelle" | name => "Isabelle/" ^ name); fun welcome () = "Welcome to " ^ description () ^ Isabelle_System.isabelle_heading (); -(* base syntax *) - -val keywords = Synchronized.var "Session.keywords" Keyword.empty_keywords; - -fun get_keywords () = Synchronized.value keywords; - -fun update_keywords () = - Synchronized.change keywords - (K (fold (curry Keyword.merge_keywords o Thy_Header.get_keywords o Thy_Info.get_theory) - (Thy_Info.get_names ()) Keyword.empty_keywords)); - - (* finish *) fun shutdown () = (Execution.shutdown (); Event_Timer.shutdown (); Future.shutdown ()); fun finish () = (shutdown (); Par_List.map (Global_Theory.get_thm_names o Thy_Info.get_theory) (Thy_Info.get_names ()); Thy_Info.finish (); - shutdown (); - update_keywords ()); + shutdown ()); end; diff --git a/src/Pure/PIDE/session.scala b/src/Pure/PIDE/session.scala --- a/src/Pure/PIDE/session.scala +++ b/src/Pure/PIDE/session.scala @@ -1,751 +1,751 @@ /* Title: Pure/PIDE/session.scala Author: Makarius Options: :folding=explicit: PIDE editor session, potentially with running prover process. */ package isabelle import scala.collection.immutable.Queue import scala.collection.mutable import scala.annotation.tailrec object Session { /* outlets */ object Consumer { def apply[A](name: String)(consume: A => Unit): Consumer[A] = new Consumer[A](name, consume) } final class Consumer[-A] private(val name: String, val consume: A => Unit) class Outlet[A](dispatcher: Consumer_Thread[() => Unit]) { private val consumers = Synchronized[List[Consumer[A]]](Nil) def += (c: Consumer[A]): Unit = consumers.change(Library.update(c)) def -= (c: Consumer[A]): Unit = consumers.change(Library.remove(c)) def post(a: A): Unit = { for (c <- consumers.value.iterator) { dispatcher.send(() => try { c.consume(a) } catch { case exn: Throwable => Output.error_message("Consumer failed: " + quote(c.name) + "\n" + Exn.message(exn)) }) } } } /* change */ sealed case class Change( previous: Document.Version, syntax_changed: List[Document.Node.Name], deps_changed: Boolean, doc_edits: List[Document.Edit_Command], consolidate: List[Document.Node.Name], version: Document.Version) case object Change_Flush /* events */ //{{{ case class Command_Timing(props: Properties.T) case class Theory_Timing(props: Properties.T) case class Runtime_Statistics(props: Properties.T) case class Task_Statistics(props: Properties.T) case class Global_Options(options: Options) case object Caret_Focus case class Raw_Edits(doc_blobs: Document.Blobs, edits: List[Document.Edit_Text]) case class Dialog_Result(id: Document_ID.Generic, serial: Long, result: String) case class Commands_Changed( assignment: Boolean, nodes: Set[Document.Node.Name], commands: Set[Command]) sealed abstract class Phase { def print: String = this match { case Terminated(result) => if (result.ok) "finished" else "failed" case _ => Word.lowercase(this.toString) } } case object Inactive extends Phase // stable case object Startup extends Phase // transient case object Ready extends Phase // metastable case object Shutdown extends Phase // transient case class Terminated(result: Process_Result) extends Phase // stable //}}} /* syslog */ private[Session] class Syslog(limit: Int) { private var queue = Queue.empty[XML.Elem] private var length = 0 def += (msg: XML.Elem): Unit = synchronized { queue = queue.enqueue(msg) length += 1 if (length > limit) queue = queue.dequeue._2 } def content: String = synchronized { cat_lines(queue.iterator.map(XML.content)) + (if (length > limit) "\n(A total of " + length + " messages...)" else "") } } /* protocol handlers */ type Protocol_Function = Prover.Protocol_Output => Boolean type Protocol_Functions = List[(String, Protocol_Function)] abstract class Protocol_Handler extends Isabelle_System.Service { def init(session: Session): Unit = {} def exit(): Unit = {} def functions: Protocol_Functions = Nil def prover_options(options: Options): Options = options } } class Session(_session_options: => Options, val resources: Resources) extends Document.Session { session => val cache: Term.Cache = Term.Cache.make() def build_blobs_info(name: Document.Node.Name): Command.Blobs_Info = Command.Blobs_Info.none /* global flags */ @volatile var timing: Boolean = false @volatile var verbose: Boolean = false /* dynamic session options */ def session_options: Options = _session_options def output_delay: Time = session_options.seconds("editor_output_delay") def consolidate_delay: Time = session_options.seconds("editor_consolidate_delay") def prune_delay: Time = session_options.seconds("editor_prune_delay") def prune_size: Int = session_options.int("editor_prune_size") def syslog_limit: Int = session_options.int("editor_syslog_limit") def reparse_limit: Int = session_options.int("editor_reparse_limit") /* dispatcher */ private val dispatcher = Consumer_Thread.fork[() => Unit]("Session.dispatcher", daemon = true) { case e => e(); true } def assert_dispatcher[A](body: => A): A = { assert(dispatcher.check_thread()) body } def require_dispatcher[A](body: => A): A = { require(dispatcher.check_thread(), "not on dispatcher thread") body } def send_dispatcher(body: => Unit): Unit = { if (dispatcher.check_thread()) body else dispatcher.send(() => body) } def send_wait_dispatcher(body: => Unit): Unit = { if (dispatcher.check_thread()) body else dispatcher.send_wait(() => body) } /* outlets */ val finished_theories = new Session.Outlet[Document.Snapshot](dispatcher) val command_timings = new Session.Outlet[Session.Command_Timing](dispatcher) val theory_timings = new Session.Outlet[Session.Theory_Timing](dispatcher) val runtime_statistics = new Session.Outlet[Session.Runtime_Statistics](dispatcher) val task_statistics = new Session.Outlet[Session.Task_Statistics](dispatcher) val global_options = new Session.Outlet[Session.Global_Options](dispatcher) val caret_focus = new Session.Outlet[Session.Caret_Focus.type](dispatcher) val raw_edits = new Session.Outlet[Session.Raw_Edits](dispatcher) val commands_changed = new Session.Outlet[Session.Commands_Changed](dispatcher) val phase_changed = new Session.Outlet[Session.Phase](dispatcher) val syslog_messages = new Session.Outlet[Prover.Output](dispatcher) val raw_output_messages = new Session.Outlet[Prover.Output](dispatcher) val trace_events = new Session.Outlet[Simplifier_Trace.Event.type](dispatcher) val debugger_updates = new Session.Outlet[Debugger.Update.type](dispatcher) val all_messages = new Session.Outlet[Prover.Message](dispatcher) // potential bottle-neck! /** main protocol manager **/ /* internal messages */ private case class Start(start_prover: Prover.Receiver => Prover) private case object Stop private case class Get_State(promise: Promise[Document.State]) private case class Cancel_Exec(exec_id: Document_ID.Exec) private case class Protocol_Command_Raw(name: String, args: List[Bytes]) private case class Protocol_Command_Args(name: String, args: List[String]) private case class Update_Options(options: Options) private case object Consolidate_Execution private case object Prune_History /* phase */ private def post_phase(new_phase: Session.Phase): Session.Phase = { phase_changed.post(new_phase) new_phase } private val _phase = Synchronized[Session.Phase](Session.Inactive) private def phase_=(new_phase: Session.Phase): Unit = _phase.change(_ => post_phase(new_phase)) def phase: Session.Phase = _phase.value def is_ready: Boolean = phase == Session.Ready /* syslog */ private val syslog = new Session.Syslog(syslog_limit) def syslog_content(): String = syslog.content /* pipelined change parsing */ private case class Text_Edits( previous: Future[Document.Version], doc_blobs: Document.Blobs, text_edits: List[Document.Edit_Text], consolidate: List[Document.Node.Name], version_result: Promise[Document.Version]) private val change_parser = Consumer_Thread.fork[Text_Edits]("change_parser", daemon = true) { case Text_Edits(previous, doc_blobs, text_edits, consolidate, version_result) => val prev = previous.get_finished val change = Timing.timeit("parse_change", timing) { resources.parse_change(reparse_limit, prev, doc_blobs, text_edits, consolidate) } version_result.fulfill(change.version) manager.send(change) true } /* buffered changes */ private object change_buffer { private var assignment: Boolean = false private var nodes: Set[Document.Node.Name] = Set.empty private var commands: Set[Command] = Set.empty def flush(): Unit = synchronized { if (assignment || nodes.nonEmpty || commands.nonEmpty) commands_changed.post(Session.Commands_Changed(assignment, nodes, commands)) if (nodes.nonEmpty) consolidation.update(more_nodes = nodes) assignment = false nodes = Set.empty commands = Set.empty } private val delay_flush = Delay.first(output_delay) { flush() } def invoke(assign: Boolean, edited_nodes: List[Document.Node.Name], cmds: List[Command]): Unit = synchronized { assignment |= assign for (node <- edited_nodes) { nodes += node } for (command <- cmds) { nodes += command.node_name command.blobs_names.foreach(nodes += _) commands += command } delay_flush.invoke() } def shutdown(): Unit = { delay_flush.revoke() flush() } } /* postponed changes */ private object postponed_changes { private var postponed: List[Session.Change] = Nil def store(change: Session.Change): Unit = synchronized { postponed ::= change } def flush(state: Document.State): List[Session.Change] = synchronized { val (assigned, unassigned) = postponed.partition(change => state.is_assigned(change.previous)) postponed = unassigned assigned.reverse } } /* node consolidation */ private object consolidation { private val delay = Delay.first(consolidate_delay) { manager.send(Consolidate_Execution) } private val init_state: Option[Set[Document.Node.Name]] = Some(Set.empty) private val state = Synchronized(init_state) def exit(): Unit = { delay.revoke() state.change(_ => None) } def update(more_nodes: Set[Document.Node.Name] = Set.empty): Unit = { val active = state.change_result(st => (st.isDefined, st.map(nodes => if (nodes.isEmpty) more_nodes else nodes ++ more_nodes))) if (active) delay.invoke() } def flush(): Set[Document.Node.Name] = state.change_result(st => if (st.isDefined) (st.get, init_state) else (Set.empty, None)) } /* prover process */ private object prover { private val variable = Synchronized[Option[Prover]](None) def defined: Boolean = variable.value.isDefined def get: Prover = variable.value.get def set(p: Prover): Unit = variable.change(_ => Some(p)) def reset(): Unit = variable.change(_ => None) def await_reset(): Unit = variable.guarded_access({ case None => Some((), None) case _ => None }) } /* file formats and protocol handlers */ private lazy val file_formats: File_Format.Session = File_Format.registry.start_session(session) private val protocol_handlers = Protocol_Handlers.init(session) def get_protocol_handler[C <: Session.Protocol_Handler](c: Class[C]): Option[C] = protocol_handlers.get(c.getName) match { case Some(h) if Library.is_subclass(h.getClass, c) => Some(h.asInstanceOf[C]) case _ => None } def init_protocol_handler(handler: Session.Protocol_Handler): Unit = protocol_handlers.init(handler) def prover_options(options: Options): Options = protocol_handlers.prover_options(file_formats.prover_options(options)) /* debugger */ private val debugger_handler = new Debugger.Handler(this) init_protocol_handler(debugger_handler) def debugger: Debugger = debugger_handler.debugger /* manager thread */ - private val delay_prune = + private lazy val delay_prune = Delay.first(prune_delay) { manager.send(Prune_History) } private val manager: Consumer_Thread[Any] = { /* global state */ val global_state = Synchronized(Document.State.init) /* raw edits */ def handle_raw_edits( doc_blobs: Document.Blobs = Document.Blobs.empty, edits: List[Document.Edit_Text] = Nil, consolidate: List[Document.Node.Name] = Nil): Unit = { //{{{ require(prover.defined, "prover process not defined (handle_raw_edits)") if (edits.nonEmpty) prover.get.discontinue_execution() val previous = global_state.value.history.tip.version val version = Future.promise[Document.Version] global_state.change(_.continue_history(previous, edits, version)) raw_edits.post(Session.Raw_Edits(doc_blobs, edits)) change_parser.send(Text_Edits(previous, doc_blobs, edits, consolidate, version)) //}}} } /* resulting changes */ def handle_change(change: Session.Change): Unit = { //{{{ require(prover.defined, "prover process not defined (handle_change)") // define commands { val id_commands = new mutable.ListBuffer[Command] def id_command(command: Command): Unit = { for { (name, digest) <- command.blobs_defined if !global_state.value.defined_blob(digest) } { change.version.nodes(name).get_blob match { case Some(blob) => global_state.change(_.define_blob(digest)) prover.get.define_blob(digest, blob.bytes) case None => Output.error_message("Missing blob " + quote(name.toString)) } } if (!global_state.value.defined_command(command.id)) { global_state.change(_.define_command(command)) id_commands += command } } for { (_, edit) <- change.doc_edits } { edit.foreach({ case (c1, c2) => c1.foreach(id_command); c2.foreach(id_command) }) } if (id_commands.nonEmpty) { prover.get.define_commands_bulk(resources, id_commands.toList) } } val assignment = global_state.value.the_assignment(change.previous).check_finished global_state.change(_.define_version(change.version, assignment)) prover.get.update(change.previous.id, change.version.id, change.doc_edits, change.consolidate) resources.commit(change) //}}} } /* prover output */ def handle_output(output: Prover.Output): Unit = { //{{{ def bad_output(): Unit = { if (verbose) Output.warning("Ignoring bad prover output: " + output.message.toString) } def change_command(f: Document.State => (Command.State, Document.State)): Unit = { try { val st = global_state.change_result(f) if (!st.command.span.is_theory) { change_buffer.invoke(false, Nil, List(st.command)) } } catch { case _: Document.State.Fail => bad_output() } } output match { case msg: Prover.Protocol_Output => val handled = protocol_handlers.invoke(msg) if (!handled) { msg.properties match { case Protocol.Command_Timing(props, state_id, timing) if prover.defined => command_timings.post(Session.Command_Timing(props)) val message = XML.elem(Markup.STATUS, List(XML.Elem(Markup.Timing(timing), Nil))) change_command(_.accumulate(state_id, cache.elem(message), cache)) case Markup.Theory_Timing(props) => theory_timings.post(Session.Theory_Timing(props)) case Markup.Task_Statistics(props) => task_statistics.post(Session.Task_Statistics(props)) case Protocol.Export(args) if args.id.isDefined && Value.Long.unapply(args.id.get).isDefined => val id = Value.Long.unapply(args.id.get).get val entry = Export.make_entry(Sessions.DRAFT, args, msg.chunk, cache) change_command(_.add_export(id, (args.serial, entry))) case Protocol.Loading_Theory(node_name, id) => val blobs_info = build_blobs_info(node_name) try { global_state.change(_.begin_theory(node_name, id, msg.text, blobs_info)) } catch { case _: Document.State.Fail => bad_output() } case List(Markup.Commands_Accepted.THIS) => msg.text match { case Protocol.Commands_Accepted(ids) => ids.foreach(id => change_command(_.accumulate(id, Protocol.Commands_Accepted.message, cache))) case _ => bad_output() } case List(Markup.Assign_Update.THIS) => msg.text match { case Protocol.Assign_Update(id, edited, update) => try { val (edited_nodes, cmds) = global_state.change_result(_.assign(id, edited, update)) change_buffer.invoke(true, edited_nodes, cmds) manager.send(Session.Change_Flush) } catch { case _: Document.State.Fail => bad_output() } case _ => bad_output() } delay_prune.invoke() case List(Markup.Removed_Versions.THIS) => msg.text match { case Protocol.Removed(removed) => try { global_state.change(_.removed_versions(removed)) manager.send(Session.Change_Flush) } catch { case _: Document.State.Fail => bad_output() } case _ => bad_output() } case _ => bad_output() } } case _ => output.properties match { case Position.Id(state_id) => change_command(_.accumulate(state_id, output.message, cache)) case _ if output.is_init => val init_ok = try { Isabelle_System.make_services(classOf[Session.Protocol_Handler]) .foreach(init_protocol_handler) true } catch { case exn: Throwable => prover.get.protocol_command("Prover.stop", "1", Exn.message(exn)) false } if (init_ok) { prover.get.options(prover_options(session_options)) prover.get.init_session(resources) phase = Session.Ready debugger.ready() } case Markup.Process_Result(result) if output.is_exit => if (prover.defined) protocol_handlers.exit() for (id <- global_state.value.theories.keys) { val snapshot = global_state.change_result(_.end_theory(id)) finished_theories.post(snapshot) } file_formats.stop_session phase = Session.Terminated(result) prover.reset() case _ => raw_output_messages.post(output) } } //}}} } /* main thread */ Consumer_Thread.fork[Any]("Session.manager", daemon = true) { case arg: Any => //{{{ arg match { case output: Prover.Output => if (output.is_syslog) { syslog += output.message syslog_messages.post(output) } if (output.is_stdout || output.is_stderr) raw_output_messages.post(output) else handle_output(output) all_messages.post(output) case input: Prover.Input => all_messages.post(input) case Start(start_prover) if !prover.defined => prover.set(start_prover(manager.send(_))) case Stop => consolidation.exit() delay_prune.revoke() if (prover.defined) { global_state.change(_ => Document.State.init) prover.get.terminate() } case Get_State(promise) => promise.fulfill(global_state.value) case Consolidate_Execution => if (prover.defined) { val state = global_state.value state.stable_tip_version match { case None => consolidation.update() case Some(version) => val consolidate = consolidation.flush().iterator.filter(name => !resources.session_base.loaded_theory(name) && !state.node_consolidated(version, name) && state.node_maybe_consolidated(version, name)).toList if (consolidate.nonEmpty) handle_raw_edits(consolidate = consolidate) } } case Prune_History => if (prover.defined) { val old_versions = global_state.change_result(_.remove_versions(prune_size)) if (old_versions.nonEmpty) prover.get.remove_versions(old_versions) } case Update_Options(options) => if (prover.defined && is_ready) { prover.get.options(prover_options(options)) handle_raw_edits() } global_options.post(Session.Global_Options(options)) case Cancel_Exec(exec_id) if prover.defined => prover.get.cancel_exec(exec_id) case Session.Raw_Edits(doc_blobs, edits) if prover.defined => handle_raw_edits(doc_blobs = doc_blobs, edits = edits) case Session.Dialog_Result(id, serial, result) if prover.defined => prover.get.dialog_result(serial, result) handle_output(new Prover.Output(Protocol.Dialog_Result(id, serial, result))) case Protocol_Command_Raw(name, args) if prover.defined => prover.get.protocol_command_raw(name, args) case Protocol_Command_Args(name, args) if prover.defined => prover.get.protocol_command_args(name, args) case change: Session.Change if prover.defined => val state = global_state.value if (!state.removing_versions && state.is_assigned(change.previous)) handle_change(change) else postponed_changes.store(change) case Session.Change_Flush if prover.defined => val state = global_state.value if (!state.removing_versions) postponed_changes.flush(state).foreach(handle_change) case bad => if (verbose) Output.warning("Ignoring bad message: " + bad.toString) } true //}}} } } /* main operations */ def get_state(): Document.State = { if (manager.is_active()) { val promise = Future.promise[Document.State] manager.send_wait(Get_State(promise)) promise.join } else Document.State.init } def snapshot(name: Document.Node.Name = Document.Node.Name.empty, pending_edits: List[Text.Edit] = Nil): Document.Snapshot = get_state().snapshot(name, pending_edits) def recent_syntax(name: Document.Node.Name): Outer_Syntax = get_state().recent_finished.version.get_finished.nodes(name).syntax getOrElse resources.session_base.overall_syntax @tailrec final def await_stable_snapshot(): Document.Snapshot = { val snapshot = this.snapshot() if (snapshot.is_outdated) { output_delay.sleep() await_stable_snapshot() } else snapshot } def start(start_prover: Prover.Receiver => Prover): Unit = { file_formats _phase.change( { case Session.Inactive => manager.send(Start(start_prover)) post_phase(Session.Startup) case phase => error("Cannot start prover in phase " + quote(phase.print)) }) } def stop(): Process_Result = { val was_ready = _phase.guarded_access( { case Session.Startup | Session.Shutdown => None case Session.Terminated(_) => Some((false, phase)) case Session.Inactive => Some((false, post_phase(Session.Terminated(Process_Result(0))))) case Session.Ready => Some((true, post_phase(Session.Shutdown))) }) if (was_ready) manager.send(Stop) prover.await_reset() change_parser.shutdown() change_buffer.shutdown() manager.shutdown() dispatcher.shutdown() phase match { case Session.Terminated(result) => result case phase => error("Bad session phase after shutdown: " + quote(phase.print)) } } def protocol_command_raw(name: String, args: List[Bytes]): Unit = manager.send(Protocol_Command_Raw(name, args)) def protocol_command_args(name: String, args: List[String]): Unit = manager.send(Protocol_Command_Args(name, args)) def protocol_command(name: String, args: String*): Unit = protocol_command_args(name, args.toList) def cancel_exec(exec_id: Document_ID.Exec): Unit = manager.send(Cancel_Exec(exec_id)) def update(doc_blobs: Document.Blobs, edits: List[Document.Edit_Text]): Unit = if (edits.nonEmpty) manager.send_wait(Session.Raw_Edits(doc_blobs, edits)) def update_options(options: Options): Unit = manager.send_wait(Update_Options(options)) def dialog_result(id: Document_ID.Generic, serial: Long, result: String): Unit = manager.send(Session.Dialog_Result(id, serial, result)) } diff --git a/src/Pure/Thy/document_output.ML b/src/Pure/Thy/document_output.ML --- a/src/Pure/Thy/document_output.ML +++ b/src/Pure/Thy/document_output.ML @@ -1,593 +1,589 @@ (* Title: Pure/Thy/document_output.ML Author: Makarius Theory document output. *) signature DOCUMENT_OUTPUT = sig val document_reports: Input.source -> Position.report list val output_document: Proof.context -> {markdown: bool} -> Input.source -> Latex.text val document_output: {markdown: bool, markup: Latex.text -> Latex.text} -> (xstring * Position.T) option * Input.source -> Toplevel.transition -> Toplevel.transition val check_comments: Proof.context -> Symbol_Pos.T list -> unit val output_token: Proof.context -> Token.T -> Latex.text val output_source: Proof.context -> string -> Latex.text type segment = {span: Command_Span.span, command: Toplevel.transition, prev_state: Toplevel.state, state: Toplevel.state} - val present_thy: Options.T -> theory -> segment list -> Latex.text + val present_thy: Options.T -> Keyword.keywords -> segment list -> Latex.text val pretty_term: Proof.context -> term -> Pretty.T val pretty_thm: Proof.context -> thm -> Pretty.T val isabelle: Proof.context -> Latex.text -> Latex.text val isabelle_typewriter: Proof.context -> Latex.text -> Latex.text val typewriter: Proof.context -> string -> Latex.text val verbatim: Proof.context -> string -> Latex.text val source: Proof.context -> {embedded: bool} -> Token.src -> Latex.text val pretty: Proof.context -> Pretty.T -> Latex.text val pretty_source: Proof.context -> {embedded: bool} -> Token.src -> Pretty.T -> Latex.text val pretty_items: Proof.context -> Pretty.T list -> Latex.text val pretty_items_source: Proof.context -> {embedded: bool} -> Token.src -> Pretty.T list -> Latex.text val antiquotation_pretty: binding -> 'a context_parser -> (Proof.context -> 'a -> Pretty.T) -> theory -> theory val antiquotation_pretty_embedded: binding -> 'a context_parser -> (Proof.context -> 'a -> Pretty.T) -> theory -> theory val antiquotation_pretty_source: binding -> 'a context_parser -> (Proof.context -> 'a -> Pretty.T) -> theory -> theory val antiquotation_pretty_source_embedded: binding -> 'a context_parser -> (Proof.context -> 'a -> Pretty.T) -> theory -> theory val antiquotation_raw: binding -> 'a context_parser -> (Proof.context -> 'a -> Latex.text) -> theory -> theory val antiquotation_raw_embedded: binding -> 'a context_parser -> (Proof.context -> 'a -> Latex.text) -> theory -> theory val antiquotation_verbatim: binding -> 'a context_parser -> (Proof.context -> 'a -> string) -> theory -> theory val antiquotation_verbatim_embedded: binding -> 'a context_parser -> (Proof.context -> 'a -> string) -> theory -> theory end; structure Document_Output: DOCUMENT_OUTPUT = struct (* output document source *) fun document_reports txt = let val pos = Input.pos_of txt in [(pos, Markup.language_document (Input.is_delimited txt)), (pos, Markup.plain_text)] end; fun output_comment ctxt (kind, syms) = (case kind of Comment.Comment => Input.cartouche_content syms |> output_document (ctxt |> Config.put Document_Antiquotation.thy_output_display false) {markdown = false} |> XML.enclose "%\n\\isamarkupcmt{" "%\n}" | Comment.Cancel => Symbol_Pos.cartouche_content syms |> Latex.symbols_output |> XML.enclose "%\n\\isamarkupcancel{" "}" | Comment.Latex => Latex.symbols (Symbol_Pos.cartouche_content syms) | Comment.Marker => []) and output_comment_document ctxt (comment, syms) = (case comment of SOME kind => output_comment ctxt (kind, syms) | NONE => Latex.symbols syms) and output_document_text ctxt syms = Comment.read_body syms |> maps (output_comment_document ctxt) and output_document ctxt {markdown} source = let val pos = Input.pos_of source; val syms = Input.source_explode source; val output_antiquotes = maps (Document_Antiquotation.evaluate (output_document_text ctxt) ctxt); fun output_line line = (if Markdown.line_is_item line then Latex.string "\\item " else []) @ output_antiquotes (Markdown.line_content line); fun output_block (Markdown.Par lines) = separate (XML.Text "\n") (map (Latex.block o output_line) lines) | output_block (Markdown.List {kind, body, ...}) = Latex.environment (Markdown.print_kind kind) (output_blocks body) and output_blocks blocks = separate (XML.Text "\n\n") (map (Latex.block o output_block) blocks); in if Toplevel.is_skipped_proof (Toplevel.presentation_state ctxt) then [] else if markdown andalso exists (Markdown.is_control o Symbol_Pos.symbol) syms then let val ants = Antiquote.parse_comments pos syms; val reports = Antiquote.antiq_reports ants; val blocks = Markdown.read_antiquotes ants; val _ = Context_Position.reports ctxt (reports @ Markdown.reports blocks); in output_blocks blocks end else let val ants = Antiquote.parse_comments pos (trim (Symbol.is_blank o Symbol_Pos.symbol) syms); val reports = Antiquote.antiq_reports ants; val _ = Context_Position.reports ctxt (reports @ Markdown.text_reports ants); in output_antiquotes ants end end; fun document_output {markdown, markup} (loc, txt) = let fun output st = let val ctxt = Toplevel.presentation_context st; val _ = Context_Position.reports ctxt (document_reports txt); in txt |> output_document ctxt {markdown = markdown} |> markup end; in Toplevel.present (fn st => (case loc of NONE => output st | SOME (_, pos) => error ("Illegal target specification -- not a theory context" ^ Position.here pos))) o Toplevel.present_local_theory loc output end; (* output tokens with formal comments *) local val output_symbols_antiq = (fn Antiquote.Text syms => Latex.symbols_output syms | Antiquote.Control {name = (name, _), body, ...} => Latex.string (Latex.output_symbols [Symbol.encode (Symbol.Control name)]) @ Latex.symbols_output body | Antiquote.Antiq {body, ...} => XML.enclose "%\n\\isaantiq\n" "{}%\n\\endisaantiq\n" (Latex.symbols_output body)); fun output_comment_symbols ctxt {antiq} (comment, syms) = (case (comment, antiq) of (NONE, false) => Latex.symbols_output syms | (NONE, true) => Antiquote.parse_comments (#1 (Symbol_Pos.range syms)) syms |> maps output_symbols_antiq | (SOME comment, _) => output_comment ctxt (comment, syms)); fun output_body ctxt antiq bg en syms = Comment.read_body syms |> maps (output_comment_symbols ctxt {antiq = antiq}) |> XML.enclose bg en; in fun output_token ctxt tok = let fun output antiq bg en = output_body ctxt antiq bg en (Input.source_explode (Token.input_of tok)); in (case Token.kind_of tok of Token.Comment NONE => [] | Token.Comment (SOME Comment.Marker) => [] | Token.Command => output false "\\isacommand{" "}" | Token.Keyword => if Symbol.is_ascii_identifier (Token.content_of tok) then output false "\\isakeyword{" "}" else output false "" "" | Token.String => output false "{\\isachardoublequoteopen}" "{\\isachardoublequoteclose}" | Token.Alt_String => output false "{\\isacharbackquoteopen}" "{\\isacharbackquoteclose}" | Token.Cartouche => output false "{\\isacartoucheopen}" "{\\isacartoucheclose}" | Token.Control control => output_body ctxt false "" "" (Antiquote.control_symbols control) | _ => output false "" "") end handle ERROR msg => error (msg ^ Position.here (Token.pos_of tok)); fun output_source ctxt s = output_body ctxt false "" "" (Symbol_Pos.explode (s, Position.none)); fun check_comments ctxt = Comment.read_body #> List.app (fn (comment, syms) => let val pos = #1 (Symbol_Pos.range syms); val _ = comment |> Option.app (fn kind => Context_Position.reports ctxt (map (pair pos) (Comment.kind_markups kind))); val _ = output_comment_symbols ctxt {antiq = false} (comment, syms); in if comment = SOME Comment.Comment then check_comments ctxt syms else () end); end; (** present theory source **) (* presentation tokens *) datatype token = Ignore | Token of Token.T | Output of Latex.text; fun token_with pred (Token tok) = pred tok | token_with _ _ = false; val white_token = token_with Document_Source.is_white; val white_comment_token = token_with Document_Source.is_white_comment; val blank_token = token_with Token.is_blank; val newline_token = token_with Token.is_newline; fun present_token ctxt tok = (case tok of Ignore => [] | Token tok => output_token ctxt tok | Output output => output); (* command spans *) type command = string * Position.T; (*name, position*) type source = (token * (string * int)) list; (*token, markup flag, meta-comment depth*) datatype span = Span of command * (source * source * source * source) * bool; fun make_span cmd src = let fun chop_newline (tok :: toks) = if newline_token (fst tok) then ([tok], toks, true) else ([], tok :: toks, false) | chop_newline [] = ([], [], false); val (((src_prefix, src_main), src_suffix1), (src_suffix2, src_appendix, newline)) = src |> chop_prefix (white_token o fst) ||>> chop_suffix (white_token o fst) ||>> chop_prefix (white_comment_token o fst) ||> chop_newline; in Span (cmd, (src_prefix, src_main, src_suffix1 @ src_suffix2, src_appendix), newline) end; (* present spans *) local fun err_bad_nesting here = error ("Bad nesting of commands in presentation" ^ here); fun edge which f (x: string option, y) = if x = y then I else (case which (x, y) of NONE => I | SOME txt => fold cons (Latex.string (f txt))); val markup_tag = YXML.output_markup o Markup.latex_tag; val markup_delim = YXML.output_markup o Markup.latex_delim; val bg_delim = #1 o markup_delim; val en_delim = #2 o markup_delim; val begin_tag = edge #2 (#1 o markup_tag); val end_tag = edge #1 (#2 o markup_tag); fun open_delim delim e = edge #2 bg_delim e #> delim #> edge #2 en_delim e; fun close_delim delim e = edge #1 bg_delim e #> delim #> edge #1 en_delim e; fun document_tag cmd_pos state state' tagging_stack = let val ctxt' = Toplevel.presentation_context state'; val nesting = Toplevel.level state' - Toplevel.level state; val (tagging, taggings) = tagging_stack; val (tag', tagging') = Document_Source.update_tagging ctxt' tagging; val tagging_stack' = if nesting = 0 andalso not (Toplevel.is_proof state) then tagging_stack else if nesting >= 0 then (tagging', replicate nesting tagging @ taggings) else (case drop (~ nesting - 1) taggings of tg :: tgs => (tg, tgs) | [] => err_bad_nesting (Position.here cmd_pos)); in (tag', tagging_stack') end; fun read_tag s = (case space_explode "%" s of ["", b] => (SOME b, NONE) | [a, b] => (NONE, SOME (a, b)) | _ => error ("Bad document_tags specification: " ^ quote s)); in fun make_command_tag options keywords = let val document_tags = map read_tag (space_explode "," (Options.string options \<^system_option>\document_tags\)); val document_tags_default = map_filter #1 document_tags; val document_tags_command = map_filter #2 document_tags; in - fn cmd_name => fn state => fn state' => fn active_tag => + fn name => fn st => fn st' => fn active_tag => let val keyword_tags = - if cmd_name = "end" andalso Toplevel.is_end_theory state' then ["theory"] - else Keyword.command_tags keywords cmd_name; + if Keyword.is_theory_end keywords name andalso Toplevel.is_end_theory st' then ["theory"] + else Keyword.command_tags keywords name; val command_tags = - the_list (AList.lookup (op =) document_tags_command cmd_name) @ + the_list (AList.lookup (op =) document_tags_command name) @ keyword_tags @ document_tags_default; val active_tag' = (case command_tags of default_tag :: _ => SOME default_tag | [] => - if Keyword.is_vacuous keywords cmd_name andalso Toplevel.is_proof state - then active_tag - else NONE); + if Keyword.is_vacuous keywords name andalso Toplevel.is_proof st + then active_tag else NONE); in active_tag' end end; fun present_span command_tag span state state' (tagging_stack, active_tag, newline, latex, present_cont) = let val ctxt' = Toplevel.presentation_context state'; val present = fold (fn (tok, (flag, 0)) => fold cons (present_token ctxt' tok) #> fold cons (Latex.string flag) | _ => I); val Span ((cmd_name, cmd_pos), srcs, span_newline) = span; val (tag', tagging_stack') = document_tag cmd_pos state state' tagging_stack; val active_tag' = if is_some tag' then Option.map #1 tag' else command_tag cmd_name state state' active_tag; val edge = (active_tag, active_tag'); val newline' = if is_none active_tag' then span_newline else newline; val latex' = latex |> end_tag edge |> close_delim (fst present_cont) edge |> snd present_cont |> open_delim (present (#1 srcs)) edge |> begin_tag edge |> present (#2 srcs); val present_cont' = if newline then (present (#3 srcs), present (#4 srcs)) else (I, present (#3 srcs) #> present (#4 srcs)); in (tagging_stack', active_tag', newline', latex', present_cont') end; fun present_trailer ((_, tags), active_tag, _, latex, present_cont) = if not (null tags) then err_bad_nesting " at end of theory" else latex |> end_tag (active_tag, NONE) |> close_delim (fst present_cont) (active_tag, NONE) |> snd present_cont; end; (* present_thy *) type segment = {span: Command_Span.span, command: Toplevel.transition, prev_state: Toplevel.state, state: Toplevel.state}; local val markup_true = "\\isamarkuptrue%\n"; val markup_false = "\\isamarkupfalse%\n"; fun command_output output tok = if Token.is_command tok then SOME (Token.put_output output tok) else NONE; fun segment_content (segment: segment) = let val toks = Command_Span.content (#span segment) in (case Toplevel.output_of (#state segment) of NONE => toks | SOME output => map_filter (command_output output) toks) end; fun output_command keywords = Scan.some (fn tok => if Token.is_command tok then let val name = Token.content_of tok; val is_document = Keyword.is_document keywords name; val is_document_raw = Keyword.is_document_raw keywords name; val flag = if is_document andalso not is_document_raw then markup_true else ""; in if is_document andalso is_some (Token.get_output tok) then SOME ((name, Token.pos_of tok), the (Token.get_output tok), flag) else NONE end else NONE); val opt_newline = Scan.option (Scan.one Token.is_newline); val ignore = Scan.depend (fn d => opt_newline |-- Scan.one Token.is_begin_ignore >> pair (d + 1)) || Scan.depend (fn d => Scan.one Token.is_end_ignore --| (if d = 0 then Scan.fail_with (K (fn () => "Bad nesting of meta-comments")) else opt_newline) >> pair (d - 1)); in -fun present_thy options thy (segments: segment list) = +fun present_thy options keywords (segments: segment list) = let - val keywords = Thy_Header.get_keywords thy; - - (* tokens *) val ignored = Scan.state --| ignore >> (fn d => [(NONE, (Ignore, ("", d)))]); val output = Scan.peek (fn d => Document_Source.improper |-- output_command keywords --| Document_Source.improper_end >> (fn (kind, body, flag) => [(SOME kind, (Output body, (flag, d)))])); val command = Scan.peek (fn d => Scan.optional (Scan.one Token.is_command_modifier ::: Document_Source.improper) [] -- Scan.one Token.is_command --| Document_Source.annotation >> (fn (cmd_mod, cmd) => map (fn tok => (NONE, (Token tok, ("", d)))) cmd_mod @ [(SOME (Token.content_of cmd, Token.pos_of cmd), (Token cmd, (markup_false, d)))])); val cmt = Scan.peek (fn d => Scan.one Document_Source.is_black_comment >> (fn tok => [(NONE, (Token tok, ("", d)))])); val other = Scan.peek (fn d => Parse.not_eof >> (fn tok => [(NONE, (Token tok, ("", d)))])); val tokens = ignored || output || command || cmt || other; (* spans *) val is_eof = fn (_, (Token x, _)) => Token.is_eof x | _ => false; val stopper = Scan.stopper (K (NONE, (Token Token.eof, ("", 0)))) is_eof; val cmd = Scan.one (is_some o fst); val non_cmd = Scan.one (is_none o fst andf not o is_eof) >> #2; val white_comments = Scan.many (white_comment_token o fst o snd); val blank = Scan.one (blank_token o fst o snd); val newline = Scan.one (newline_token o fst o snd); val before_cmd = Scan.option (newline -- white_comments) -- Scan.option (newline -- white_comments) -- Scan.option (blank -- white_comments) -- cmd; val span = Scan.repeat non_cmd -- cmd -- Scan.repeat (Scan.unless before_cmd non_cmd) -- Scan.option (newline >> (single o snd)) >> (fn (((toks1, (cmd, tok2)), toks3), tok4) => make_span (the cmd) (toks1 @ (tok2 :: (toks3 @ the_default [] tok4)))); val spans = segments |> maps segment_content |> drop_suffix Token.is_space |> Source.of_list |> Source.source' 0 Token.stopper (Scan.error (Scan.bulk tokens >> flat)) |> Source.source stopper (Scan.error (Scan.bulk span)) |> Source.exhaust; val command_results = segments |> map_filter (fn {command, state, ...} => if Toplevel.is_ignored command then NONE else SOME (command, state)); (* present commands *) val command_tag = make_command_tag options keywords; fun present_command tr span st st' = Toplevel.setmp_thread_position tr (present_span command_tag span st st'); fun present _ [] = I | present st ((span, (tr, st')) :: rest) = present_command tr span st st' #> present st' rest; in if length command_results = length spans then (([], []), NONE, true, [], (I, I)) - |> present (Toplevel.init_toplevel ()) (spans ~~ command_results) + |> present (Toplevel.make_state NONE) (spans ~~ command_results) |> present_trailer |> rev else error "Messed-up outer syntax for presentation" end; end; (** standard output operations **) (* pretty printing *) fun pretty_term ctxt t = Syntax.pretty_term (Proof_Context.augment t ctxt) t; fun pretty_thm ctxt = pretty_term ctxt o Thm.full_prop_of; (* default output *) fun isabelle ctxt body = if Config.get ctxt Document_Antiquotation.thy_output_display then Latex.environment "isabelle" body else Latex.macro "isa" body; fun isabelle_typewriter ctxt body = if Config.get ctxt Document_Antiquotation.thy_output_display then Latex.environment "isabellett" body else Latex.macro "isatt" body; fun typewriter ctxt s = isabelle_typewriter ctxt (Latex.string (Latex.output_ascii s)); fun verbatim ctxt = if Config.get ctxt Document_Antiquotation.thy_output_display then Document_Antiquotation.indent_lines ctxt #> typewriter ctxt else split_lines #> map (typewriter ctxt #> Latex.block) #> separate (XML.Text "\\isanewline%\n"); fun token_source ctxt {embedded} tok = if Token.is_kind Token.Cartouche tok andalso embedded andalso not (Config.get ctxt Document_Antiquotation.thy_output_source_cartouche) then Token.content_of tok else Token.unparse tok; fun is_source ctxt = Config.get ctxt Document_Antiquotation.thy_output_source orelse Config.get ctxt Document_Antiquotation.thy_output_source_cartouche; fun source ctxt embedded = Token.args_of_src #> map (token_source ctxt embedded #> Document_Antiquotation.prepare_lines ctxt) #> space_implode " " #> output_source ctxt #> isabelle ctxt; fun pretty ctxt = Document_Antiquotation.output ctxt #> Latex.string #> isabelle ctxt; fun pretty_source ctxt embedded src prt = if is_source ctxt then source ctxt embedded src else pretty ctxt prt; fun pretty_items ctxt = map (Document_Antiquotation.output ctxt #> XML.Text) #> separate (XML.Text "\\isasep\\isanewline%\n") #> isabelle ctxt; fun pretty_items_source ctxt embedded src prts = if is_source ctxt then source ctxt embedded src else pretty_items ctxt prts; (* antiquotation variants *) local fun gen_setup name embedded = if embedded then Document_Antiquotation.setup_embedded name else Document_Antiquotation.setup name; fun gen_antiquotation_pretty name embedded scan f = gen_setup name embedded scan (fn {context = ctxt, argument = x, ...} => pretty ctxt (f ctxt x)); fun gen_antiquotation_pretty_source name embedded scan f = gen_setup name embedded scan (fn {context = ctxt, source = src, argument = x} => pretty_source ctxt {embedded = embedded} src (f ctxt x)); fun gen_antiquotation_raw name embedded scan f = gen_setup name embedded scan (fn {context = ctxt, argument = x, ...} => f ctxt x); fun gen_antiquotation_verbatim name embedded scan f = gen_antiquotation_raw name embedded scan (fn ctxt => verbatim ctxt o f ctxt); in fun antiquotation_pretty name = gen_antiquotation_pretty name false; fun antiquotation_pretty_embedded name = gen_antiquotation_pretty name true; fun antiquotation_pretty_source name = gen_antiquotation_pretty_source name false; fun antiquotation_pretty_source_embedded name = gen_antiquotation_pretty_source name true; fun antiquotation_raw name = gen_antiquotation_raw name false; fun antiquotation_raw_embedded name = gen_antiquotation_raw name true; fun antiquotation_verbatim name = gen_antiquotation_verbatim name false; fun antiquotation_verbatim_embedded name = gen_antiquotation_verbatim name true; end; end; diff --git a/src/Pure/Thy/thy_info.ML b/src/Pure/Thy/thy_info.ML --- a/src/Pure/Thy/thy_info.ML +++ b/src/Pure/Thy/thy_info.ML @@ -1,460 +1,463 @@ (* Title: Pure/Thy/thy_info.ML Author: Makarius Global theory info database, with auto-loading according to theory and file dependencies, and presentation of theory documents. *) signature THY_INFO = sig type presentation_context = {options: Options.T, file_pos: Position.T, adjust_pos: Position.T -> Position.T, segments: Document_Output.segment list} val adjust_pos_properties: presentation_context -> Position.T -> Properties.T val apply_presentation: presentation_context -> theory -> unit val add_presentation: (presentation_context -> theory -> unit) -> theory -> theory val get_names: unit -> string list val lookup_theory: string -> theory option + val defined_theory: string -> bool val get_theory: string -> theory val master_directory: string -> Path.T val remove_thy: string -> unit val use_theories: Options.T -> string -> (string * Position.T) list -> (theory * Document_Output.segment list) list val use_thy: string -> unit val script_thy: Position.T -> string -> theory -> theory val register_thy: theory -> unit val finish: unit -> unit end; structure Thy_Info: THY_INFO = struct (** theory presentation **) (* hook for consolidated theory *) type presentation_context = {options: Options.T, file_pos: Position.T, adjust_pos: Position.T -> Position.T, segments: Document_Output.segment list}; fun adjust_pos_properties (context: presentation_context) pos = Position.offset_properties_of (#adjust_pos context pos) @ filter (fn (a, _) => a = Markup.idN orelse a = Markup.fileN) (Position.get_props pos); structure Presentation = Theory_Data ( type T = ((presentation_context -> theory -> unit) * stamp) list; val empty = []; fun merge data : T = Library.merge (eq_snd op =) data; ); fun apply_presentation (context: presentation_context) thy = ignore (Par_List.map (fn (f, _) => f context thy) (Presentation.get thy)); fun add_presentation f = Presentation.map (cons (f, stamp ())); val _ = Theory.setup (add_presentation (fn {options, segments, ...} => fn thy => if exists (Toplevel.is_skipped_proof o #state) segments then () else let - val body = Document_Output.present_thy options thy segments; + val body = Document_Output.present_thy options (Thy_Header.get_keywords thy) segments; in if Options.string options "document" = "false" then () else let val thy_name = Context.theory_name thy; val latex = Latex.isabelle_body thy_name body; in Export.export thy \<^path_binding>\document/latex\ latex end end)); (** thy database **) (* messages *) val show_path = space_implode " via " o map quote; fun cycle_msg names = "Cyclic dependency of " ^ show_path names; (* derived graph operations *) fun add_deps name parents G = String_Graph.add_deps_acyclic (name, parents) G handle String_Graph.CYCLES namess => error (cat_lines (map cycle_msg namess)); fun new_entry name parents entry = String_Graph.new_node (name, entry) #> add_deps name parents; (* global thys *) type deps = {master: (Path.T * SHA1.digest), (*master dependencies for thy file*) imports: (string * Position.T) list}; (*source specification of imports (partially qualified)*) fun make_deps master imports : deps = {master = master, imports = imports}; fun master_dir_deps (d: deps option) = the_default Path.current (Option.map (Path.dir o #1 o #master) d); local val global_thys = Synchronized.var "Thy_Info.thys" (String_Graph.empty: (deps option * theory option) String_Graph.T); in fun get_thys () = Synchronized.value global_thys; fun change_thys f = Synchronized.change global_thys f; end; fun get_names () = String_Graph.topological_order (get_thys ()); (* access thy *) fun lookup thys name = try (String_Graph.get_node thys) name; fun lookup_thy name = lookup (get_thys ()) name; fun get thys name = (case lookup thys name of SOME thy => thy | NONE => error ("Theory loader: nothing known about theory " ^ quote name)); fun get_thy name = get (get_thys ()) name; (* access deps *) val lookup_deps = Option.map #1 o lookup_thy; val master_directory = master_dir_deps o #1 o get_thy; (* access theory *) fun lookup_theory name = (case lookup_thy name of SOME (_, SOME theory) => SOME theory | _ => NONE); +val defined_theory = is_some o lookup_theory; + fun get_theory name = (case lookup_theory name of SOME theory => theory | _ => error ("Theory loader: undefined entry for theory " ^ quote name)); val get_imports = Resources.imports_of o get_theory; (** thy operations **) (* remove *) fun remove name thys = (case lookup thys name of NONE => thys | SOME (NONE, _) => error ("Cannot update finished theory " ^ quote name) | SOME _ => let val succs = String_Graph.all_succs thys [name]; val _ = writeln ("Theory loader: removing " ^ commas_quote succs); in fold String_Graph.del_node succs thys end); val remove_thy = change_thys o remove; (* update *) fun update deps theory thys = let val name = Context.theory_long_name theory; val parents = map Context.theory_long_name (Theory.parents_of theory); val thys' = remove name thys; val _ = map (get thys') parents; in new_entry name parents (SOME deps, SOME theory) thys' end; fun update_thy deps theory = change_thys (update deps theory); (* scheduling loader tasks *) datatype result = Result of {theory: theory, exec_id: Document_ID.exec, present: unit -> presentation_context option, commit: unit -> unit}; fun theory_result theory = Result {theory = theory, exec_id = Document_ID.none, present = K NONE, commit = I}; fun result_theory (Result {theory, ...}) = theory; fun result_commit (Result {commit, ...}) = commit; datatype task = Task of string list * (theory list -> result) | Finished of theory; local fun consolidate_theory (Exn.Exn exn) = [Exn.Exn exn] | consolidate_theory (Exn.Res (Result {theory, exec_id, ...})) = let val _ = Execution.join [exec_id]; val res = Exn.capture Thm.consolidate_theory theory; val exns = maps Task_Queue.group_status (Execution.peek exec_id); in res :: map Exn.Exn exns end; fun present_theory (Exn.Exn exn) = [Exn.Exn exn] | present_theory (Exn.Res (Result {theory, present, ...})) = (case present () of NONE => [] | SOME (context as {segments, ...}) => [Exn.capture (fn () => (apply_presentation context theory; (theory, segments))) ()]); in val schedule_theories = Thread_Attributes.uninterruptible (fn _ => fn tasks => let fun join_parents deps name parents = (case map #1 (filter (not o can Future.join o #2) deps) of [] => map (result_theory o Future.join o the o AList.lookup (op =) deps) parents | bad => error ("Failed to load theory " ^ quote name ^ " (unresolved " ^ commas_quote bad ^ ")")); val futures = tasks |> String_Graph.schedule (fn deps => fn (name, task) => (case task of Task (parents, body) => if Multithreading.max_threads () > 1 then (singleton o Future.forks) {name = "theory:" ^ name, group = NONE, deps = map (Future.task_of o #2) deps, pri = 0, interrupts = true} (fn () => body (join_parents deps name parents)) else Future.value_result (Exn.capture (fn () => body (join_parents deps name parents)) ()) | Finished theory => Future.value (theory_result theory))); val results1 = futures |> maps (consolidate_theory o Future.join_result); val present_results = futures |> maps (present_theory o Future.join_result); val results2 = (map o Exn.map_res) (K ()) present_results; val results3 = futures |> map (fn future => Exn.capture (fn () => result_commit (Future.join future) ()) ()); val results4 = map Exn.Exn (maps Task_Queue.group_status (Execution.reset ())); val _ = Par_Exn.release_all (results1 @ results2 @ results3 @ results4); in Par_Exn.release_all present_results end); end; (* eval theory *) fun eval_thy options master_dir header text_pos text parents = let val (name, _) = #name header; val keywords = fold (curry Keyword.merge_keywords o Thy_Header.get_keywords) parents (Keyword.add_keywords (#keywords header) Keyword.empty_keywords); val spans = Outer_Syntax.parse_spans (Token.explode keywords text_pos text); val elements = Thy_Element.parse_elements keywords spans; val text_id = Position.copy_id text_pos Position.none; fun init () = Resources.begin_theory master_dir header parents; fun excursion () = let fun element_result span_elem (st, _) = let fun prepare span = let val tr = Position.setmp_thread_data text_id (fn () => Command.read_span keywords st master_dir init span) (); in Toplevel.timing (Resources.last_timing tr) tr end; val elem = Thy_Element.map_element prepare span_elem; val (results, st') = Toplevel.element_result keywords elem st; val pos' = Toplevel.pos_of (Thy_Element.last_element elem); in (results, (st', pos')) end; val (results, (end_state, end_pos)) = - fold_map element_result elements (Toplevel.init_toplevel (), Position.none); + fold_map element_result elements (Toplevel.make_state NONE, Position.none); val thy = Toplevel.end_theory end_pos end_state; in (results, thy) end; val (results, thy) = cond_timeit true ("theory " ^ quote name) excursion; fun present () : presentation_context = let val segments = (spans ~~ maps Toplevel.join_results results) |> map (fn (span, (st, tr, st')) => {span = span, prev_state = st, command = tr, state = st'}); in {options = options, file_pos = text_pos, adjust_pos = I, segments = segments} end; in (thy, present) end; (* require_thy -- checking database entries wrt. the file-system *) local fun required_by _ [] = "" | required_by s initiators = s ^ "(required by " ^ show_path (rev initiators) ^ ")"; fun load_thy options initiators deps text (name, header_pos) keywords parents = let val {master = (thy_path, _), imports} = deps; val dir = Path.dir thy_path; val exec_id = Document_ID.make (); val id = Document_ID.print exec_id; val put_id = Position.put_id id; val _ = Execution.running Document_ID.none exec_id [] orelse raise Fail ("Failed to register execution: " ^ id); val text_pos = put_id (Path.position thy_path); val text_props = Position.properties_of text_pos; val _ = remove_thy name; val _ = writeln ("Loading theory " ^ quote name ^ required_by " " initiators); val _ = Output.try_protocol_message (Markup.loading_theory name @ text_props) [XML.blob [text]]; val _ = Position.setmp_thread_data (Position.id_only id) (fn () => (parents, map #2 imports) |> ListPair.app (fn (thy, pos) => Context_Position.reports_global thy [(put_id pos, Theory.get_markup thy)])) (); val timing_start = Timing.start (); val header = Thy_Header.make (name, put_id header_pos) imports keywords; val (theory, present) = eval_thy options dir header text_pos text (if name = Context.PureN then [Context.the_global_context ()] else parents); val timing_result = Timing.result timing_start; val timing_props = [Markup.theory_timing, (Markup.nameN, name)]; val _ = Output.try_protocol_message (timing_props @ Markup.timing_properties timing_result) [] fun commit () = update_thy deps theory; in Result {theory = theory, exec_id = exec_id, present = SOME o present, commit = commit} end; fun check_thy_deps dir name = (case lookup_deps name of SOME NONE => (true, NONE, Position.none, get_imports name, []) | NONE => let val {master, text, theory_pos, imports, keywords} = Resources.check_thy dir name in (false, SOME (make_deps master imports, text), theory_pos, imports, keywords) end | SOME (SOME {master, ...}) => let val {master = master', text = text', theory_pos = theory_pos', imports = imports', keywords = keywords'} = Resources.check_thy dir name; val deps' = SOME (make_deps master' imports', text'); val current = #2 master = #2 master' andalso (case lookup_theory name of NONE => false | SOME theory => Resources.loaded_files_current theory); in (current, deps', theory_pos', imports', keywords') end); fun task_finished (Task _) = false | task_finished (Finished _) = true; in fun require_thys options initiators qualifier dir strs tasks = fold_map (require_thy options initiators qualifier dir) strs tasks |>> forall I and require_thy options initiators qualifier dir (s, require_pos) tasks = let val {master_dir, theory_name, ...} = Resources.import_name qualifier dir s; in (case try (String_Graph.get_node tasks) theory_name of SOME task => (task_finished task, tasks) | NONE => let val _ = member (op =) initiators theory_name andalso error (cycle_msg initiators); val (current, deps, theory_pos, imports, keywords) = check_thy_deps master_dir theory_name handle ERROR msg => cat_error msg ("The error(s) above occurred for theory " ^ quote theory_name ^ Position.here require_pos ^ required_by "\n" initiators); val qualifier' = Resources.theory_qualifier theory_name; val dir' = dir + master_dir_deps (Option.map #1 deps); val parents = map (#theory_name o Resources.import_name qualifier' dir' o #1) imports; val (parents_current, tasks') = require_thys options (theory_name :: initiators) qualifier' dir' imports tasks; val all_current = current andalso parents_current; val task = if all_current then Finished (get_theory theory_name) else (case deps of NONE => raise Fail "Malformed deps" | SOME (dep, text) => Task (parents, load_thy options initiators dep text (theory_name, theory_pos) keywords)); val tasks'' = new_entry theory_name parents task tasks'; in (all_current, tasks'') end) end; end; (* use theories *) fun use_theories options qualifier imports = schedule_theories (#2 (require_thys options [] qualifier Path.current imports String_Graph.empty)); fun use_thy name = ignore (use_theories (Options.default ()) Resources.default_qualifier [(name, Position.none)]); (* toplevel scripting -- without maintaining database *) fun script_thy pos txt thy = let val trs = Outer_Syntax.parse_text thy (K thy) pos txt; val end_pos = if null trs then pos else Toplevel.pos_of (List.last trs); - val end_state = fold (Toplevel.command_exception true) trs (Toplevel.init_toplevel ()); + val end_state = fold (Toplevel.command_exception true) trs (Toplevel.make_state NONE); in Toplevel.end_theory end_pos end_state end; (* register theory *) fun register_thy theory = let val name = Context.theory_long_name theory; val {master, ...} = Resources.check_thy (Resources.master_directory theory) name; val imports = Resources.imports_of theory; in change_thys (fn thys => let val thys' = remove name thys; val _ = writeln ("Registering theory " ^ quote name); in update (make_deps master imports) theory thys' end) end; (* finish all theories *) fun finish () = change_thys (String_Graph.map (fn _ => fn (_, entry) => (NONE, entry))); end; fun use_thy name = Runtime.toplevel_program (fn () => Thy_Info.use_thy name); diff --git a/src/Pure/Tools/print_operation.ML b/src/Pure/Tools/print_operation.ML --- a/src/Pure/Tools/print_operation.ML +++ b/src/Pure/Tools/print_operation.ML @@ -1,73 +1,74 @@ (* Title: Pure/Tools/print_operation.ML Author: Makarius Print operations as asynchronous query. *) signature PRINT_OPERATION = sig val register: string -> string -> (Toplevel.state -> Pretty.T list) -> unit end; structure Print_Operation: PRINT_OPERATION = struct (* maintain print operations *) local val print_operations = Synchronized.var "print_operations" ([]: (string * (string * (Toplevel.state -> Pretty.T list))) list); fun report () = Output.try_protocol_message Markup.print_operations [Synchronized.value print_operations |> map (fn (x, (y, _)) => (x, y)) |> rev |> let open XML.Encode in list (pair string string) end]; val _ = Protocol_Command.define "print_operations" (fn [] => report ()); in fun register name description pr = (Synchronized.change print_operations (fn tab => (if not (AList.defined (op =) tab name) then () else warning ("Redefining print operation: " ^ quote name); AList.update (op =) (name, (description, pr)) tab)); report ()); val _ = Query_Operation.register {name = "print_operation", pri = Task_Queue.urgent_pri} (fn {state, args, writeln_result, ...} => let val _ = Toplevel.context_of state handle Toplevel.UNDEF => error "Unknown context"; fun err s = Pretty.mark_str (Markup.bad (), s); fun print name = (case AList.lookup (op =) (Synchronized.value print_operations) name of SOME (_, pr) => (pr state handle Toplevel.UNDEF => [err "Unknown context"]) | NONE => [err ("Unknown print operation: " ^ quote name)]); in writeln_result (Pretty.string_of (Pretty.chunks (maps print args))) end); end; +end; + (* common print operations *) val _ = - register "context" "context of local theory target" Toplevel.pretty_context; + Print_Operation.register "context" "context of local theory target" + Toplevel.pretty_context; val _ = - register "cases" "cases of proof context" + Print_Operation.register "cases" "cases of proof context" (Proof_Context.pretty_cases o Toplevel.context_of); val _ = - register "terms" "term bindings of proof context" + Print_Operation.register "terms" "term bindings of proof context" (Proof_Context.pretty_term_bindings o Toplevel.context_of); val _ = - register "theorems" "theorems of local theory or proof context" + Print_Operation.register "theorems" "theorems of local theory or proof context" (Isar_Cmd.pretty_theorems false); - -end; diff --git a/src/ZF/WF.thy b/src/ZF/WF.thy --- a/src/ZF/WF.thy +++ b/src/ZF/WF.thy @@ -1,372 +1,371 @@ (* Title: ZF/WF.thy Author: Tobias Nipkow and Lawrence C Paulson Copyright 1994 University of Cambridge Derived first for transitive relations, and finally for arbitrary WF relations via wf_trancl and trans_trancl. It is difficult to derive this general case directly, using r^+ instead of r. In is_recfun, the two occurrences of the relation must have the same form. Inserting r^+ in the_recfun or wftrec yields a recursion rule with r^+ -`` {a} instead of r-``{a}. This recursion rule is stronger in principle, but harder to use, especially to prove wfrec_eclose_eq in epsilon.ML. Expanding out the definition of wftrec in wfrec would yield a mess. *) section\Well-Founded Recursion\ theory WF imports Trancl begin definition wf :: "i\o" where (*r is a well-founded relation*) "wf(r) \ \Z. Z=0 | (\x\Z. \y. \y,x\:r \ \ y \ Z)" definition wf_on :: "[i,i]\o" (\wf[_]'(_')\) where (*r is well-founded on A*) "wf_on(A,r) \ wf(r \ A*A)" definition is_recfun :: "[i, i, [i,i]\i, i] \o" where "is_recfun(r,a,H,f) \ (f = (\x\r-``{a}. H(x, restrict(f, r-``{x}))))" definition the_recfun :: "[i, i, [i,i]\i] \i" where "the_recfun(r,a,H) \ (THE f. is_recfun(r,a,H,f))" definition wftrec :: "[i, i, [i,i]\i] \i" where "wftrec(r,a,H) \ H(a, the_recfun(r,a,H))" definition wfrec :: "[i, i, [i,i]\i] \i" where (*public version. Does not require r to be transitive*) "wfrec(r,a,H) \ wftrec(r^+, a, \x f. H(x, restrict(f,r-``{x})))" definition wfrec_on :: "[i, i, i, [i,i]\i] \i" (\wfrec[_]'(_,_,_')\) where "wfrec[A](r,a,H) \ wfrec(r \ A*A, a, H)" subsection\Well-Founded Relations\ subsubsection\Equivalences between \<^term>\wf\ and \<^term>\wf_on\\ lemma wf_imp_wf_on: "wf(r) \ wf[A](r)" by (unfold wf_def wf_on_def, force) lemma wf_on_imp_wf: "\wf[A](r); r \ A*A\ \ wf(r)" by (simp add: wf_on_def subset_Int_iff) lemma wf_on_field_imp_wf: "wf[field(r)](r) \ wf(r)" by (unfold wf_def wf_on_def, fast) lemma wf_iff_wf_on_field: "wf(r) \ wf[field(r)](r)" by (blast intro: wf_imp_wf_on wf_on_field_imp_wf) lemma wf_on_subset_A: "\wf[A](r); B<=A\ \ wf[B](r)" by (unfold wf_on_def wf_def, fast) lemma wf_on_subset_r: "\wf[A](r); s<=r\ \ wf[A](s)" by (unfold wf_on_def wf_def, fast) lemma wf_subset: "\wf(s); r<=s\ \ wf(r)" by (simp add: wf_def, fast) subsubsection\Introduction Rules for \<^term>\wf_on\\ text\If every non-empty subset of \<^term>\A\ has an \<^term>\r\-minimal element then we have \<^term>\wf[A](r)\.\ lemma wf_onI: assumes prem: "\Z u. \Z<=A; u \ Z; \x\Z. \y\Z. \y,x\:r\ \ False" shows "wf[A](r)" unfolding wf_on_def wf_def apply (rule equals0I [THEN disjCI, THEN allI]) apply (rule_tac Z = Z in prem, blast+) done text\If \<^term>\r\ allows well-founded induction over \<^term>\A\ then we have \<^term>\wf[A](r)\. Premise is equivalent to \<^prop>\\B. \x\A. (\y. \y,x\: r \ y \ B) \ x \ B \ A<=B\\ lemma wf_onI2: assumes prem: "\y B. \\x\A. (\y\A. \y,x\:r \ y \ B) \ x \ B; y \ A\ \ y \ B" shows "wf[A](r)" apply (rule wf_onI) apply (rule_tac c=u in prem [THEN DiffE]) prefer 3 apply blast apply fast+ done subsubsection\Well-founded Induction\ text\Consider the least \<^term>\z\ in \<^term>\domain(r)\ such that \<^term>\P(z)\ does not hold...\ lemma wf_induct_raw: "\wf(r); \x.\\y. \y,x\: r \ P(y)\ \ P(x)\ \ P(a)" unfolding wf_def apply (erule_tac x = "{z \ domain(r). \ P(z)}" in allE) apply blast done lemmas wf_induct = wf_induct_raw [rule_format, consumes 1, case_names step, induct set: wf] text\The form of this rule is designed to match \wfI\\ lemma wf_induct2: "\wf(r); a \ A; field(r)<=A; \x.\x \ A; \y. \y,x\: r \ P(y)\ \ P(x)\ \ P(a)" apply (erule_tac P="a \ A" in rev_mp) apply (erule_tac a=a in wf_induct, blast) done lemma field_Int_square: "field(r \ A*A) \ A" by blast lemma wf_on_induct_raw [consumes 2, induct set: wf_on]: "\wf[A](r); a \ A; \x.\x \ A; \y\A. \y,x\: r \ P(y)\ \ P(x) \ \ P(a)" unfolding wf_on_def apply (erule wf_induct2, assumption) apply (rule field_Int_square, blast) done lemma wf_on_induct [consumes 2, case_names step, induct set: wf_on]: "wf[A](r) \ a \ A \ (\x. x \ A \ (\y. y \ A \ \y, x\ \ r \ P(y)) \ P(x)) \ P(a)" using wf_on_induct_raw [of A r a P] by simp text\If \<^term>\r\ allows well-founded induction then we have \<^term>\wf(r)\.\ lemma wfI: "\field(r)<=A; \y B. \\x\A. (\y\A. \y,x\:r \ y \ B) \ x \ B; y \ A\ \ y \ B\ \ wf(r)" apply (rule wf_on_subset_A [THEN wf_on_field_imp_wf]) apply (rule wf_onI2) prefer 2 apply blast apply blast done subsection\Basic Properties of Well-Founded Relations\ lemma wf_not_refl: "wf(r) \ \a,a\ \ r" by (erule_tac a=a in wf_induct, blast) lemma wf_not_sym [rule_format]: "wf(r) \ \x. \a,x\:r \ \x,a\ \ r" by (erule_tac a=a in wf_induct, blast) (* @{term"\wf(r); \a,x\ \ r; \P \ \x,a\ \ r\ \ P"} *) lemmas wf_asym = wf_not_sym [THEN swap] lemma wf_on_not_refl: "\wf[A](r); a \ A\ \ \a,a\ \ r" by (erule_tac a=a in wf_on_induct, assumption, blast) lemma wf_on_not_sym: "\wf[A](r); a \ A\ \ (\b. b\A \ \a,b\:r \ \b,a\\r)" apply (atomize (full), intro impI) apply (erule_tac a=a in wf_on_induct, assumption, blast) done lemma wf_on_asym: "\wf[A](r); \Z \ \a,b\ \ r; \b,a\ \ r \ Z; \Z \ a \ A; \Z \ b \ A\ \ Z" by (blast dest: wf_on_not_sym) (*Needed to prove well_ordI. Could also reason that wf[A](r) means wf(r \ A*A); thus wf( (r \ A*A)^+ ) and use wf_not_refl *) lemma wf_on_chain3: "\wf[A](r); \a,b\:r; \b,c\:r; \c,a\:r; a \ A; b \ A; c \ A\ \ P" apply (subgoal_tac "\y\A. \z\A. \a,y\:r \ \y,z\:r \ \z,a\:r \ P", blast) apply (erule_tac a=a in wf_on_induct, assumption, blast) done text\transitive closure of a WF relation is WF provided \<^term>\A\ is downward closed\ lemma wf_on_trancl: "\wf[A](r); r-``A \ A\ \ wf[A](r^+)" apply (rule wf_onI2) apply (frule bspec [THEN mp], assumption+) apply (erule_tac a = y in wf_on_induct, assumption) apply (blast elim: tranclE, blast) done lemma wf_trancl: "wf(r) \ wf(r^+)" apply (simp add: wf_iff_wf_on_field) apply (rule wf_on_subset_A) apply (erule wf_on_trancl) apply blast apply (rule trancl_type [THEN field_rel_subset]) done text\\<^term>\r-``{a}\ is the set of everything under \<^term>\a\ in \<^term>\r\\ lemmas underI = vimage_singleton_iff [THEN iffD2] lemmas underD = vimage_singleton_iff [THEN iffD1] subsection\The Predicate \<^term>\is_recfun\\ lemma is_recfun_type: "is_recfun(r,a,H,f) \ f \ r-``{a} -> range(f)" unfolding is_recfun_def apply (erule ssubst) apply (rule lamI [THEN rangeI, THEN lam_type], assumption) done lemmas is_recfun_imp_function = is_recfun_type [THEN fun_is_function] lemma apply_recfun: "\is_recfun(r,a,H,f); \x,a\:r\ \ f`x = H(x, restrict(f,r-``{x}))" unfolding is_recfun_def txt\replace f only on the left-hand side\ apply (erule_tac P = "\x. t(x) = u" for t u in ssubst) apply (simp add: underI) done lemma is_recfun_equal [rule_format]: "\wf(r); trans(r); is_recfun(r,a,H,f); is_recfun(r,b,H,g)\ \ \x,a\:r \ \x,b\:r \ f`x=g`x" apply (frule_tac f = f in is_recfun_type) apply (frule_tac f = g in is_recfun_type) apply (simp add: is_recfun_def) apply (erule_tac a=x in wf_induct) apply (intro impI) apply (elim ssubst) apply (simp (no_asm_simp) add: vimage_singleton_iff restrict_def) apply (rule_tac t = "\z. H (x, z)" for x in subst_context) apply (subgoal_tac "\y\r-``{x}. \z. \y,z\:f \ \y,z\:g") apply (blast dest: transD) apply (simp add: apply_iff) apply (blast dest: transD intro: sym) done lemma is_recfun_cut: "\wf(r); trans(r); is_recfun(r,a,H,f); is_recfun(r,b,H,g); \b,a\:r\ \ restrict(f, r-``{b}) = g" apply (frule_tac f = f in is_recfun_type) apply (rule fun_extension) apply (blast dest: transD intro: restrict_type2) apply (erule is_recfun_type, simp) apply (blast dest: transD intro: is_recfun_equal) done subsection\Recursion: Main Existence Lemma\ lemma is_recfun_functional: "\wf(r); trans(r); is_recfun(r,a,H,f); is_recfun(r,a,H,g)\ \ f=g" by (blast intro: fun_extension is_recfun_type is_recfun_equal) lemma the_recfun_eq: "\is_recfun(r,a,H,f); wf(r); trans(r)\ \ the_recfun(r,a,H) = f" unfolding the_recfun_def apply (blast intro: is_recfun_functional) done (*If some f satisfies is_recfun(r,a,H,-) then so does the_recfun(r,a,H) *) lemma is_the_recfun: "\is_recfun(r,a,H,f); wf(r); trans(r)\ \ is_recfun(r, a, H, the_recfun(r,a,H))" by (simp add: the_recfun_eq) lemma unfold_the_recfun: "\wf(r); trans(r)\ \ is_recfun(r, a, H, the_recfun(r,a,H))" apply (rule_tac a=a in wf_induct, assumption) apply (rename_tac a1) apply (rule_tac f = "\y\r-``{a1}. wftrec (r,y,H)" in is_the_recfun) apply typecheck unfolding is_recfun_def wftrec_def \ \Applying the substitution: must keep the quantified assumption!\ apply (rule lam_cong [OF refl]) apply (drule underD) apply (fold is_recfun_def) apply (rule_tac t = "\z. H(x, z)" for x in subst_context) apply (rule fun_extension) apply (blast intro: is_recfun_type) apply (rule lam_type [THEN restrict_type2]) apply blast apply (blast dest: transD) apply atomize apply (frule spec [THEN mp], assumption) apply (subgoal_tac "\xa,a1\ \ r") apply (drule_tac x1 = xa in spec [THEN mp], assumption) apply (simp add: vimage_singleton_iff apply_recfun is_recfun_cut) apply (blast dest: transD) done subsection\Unfolding \<^term>\wftrec(r,a,H)\\ lemma the_recfun_cut: "\wf(r); trans(r); \b,a\:r\ \ restrict(the_recfun(r,a,H), r-``{b}) = the_recfun(r,b,H)" by (blast intro: is_recfun_cut unfold_the_recfun) (*NOT SUITABLE FOR REWRITING: it is recursive!*) lemma wftrec: "\wf(r); trans(r)\ \ wftrec(r,a,H) = H(a, \x\r-``{a}. wftrec(r,x,H))" unfolding wftrec_def apply (subst unfold_the_recfun [unfolded is_recfun_def]) apply (simp_all add: vimage_singleton_iff [THEN iff_sym] the_recfun_cut) done subsubsection\Removal of the Premise \<^term>\trans(r)\\ (*NOT SUITABLE FOR REWRITING: it is recursive!*) lemma wfrec: "wf(r) \ wfrec(r,a,H) = H(a, \x\r-``{a}. wfrec(r,x,H))" unfolding wfrec_def apply (erule wf_trancl [THEN wftrec, THEN ssubst]) apply (rule trans_trancl) apply (rule vimage_pair_mono [THEN restrict_lam_eq, THEN subst_context]) apply (erule r_into_trancl) apply (rule subset_refl) done (*This form avoids giant explosions in proofs. NOTE USE OF \ *) lemma def_wfrec: "\\x. h(x)\wfrec(r,x,H); wf(r)\ \ h(a) = H(a, \x\r-``{a}. h(x))" apply simp apply (elim wfrec) done lemma wfrec_type: "\wf(r); a \ A; field(r)<=A; \x u. \x \ A; u \ Pi(r-``{x}, B)\ \ H(x,u) \ B(x) \ \ wfrec(r,a,H) \ B(a)" apply (rule_tac a = a in wf_induct2, assumption+) apply (subst wfrec, assumption) apply (simp add: lam_type underD) done lemma wfrec_on: "\wf[A](r); a \ A\ \ wfrec[A](r,a,H) = H(a, \x\(r-``{a}) \ A. wfrec[A](r,x,H))" unfolding wf_on_def wfrec_on_def apply (erule wfrec [THEN trans]) -apply (simp add: vimage_Int_square cons_subset_iff) +apply (simp add: vimage_Int_square) done text\Minimal-element characterization of well-foundedness\ -lemma wf_eq_minimal: - "wf(r) \ (\Q x. x \ Q \ (\z\Q. \y. \y,z\:r \ y\Q))" -by (unfold wf_def, blast) +lemma wf_eq_minimal: "wf(r) \ (\Q x. x \ Q \ (\z\Q. \y. \y,z\:r \ y\Q))" + unfolding wf_def by blast end