diff --git a/etc/options b/etc/options --- a/etc/options +++ b/etc/options @@ -1,404 +1,404 @@ (* :mode=isabelle-options: *) section "Document Preparation" option browser_info : bool = false for build -- "generate theory browser information" option document : string = "" (standard "true") for build -- "build PDF document (enabled for \"pdf\" or \"true\", disabled for \"\" or \"false\")" option document_output : string = "" (standard "output") for build -- "document output directory" option document_echo : bool = false for build -- "inform about document file names during session presentation" option document_variants : string = "document" for build document -- "alternative document variants (separated by colons)" option document_tags : string = "" for document -- "default command tags (separated by commas)" option document_bibliography : bool = false for document -- "explicitly enable use of bibtex (default: according to presence of root.bib)" option document_build : string = "lualatex" (standard "build") for document -- "document build engine (e.g. build, lualatex, pdflatex)" option document_logo : string = "" (standard "_") for document -- "generate named instance of Isabelle logo (underscore means unnamed variant)" option document_heading_prefix : string = "isamarkup" (standard) for document -- "prefix for LaTeX macros generated from 'chapter', 'section' etc." option document_comment_latex : bool = false for document -- "use regular LaTeX version of comment.sty, instead of historic plain TeX version" option document_cite_commands : string = "cite,nocite,citet,citep" for document -- "antiquotation commands to determine the structure of bibliography" option thy_output_display : bool = false for content -- "indicate output as multi-line display-style material" option thy_output_break : bool = false for content -- "control line breaks in non-display material" option thy_output_cartouche : bool = false for content -- "indicate if the output should be delimited as cartouche" option thy_output_quotes : bool = false for content -- "indicate if the output should be delimited via double quotes" option thy_output_margin : int = 76 for content -- "right margin / page width for printing of display material" option thy_output_indent : int = 0 for content -- "indentation for pretty printing of display material" option thy_output_source : bool = false for content -- "print original source text rather than internal representation" option thy_output_source_cartouche : bool = false for content -- "print original source text rather than internal representation, preserve cartouches" option thy_output_modes : string = "" for content -- "additional print modes for document output (separated by commas)" section "Prover Output" option pide_reports : bool = true for content -- "enable reports of PIDE markup" option show_types : bool = false for content -- "show type constraints when printing terms" option show_sorts : bool = false for content -- "show sort constraints when printing types" option show_brackets : bool = false for content -- "show extra brackets when printing terms/types" option show_question_marks : bool = true for content -- "show leading question mark of schematic variables" option show_consts : bool = false for content -- "show constants with types when printing proof state" option show_main_goal : bool = false for content -- "show main goal when printing proof state" option goals_limit : int = 10 for content -- "maximum number of subgoals to be printed" option show_states : bool = false for content -- "show toplevel states even if outside of interactive mode" option names_long : bool = false for content -- "show fully qualified names" option names_short : bool = false for content -- "show base names only" option names_unique : bool = true for content -- "show partially qualified names, as required for unique name resolution" option eta_contract : bool = true for content -- "print terms in eta-contracted form" option print_mode : string = "" for content -- "additional print modes for prover output (separated by commas)" section "Parallel Processing and Timing" public option threads : int = 0 for build -- "maximum number of worker threads for prover process (0 = hardware max.)" option threads_trace : int = 0 -- "level of tracing information for multithreading" option threads_stack_limit : real = 0.25 -- "maximum stack size for worker threads (in giga words, 0 = unlimited)" public option parallel_limit : int = 0 for build -- "approximative limit for parallel tasks (0 = unlimited)" public option parallel_print : bool = true for build -- "parallel and asynchronous printing of results" public option parallel_proofs : int = 1 for build -- "level of parallel proof checking: 0, 1, 2" option parallel_subproofs_threshold : real = 0.01 for build -- "lower bound of timing estimate for forked nested proofs (seconds)" option command_timing_threshold : real = 0.1 for build -- "default threshold for persistent command timing (seconds)" public option timeout_scale : real = 1.0 for build -- "scale factor for timeout in Isabelle/ML and session build" section "Detail of Proof Checking" option record_proofs : int = -1 for content -- "set level of proofterm recording: 0, 1, 2, negative means unchanged" option quick_and_dirty : bool = false for content -- "if true then some tools will OMIT some proofs" option skip_proofs : bool = false for content -- "skip over proofs (implicit 'sorry')" option strict_facts : bool = false for content -- "force lazy facts when defined in context" section "Global Session Parameters" option condition : string = "" for content -- "required environment variables for subsequent theories (separated by commas)" option timeout : real = 0 for build -- "timeout for session build job (seconds > 0)" option timeout_build : bool = true for build -- "observe timeout for session build" option process_policy : string = "" -- "command prefix for underlying process, notably ML with NUMA policy" option process_output_tail : int = 40 for build -- "build process output tail shown to user (in lines, 0 = unlimited)" option profiling : string = "" (standard "time") for build -- "ML profiling (possible values: time, time_thread, allocations)" option system_log : string = "" (standard "-") for build -- "output for system messages (log file or \"-\" for console progress)" option system_heaps : bool = false for build -- "store session heaps in $ISABELLE_HEAPS_SYSTEM, not $ISABELLE_HEAPS" section "ML System" option ML_print_depth : int = 20 for content -- "ML print depth for toplevel pretty-printing" public option ML_exception_trace : bool = false -- "ML exception trace for toplevel command execution" public option ML_exception_debugger : bool = false -- "ML debugger exception trace for toplevel command execution" public option ML_debugger : bool = false -- "ML debugger instrumentation for newly compiled code" public option ML_system_64 : bool = false for build -- "prefer native 64bit platform (change requires restart)" public option ML_system_apple : bool = true for build# -- "prefer native Apple/ARM64 platform (change requires restart)" section "Build Process" option build_thorough : bool = false -- "observe dependencies on options with tag 'content' or 'document'" option build_hostname : string = "" -- "alternative hostname for build process (default $ISABELLE_HOSTNAME)" option build_engine : string = "" -- "alternative session build engine" option build_database_test : bool = false -- "expose state of build process via central database" section "Editor Session" public option editor_load_delay : real = 0.5 -- "delay for file load operations (new buffers etc.)" public option editor_input_delay : real = 0.2 -- "delay for user input (text edits, cursor movement etc.)" public option editor_generated_input_delay : real = 1.0 -- "delay for machine-generated input that may outperform user edits" public option editor_output_delay : real = 0.1 -- "delay for prover output (markup, common messages etc.)" public option editor_consolidate_delay : real = 1.0 -- "delay to consolidate status of command evaluation (execution forks)" public option editor_prune_delay : real = 15 -- "delay to prune history (delete old versions)" option editor_prune_size : int = 0 -- "retained size of pruned history (delete old versions)" public option editor_update_delay : real = 0.5 -- "delay for physical GUI updates" public option editor_reparse_limit : int = 10000 -- "maximum amount of reparsed text outside perspective" public option editor_tracing_messages : int = 1000 -- "initial number of tracing messages for each command transaction (0: unbounded)" public option editor_chart_delay : real = 3.0 -- "delay for chart repainting" public option editor_continuous_checking : bool = true -- "continuous checking of proof document (visible and required parts)" public option editor_output_state : bool = false -- "implicit output of proof state" public option editor_document_session : string = "" -- "session for interactive document preparation" public option editor_document_auto : bool = false -- "automatically build document when selected theories are finished" public option editor_document_delay : real = 2.0 -- "delay for document auto build" option editor_execution_delay : real = 0.02 -- "delay for start of execution process after document update (seconds)" option editor_syslog_limit : int = 100 -- "maximum amount of buffered syslog messages" section "Headless Session" option headless_consolidate_delay : real = 15 -- "delay to consolidate status of command evaluation (execution forks)" option headless_prune_delay : real = 60 -- "delay to prune history (delete old versions)" option headless_check_delay : real = 0.5 -- "delay for theory status check during PIDE processing (seconds)" option headless_check_limit : int = 0 -- "maximum number of theory status checks (0 = unlimited)" option headless_nodes_status_delay : real = -1 -- "delay for overall nodes status check during PIDE processing (seconds, disabled for < 0)" option headless_watchdog_timeout : real = 600 -- "watchdog timeout for PIDE processing of broken theories (seconds, 0 = disabled)" option headless_commit_cleanup_delay : real = 60 -- "delay for cleanup of already imported theories (seconds, 0 = disabled)" option headless_load_limit : real = 5.0 - -- "limit in MB for loaded theory files (0 = unlimited)" + -- "limit in MiB for loaded theory files (0 = unlimited)" section "Miscellaneous Tools" public option find_theorems_limit : int = 40 -- "limit of displayed results" public option find_theorems_tactic_limit : int = 5 -- "limit of tactic search for 'solves' criterion" section "Completion" public option completion_limit : int = 40 -- "limit for completion within the formal context" public option completion_path_ignore : string = "*~:*.marks:*.orig:*.rej:.DS_Store" -- "glob patterns to ignore in file-system path completion (separated by colons)" section "Spell Checker" public option spell_checker : bool = true -- "enable spell-checker for prose words within document text, comments etc." public option spell_checker_dictionary : string = "en" -- "spell-checker dictionary name" public option spell_checker_include : string = "words,comment,comment1,comment2,comment3,ML_comment,SML_comment" -- "included markup elements for spell-checker (separated by commas)" public option spell_checker_exclude : string = "document_marker,antiquoted,raw_text" -- "excluded markup elements for spell-checker (separated by commas)" section "Secure Shell (OpenSSH)" public option ssh_batch_mode : bool = true -- "enable SSH batch mode (no user interaction)" public option ssh_multiplexing : bool = true -- "enable multiplexing of SSH sessions (ignored on Windows)" public option ssh_compression : bool = true -- "enable SSH compression" public option ssh_alive_interval : real = 30 -- "time interval to keep SSH server connection alive (seconds, ignore value < 0)" public option ssh_alive_count_max : int = 3 -- "maximum number of messages to keep SSH server connection alive (ignore value < 0)" section "Phabricator" option phabricator_version_arcanist : string = "4f70fcffa8a5393e210d64f237ffdaa256256d6a" -- "repository version for arcanist" option phabricator_version_phabricator : string = "193798385bd3a7f72dca255e44f8112f4f8fc155" -- "repository version for phabricator" section "Theory Export" option export_theory : bool = false for content -- "export theory content to Isabelle/Scala" option export_standard_proofs : bool = false for content -- "export standardized proof terms to Isabelle/Scala (not scalable)" option export_proofs : bool = false for content -- "export proof terms to Isabelle/Scala" option prune_proofs : bool = false for content -- "prune proof terms after export (do not store in Isabelle/ML)" section "Theory update" option update_inner_syntax_cartouches : bool = false for content update -- "update inner syntax (types, terms, etc.) to use cartouches" option update_mixfix_cartouches : bool = false for content update -- "update mixfix templates to use cartouches instead of double quotes" option update_control_cartouches : bool = false for content update -- "update antiquotations to use control symbol with cartouche argument" option update_path_cartouches : bool = false for content update -- "update file-system paths to use cartouches" option update_cite : bool = false for content update -- "update cite commands and antiquotations" section "Build Database" option build_database_server : bool = false for connection option build_database_user : string = "" for connection option build_database_password : string = "" for connection option build_database_name : string = "" for connection option build_database_host : string = "" for connection option build_database_port : int = 0 for connection option build_database_ssh_host : string = "" for connection option build_database_ssh_user : string = "" for connection option build_database_ssh_port : int = 0 for connection section "Build Log Database" option build_log_database_user : string = "" for connection option build_log_database_password : string = "" for connection option build_log_database_name : string = "" for connection option build_log_database_host : string = "" for connection option build_log_database_port : int = 0 for connection option build_log_ssh_host : string = "" for connection option build_log_ssh_user : string = "" for connection option build_log_ssh_port : int = 0 for connection option build_log_history : int = 30 -- "length of relevant history (in days)" option build_log_transaction_size : int = 1 -- "number of log files for each db update" section "Isabelle/Scala/ML system channel" option system_channel_address : string = "" for connection option system_channel_password : string = "" for connection section "Bash process execution server" option bash_process_debugging : bool = false for connection option bash_process_address : string = "" for connection option bash_process_password : string = "" for connection diff --git a/src/Pure/PIDE/headless.scala b/src/Pure/PIDE/headless.scala --- a/src/Pure/PIDE/headless.scala +++ b/src/Pure/PIDE/headless.scala @@ -1,705 +1,706 @@ /* Title: Pure/PIDE/headless.scala Author: Makarius Headless PIDE session and resources from file-system. */ package isabelle import java.io.{File => JFile} import scala.annotation.tailrec import scala.collection.mutable object Headless { /** session **/ private def stable_snapshot( state: Document.State, version: Document.Version, name: Document.Node.Name ): Document.Snapshot = { val snapshot = state.snapshot(name) assert(version.id == snapshot.version.id) snapshot } class Use_Theories_Result private[Headless]( val state: Document.State, val version: Document.Version, val nodes: List[(Document.Node.Name, Document_Status.Node_Status)], val nodes_committed: List[(Document.Node.Name, Document_Status.Node_Status)] ) { def nodes_pending: List[(Document.Node.Name, Document_Status.Node_Status)] = { val committed = nodes_committed.iterator.map(_._1).toSet nodes.filter(p => !committed(p._1)) } def snapshot(name: Document.Node.Name): Document.Snapshot = stable_snapshot(state, version, name) def ok: Boolean = (nodes.iterator ++ nodes_committed.iterator).forall({ case (_, st) => st.ok }) } class Session private[Headless]( session_name: String, _session_options: => Options, override val resources: Resources) extends isabelle.Session(_session_options, resources) { session => private def loaded_theory(name: Document.Node.Name): Boolean = resources.session_base.loaded_theory(name.theory) /* options */ override def consolidate_delay: Time = session_options.seconds("headless_consolidate_delay") override def prune_delay: Time = session_options.seconds("headless_prune_delay") def default_check_delay: Time = session_options.seconds("headless_check_delay") def default_check_limit: Int = session_options.int("headless_check_limit") def default_nodes_status_delay: Time = session_options.seconds("headless_nodes_status_delay") def default_watchdog_timeout: Time = session_options.seconds("headless_watchdog_timeout") def default_commit_cleanup_delay: Time = session_options.seconds("headless_commit_cleanup_delay") def show_states: Boolean = session_options.bool("show_states") /* temporary directory */ val tmp_dir: JFile = Isabelle_System.tmp_dir("server_session") val tmp_dir_name: String = File.path(tmp_dir).implode def master_directory(master_dir: String): String = proper_string(master_dir) getOrElse tmp_dir_name override def toString: String = session_name override def stop(): Process_Result = { try { super.stop() } finally { Isabelle_System.rm_tree(tmp_dir) } } /* theories */ private object Load_State { - def finished: Load_State = Load_State(Nil, Nil, 0) + def finished: Load_State = Load_State(Nil, Nil, Space.zero) def count_file(name: Document.Node.Name): Long = if (loaded_theory(name)) 0 else File.space(name.path).bytes } private case class Load_State( pending: List[Document.Node.Name], rest: List[Document.Node.Name], - load_limit: Long + load_limit: Space ) { def next( dep_graph: Document.Node.Name.Graph[Unit], consolidated: Document.Node.Name => Boolean ): (List[Document.Node.Name], Load_State) = { def load_requirements( pending1: List[Document.Node.Name], rest1: List[Document.Node.Name] ) : (List[Document.Node.Name], Load_State) = { val load_theories = dep_graph.all_preds_rev(pending1) (load_theories, Load_State(pending1, rest1, load_limit)) } if (!pending.forall(consolidated)) (Nil, this) else if (rest.isEmpty) (Nil, Load_State.finished) - else if (load_limit == 0) load_requirements(rest, Nil) + else if (!load_limit.is_proper) load_requirements(rest, Nil) else { val reachable = - dep_graph.reachable_limit(load_limit, Load_State.count_file, dep_graph.imm_preds, rest) + dep_graph.reachable_limit( + load_limit.bytes, Load_State.count_file, dep_graph.imm_preds, rest) val (pending1, rest1) = rest.partition(reachable) load_requirements(pending1, rest1) } } } private sealed case class Use_Theories_State( dep_graph: Document.Node.Name.Graph[Unit], load_state: Load_State, watchdog_timeout: Time, commit: Option[(Document.Snapshot, Document_Status.Node_Status) => Unit], last_update: Time = Time.now(), nodes_status: Document_Status.Nodes_Status = Document_Status.Nodes_Status.empty, already_committed: Map[Document.Node.Name, Document_Status.Node_Status] = Map.empty, changed_nodes: Set[Document.Node.Name] = Set.empty, changed_assignment: Boolean = false, result: Option[Exn.Result[Use_Theories_Result]] = None ) { def nodes_status_update(state: Document.State, version: Document.Version, domain: Option[Set[Document.Node.Name]] = None, trim: Boolean = false ): (Boolean, Use_Theories_State) = { val (nodes_status_changed, nodes_status1) = nodes_status.update(resources, state, version, domain = domain, trim = trim) val st1 = copy(last_update = Time.now(), nodes_status = nodes_status1) (nodes_status_changed, st1) } def changed( nodes: IterableOnce[Document.Node.Name], assignment: Boolean ): Use_Theories_State = { copy( changed_nodes = changed_nodes ++ nodes, changed_assignment = changed_assignment || assignment) } def reset_changed: Use_Theories_State = if (changed_nodes.isEmpty && !changed_assignment) this else copy(changed_nodes = Set.empty, changed_assignment = false) def watchdog: Boolean = watchdog_timeout > Time.zero && Time.now() - last_update > watchdog_timeout def finished_result: Boolean = result.isDefined def join_result: Option[(Exn.Result[Use_Theories_Result], Use_Theories_State)] = if (finished_result) Some((result.get, this)) else None def cancel_result: Use_Theories_State = if (finished_result) this else copy(result = Some(Exn.Exn(Exn.Interrupt()))) def clean_theories: (List[Document.Node.Name], Use_Theories_State) = { @tailrec def frontier( base: List[Document.Node.Name], front: Set[Document.Node.Name] ) : Set[Document.Node.Name] = { val add = base.filter(name => dep_graph.imm_succs(name).forall(front)) if (add.isEmpty) front else { val preds = add.map(dep_graph.imm_preds) val base1 = preds.tail.foldLeft(preds.head)(_ ++ _).toList.filter(already_committed.keySet) frontier(base1, front ++ add) } } if (already_committed.isEmpty) (Nil, this) else { val base = (for { (name, (_, (_, succs))) <- dep_graph.iterator if succs.isEmpty && already_committed.isDefinedAt(name) } yield name).toList val clean = frontier(base, Set.empty) if (clean.isEmpty) (Nil, this) else { (dep_graph.topological_order.filter(clean), copy(dep_graph = dep_graph.exclude(clean))) } } } private def consolidated( state: Document.State, version: Document.Version, name: Document.Node.Name ): Boolean = { loaded_theory(name) || nodes_status.quasi_consolidated(name) || (if (commit.isDefined) already_committed.isDefinedAt(name) else state.node_consolidated(version, name)) } def check( state: Document.State, version: Document.Version, beyond_limit: Boolean ) : (List[Document.Node.Name], Use_Theories_State) = { val st1 = commit match { case None => this case Some(commit_fn) => copy(already_committed = dep_graph.topological_order.foldLeft(already_committed) { case (committed, name) => def parents_committed: Boolean = version.nodes(name).header.imports.forall(parent => loaded_theory(parent) || committed.isDefinedAt(parent)) if (!committed.isDefinedAt(name) && parents_committed && state.node_consolidated(version, name)) { val snapshot = stable_snapshot(state, version, name) val status = Document_Status.Node_Status.make(state, version, name) commit_fn(snapshot, status) committed + (name -> status) } else committed }) } def committed(name: Document.Node.Name): Boolean = loaded_theory(name) || st1.already_committed.isDefinedAt(name) val (load_theories0, load_state1) = load_state.next(dep_graph, consolidated(state, version, _)) val load_theories = load_theories0.filterNot(committed) val result1 = { val stopped = beyond_limit || watchdog if (!finished_result && load_theories.isEmpty && (stopped || dep_graph.keys_iterator.forall(consolidated(state, version, _))) ) { @tailrec def make_nodes( input: List[Document.Node.Name], output: List[(Document.Node.Name, Document_Status.Node_Status)] ): Option[List[(Document.Node.Name, Document_Status.Node_Status)]] = { input match { case name :: rest => if (loaded_theory(name)) make_nodes(rest, output) else { val status = Document_Status.Node_Status.make(state, version, name) val ok = if (commit.isDefined) committed(name) else status.consolidated if (stopped || ok) make_nodes(rest, (name -> status) :: output) else None } case Nil => Some(output) } } for (nodes <- make_nodes(dep_graph.topological_order.reverse, Nil)) yield { val nodes_committed = (for { name <- dep_graph.keys_iterator status <- st1.already_committed.get(name) } yield name -> status).toList Exn.Res(new Use_Theories_Result(state, version, nodes, nodes_committed)) } } else result } (load_theories, st1.copy(result = result1, load_state = load_state1)) } } def use_theories( theories: List[String], qualifier: String = Sessions.DRAFT, master_dir: String = "", unicode_symbols: Boolean = false, check_delay: Time = default_check_delay, check_limit: Int = default_check_limit, watchdog_timeout: Time = default_watchdog_timeout, nodes_status_delay: Time = default_nodes_status_delay, id: UUID.T = UUID.random(), // commit: must not block, must not fail commit: Option[(Document.Snapshot, Document_Status.Node_Status) => Unit] = None, commit_cleanup_delay: Time = default_commit_cleanup_delay, progress: Progress = new Progress ): Use_Theories_Result = { val dependencies = { val import_names = theories.map(thy => resources.import_name(qualifier, master_directory(master_dir), thy) -> Position.none) resources.dependencies(import_names, progress = progress).check_errors } val dep_theories = dependencies.theories val dep_theories_set = dep_theories.toSet val dep_files = dependencies.loaded_files val use_theories_state = { val dep_graph = dependencies.theory_graph val maximals = dep_graph.maximals val rest = if (maximals.isEmpty || maximals.tail.isEmpty) maximals else { val depth = dep_graph.node_depth(Load_State.count_file) maximals.sortBy(node => - depth(node)) } val load_limit = - if (commit.isDefined) (session_options.real("headless_load_limit") * 1024 * 1024).round - else 0 + if (commit.isDefined) Space.MiB(session_options.real("headless_load_limit")) + else Space.zero val load_state = Load_State(Nil, rest, load_limit) Synchronized(Use_Theories_State(dep_graph, load_state, watchdog_timeout, commit)) } def check_state( beyond_limit: Boolean = false, state: Document.State = session.get_state() ): Unit = { for { version <- state.stable_tip_version load_theories = use_theories_state.change_result(_.check(state, version, beyond_limit)) if load_theories.nonEmpty } resources.load_theories(session, id, load_theories, dep_files, unicode_symbols, progress) } lazy val check_progress = { var check_count = 0 Event_Timer.request(Time.now(), repeat = Some(check_delay)) { if (progress.stopped) use_theories_state.change(_.cancel_result) else { check_count += 1 check_state(check_limit > 0 && check_count > check_limit) } } } val consumer = { val delay_nodes_status = Delay.first(nodes_status_delay max Time.zero) { progress.nodes_status(use_theories_state.value.nodes_status) } val delay_commit_clean = Delay.first(commit_cleanup_delay max Time.zero) { val clean_theories = use_theories_state.change_result(_.clean_theories) if (clean_theories.nonEmpty && session.is_ready) { progress.echo("Removing " + clean_theories.length + " theories ...") resources.clean_theories(session, id, clean_theories) } } isabelle.Session.Consumer[isabelle.Session.Commands_Changed](getClass.getName) { changed => val state = session.get_state() def apply_changed(st: Use_Theories_State): Use_Theories_State = st.changed(changed.nodes.iterator.filter(dep_theories_set), changed.assignment) state.stable_tip_version match { case None => use_theories_state.change(apply_changed) case Some(version) => val theory_progress = use_theories_state.change_result { st => val changed_st = apply_changed(st) val domain = if (st.nodes_status.is_empty) dep_theories_set else changed_st.changed_nodes val (nodes_status_changed, st1) = st.reset_changed.nodes_status_update(state, version, domain = Some(domain), trim = changed_st.changed_assignment) if (nodes_status_delay >= Time.zero && nodes_status_changed) { delay_nodes_status.invoke() } val theory_progress = (for { (name, node_status) <- st1.nodes_status.present().iterator if !node_status.is_empty && changed_st.changed_nodes(name) && !st.already_committed.isDefinedAt(name) p1 = node_status.percentage if p1 > 0 && !st.nodes_status.get(name).map(_.percentage).contains(p1) } yield Progress.Theory(name.theory, percentage = Some(p1))).toList if (commit.isDefined && commit_cleanup_delay > Time.zero) { if (st1.finished_result) delay_commit_clean.revoke() else delay_commit_clean.invoke() } (theory_progress, st1) } theory_progress.foreach(progress.theory) check_state(state = state) } } } try { session.commands_changed += consumer check_progress use_theories_state.guarded_access(_.join_result) check_progress.cancel() } finally { session.commands_changed -= consumer resources.unload_theories(session, id, dep_theories) } Exn.release(use_theories_state.guarded_access(_.join_result)) } def purge_theories( theories: List[String], qualifier: String = Sessions.DRAFT, master_dir: String = "", all: Boolean = false ): (List[Document.Node.Name], List[Document.Node.Name]) = { val nodes = if (all) None else Some(theories.map(resources.import_name(qualifier, master_directory(master_dir), _))) resources.purge_theories(nodes) } } /** resources **/ object Resources { def apply( options: Options, session_background: Sessions.Background, log: Logger = No_Logger ): Resources = new Resources(options, session_background, log = log) def make( options: Options, session_name: String, session_dirs: List[Path] = Nil, include_sessions: List[String] = Nil, progress: Progress = new Progress, log: Logger = No_Logger ): Resources = { val session_background = Sessions.background(options, session_name, dirs = session_dirs, include_sessions = include_sessions, progress = progress) apply(options, session_background, log = log) } final class Theory private[Headless]( val node_name: Document.Node.Name, val node_header: Document.Node.Header, val text: String, val node_required: Boolean ) { override def toString: String = node_name.toString def node_perspective: Document.Node.Perspective_Text.T = Document.Node.Perspective(node_required, Text.Perspective.empty, Document.Node.Overlays.empty) def make_edits(text_edits: List[Text.Edit]): List[Document.Edit_Text] = List(node_name -> Document.Node.Deps(node_header), node_name -> Document.Node.Edits(text_edits), node_name -> node_perspective) def node_edits(old: Option[Theory]): List[Document.Edit_Text] = { val (text_edits, old_required) = if (old.isEmpty) (Text.Edit.inserts(0, text), false) else (Text.Edit.replace(0, old.get.text, text), old.get.node_required) if (text_edits.isEmpty && node_required == old_required) Nil else make_edits(text_edits) } def purge_edits: List[Document.Edit_Text] = make_edits(Text.Edit.removes(0, text)) def set_required(required: Boolean): Theory = if (required == node_required) this else new Theory(node_name, node_header, text, required) } sealed case class State( blobs: Map[Document.Node.Name, Document.Blobs.Item] = Map.empty, theories: Map[Document.Node.Name, Theory] = Map.empty, required: Multi_Map[Document.Node.Name, UUID.T] = Multi_Map.empty ) { /* blobs */ def doc_blobs: Document.Blobs = Document.Blobs(blobs) def update_blobs(names: List[Document.Node.Name]): (Document.Blobs, State) = { val new_blobs = names.flatMap { name => val bytes = Bytes.read(name.path) blobs.get(name) match { case Some(blob) if blob.bytes == bytes => None case _ => val text = bytes.text val blob = Document.Blobs.Item(bytes, text, Symbol.Text_Chunk(text), changed = true) Some(name -> blob) } } val blobs1 = new_blobs.foldLeft(blobs)(_ + _) val blobs2 = new_blobs.foldLeft(blobs) { case (map, (a, b)) => map + (a -> b.unchanged) } (Document.Blobs(blobs1), copy(blobs = blobs2)) } def blob_edits( name: Document.Node.Name, old_blob: Option[Document.Blobs.Item] ) : List[Document.Edit_Text] = { val blob = blobs.getOrElse(name, error("Missing blob " + quote(name.toString))) val text_edits = old_blob match { case None => List(Text.Edit.insert(0, blob.source)) case Some(blob0) => Text.Edit.replace(0, blob0.source, blob.source) } if (text_edits.isEmpty) Nil else List(name -> Document.Node.Blob(blob), name -> Document.Node.Edits(text_edits)) } /* theories */ lazy val theory_graph: Document.Node.Name.Graph[Unit] = Document.Node.Name.make_graph( for ((name, theory) <- theories.toList) yield ((name, ()), theory.node_header.imports.filter(theories.isDefinedAt))) def is_required(name: Document.Node.Name): Boolean = required.isDefinedAt(name) def insert_required(id: UUID.T, names: List[Document.Node.Name]): State = copy(required = names.foldLeft(required)(_.insert(_, id))) def remove_required(id: UUID.T, names: List[Document.Node.Name]): State = copy(required = names.foldLeft(required)(_.remove(_, id))) def update_theories(update: List[Theory]): State = copy(theories = update.foldLeft(theories) { case (thys, thy) => thys.get(thy.node_name) match { case Some(thy1) if thy1 == thy => thys case _ => thys + (thy.node_name -> thy) } }) def remove_theories(remove: List[Document.Node.Name]): State = { require(remove.forall(name => !is_required(name)), "attempt to remove required nodes") copy(theories = theories -- remove) } def unload_theories( id: UUID.T, theories: List[Document.Node.Name] ) : (List[Document.Edit_Text], State) = { val st1 = remove_required(id, theories) val theory_edits = for { node_name <- theories theory <- st1.theories.get(node_name) } yield { val theory1 = theory.set_required(st1.is_required(node_name)) val edits = theory1.node_edits(Some(theory)) (theory1, edits) } (theory_edits.flatMap(_._2), st1.update_theories(theory_edits.map(_._1))) } def purge_theories( nodes: Option[List[Document.Node.Name]] ) : ((List[Document.Node.Name], List[Document.Node.Name], List[Document.Edit_Text]), State) = { val all_nodes = theory_graph.topological_order val purge = nodes.getOrElse(all_nodes).filterNot(is_required).toSet val retain = theory_graph.all_preds(all_nodes.filterNot(purge)).toSet val (retained, purged) = all_nodes.partition(retain) val purge_edits = purged.flatMap(name => theories(name).purge_edits) ((purged, retained, purge_edits), remove_theories(purged)) } } } class Resources private[Headless]( val options: Options, session_background: Sessions.Background, log: Logger = No_Logger) extends isabelle.Resources(session_background.check_errors, log = log) { resources => val store: Sessions.Store = Sessions.store(options) /* session */ def start_session( print_mode: List[String] = Nil, progress: Progress = new Progress ): Session = { val session_name = session_background.session_name val session = new Session(session_name, options, resources) val session_heaps = ML_Process.session_heaps(store, session_background, logic = session_name) progress.echo("Starting session " + session_name + " ...") Isabelle_Process.start( options, session, session_background, session_heaps, modes = print_mode).await_startup() session } /* theories */ private val state = Synchronized(Resources.State()) def load_theories( session: Session, id: UUID.T, theories: List[Document.Node.Name], files: List[Document.Node.Name], unicode_symbols: Boolean, progress: Progress ): Unit = { val loaded_theories = for (node_name <- theories) yield { val path = node_name.path if (!node_name.is_theory) error("Not a theory file: " + path) progress.expose_interrupt() val text = Symbol.output(unicode_symbols, File.read(path)) val node_header = resources.check_thy(node_name, Scan.char_reader(text)) new Resources.Theory(node_name, node_header, text, true) } val loaded = loaded_theories.length if (loaded > 1) progress.echo("Loading " + loaded + " theories ...") state.change { st => val (doc_blobs1, st1) = st.insert_required(id, theories).update_blobs(files) val theory_edits = for (theory <- loaded_theories) yield { val node_name = theory.node_name val old_theory = st.theories.get(node_name) val theory1 = theory.set_required(st1.is_required(node_name)) val edits = theory1.node_edits(old_theory) (theory1, edits) } val file_edits = for { node_name <- files if doc_blobs1.changed(node_name) } yield st1.blob_edits(node_name, st.blobs.get(node_name)) session.update(doc_blobs1, theory_edits.flatMap(_._2) ::: file_edits.flatten) st1.update_theories(theory_edits.map(_._1)) } } def unload_theories(session: Session, id: UUID.T, theories: List[Document.Node.Name]): Unit = { state.change { st => val (edits, st1) = st.unload_theories(id, theories) session.update(st.doc_blobs, edits) st1 } } def clean_theories(session: Session, id: UUID.T, theories: List[Document.Node.Name]): Unit = { state.change { st => val (edits1, st1) = st.unload_theories(id, theories) val ((_, _, edits2), st2) = st1.purge_theories(None) session.update(st.doc_blobs, edits1 ::: edits2) st2 } } def purge_theories( nodes: Option[List[Document.Node.Name]] ) : (List[Document.Node.Name], List[Document.Node.Name]) = { state.change_result { st => val ((purged, retained, _), st1) = st.purge_theories(nodes) ((purged, retained), st1) } } } }