diff --git a/src/Pure/PIDE/session.scala b/src/Pure/PIDE/session.scala
--- a/src/Pure/PIDE/session.scala
+++ b/src/Pure/PIDE/session.scala
@@ -1,784 +1,784 @@
/* Title: Pure/PIDE/session.scala
Author: Makarius
Options: :folding=explicit:
PIDE editor session, potentially with running prover process.
*/
package isabelle
import scala.collection.immutable.Queue
import scala.collection.mutable
import scala.annotation.tailrec
object Session
{
/* outlets */
object Consumer
{
def apply[A](name: String)(consume: A => Unit): Consumer[A] =
new Consumer[A](name, consume)
}
final class Consumer[-A] private(val name: String, val consume: A => Unit)
class Outlet[A](dispatcher: Consumer_Thread[() => Unit])
{
private val consumers = Synchronized[List[Consumer[A]]](Nil)
def += (c: Consumer[A]): Unit = consumers.change(Library.update(c))
def -= (c: Consumer[A]): Unit = consumers.change(Library.remove(c))
def post(a: A): Unit =
{
for (c <- consumers.value.iterator) {
dispatcher.send(() =>
try { c.consume(a) }
catch {
case exn: Throwable =>
Output.error_message("Consumer failed: " + quote(c.name) + "\n" + Exn.message(exn))
})
}
}
}
/* change */
sealed case class Change(
previous: Document.Version,
syntax_changed: List[Document.Node.Name],
deps_changed: Boolean,
doc_edits: List[Document.Edit_Command],
consolidate: List[Document.Node.Name],
version: Document.Version)
case object Change_Flush
/* events */
//{{{
case class Command_Timing(props: Properties.T)
case class Theory_Timing(props: Properties.T)
case class Runtime_Statistics(props: Properties.T)
case class Task_Statistics(props: Properties.T)
case class Global_Options(options: Options)
case object Caret_Focus
case class Raw_Edits(doc_blobs: Document.Blobs, edits: List[Document.Edit_Text])
case class Dialog_Result(id: Document_ID.Generic, serial: Long, result: String)
case class Build_Theories(id: String, master_dir: Path, theories: List[(Options, List[Path])])
case class Commands_Changed(
assignment: Boolean, nodes: Set[Document.Node.Name], commands: Set[Command])
sealed abstract class Phase
{
def print: String =
this match {
case Terminated(result) => if (result.ok) "finished" else "failed"
case _ => Word.lowercase(this.toString)
}
}
case object Inactive extends Phase // stable
case object Startup extends Phase // transient
case object Ready extends Phase // metastable
case object Shutdown extends Phase // transient
case class Terminated(result: Process_Result) extends Phase // stable
//}}}
/* syslog */
private[Session] class Syslog(limit: Int)
{
private var queue = Queue.empty[XML.Elem]
private var length = 0
def += (msg: XML.Elem): Unit = synchronized {
queue = queue.enqueue(msg)
length += 1
if (length > limit) queue = queue.dequeue._2
}
def content: String = synchronized {
cat_lines(queue.iterator.map(XML.content)) +
(if (length > limit) "\n(A total of " + length + " messages...)" else "")
}
}
/* protocol handlers */
type Protocol_Function = Prover.Protocol_Output => Boolean
abstract class Protocol_Handler extends Isabelle_System.Service
{
def init(session: Session): Unit = {}
def exit(): Unit = {}
def functions: List[(String, Protocol_Function)] = Nil
def prover_options(options: Options): Options = options
}
}
class Session(_session_options: => Options, val resources: Resources) extends Document.Session
{
session =>
- val cache: XML.Cache = XML.Cache.make()
+ val cache: Term.Cache = Term.Cache.make()
def build_blobs_info(name: Document.Node.Name): Command.Blobs_Info =
Command.Blobs_Info.none
/* global flags */
@volatile var timing: Boolean = false
@volatile var verbose: Boolean = false
/* dynamic session options */
def session_options: Options = _session_options
def output_delay: Time = session_options.seconds("editor_output_delay")
def consolidate_delay: Time = session_options.seconds("editor_consolidate_delay")
def prune_delay: Time = session_options.seconds("editor_prune_delay")
def prune_size: Int = session_options.int("editor_prune_size")
def syslog_limit: Int = session_options.int("editor_syslog_limit")
def reparse_limit: Int = session_options.int("editor_reparse_limit")
/* dispatcher */
private val dispatcher =
Consumer_Thread.fork[() => Unit]("Session.dispatcher", daemon = true) { case e => e(); true }
def assert_dispatcher[A](body: => A): A =
{
assert(dispatcher.check_thread())
body
}
def require_dispatcher[A](body: => A): A =
{
require(dispatcher.check_thread(), "not on dispatcher thread")
body
}
def send_dispatcher(body: => Unit): Unit =
{
if (dispatcher.check_thread()) body
else dispatcher.send(() => body)
}
def send_wait_dispatcher(body: => Unit): Unit =
{
if (dispatcher.check_thread()) body
else dispatcher.send_wait(() => body)
}
/* outlets */
val finished_theories = new Session.Outlet[Document.Snapshot](dispatcher)
val command_timings = new Session.Outlet[Session.Command_Timing](dispatcher)
val theory_timings = new Session.Outlet[Session.Theory_Timing](dispatcher)
val runtime_statistics = new Session.Outlet[Session.Runtime_Statistics](dispatcher)
val task_statistics = new Session.Outlet[Session.Task_Statistics](dispatcher)
val global_options = new Session.Outlet[Session.Global_Options](dispatcher)
val caret_focus = new Session.Outlet[Session.Caret_Focus.type](dispatcher)
val raw_edits = new Session.Outlet[Session.Raw_Edits](dispatcher)
val commands_changed = new Session.Outlet[Session.Commands_Changed](dispatcher)
val phase_changed = new Session.Outlet[Session.Phase](dispatcher)
val syslog_messages = new Session.Outlet[Prover.Output](dispatcher)
val raw_output_messages = new Session.Outlet[Prover.Output](dispatcher)
val trace_events = new Session.Outlet[Simplifier_Trace.Event.type](dispatcher)
val debugger_updates = new Session.Outlet[Debugger.Update.type](dispatcher)
val all_messages = new Session.Outlet[Prover.Message](dispatcher) // potential bottle-neck!
/** main protocol manager **/
/* internal messages */
private case class Start(start_prover: Prover.Receiver => Prover)
private case object Stop
private case class Get_State(promise: Promise[Document.State])
private case class Cancel_Exec(exec_id: Document_ID.Exec)
private case class Protocol_Command_Raw(name: String, args: List[Bytes])
private case class Protocol_Command_Args(name: String, args: List[String])
private case class Update_Options(options: Options)
private case object Consolidate_Execution
private case object Prune_History
/* phase */
private def post_phase(new_phase: Session.Phase): Session.Phase =
{
phase_changed.post(new_phase)
new_phase
}
private val _phase = Synchronized[Session.Phase](Session.Inactive)
private def phase_=(new_phase: Session.Phase): Unit = _phase.change(_ => post_phase(new_phase))
def phase: Session.Phase = _phase.value
def is_ready: Boolean = phase == Session.Ready
/* syslog */
private val syslog = new Session.Syslog(syslog_limit)
def syslog_content(): String = syslog.content
/* pipelined change parsing */
private case class Text_Edits(
previous: Future[Document.Version],
doc_blobs: Document.Blobs,
text_edits: List[Document.Edit_Text],
consolidate: List[Document.Node.Name],
version_result: Promise[Document.Version])
private val change_parser = Consumer_Thread.fork[Text_Edits]("change_parser", daemon = true)
{
case Text_Edits(previous, doc_blobs, text_edits, consolidate, version_result) =>
val prev = previous.get_finished
val change =
Timing.timeit("parse_change", timing) {
resources.parse_change(reparse_limit, prev, doc_blobs, text_edits, consolidate)
}
version_result.fulfill(change.version)
manager.send(change)
true
}
/* buffered changes */
private object change_buffer
{
private var assignment: Boolean = false
private var nodes: Set[Document.Node.Name] = Set.empty
private var commands: Set[Command] = Set.empty
def flush(): Unit = synchronized {
if (assignment || nodes.nonEmpty || commands.nonEmpty)
commands_changed.post(Session.Commands_Changed(assignment, nodes, commands))
if (nodes.nonEmpty) consolidation.update(nodes)
assignment = false
nodes = Set.empty
commands = Set.empty
}
private val delay_flush = Delay.first(output_delay) { flush() }
def invoke(assign: Boolean, edited_nodes: List[Document.Node.Name], cmds: List[Command]): Unit =
synchronized {
assignment |= assign
for (node <- edited_nodes) {
nodes += node
}
for (command <- cmds) {
nodes += command.node_name
command.blobs_names.foreach(nodes += _)
commands += command
}
delay_flush.invoke()
}
def shutdown(): Unit =
{
delay_flush.revoke()
flush()
}
}
/* postponed changes */
private object postponed_changes
{
private var postponed: List[Session.Change] = Nil
def store(change: Session.Change): Unit = synchronized { postponed ::= change }
def flush(state: Document.State): List[Session.Change] = synchronized {
val (assigned, unassigned) = postponed.partition(change => state.is_assigned(change.previous))
postponed = unassigned
assigned.reverse
}
}
/* node consolidation */
private object consolidation
{
private val delay =
Delay.first(consolidate_delay) { manager.send(Consolidate_Execution) }
private val init_state: Option[Set[Document.Node.Name]] = Some(Set.empty)
private val state = Synchronized(init_state)
def exit(): Unit =
{
delay.revoke()
state.change(_ => None)
}
def update(new_nodes: Set[Document.Node.Name] = Set.empty): Unit =
{
val active =
state.change_result(st =>
(st.isDefined, st.map(nodes => if (nodes.isEmpty) new_nodes else nodes ++ new_nodes)))
if (active) delay.invoke()
}
def flush(): Set[Document.Node.Name] =
state.change_result(st => if (st.isDefined) (st.get, init_state) else (Set.empty, None))
}
/* prover process */
private object prover
{
private val variable = Synchronized[Option[Prover]](None)
def defined: Boolean = variable.value.isDefined
def get: Prover = variable.value.get
def set(p: Prover): Unit = variable.change(_ => Some(p))
def reset: Unit = variable.change(_ => None)
def await_reset(): Unit = variable.guarded_access({ case None => Some((), None) case _ => None })
}
/* file formats and protocol handlers */
private lazy val file_formats: File_Format.Session =
File_Format.registry.start_session(session)
private val protocol_handlers = Protocol_Handlers.init(session)
def get_protocol_handler[C <: Session.Protocol_Handler](c: Class[C]): Option[C] =
protocol_handlers.get(c.getName) match {
case Some(h) if Library.is_subclass(h.getClass, c) => Some(h.asInstanceOf[C])
case _ => None
}
def init_protocol_handler(handler: Session.Protocol_Handler): Unit =
protocol_handlers.init(handler)
def prover_options(options: Options): Options =
protocol_handlers.prover_options(file_formats.prover_options(options))
/* debugger */
private val debugger_handler = new Debugger.Handler(this)
init_protocol_handler(debugger_handler)
def debugger: Debugger = debugger_handler.debugger
/* manager thread */
private val delay_prune =
Delay.first(prune_delay) { manager.send(Prune_History) }
private val manager: Consumer_Thread[Any] =
{
/* global state */
val global_state = Synchronized(Document.State.init)
/* raw edits */
def handle_raw_edits(
doc_blobs: Document.Blobs = Document.Blobs.empty,
edits: List[Document.Edit_Text] = Nil,
consolidate: List[Document.Node.Name] = Nil): Unit =
//{{{
{
require(prover.defined, "prover process not defined (handle_raw_edits)")
if (edits.nonEmpty) prover.get.discontinue_execution()
val previous = global_state.value.history.tip.version
val version = Future.promise[Document.Version]
global_state.change(_.continue_history(previous, edits, version))
raw_edits.post(Session.Raw_Edits(doc_blobs, edits))
change_parser.send(Text_Edits(previous, doc_blobs, edits, consolidate, version))
}
//}}}
/* resulting changes */
def handle_change(change: Session.Change): Unit =
//{{{
{
require(prover.defined, "prover process not defined (handle_change)")
// define commands
{
val id_commands = new mutable.ListBuffer[Command]
def id_command(command: Command): Unit =
{
for {
(name, digest) <- command.blobs_defined
if !global_state.value.defined_blob(digest)
} {
change.version.nodes(name).get_blob match {
case Some(blob) =>
global_state.change(_.define_blob(digest))
prover.get.define_blob(digest, blob.bytes)
case None =>
Output.error_message("Missing blob " + quote(name.toString))
}
}
if (!global_state.value.defined_command(command.id)) {
global_state.change(_.define_command(command))
id_commands += command
}
}
for { (_, edit) <- change.doc_edits } {
edit.foreach({ case (c1, c2) => c1.foreach(id_command); c2.foreach(id_command) })
}
if (id_commands.nonEmpty) {
prover.get.define_commands_bulk(resources, id_commands.toList)
}
}
val assignment = global_state.value.the_assignment(change.previous).check_finished
global_state.change(_.define_version(change.version, assignment))
prover.get.update(change.previous.id, change.version.id, change.doc_edits, change.consolidate)
resources.commit(change)
}
//}}}
/* prover output */
def handle_output(output: Prover.Output): Unit =
//{{{
{
def bad_output(): Unit =
{
if (verbose)
Output.warning("Ignoring bad prover output: " + output.message.toString)
}
def change_command(f: Document.State => (Command.State, Document.State)): Unit =
{
try {
val st = global_state.change_result(f)
if (!st.command.span.is_theory) {
change_buffer.invoke(false, Nil, List(st.command))
}
}
catch { case _: Document.State.Fail => bad_output() }
}
output match {
case msg: Prover.Protocol_Output =>
val handled = protocol_handlers.invoke(msg)
if (!handled) {
msg.properties match {
case Protocol.Command_Timing(props, state_id, timing) if prover.defined =>
command_timings.post(Session.Command_Timing(props))
val message = XML.elem(Markup.STATUS, List(XML.Elem(Markup.Timing(timing), Nil)))
change_command(_.accumulate(state_id, cache.elem(message), cache))
case Markup.Theory_Timing(props) =>
theory_timings.post(Session.Theory_Timing(props))
case Markup.Task_Statistics(props) =>
task_statistics.post(Session.Task_Statistics(props))
case Protocol.Export(args)
if args.id.isDefined && Value.Long.unapply(args.id.get).isDefined =>
val id = Value.Long.unapply(args.id.get).get
val entry = Export.make_entry("", args, msg.chunk, cache)
change_command(_.add_export(id, (args.serial, entry)))
case Protocol.Loading_Theory(node_name, id) =>
val blobs_info = build_blobs_info(node_name)
try { global_state.change(_.begin_theory(node_name, id, msg.text, blobs_info)) }
catch { case _: Document.State.Fail => bad_output() }
case List(Markup.Commands_Accepted.PROPERTY) =>
msg.text match {
case Protocol.Commands_Accepted(ids) =>
ids.foreach(id =>
change_command(_.accumulate(id, Protocol.Commands_Accepted.message, cache)))
case _ => bad_output()
}
case List(Markup.Assign_Update.PROPERTY) =>
msg.text match {
case Protocol.Assign_Update(id, edited, update) =>
try {
val (edited_nodes, cmds) =
global_state.change_result(_.assign(id, edited, update))
change_buffer.invoke(true, edited_nodes, cmds)
manager.send(Session.Change_Flush)
}
catch { case _: Document.State.Fail => bad_output() }
case _ => bad_output()
}
delay_prune.invoke()
case List(Markup.Removed_Versions.PROPERTY) =>
msg.text match {
case Protocol.Removed(removed) =>
try {
global_state.change(_.removed_versions(removed))
manager.send(Session.Change_Flush)
}
catch { case _: Document.State.Fail => bad_output() }
case _ => bad_output()
}
case _ => bad_output()
}
}
case _ =>
output.properties match {
case Position.Id(state_id) =>
change_command(_.accumulate(state_id, output.message, cache))
case _ if output.is_init =>
val init_ok =
try {
Isabelle_System.make_services(classOf[Session.Protocol_Handler])
.foreach(init_protocol_handler)
true
}
catch {
case exn: Throwable =>
prover.get.protocol_command("Prover.stop", "1", Exn.message(exn))
false
}
if (init_ok) {
prover.get.options(prover_options(session_options))
prover.get.init_session(resources)
phase = Session.Ready
debugger.ready()
}
case Markup.Process_Result(result) if output.is_exit =>
if (prover.defined) protocol_handlers.exit()
for (id <- global_state.value.theories.keys) {
val snapshot = global_state.change_result(_.end_theory(id))
finished_theories.post(snapshot)
}
file_formats.stop_session
phase = Session.Terminated(result)
prover.reset
case _ =>
raw_output_messages.post(output)
}
}
}
//}}}
/* main thread */
Consumer_Thread.fork[Any]("Session.manager", daemon = true)
{
case arg: Any =>
//{{{
arg match {
case output: Prover.Output =>
if (output.is_syslog) {
syslog += output.message
syslog_messages.post(output)
}
if (output.is_stdout || output.is_stderr)
raw_output_messages.post(output)
else handle_output(output)
all_messages.post(output)
case input: Prover.Input =>
all_messages.post(input)
case Start(start_prover) if !prover.defined =>
prover.set(start_prover(manager.send(_)))
case Stop =>
consolidation.exit()
delay_prune.revoke()
if (prover.defined) {
global_state.change(_ => Document.State.init)
prover.get.terminate()
}
case Get_State(promise) =>
promise.fulfill(global_state.value)
case Consolidate_Execution =>
if (prover.defined) {
val state = global_state.value
state.stable_tip_version match {
case None => consolidation.update()
case Some(version) =>
val consolidate =
consolidation.flush().iterator.filter(name =>
!resources.session_base.loaded_theory(name) &&
!state.node_consolidated(version, name) &&
state.node_maybe_consolidated(version, name)).toList
if (consolidate.nonEmpty) handle_raw_edits(consolidate = consolidate)
}
}
case Prune_History =>
if (prover.defined) {
val old_versions = global_state.change_result(_.remove_versions(prune_size))
if (old_versions.nonEmpty) prover.get.remove_versions(old_versions)
}
case Update_Options(options) =>
if (prover.defined && is_ready) {
prover.get.options(prover_options(options))
handle_raw_edits()
}
global_options.post(Session.Global_Options(options))
case Cancel_Exec(exec_id) if prover.defined =>
prover.get.cancel_exec(exec_id)
case Session.Raw_Edits(doc_blobs, edits) if prover.defined =>
handle_raw_edits(doc_blobs = doc_blobs, edits = edits)
case Session.Dialog_Result(id, serial, result) if prover.defined =>
prover.get.dialog_result(serial, result)
handle_output(new Prover.Output(Protocol.Dialog_Result(id, serial, result)))
case Protocol_Command_Raw(name, args) if prover.defined =>
prover.get.protocol_command_raw(name, args)
case Protocol_Command_Args(name, args) if prover.defined =>
prover.get.protocol_command_args(name, args)
case change: Session.Change if prover.defined =>
val state = global_state.value
if (!state.removing_versions && state.is_assigned(change.previous))
handle_change(change)
else postponed_changes.store(change)
case Session.Change_Flush if prover.defined =>
val state = global_state.value
if (!state.removing_versions)
postponed_changes.flush(state).foreach(handle_change)
case bad =>
if (verbose) Output.warning("Ignoring bad message: " + bad.toString)
}
true
//}}}
}
}
/* main operations */
def get_state(): Document.State =
{
if (manager.is_active()) {
val promise = Future.promise[Document.State]
manager.send_wait(Get_State(promise))
promise.join
}
else Document.State.init
}
def snapshot(name: Document.Node.Name = Document.Node.Name.empty,
pending_edits: List[Text.Edit] = Nil): Document.Snapshot =
get_state().snapshot(name, pending_edits)
def recent_syntax(name: Document.Node.Name): Outer_Syntax =
get_state().recent_finished.version.get_finished.nodes(name).syntax getOrElse
resources.session_base.overall_syntax
@tailrec final def await_stable_snapshot(): Document.Snapshot =
{
val snapshot = this.snapshot()
if (snapshot.is_outdated) {
output_delay.sleep()
await_stable_snapshot()
}
else snapshot
}
def start(start_prover: Prover.Receiver => Prover): Unit =
{
file_formats
_phase.change(
{
case Session.Inactive =>
manager.send(Start(start_prover))
post_phase(Session.Startup)
case phase => error("Cannot start prover in phase " + quote(phase.print))
})
}
def stop(): Process_Result =
{
val was_ready =
_phase.guarded_access(
{
case Session.Startup | Session.Shutdown => None
case Session.Terminated(_) => Some((false, phase))
case Session.Inactive => Some((false, post_phase(Session.Terminated(Process_Result(0)))))
case Session.Ready => Some((true, post_phase(Session.Shutdown)))
})
if (was_ready) manager.send(Stop)
prover.await_reset()
change_parser.shutdown()
change_buffer.shutdown()
manager.shutdown()
dispatcher.shutdown()
phase match {
case Session.Terminated(result) => result
case phase => error("Bad session phase after shutdown: " + quote(phase.print))
}
}
def protocol_command_raw(name: String, args: List[Bytes]): Unit =
manager.send(Protocol_Command_Raw(name, args))
def protocol_command_args(name: String, args: List[String]): Unit =
manager.send(Protocol_Command_Args(name, args))
def protocol_command(name: String, args: String*): Unit =
protocol_command_args(name, args.toList)
def cancel_exec(exec_id: Document_ID.Exec): Unit =
manager.send(Cancel_Exec(exec_id))
def update(doc_blobs: Document.Blobs, edits: List[Document.Edit_Text]): Unit =
if (edits.nonEmpty) manager.send_wait(Session.Raw_Edits(doc_blobs, edits))
def update_options(options: Options): Unit =
manager.send_wait(Update_Options(options))
def dialog_result(id: Document_ID.Generic, serial: Long, result: String): Unit =
manager.send(Session.Dialog_Result(id, serial, result))
}
diff --git a/src/Pure/PIDE/xml.scala b/src/Pure/PIDE/xml.scala
--- a/src/Pure/PIDE/xml.scala
+++ b/src/Pure/PIDE/xml.scala
@@ -1,459 +1,459 @@
/* Title: Pure/PIDE/xml.scala
Author: Makarius
Untyped XML trees and basic data representation.
*/
package isabelle
object XML
{
/** XML trees **/
/* datatype representation */
type Attribute = Properties.Entry
type Attributes = Properties.T
sealed abstract class Tree { override def toString: String = string_of_tree(this) }
type Body = List[Tree]
case class Elem(markup: Markup, body: Body) extends Tree
{
private lazy val hash: Int = (markup, body).hashCode()
override def hashCode(): Int = hash
def name: String = markup.name
def update_attributes(more_attributes: Attributes): Elem =
if (more_attributes.isEmpty) this
else Elem(markup.update_properties(more_attributes), body)
def + (att: Attribute): Elem = Elem(markup + att, body)
}
case class Text(content: String) extends Tree
{
private lazy val hash: Int = content.hashCode()
override def hashCode(): Int = hash
}
def elem(markup: Markup): XML.Elem = XML.Elem(markup, Nil)
def elem(name: String, body: Body): XML.Elem = XML.Elem(Markup(name, Nil), body)
def elem(name: String): XML.Elem = XML.Elem(Markup(name, Nil), Nil)
val no_text: Text = Text("")
val newline: Text = Text("\n")
/* name space */
object Namespace
{
def apply(prefix: String, target: String): Namespace =
new Namespace(prefix, target)
}
final class Namespace private(prefix: String, target: String)
{
def apply(name: String): String = prefix + ":" + name
val attribute: XML.Attribute = ("xmlns:" + prefix, target)
override def toString: String = attribute.toString
}
/* wrapped elements */
val XML_ELEM = "xml_elem"
val XML_NAME = "xml_name"
val XML_BODY = "xml_body"
object Wrapped_Elem
{
def apply(markup: Markup, body1: Body, body2: Body): XML.Elem =
XML.Elem(Markup(XML_ELEM, (XML_NAME, markup.name) :: markup.properties),
XML.Elem(Markup(XML_BODY, Nil), body1) :: body2)
def unapply(tree: Tree): Option[(Markup, Body, Body)] =
tree match {
case
XML.Elem(Markup(XML_ELEM, (XML_NAME, name) :: props),
XML.Elem(Markup(XML_BODY, Nil), body1) :: body2) =>
Some(Markup(name, props), body1, body2)
case _ => None
}
}
object Root_Elem
{
def apply(body: Body): XML.Elem = XML.Elem(Markup(XML_ELEM, Nil), body)
def unapply(tree: Tree): Option[Body] =
tree match {
case XML.Elem(Markup(XML_ELEM, Nil), body) => Some(body)
case _ => None
}
}
/* traverse text */
def traverse_text[A](body: Body)(a: A)(op: (A, String) => A): A =
{
def traverse(x: A, t: Tree): A =
t match {
case XML.Wrapped_Elem(_, _, ts) => ts.foldLeft(x)(traverse)
case XML.Elem(_, ts) => ts.foldLeft(x)(traverse)
case XML.Text(s) => op(x, s)
}
body.foldLeft(a)(traverse)
}
def text_length(body: Body): Int = traverse_text(body)(0) { case (n, s) => n + s.length }
def symbol_length(body: Body): Int = traverse_text(body)(0) { case (n, s) => n + Symbol.length(s) }
/* text content */
def content(body: Body): String =
{
val text = new StringBuilder(text_length(body))
traverse_text(body)(()) { case (_, s) => text.append(s) }
text.toString
}
def content(tree: Tree): String = content(List(tree))
/** string representation **/
val header: String = "\n"
def output_char(s: StringBuilder, c: Char, permissive: Boolean = false): Unit =
{
c match {
case '<' => s ++= "<"
case '>' => s ++= ">"
case '&' => s ++= "&"
case '"' if !permissive => s ++= """
case '\'' if !permissive => s ++= "'"
case _ => s += c
}
}
def output_string(s: StringBuilder, str: String, permissive: Boolean = false): Unit =
{
if (str == null) s ++= str
else str.iterator.foreach(output_char(s, _, permissive = permissive))
}
def output_elem(s: StringBuilder, markup: Markup, end: Boolean = false): Unit =
{
s += '<'
s ++= markup.name
for ((a, b) <- markup.properties) {
s += ' '
s ++= a
s += '='
s += '"'
output_string(s, b)
s += '"'
}
if (end) s += '/'
s += '>'
}
def output_elem_end(s: StringBuilder, name: String): Unit =
{
s += '<'
s += '/'
s ++= name
s += '>'
}
def string_of_body(body: Body): String =
{
val s = new StringBuilder
def tree(t: Tree): Unit =
t match {
case XML.Elem(markup, Nil) =>
output_elem(s, markup, end = true)
case XML.Elem(markup, ts) =>
output_elem(s, markup)
ts.foreach(tree)
output_elem_end(s, markup.name)
case XML.Text(txt) => output_string(s, txt)
}
body.foreach(tree)
s.toString
}
def string_of_tree(tree: XML.Tree): String = string_of_body(List(tree))
def text(s: String): String = string_of_tree(XML.Text(s))
/** cache **/
object Cache
{
def make(
- xz: XZ.Cache = XZ.Cache.make(),
- max_string: Int = isabelle.Cache.default_max_string,
+ xz: XZ.Cache = XZ.Cache.make(),
+ max_string: Int = isabelle.Cache.default_max_string,
initial_size: Int = isabelle.Cache.default_initial_size): Cache =
new Cache(xz, max_string, initial_size)
val none: Cache = make(XZ.Cache.none, max_string = 0)
}
- class Cache private[XML](val xz: XZ.Cache, max_string: Int, initial_size: Int)
+ class Cache(val xz: XZ.Cache, max_string: Int, initial_size: Int)
extends isabelle.Cache(max_string, initial_size)
{
protected def cache_props(x: Properties.T): Properties.T =
{
if (x.isEmpty) x
else
lookup(x) match {
case Some(y) => y
case None => store(x.map(p => (Library.isolate_substring(p._1).intern, cache_string(p._2))))
}
}
protected def cache_markup(x: Markup): Markup =
{
lookup(x) match {
case Some(y) => y
case None =>
x match {
case Markup(name, props) =>
store(Markup(cache_string(name), cache_props(props)))
}
}
}
protected def cache_tree(x: XML.Tree): XML.Tree =
{
lookup(x) match {
case Some(y) => y
case None =>
x match {
case XML.Elem(markup, body) =>
store(XML.Elem(cache_markup(markup), cache_body(body)))
case XML.Text(text) => store(XML.Text(cache_string(text)))
}
}
}
protected def cache_body(x: XML.Body): XML.Body =
{
if (x.isEmpty) x
else
lookup(x) match {
case Some(y) => y
case None => x.map(cache_tree)
}
}
// support hash-consing
def tree0(x: XML.Tree): XML.Tree =
if (no_cache) x else synchronized { lookup(x) getOrElse store(x) }
// main methods
def props(x: Properties.T): Properties.T =
if (no_cache) x else synchronized { cache_props(x) }
def markup(x: Markup): Markup =
if (no_cache) x else synchronized { cache_markup(x) }
def tree(x: XML.Tree): XML.Tree =
if (no_cache) x else synchronized { cache_tree(x) }
def body(x: XML.Body): XML.Body =
if (no_cache) x else synchronized { cache_body(x) }
def elem(x: XML.Elem): XML.Elem =
if (no_cache) x else synchronized { cache_tree(x).asInstanceOf[XML.Elem] }
}
/** XML as data representation language **/
abstract class Error(s: String) extends Exception(s)
class XML_Atom(s: String) extends Error(s)
class XML_Body(body: XML.Body) extends Error("")
object Encode
{
type T[A] = A => XML.Body
type V[A] = PartialFunction[A, (List[String], XML.Body)]
type P[A] = PartialFunction[A, List[String]]
/* atomic values */
def long_atom(i: Long): String = Library.signed_string_of_long(i)
def int_atom(i: Int): String = Library.signed_string_of_int(i)
def bool_atom(b: Boolean): String = if (b) "1" else "0"
def unit_atom(u: Unit) = ""
/* structural nodes */
private def node(ts: XML.Body): XML.Tree = XML.Elem(Markup(":", Nil), ts)
private def vector(xs: List[String]): XML.Attributes =
xs.zipWithIndex.map({ case (x, i) => (int_atom(i), x) })
private def tagged(tag: Int, data: (List[String], XML.Body)): XML.Tree =
XML.Elem(Markup(int_atom(tag), vector(data._1)), data._2)
/* representation of standard types */
val tree: T[XML.Tree] = (t => List(t))
val properties: T[Properties.T] =
(props => List(XML.Elem(Markup(":", props), Nil)))
val string: T[String] = (s => if (s.isEmpty) Nil else List(XML.Text(s)))
val long: T[Long] = (x => string(long_atom(x)))
val int: T[Int] = (x => string(int_atom(x)))
val bool: T[Boolean] = (x => string(bool_atom(x)))
val unit: T[Unit] = (x => string(unit_atom(x)))
def pair[A, B](f: T[A], g: T[B]): T[(A, B)] =
(x => List(node(f(x._1)), node(g(x._2))))
def triple[A, B, C](f: T[A], g: T[B], h: T[C]): T[(A, B, C)] =
(x => List(node(f(x._1)), node(g(x._2)), node(h(x._3))))
def list[A](f: T[A]): T[List[A]] =
(xs => xs.map((x: A) => node(f(x))))
def option[A](f: T[A]): T[Option[A]] =
{
case None => Nil
case Some(x) => List(node(f(x)))
}
def variant[A](fs: List[V[A]]): T[A] =
{
case x =>
val (f, tag) = fs.iterator.zipWithIndex.find(p => p._1.isDefinedAt(x)).get
List(tagged(tag, f(x)))
}
}
object Decode
{
type T[A] = XML.Body => A
type V[A] = (List[String], XML.Body) => A
type P[A] = PartialFunction[List[String], A]
/* atomic values */
def long_atom(s: String): Long =
try { java.lang.Long.parseLong(s) }
catch { case e: NumberFormatException => throw new XML_Atom(s) }
def int_atom(s: String): Int =
try { Integer.parseInt(s) }
catch { case e: NumberFormatException => throw new XML_Atom(s) }
def bool_atom(s: String): Boolean =
if (s == "1") true
else if (s == "0") false
else throw new XML_Atom(s)
def unit_atom(s: String): Unit =
if (s == "") () else throw new XML_Atom(s)
/* structural nodes */
private def node(t: XML.Tree): XML.Body =
t match {
case XML.Elem(Markup(":", Nil), ts) => ts
case _ => throw new XML_Body(List(t))
}
private def vector(atts: XML.Attributes): List[String] =
atts.iterator.zipWithIndex.map(
{ case ((a, x), i) => if (int_atom(a) == i) x else throw new XML_Atom(a) }).toList
private def tagged(t: XML.Tree): (Int, (List[String], XML.Body)) =
t match {
case XML.Elem(Markup(name, atts), ts) => (int_atom(name), (vector(atts), ts))
case _ => throw new XML_Body(List(t))
}
/* representation of standard types */
val tree: T[XML.Tree] =
{
case List(t) => t
case ts => throw new XML_Body(ts)
}
val properties: T[Properties.T] =
{
case List(XML.Elem(Markup(":", props), Nil)) => props
case ts => throw new XML_Body(ts)
}
val string: T[String] =
{
case Nil => ""
case List(XML.Text(s)) => s
case ts => throw new XML_Body(ts)
}
val long: T[Long] = (x => long_atom(string(x)))
val int: T[Int] = (x => int_atom(string(x)))
val bool: T[Boolean] = (x => bool_atom(string(x)))
val unit: T[Unit] = (x => unit_atom(string(x)))
def pair[A, B](f: T[A], g: T[B]): T[(A, B)] =
{
case List(t1, t2) => (f(node(t1)), g(node(t2)))
case ts => throw new XML_Body(ts)
}
def triple[A, B, C](f: T[A], g: T[B], h: T[C]): T[(A, B, C)] =
{
case List(t1, t2, t3) => (f(node(t1)), g(node(t2)), h(node(t3)))
case ts => throw new XML_Body(ts)
}
def list[A](f: T[A]): T[List[A]] =
(ts => ts.map(t => f(node(t))))
def option[A](f: T[A]): T[Option[A]] =
{
case Nil => None
case List(t) => Some(f(node(t)))
case ts => throw new XML_Body(ts)
}
def variant[A](fs: List[V[A]]): T[A] =
{
case List(t) =>
val (tag, (xs, ts)) = tagged(t)
val f =
try { fs(tag) }
catch { case _: IndexOutOfBoundsException => throw new XML_Body(List(t)) }
f(xs, ts)
case ts => throw new XML_Body(ts)
}
}
}
diff --git a/src/Pure/Thy/presentation.scala b/src/Pure/Thy/presentation.scala
--- a/src/Pure/Thy/presentation.scala
+++ b/src/Pure/Thy/presentation.scala
@@ -1,622 +1,621 @@
/* Title: Pure/Thy/presentation.scala
Author: Makarius
HTML presentation of PIDE document content.
*/
package isabelle
import scala.annotation.tailrec
import scala.collection.immutable.SortedMap
import scala.collection.mutable
object Presentation
{
/** HTML documents **/
/* HTML context */
sealed case class HTML_Document(title: String, content: String)
- def html_context(): HTML_Context = new HTML_Context
+ def html_context(cache: Term.Cache = Term.Cache.make()): HTML_Context =
+ new HTML_Context(cache)
- final class HTML_Context private[Presentation]
+ final class HTML_Context private[Presentation](val cache: Term.Cache)
{
- val term_cache: Term.Cache = Term.Cache.make()
-
private val already_presented = Synchronized(Set.empty[String])
def register_presented(nodes: List[Document.Node.Name]): List[Document.Node.Name] =
already_presented.change_result(presented =>
(nodes.filterNot(name => presented.contains(name.theory)),
presented ++ nodes.iterator.map(_.theory)))
private val theory_cache = Synchronized(Map.empty[String, Export_Theory.Theory])
def cache_theory(thy_name: String, make_thy: => Export_Theory.Theory): Export_Theory.Theory =
{
theory_cache.change_result(thys =>
{
thys.get(thy_name) match {
case Some(thy) => (thy, thys)
case None =>
val thy = make_thy
(thy, thys + (thy_name -> thy))
}
})
}
def head(title: String, rest: XML.Body = Nil): XML.Tree =
HTML.div("head", HTML.chapter(title) :: rest)
def source(body: XML.Body): XML.Tree = HTML.pre("source", body)
def physical_ref(range: Text.Range): String =
"offset_" + range.start + ".." + range.stop
def contents(heading: String, items: List[XML.Body], css_class: String = "contents")
: List[XML.Elem] =
{
if (items.isEmpty) Nil
else List(HTML.div(css_class, List(HTML.section(heading), HTML.itemize(items))))
}
def html_document(title: String, body: XML.Body, fonts_css: String): HTML_Document =
{
val content =
HTML.output_document(
List(
HTML.style(fonts_css + "\n\n" + File.read(HTML.isabelle_css)),
HTML.title(title)),
List(HTML.source(body)), css = "", structural = false)
HTML_Document(title, content)
}
}
/* presentation elements */
sealed case class Elements(
html: Markup.Elements = Markup.Elements.empty,
entity: Markup.Elements = Markup.Elements.empty,
language: Markup.Elements = Markup.Elements.empty)
val elements1: Elements =
Elements(
html = Rendering.foreground_elements ++ Rendering.text_color_elements +
Markup.NUMERAL + Markup.COMMENT + Markup.ENTITY + Markup.LANGUAGE,
entity = Markup.Elements(Markup.THEORY, Markup.TYPE_NAME, Markup.CONSTANT, Markup.FACT,
Markup.CLASS, Markup.LOCALE, Markup.FREE))
val elements2: Elements =
Elements(
html = elements1.html ++ Rendering.markdown_elements,
language = Markup.Elements(Markup.Language.DOCUMENT))
/* formal entities */
type Entity = Export_Theory.Entity[Export_Theory.No_Content]
case class Entity_Context(node: Document.Node.Name)
{
val seen_ranges: mutable.Set[Symbol.Range] = mutable.Set.empty
}
/* HTML output */
private val div_elements =
Set(HTML.div.name, HTML.pre.name, HTML.par.name, HTML.list.name, HTML.enum.name,
HTML.descr.name)
def make_html(
name: Document.Node.Name,
elements: Elements,
entity_anchor: (Entity_Context, Symbol.Range, XML.Body) => Option[XML.Tree],
entity_link: (Entity_Context, Properties.T, XML.Body) => Option[XML.Tree],
xml: XML.Body): XML.Body =
{
val context = Entity_Context(name)
def html_div(html: XML.Body): Boolean =
html exists {
case XML.Elem(markup, body) => div_elements.contains(markup.name) || html_div(body)
case XML.Text(_) => false
}
def html_class(c: String, html: XML.Body): XML.Body =
if (c == "") html
else if (html_div(html)) List(HTML.div(c, html))
else List(HTML.span(c, html))
def html_body(xml_body: XML.Body, end_offset: Symbol.Offset): (XML.Body, Symbol.Offset) =
xml_body.foldRight((List.empty[XML.Tree], end_offset)) { case (tree, (res, end_offset1)) =>
val (res1, offset) = html_body_single(tree, end_offset1)
(res1 ++ res, offset)
}
@tailrec
def html_body_single(xml_tree: XML.Tree, end_offset: Symbol.Offset): (XML.Body, Symbol.Offset) =
xml_tree match {
case XML.Wrapped_Elem(markup, _, body) => html_body_single(XML.Elem(markup, body), end_offset)
case XML.Elem(Markup(Markup.ENTITY, props @ Markup.Kind(kind)), body) =>
val (body1, offset) = html_body(body, end_offset)
if (elements.entity(kind)) {
entity_link(context, props, body1) match {
case Some(link) => (List(link), offset)
case None => (body1, offset)
}
}
else (body1, offset)
case XML.Elem(Markup(Markup.LANGUAGE, Markup.Name(name)), body) =>
val (body1, offset) = html_body(body, end_offset)
(html_class(if (elements.language(name)) name else "", body1), offset)
case XML.Elem(Markup(Markup.MARKDOWN_PARAGRAPH, _), body) =>
val (body1, offset) = html_body(body, end_offset)
(List(HTML.par(body1)), offset)
case XML.Elem(Markup(Markup.MARKDOWN_ITEM, _), body) =>
val (body1, offset) = html_body(body, end_offset)
(List(HTML.item(body1)), offset)
case XML.Elem(Markup(Markup.Markdown_Bullet.name, _), text) =>
(Nil, end_offset - XML.symbol_length(text))
case XML.Elem(Markup.Markdown_List(kind), body) =>
val (body1, offset) = html_body(body, end_offset)
if (kind == Markup.ENUMERATE) (List(HTML.enum(body1)), offset)
else (List(HTML.list(body1)), offset)
case XML.Elem(markup, body) =>
val name = markup.name
val (body1, offset) = html_body(body, end_offset)
val html =
markup.properties match {
case Markup.Kind(kind) if kind == Markup.COMMAND || kind == Markup.KEYWORD =>
html_class(kind, body1)
case _ =>
body1
}
Rendering.foreground.get(name) orElse Rendering.text_color.get(name) match {
case Some(c) => (html_class(c.toString, html), offset)
case None => (html_class(name, html), offset)
}
case XML.Text(text) =>
val offset = end_offset - Symbol.length(text)
val body = HTML.text(Symbol.decode(text))
entity_anchor(context, Text.Range(offset, end_offset), body) match {
case Some(body1) => (List(body1), offset)
case None => (body, offset)
}
}
html_body(xml, XML.symbol_length(xml) + 1)._1
}
/* PIDE HTML document */
def html_document(
resources: Resources,
snapshot: Document.Snapshot,
html_context: HTML_Context,
elements: Elements,
plain_text: Boolean = false,
fonts_css: String = HTML.fonts_css()): HTML_Document =
{
require(!snapshot.is_outdated, "document snapshot outdated")
val name = snapshot.node_name
if (plain_text) {
val title = "File " + Symbol.cartouche_decoded(name.path.file_name)
val body = HTML.text(snapshot.node.source)
html_context.html_document(title, body, fonts_css)
}
else {
resources.html_document(snapshot) getOrElse {
val title =
if (name.is_theory) "Theory " + quote(name.theory_base_name)
else "File " + Symbol.cartouche_decoded(name.path.file_name)
val xml = snapshot.xml_markup(elements = elements.html)
val body = make_html(name, elements, (_, _, _) => None, (_, _, _) => None, xml)
html_context.html_document(title, body, fonts_css)
}
}
}
/** HTML presentation **/
/* presentation context */
object Context
{
val none: Context = new Context { def enabled: Boolean = false }
val standard: Context = new Context { def enabled: Boolean = true }
def dir(path: Path): Context =
new Context {
def enabled: Boolean = true
override def dir(store: Sessions.Store): Path = path
}
def make(s: String): Context =
if (s == ":") standard else dir(Path.explode(s))
}
abstract class Context private
{
def enabled: Boolean
def enabled(info: Sessions.Info): Boolean = enabled || info.browser_info
def dir(store: Sessions.Store): Path = store.presentation_dir
def dir(store: Sessions.Store, info: Sessions.Info): Path =
dir(store) + Path.explode(info.chapter_session)
}
/* maintain chapter index */
private val sessions_path = Path.basic(".sessions")
private def read_sessions(dir: Path): List[(String, String)] =
{
val path = dir + sessions_path
if (path.is_file) {
import XML.Decode._
list(pair(string, string))(Symbol.decode_yxml(File.read(path)))
}
else Nil
}
def update_chapter(
presentation_dir: Path, chapter: String, new_sessions: List[(String, String)]): Unit =
{
val dir = Isabelle_System.make_directory(presentation_dir + Path.basic(chapter))
val sessions0 =
try { read_sessions(dir) }
catch { case _: XML.Error => Nil }
val sessions = (SortedMap.empty[String, String] ++ sessions0 ++ new_sessions).toList
File.write(dir + sessions_path,
{
import XML.Encode._
YXML.string_of_body(list(pair(string, string))(sessions))
})
val title = "Isabelle/" + chapter + " sessions"
HTML.write_document(dir, "index.html",
List(HTML.title(title + Isabelle_System.isabelle_heading())),
HTML.chapter(title) ::
(if (sessions.isEmpty) Nil
else
List(HTML.div("sessions",
List(HTML.description(
sessions.map({ case (name, description) =>
val descr = Symbol.trim_blank_lines(description)
(List(HTML.link(name + "/index.html", HTML.text(name))),
if (descr == "") Nil
else HTML.break ::: List(HTML.pre(HTML.text(descr)))) })))))),
base = Some(presentation_dir))
}
def update_root(presentation_dir: Path): Unit =
{
Isabelle_System.make_directory(presentation_dir)
HTML.init_fonts(presentation_dir)
Isabelle_System.copy_file(Path.explode("~~/lib/logo/isabelle.gif"),
presentation_dir + Path.explode("isabelle.gif"))
val title = "The " + XML.text(Isabelle_System.isabelle_name()) + " Library"
File.write(presentation_dir + Path.explode("index.html"),
HTML.header +
"""
""" + HTML.head_meta + """
""" + title + """
""" + File.read(Path.explode("~~/lib/html/library_index_content.template")) +
"""
""" + HTML.footer)
}
/* present session */
val session_graph_path = Path.explode("session_graph.pdf")
val readme_path = Path.explode("README.html")
val files_path = Path.explode("files")
def html_name(name: Document.Node.Name): String = Path.explode(name.theory_base_name).html.implode
def html_path(path: Path): String = (files_path + path.squash.html).implode
def session_relative(deps: Sessions.Deps, session0: String, session1: String): Option[String] =
{
for {
info0 <- deps.sessions_structure.get(session0)
info1 <- deps.sessions_structure.get(session1)
} yield info0.relative_path(info1)
}
def node_relative(
deps: Sessions.Deps,
session0: String,
node_name: Document.Node.Name): Option[String] =
{
val session1 = deps(session0).theory_qualifier(node_name)
session_relative(deps, session0, session1)
}
def theory_link(deps: Sessions.Deps, session0: String,
name: Document.Node.Name, body: XML.Body, anchor: Option[String] = None): Option[XML.Tree] =
{
val session1 = deps(session0).theory_qualifier(name)
val info0 = deps.sessions_structure.get(session0)
val info1 = deps.sessions_structure.get(session1)
val fragment = if (anchor.isDefined) "#" + anchor.get else ""
if (info0.isDefined && info1.isDefined) {
Some(HTML.link(info0.get.relative_path(info1.get) + html_name(name) + fragment, body))
}
else None
}
def session_html(
resources: Resources,
session: String,
deps: Sessions.Deps,
db_context: Sessions.Database_Context,
progress: Progress = new Progress,
verbose: Boolean = false,
html_context: HTML_Context,
session_elements: Elements,
presentation: Context): Unit =
{
val hierarchy = deps.sessions_structure.hierarchy(session)
val info = deps.sessions_structure(session)
val options = info.options
val base = deps(session)
def make_session_dir(name: String): Path =
Isabelle_System.make_directory(
presentation.dir(db_context.store, deps.sessions_structure(name)))
val session_dir = make_session_dir(session)
val presentation_dir = presentation.dir(db_context.store)
Bytes.write(session_dir + session_graph_path,
graphview.Graph_File.make_pdf(options, base.session_graph_display))
val documents =
for {
doc <- info.document_variants
document <- db_context.input_database(session)(Document_Build.read_document(_, _, doc.name))
} yield {
if (verbose) progress.echo("Presenting document " + session + "/" + doc.name)
Bytes.write(session_dir + doc.path.pdf, document.pdf)
doc
}
val view_links =
{
val deps_link =
HTML.link(session_graph_path, HTML.text("theory dependencies"))
val readme_links =
if ((info.dir + readme_path).is_file) {
Isabelle_System.copy_file(info.dir + readme_path, session_dir + readme_path)
List(HTML.link(readme_path, HTML.text("README")))
}
else Nil
val document_links =
documents.map(doc => HTML.link(doc.path.pdf, HTML.text(doc.name)))
Library.separate(HTML.break ::: HTML.nl,
(deps_link :: readme_links ::: document_links).
map(link => HTML.text("View ") ::: List(link))).flatten
}
val all_used_theories = hierarchy.reverse.flatMap(a => deps(a).used_theories.map(_._1))
val present_theories = html_context.register_presented(all_used_theories)
val theory_exports: Map[String, Export_Theory.Theory] =
(for (node <- all_used_theories.iterator) yield {
val thy_name = node.theory
val theory =
if (thy_name == Thy_Header.PURE) Export_Theory.no_theory
else {
html_context.cache_theory(thy_name,
{
val provider = Export.Provider.database_context(db_context, hierarchy, thy_name)
if (Export_Theory.read_theory_parents(provider, thy_name).isDefined) {
Export_Theory.read_theory(
- provider, session, thy_name, cache = html_context.term_cache)
+ provider, session, thy_name, cache = html_context.cache)
}
else Export_Theory.no_theory
})
}
thy_name -> theory
}).toMap
val theories: List[XML.Body] =
{
sealed case class Seen_File(
src_path: Path, file_name: String, thy_session: String, thy_name: Document.Node.Name)
{
def check(src_path1: Path, file_name1: String, thy_session1: String): Boolean =
(src_path == src_path1 || file_name == file_name1) && thy_session == thy_session1
}
var seen_files = List.empty[Seen_File]
def node_file(node: Document.Node.Name): String =
if (node.node.endsWith(".thy")) html_name(node) else html_path(Path.explode(node.node))
def entity_anchor(
context: Entity_Context, range: Symbol.Range, body: XML.Body): Option[XML.Elem] =
{
body match {
case List(XML.Elem(Markup("span", List("id" -> _)), _)) => None
case _ =>
Some {
val entities =
theory_exports.get(context.node.theory).flatMap(_.entity_by_range.get(range))
.getOrElse(Nil)
val body1 =
if (context.seen_ranges.contains(range)) {
HTML.entity_def(HTML.span(HTML.id(html_context.physical_ref(range)), body))
}
else HTML.span(body)
entities.map(_.kname).foldLeft(body1) {
case (elem, id) => HTML.entity_def(HTML.span(HTML.id(id), List(elem)))
}
}
}
}
def logical_ref(thy_name: String, kind: String, name: String): Option[String] =
for {
thy <- theory_exports.get(thy_name)
entity <- thy.entity_by_kind_name.get((kind, name))
} yield entity.kname
def physical_ref(
context: Entity_Context, thy_name: String, props: Properties.T): Option[String] =
{
for {
range <- Position.Def_Range.unapply(props)
if thy_name == context.node.theory
} yield {
context.seen_ranges += range
html_context.physical_ref(range)
}
}
def entity_link(
context: Entity_Context, props: Properties.T, body: XML.Body): Option[XML.Elem] =
{
(props, props, props, props, props) match {
case (Markup.Kind(Markup.THEORY), Markup.Name(theory), Position.Def_File(thy_file), _, _) =>
val node_name = resources.file_node(Path.explode(thy_file), theory = theory)
node_relative(deps, session, node_name).map(html_dir =>
HTML.link(html_dir + html_name(node_name), body))
case (Markup.Ref(_), Position.Def_File(thy_file), Position.Def_Theory(def_theory),
Markup.Kind(kind), Markup.Name(name)) =>
val thy_name = if (def_theory.nonEmpty) def_theory else context.node.theory
val node_name = resources.file_node(Path.explode(thy_file), theory = thy_name)
for {
html_dir <- node_relative(deps, session, node_name)
html_file = node_file(node_name)
html_ref <-
logical_ref(thy_name, kind, name) orElse physical_ref(context, thy_name, props)
} yield {
HTML.entity_ref(HTML.link(html_dir + html_file + "#" + html_ref, body))
}
case _ => None
}
}
sealed case class Theory(
name: Document.Node.Name,
command: Command,
files_html: List[(Path, XML.Tree)],
html: XML.Tree)
def read_theory(name: Document.Node.Name): Option[Theory] =
{
progress.expose_interrupt()
for (command <- Build_Job.read_theory(db_context, resources, hierarchy, name.theory))
yield {
if (verbose) progress.echo("Presenting theory " + name)
val snapshot = Document.State.init.snippet(command)
val thy_elements =
session_elements.copy(entity =
theory_exports(name.theory).others.keySet.foldLeft(session_elements.entity)(_ + _))
val files_html =
for {
(src_path, xml) <- snapshot.xml_markup_blobs(elements = thy_elements.html)
if xml.nonEmpty
}
yield {
progress.expose_interrupt()
if (verbose) progress.echo("Presenting file " + src_path)
(src_path, html_context.source(
make_html(name, thy_elements, entity_anchor, entity_link, xml)))
}
val html =
html_context.source(
make_html(name, thy_elements, entity_anchor, entity_link,
snapshot.xml_markup(elements = thy_elements.html)))
Theory(name, command, files_html, html)
}
}
(for (thy <- Par_List.map(read_theory, present_theories).flatten) yield {
val thy_session = base.theory_qualifier(thy.name)
val thy_dir = make_session_dir(thy_session)
val files =
for { (src_path, file_html) <- thy.files_html }
yield {
val file_name = html_path(src_path)
seen_files.find(_.check(src_path, file_name, thy_session)) match {
case None => seen_files ::= Seen_File(src_path, file_name, thy_session, thy.name)
case Some(seen_file) =>
error("Incoherent use of file name " + src_path + " as " + quote(file_name) +
" in theory " + seen_file.thy_name + " vs. " + thy.name)
}
val file_path = thy_dir + Path.explode(file_name)
val file_title = "File " + Symbol.cartouche_decoded(src_path.implode_short)
HTML.write_document(file_path.dir, file_path.file_name,
List(HTML.title(file_title)), List(html_context.head(file_title), file_html),
base = Some(presentation_dir))
List(HTML.link(file_name, HTML.text(file_title)))
}
val thy_title = "Theory " + thy.name.theory_base_name
HTML.write_document(thy_dir, html_name(thy.name),
List(HTML.title(thy_title)), List(html_context.head(thy_title), thy.html),
base = Some(presentation_dir))
if (thy_session == session) {
Some(
List(HTML.link(html_name(thy.name),
HTML.text(thy.name.theory_base_name) :::
(if (files.isEmpty) Nil else List(HTML.itemize(files))))))
}
else None
}).flatten
}
val title = "Session " + session
HTML.write_document(session_dir, "index.html",
List(HTML.title(title + Isabelle_System.isabelle_heading())),
html_context.head(title, List(HTML.par(view_links))) ::
html_context.contents("Theories", theories),
base = Some(presentation_dir))
}
}
diff --git a/src/Pure/Thy/sessions.scala b/src/Pure/Thy/sessions.scala
--- a/src/Pure/Thy/sessions.scala
+++ b/src/Pure/Thy/sessions.scala
@@ -1,1520 +1,1520 @@
/* Title: Pure/Thy/sessions.scala
Author: Makarius
Cumulative session information.
*/
package isabelle
import java.io.{File => JFile}
import java.nio.ByteBuffer
import java.nio.channels.FileChannel
import java.nio.file.StandardOpenOption
import scala.collection.immutable.{SortedSet, SortedMap}
import scala.collection.mutable
object Sessions
{
/* session and theory names */
val ROOTS: Path = Path.explode("ROOTS")
val ROOT: Path = Path.explode("ROOT")
val roots_name: String = "ROOTS"
val root_name: String = "ROOT"
val theory_name: String = "Pure.Sessions"
val UNSORTED = "Unsorted"
val DRAFT = "Draft"
def is_pure(name: String): Boolean = name == Thy_Header.PURE
def exclude_session(name: String): Boolean = name == "" || name == DRAFT
def exclude_theory(name: String): Boolean =
name == root_name || name == "README" || name == "index" || name == "bib"
/* ROOTS file format */
class File_Format extends isabelle.File_Format
{
val format_name: String = roots_name
val file_ext = ""
override def detect(name: String): Boolean =
Thy_Header.split_file_name(name) match {
case Some((_, file_name)) => file_name == roots_name
case None => false
}
override def theory_suffix: String = "ROOTS_file"
override def theory_content(name: String): String =
"""theory "ROOTS" imports Pure begin ROOTS_file """ +
Outer_Syntax.quote_string(name) + """ end"""
}
/* base info and source dependencies */
sealed case class Base(
pos: Position.T = Position.none,
session_directories: Map[JFile, String] = Map.empty,
global_theories: Map[String, String] = Map.empty,
session_theories: List[Document.Node.Name] = Nil,
document_theories: List[Document.Node.Name] = Nil,
loaded_theories: Graph[String, Outer_Syntax] = Graph.string,
used_theories: List[(Document.Node.Name, Options)] = Nil,
load_commands: Map[Document.Node.Name, List[Command_Span.Span]] = Map.empty,
known_theories: Map[String, Document.Node.Entry] = Map.empty,
known_loaded_files: Map[String, List[Path]] = Map.empty,
overall_syntax: Outer_Syntax = Outer_Syntax.empty,
imported_sources: List[(Path, SHA1.Digest)] = Nil,
sources: List[(Path, SHA1.Digest)] = Nil,
session_graph_display: Graph_Display.Graph = Graph_Display.empty_graph,
errors: List[String] = Nil)
{
override def toString: String =
"Sessions.Base(loaded_theories = " + loaded_theories.size +
", used_theories = " + used_theories.length + ")"
def theory_qualifier(name: String): String =
global_theories.getOrElse(name, Long_Name.qualifier(name))
def theory_qualifier(name: Document.Node.Name): String = theory_qualifier(name.theory)
def loaded_theory(name: String): Boolean = loaded_theories.defined(name)
def loaded_theory(name: Document.Node.Name): Boolean = loaded_theory(name.theory)
def loaded_theory_syntax(name: String): Option[Outer_Syntax] =
if (loaded_theory(name)) Some(loaded_theories.get_node(name)) else None
def loaded_theory_syntax(name: Document.Node.Name): Option[Outer_Syntax] =
loaded_theory_syntax(name.theory)
def theory_syntax(name: Document.Node.Name): Outer_Syntax =
loaded_theory_syntax(name) getOrElse overall_syntax
def node_syntax(nodes: Document.Nodes, name: Document.Node.Name): Outer_Syntax =
nodes(name).syntax orElse loaded_theory_syntax(name) getOrElse overall_syntax
}
sealed case class Deps(sessions_structure: Structure, session_bases: Map[String, Base])
{
override def toString: String = "Sessions.Deps(" + sessions_structure + ")"
def is_empty: Boolean = session_bases.isEmpty
def apply(name: String): Base = session_bases(name)
def get(name: String): Option[Base] = session_bases.get(name)
def imported_sources(name: String): List[SHA1.Digest] =
session_bases(name).imported_sources.map(_._2)
def sources(name: String): List[SHA1.Digest] =
session_bases(name).sources.map(_._2)
def errors: List[String] =
(for {
(name, base) <- session_bases.iterator
if base.errors.nonEmpty
} yield cat_lines(base.errors) +
"\nThe error(s) above occurred in session " + quote(name) + Position.here(base.pos)
).toList
def check_errors: Deps =
errors match {
case Nil => this
case errs => error(cat_lines(errs))
}
}
def deps(sessions_structure: Structure,
progress: Progress = new Progress,
inlined_files: Boolean = false,
verbose: Boolean = false,
list_files: Boolean = false,
check_keywords: Set[String] = Set.empty): Deps =
{
var cache_sources = Map.empty[JFile, SHA1.Digest]
def check_sources(paths: List[Path]): List[(Path, SHA1.Digest)] =
{
for {
path <- paths
file = path.file
if cache_sources.isDefinedAt(file) || file.isFile
}
yield {
cache_sources.get(file) match {
case Some(digest) => (path, digest)
case None =>
val digest = SHA1.digest(file)
cache_sources = cache_sources + (file -> digest)
(path, digest)
}
}
}
val session_bases =
sessions_structure.imports_topological_order.foldLeft(Map("" -> sessions_structure.bootstrap)) {
case (session_bases, session_name) =>
progress.expose_interrupt()
val info = sessions_structure(session_name)
try {
val deps_base = info.deps_base(session_bases)
val resources = new Resources(sessions_structure, deps_base)
if (verbose || list_files) {
val groups =
if (info.groups.isEmpty) ""
else info.groups.mkString(" (", " ", ")")
progress.echo("Session " + info.chapter_session + groups)
}
val dependencies = resources.session_dependencies(info)
val overall_syntax = dependencies.overall_syntax
val session_theories =
dependencies.theories.filter(name => deps_base.theory_qualifier(name) == session_name)
val theory_files = dependencies.theories.map(_.path)
dependencies.load_commands
val (load_commands, load_commands_errors) =
try { if (inlined_files) (dependencies.load_commands, Nil) else (Nil, Nil) }
catch { case ERROR(msg) => (Nil, List(msg)) }
val loaded_files =
load_commands.map({ case (name, spans) => dependencies.loaded_files(name, spans) })
val session_files =
(theory_files ::: loaded_files.flatMap(_._2) :::
info.document_files.map(file => info.dir + file._1 + file._2)).map(_.expand)
val imported_files = if (inlined_files) dependencies.imported_files else Nil
if (list_files)
progress.echo(cat_lines(session_files.map(_.implode).sorted.map(" " + _)))
if (check_keywords.nonEmpty) {
Check_Keywords.check_keywords(
progress, overall_syntax.keywords, check_keywords, theory_files)
}
val session_graph_display: Graph_Display.Graph =
{
def session_node(name: String): Graph_Display.Node =
Graph_Display.Node("[" + name + "]", "session." + name)
def node(name: Document.Node.Name): Graph_Display.Node =
{
val qualifier = deps_base.theory_qualifier(name)
if (qualifier == info.name)
Graph_Display.Node(name.theory_base_name, "theory." + name.theory)
else session_node(qualifier)
}
val required_sessions =
dependencies.loaded_theories.all_preds(dependencies.theories.map(_.theory))
.map(theory => deps_base.theory_qualifier(theory))
.filter(name => name != info.name && sessions_structure.defined(name))
val required_subgraph =
sessions_structure.imports_graph
.restrict(sessions_structure.imports_graph.all_preds(required_sessions).toSet)
.transitive_closure
.restrict(required_sessions.toSet)
.transitive_reduction_acyclic
val graph0 =
required_subgraph.topological_order.foldLeft(Graph_Display.empty_graph) {
case (g, session) =>
val a = session_node(session)
val bs = required_subgraph.imm_preds(session).toList.map(session_node)
bs.foldLeft((a :: bs).foldLeft(g)(_.default_node(_, Nil)))(_.add_edge(_, a))
}
dependencies.entries.foldLeft(graph0) {
case (g, entry) =>
val a = node(entry.name)
val bs = entry.header.imports.map(node).filterNot(_ == a)
bs.foldLeft((a :: bs).foldLeft(g)(_.default_node(_, Nil)))(_.add_edge(_, a))
}
}
val known_theories =
dependencies.entries.iterator.map(entry => entry.name.theory -> entry).
foldLeft(deps_base.known_theories)(_ + _)
val known_loaded_files = deps_base.known_loaded_files ++ loaded_files
val import_errors =
{
val known_sessions =
sessions_structure.imports_requirements(List(session_name)).toSet
for {
name <- dependencies.theories
qualifier = deps_base.theory_qualifier(name)
if !known_sessions(qualifier)
} yield "Bad import of theory " + quote(name.toString) +
": need to include sessions " + quote(qualifier) + " in ROOT"
}
val document_errors =
info.document_theories.flatMap(
{
case (thy, pos) =>
val parent_sessions =
if (sessions_structure.build_graph.defined(session_name)) {
sessions_structure.build_requirements(List(session_name))
}
else Nil
def err(msg: String): Option[String] =
Some(msg + " " + quote(thy) + Position.here(pos))
known_theories.get(thy).map(_.name) match {
case None => err("Unknown document theory")
case Some(name) =>
val qualifier = deps_base.theory_qualifier(name)
if (session_theories.contains(name)) {
err("Redundant document theory from this session:")
}
else if (parent_sessions.contains(qualifier)) None
else if (dependencies.theories.contains(name)) None
else err("Document theory from other session not imported properly:")
}
})
val document_theories =
info.document_theories.map({ case (thy, _) => known_theories(thy).name })
val dir_errors =
{
val ok = info.dirs.map(_.canonical_file).toSet
val bad =
(for {
name <- session_theories.iterator
path = name.master_dir_path
if !ok(path.canonical_file)
path1 = File.relative_path(info.dir.canonical, path).getOrElse(path)
} yield (path1, name)).toList
val bad_dirs = (for { (path1, _) <- bad } yield path1.toString).distinct.sorted
val errs1 =
for { (path1, name) <- bad }
yield "Implicit use of directory " + path1 + " for theory " + quote(name.toString)
val errs2 =
if (bad_dirs.isEmpty) Nil
else List("Implicit use of session directories: " + commas(bad_dirs))
val errs3 = for (p <- info.dirs if !p.is_dir) yield "No such directory: " + p
val errs4 =
(for {
name <- session_theories.iterator
name1 <- resources.find_theory_node(name.theory)
if name.node != name1.node
} yield "Incoherent theory file import:\n " + name.path + " vs. \n " + name1.path)
.toList
errs1 ::: errs2 ::: errs3 ::: errs4
}
val sources_errors =
for (p <- session_files if !p.is_file) yield "No such file: " + p
val path_errors =
try { Path.check_case_insensitive(session_files ::: imported_files); Nil }
catch { case ERROR(msg) => List(msg) }
val bibtex_errors =
try { info.bibtex_entries; Nil }
catch { case ERROR(msg) => List(msg) }
val base =
Base(
pos = info.pos,
session_directories = sessions_structure.session_directories,
global_theories = sessions_structure.global_theories,
session_theories = session_theories,
document_theories = document_theories,
loaded_theories = dependencies.loaded_theories,
used_theories = dependencies.theories_adjunct,
load_commands = load_commands.toMap,
known_theories = known_theories,
known_loaded_files = known_loaded_files,
overall_syntax = overall_syntax,
imported_sources = check_sources(imported_files),
sources = check_sources(session_files),
session_graph_display = session_graph_display,
errors = dependencies.errors ::: load_commands_errors ::: import_errors :::
document_errors ::: dir_errors ::: sources_errors ::: path_errors :::
bibtex_errors)
session_bases + (info.name -> base)
}
catch {
case ERROR(msg) =>
cat_error(msg, "The error(s) above occurred in session " +
quote(info.name) + Position.here(info.pos))
}
}
Deps(sessions_structure, session_bases)
}
/* base info */
sealed case class Base_Info(
session: String,
sessions_structure: Structure,
errors: List[String],
base: Base,
infos: List[Info])
{
def check: Base_Info = if (errors.isEmpty) this else error(cat_lines(errors))
}
def base_info(options: Options,
session: String,
progress: Progress = new Progress,
dirs: List[Path] = Nil,
include_sessions: List[String] = Nil,
session_ancestor: Option[String] = None,
session_requirements: Boolean = false): Base_Info =
{
val full_sessions = load_structure(options, dirs = dirs)
val selected_sessions =
full_sessions.selection(Selection(sessions = session :: session_ancestor.toList))
val info = selected_sessions(session)
val ancestor = session_ancestor orElse info.parent
val (session1, infos1) =
if (session_requirements && ancestor.isDefined) {
val deps = Sessions.deps(selected_sessions, progress = progress)
val base = deps(session)
val ancestor_loaded =
deps.get(ancestor.get) match {
case Some(ancestor_base)
if !selected_sessions.imports_requirements(List(ancestor.get)).contains(session) =>
ancestor_base.loaded_theories.defined _
case _ =>
error("Bad ancestor " + quote(ancestor.get) + " for session " + quote(session))
}
val required_theories =
for {
thy <- base.loaded_theories.keys
if !ancestor_loaded(thy) && base.theory_qualifier(thy) != session
}
yield thy
if (required_theories.isEmpty) (ancestor.get, Nil)
else {
val other_name = info.name + "_requirements(" + ancestor.get + ")"
Isabelle_System.isabelle_tmp_prefix()
(other_name,
List(
make_info(info.options,
dir_selected = false,
dir = Path.explode("$ISABELLE_TMP_PREFIX"),
chapter = info.chapter,
Session_Entry(
pos = info.pos,
name = other_name,
groups = info.groups,
path = ".",
parent = ancestor,
description = "Required theory imports from other sessions",
options = Nil,
imports = info.deps,
directories = Nil,
theories = List((Nil, required_theories.map(thy => ((thy, Position.none), false)))),
document_theories = Nil,
document_files = Nil,
export_files = Nil))))
}
}
else (session, Nil)
val full_sessions1 =
if (infos1.isEmpty) full_sessions
else load_structure(options, dirs = dirs, infos = infos1)
val selected_sessions1 =
full_sessions1.selection(Selection(sessions = session1 :: session :: include_sessions))
val deps1 = Sessions.deps(selected_sessions1, progress = progress)
Base_Info(session1, full_sessions1, deps1.errors, deps1(session1), infos1)
}
/* cumulative session info */
sealed case class Info(
name: String,
chapter: String,
dir_selected: Boolean,
pos: Position.T,
groups: List[String],
dir: Path,
parent: Option[String],
description: String,
directories: List[Path],
options: Options,
imports: List[String],
theories: List[(Options, List[(String, Position.T)])],
global_theories: List[String],
document_theories: List[(String, Position.T)],
document_files: List[(Path, Path)],
export_files: List[(Path, Int, List[String])],
meta_digest: SHA1.Digest)
{
def chapter_session: String = chapter + "/" + name
def relative_path(info1: Info): String =
if (name == info1.name) ""
else if (chapter == info1.chapter) "../" + info1.name + "/"
else "../../" + info1.chapter_session + "/"
def deps: List[String] = parent.toList ::: imports
def deps_base(session_bases: String => Base): Base =
{
val parent_base = session_bases(parent.getOrElse(""))
val imports_bases = imports.map(session_bases)
parent_base.copy(
known_theories =
(for {
base <- imports_bases.iterator
(_, entry) <- base.known_theories.iterator
} yield (entry.name.theory -> entry)).foldLeft(parent_base.known_theories)(_ + _),
known_loaded_files =
imports_bases.iterator.map(_.known_loaded_files).
foldLeft(parent_base.known_loaded_files)(_ ++ _))
}
def dirs: List[Path] = dir :: directories
def timeout_ignored: Boolean =
!options.bool("timeout_build") || Time.seconds(options.real("timeout")) < Time.ms(1)
def timeout: Time = Time.seconds(options.real("timeout") * options.real("timeout_scale"))
def document_enabled: Boolean =
options.string("document") match {
case "" | "false" => false
case "pdf" | "true" => true
case doc => error("Bad document specification " + quote(doc))
}
def document_variants: List[Document_Build.Document_Variant] =
{
val variants =
Library.space_explode(':', options.string("document_variants")).
map(Document_Build.Document_Variant.parse)
val dups = Library.duplicates(variants.map(_.name))
if (dups.nonEmpty) error("Duplicate document variants: " + commas_quote(dups))
variants
}
def documents: List[Document_Build.Document_Variant] =
{
val variants = document_variants
if (!document_enabled || document_files.isEmpty) Nil else variants
}
def document_output: Option[Path] =
options.string("document_output") match {
case "" => None
case s => Some(dir + Path.explode(s))
}
def browser_info: Boolean = options.bool("browser_info")
lazy val bibtex_entries: List[Text.Info[String]] =
(for {
(document_dir, file) <- document_files.iterator
if Bibtex.is_bibtex(file.file_name)
info <- Bibtex.entries(File.read(dir + document_dir + file)).iterator
} yield info).toList
def record_proofs: Boolean = options.int("record_proofs") >= 2
def is_afp: Boolean = chapter == AFP.chapter
def is_afp_bulky: Boolean = is_afp && groups.exists(AFP.groups_bulky.contains)
}
def make_info(options: Options, dir_selected: Boolean, dir: Path, chapter: String,
entry: Session_Entry): Info =
{
try {
val name = entry.name
if (exclude_session(name)) error("Bad session name")
if (is_pure(name) && entry.parent.isDefined) error("Illegal parent session")
if (!is_pure(name) && !entry.parent.isDefined) error("Missing parent session")
val session_path = dir + Path.explode(entry.path)
val directories = entry.directories.map(dir => session_path + Path.explode(dir))
val session_options = options ++ entry.options
val theories =
entry.theories.map({ case (opts, thys) =>
(session_options ++ opts,
thys.map({ case ((thy, pos), _) =>
if (exclude_theory(thy))
error("Bad theory name " + quote(thy) + Position.here(pos))
else (thy, pos) })) })
val global_theories =
for { (_, thys) <- entry.theories; ((thy, pos), global) <- thys if global }
yield {
val thy_name = Path.explode(thy).file_name
if (Long_Name.is_qualified(thy_name))
error("Bad qualified name for global theory " +
quote(thy_name) + Position.here(pos))
else thy_name
}
val conditions =
theories.flatMap(thys => space_explode(',', thys._1.string("condition"))).distinct.sorted.
map(x => (x, Isabelle_System.getenv(x) != ""))
val document_files =
entry.document_files.map({ case (s1, s2) => (Path.explode(s1), Path.explode(s2)) })
val export_files =
entry.export_files.map({ case (dir, prune, pats) => (Path.explode(dir), prune, pats) })
val meta_digest =
SHA1.digest(
(name, chapter, entry.parent, entry.directories, entry.options, entry.imports,
entry.theories_no_position, conditions, entry.document_theories_no_position,
entry.document_files)
.toString)
Info(name, chapter, dir_selected, entry.pos, entry.groups, session_path,
entry.parent, entry.description, directories, session_options,
entry.imports, theories, global_theories, entry.document_theories, document_files,
export_files, meta_digest)
}
catch {
case ERROR(msg) =>
error(msg + "\nThe error(s) above occurred in session entry " +
quote(entry.name) + Position.here(entry.pos))
}
}
object Selection
{
val empty: Selection = Selection()
val all: Selection = Selection(all_sessions = true)
def session(session: String): Selection = Selection(sessions = List(session))
}
sealed case class Selection(
requirements: Boolean = false,
all_sessions: Boolean = false,
base_sessions: List[String] = Nil,
exclude_session_groups: List[String] = Nil,
exclude_sessions: List[String] = Nil,
session_groups: List[String] = Nil,
sessions: List[String] = Nil)
{
def ++ (other: Selection): Selection =
Selection(
requirements = requirements || other.requirements,
all_sessions = all_sessions || other.all_sessions,
base_sessions = Library.merge(base_sessions, other.base_sessions),
exclude_session_groups = Library.merge(exclude_session_groups, other.exclude_session_groups),
exclude_sessions = Library.merge(exclude_sessions, other.exclude_sessions),
session_groups = Library.merge(session_groups, other.session_groups),
sessions = Library.merge(sessions, other.sessions))
}
object Structure
{
val empty: Structure = make(Nil)
def make(infos: List[Info]): Structure =
{
def add_edges(graph: Graph[String, Info], kind: String, edges: Info => Iterable[String])
: Graph[String, Info] =
{
def add_edge(pos: Position.T, name: String, g: Graph[String, Info], parent: String) =
{
if (!g.defined(parent))
error("Bad " + kind + " session " + quote(parent) + " for " +
quote(name) + Position.here(pos))
try { g.add_edge_acyclic(parent, name) }
catch {
case exn: Graph.Cycles[_] =>
error(cat_lines(exn.cycles.map(cycle =>
"Cyclic session dependency of " +
cycle.map(c => quote(c.toString)).mkString(" via "))) + Position.here(pos))
}
}
graph.iterator.foldLeft(graph) {
case (g, (name, (info, _))) =>
edges(info).foldLeft(g)(add_edge(info.pos, name, _, _))
}
}
val info_graph =
infos.foldLeft(Graph.string[Info]) {
case (graph, info) =>
if (graph.defined(info.name))
error("Duplicate session " + quote(info.name) + Position.here(info.pos) +
Position.here(graph.get_node(info.name).pos))
else graph.new_node(info.name, info)
}
val build_graph = add_edges(info_graph, "parent", _.parent)
val imports_graph = add_edges(build_graph, "imports", _.imports)
val session_positions: List[(String, Position.T)] =
(for ((name, (info, _)) <- info_graph.iterator) yield (name, info.pos)).toList
val session_directories: Map[JFile, String] =
(for {
session <- imports_graph.topological_order.iterator
info = info_graph.get_node(session)
dir <- info.dirs.iterator
} yield (info, dir)).foldLeft(Map.empty[JFile, String]) {
case (dirs, (info, dir)) =>
val session = info.name
val canonical_dir = dir.canonical_file
dirs.get(canonical_dir) match {
case Some(session1) =>
val info1 = info_graph.get_node(session1)
error("Duplicate use of directory " + dir +
"\n for session " + quote(session1) + Position.here(info1.pos) +
"\n vs. session " + quote(session) + Position.here(info.pos))
case None => dirs + (canonical_dir -> session)
}
}
val global_theories: Map[String, String] =
(for {
session <- imports_graph.topological_order.iterator
info = info_graph.get_node(session)
thy <- info.global_theories.iterator }
yield (info, thy)).foldLeft(Thy_Header.bootstrap_global_theories.toMap) {
case (global, (info, thy)) =>
val qualifier = info.name
global.get(thy) match {
case Some(qualifier1) if qualifier != qualifier1 =>
error("Duplicate global theory " + quote(thy) + Position.here(info.pos))
case _ => global + (thy -> qualifier)
}
}
new Structure(
session_positions, session_directories, global_theories, build_graph, imports_graph)
}
}
final class Structure private[Sessions](
val session_positions: List[(String, Position.T)],
val session_directories: Map[JFile, String],
val global_theories: Map[String, String],
val build_graph: Graph[String, Info],
val imports_graph: Graph[String, Info])
{
sessions_structure =>
def bootstrap: Base =
Base(
session_directories = session_directories,
global_theories = global_theories,
overall_syntax = Thy_Header.bootstrap_syntax)
def dest_session_directories: List[(String, String)] =
for ((file, session) <- session_directories.toList)
yield (File.standard_path(file), session)
lazy val chapters: SortedMap[String, List[Info]] =
build_graph.iterator.foldLeft(SortedMap.empty[String, List[Info]]) {
case (chs, (_, (info, _))) =>
chs + (info.chapter -> (info :: chs.getOrElse(info.chapter, Nil)))
}
def build_graph_display: Graph_Display.Graph = Graph_Display.make_graph(build_graph)
def imports_graph_display: Graph_Display.Graph = Graph_Display.make_graph(imports_graph)
def defined(name: String): Boolean = imports_graph.defined(name)
def apply(name: String): Info = imports_graph.get_node(name)
def get(name: String): Option[Info] = if (defined(name)) Some(apply(name)) else None
def theory_qualifier(name: String): String =
global_theories.getOrElse(name, Long_Name.qualifier(name))
def check_sessions(names: List[String]): Unit =
{
val bad_sessions = SortedSet(names.filterNot(defined): _*).toList
if (bad_sessions.nonEmpty)
error("Undefined session(s): " + commas_quote(bad_sessions))
}
def check_sessions(sel: Selection): Unit =
check_sessions(sel.base_sessions ::: sel.exclude_sessions ::: sel.sessions)
private def selected(graph: Graph[String, Info], sel: Selection): List[String] =
{
check_sessions(sel)
val select_group = sel.session_groups.toSet
val select_session = sel.sessions.toSet ++ imports_graph.all_succs(sel.base_sessions)
val selected0 =
if (sel.all_sessions) graph.keys
else {
(for {
(name, (info, _)) <- graph.iterator
if info.dir_selected || select_session(name) ||
graph.get_node(name).groups.exists(select_group)
} yield name).toList
}
if (sel.requirements) (graph.all_preds(selected0).toSet -- selected0).toList
else selected0
}
def selection(sel: Selection): Structure =
{
check_sessions(sel)
val excluded =
{
val exclude_group = sel.exclude_session_groups.toSet
val exclude_group_sessions =
(for {
(name, (info, _)) <- imports_graph.iterator
if imports_graph.get_node(name).groups.exists(exclude_group)
} yield name).toList
imports_graph.all_succs(exclude_group_sessions ::: sel.exclude_sessions).toSet
}
def restrict(graph: Graph[String, Info]): Graph[String, Info] =
{
val sessions = graph.all_preds(selected(graph, sel)).filterNot(excluded)
graph.restrict(graph.all_preds(sessions).toSet)
}
new Structure(
session_positions, session_directories, global_theories,
restrict(build_graph), restrict(imports_graph))
}
def selection(session: String): Structure = selection(Selection.session(session))
def selection_deps(
selection: Selection,
progress: Progress = new Progress,
loading_sessions: Boolean = false,
inlined_files: Boolean = false,
verbose: Boolean = false): Deps =
{
val deps =
Sessions.deps(sessions_structure.selection(selection),
progress = progress, inlined_files = inlined_files, verbose = verbose)
if (loading_sessions) {
val selection_size = deps.sessions_structure.build_graph.size
if (selection_size > 1) progress.echo("Loading " + selection_size + " sessions ...")
}
deps
}
def hierarchy(session: String): List[String] = build_graph.all_preds(List(session))
def build_selection(sel: Selection): List[String] = selected(build_graph, sel)
def build_descendants(ss: List[String]): List[String] = build_graph.all_succs(ss)
def build_requirements(ss: List[String]): List[String] = build_graph.all_preds_rev(ss)
def build_topological_order: List[String] = build_graph.topological_order
def imports_selection(sel: Selection): List[String] = selected(imports_graph, sel)
def imports_descendants(ss: List[String]): List[String] = imports_graph.all_succs(ss)
def imports_requirements(ss: List[String]): List[String] = imports_graph.all_preds_rev(ss)
def imports_topological_order: List[String] = imports_graph.topological_order
def bibtex_entries: List[(String, List[String])] =
build_topological_order.flatMap(name =>
apply(name).bibtex_entries match {
case Nil => None
case entries => Some(name -> entries.map(_.info))
})
override def toString: String =
imports_graph.keys_iterator.mkString("Sessions.Structure(", ", ", ")")
}
/* parser */
private val CHAPTER = "chapter"
private val SESSION = "session"
private val IN = "in"
private val DESCRIPTION = "description"
private val DIRECTORIES = "directories"
private val OPTIONS = "options"
private val SESSIONS = "sessions"
private val THEORIES = "theories"
private val GLOBAL = "global"
private val DOCUMENT_THEORIES = "document_theories"
private val DOCUMENT_FILES = "document_files"
private val EXPORT_FILES = "export_files"
val root_syntax: Outer_Syntax =
Outer_Syntax.empty + "(" + ")" + "+" + "," + "=" + "[" + "]" +
GLOBAL + IN +
(CHAPTER, Keyword.THY_DECL) +
(SESSION, Keyword.THY_DECL) +
(DESCRIPTION, Keyword.QUASI_COMMAND) +
(DIRECTORIES, Keyword.QUASI_COMMAND) +
(OPTIONS, Keyword.QUASI_COMMAND) +
(SESSIONS, Keyword.QUASI_COMMAND) +
(THEORIES, Keyword.QUASI_COMMAND) +
(DOCUMENT_THEORIES, Keyword.QUASI_COMMAND) +
(DOCUMENT_FILES, Keyword.QUASI_COMMAND) +
(EXPORT_FILES, Keyword.QUASI_COMMAND)
abstract class Entry
sealed case class Chapter(name: String) extends Entry
sealed case class Session_Entry(
pos: Position.T,
name: String,
groups: List[String],
path: String,
parent: Option[String],
description: String,
options: List[Options.Spec],
imports: List[String],
directories: List[String],
theories: List[(List[Options.Spec], List[((String, Position.T), Boolean)])],
document_theories: List[(String, Position.T)],
document_files: List[(String, String)],
export_files: List[(String, Int, List[String])]) extends Entry
{
def theories_no_position: List[(List[Options.Spec], List[(String, Boolean)])] =
theories.map({ case (a, b) => (a, b.map({ case ((c, _), d) => (c, d) })) })
def document_theories_no_position: List[String] =
document_theories.map(_._1)
}
private object Parser extends Options.Parser
{
private val chapter: Parser[Chapter] =
{
val chapter_name = atom("chapter name", _.is_name)
command(CHAPTER) ~! chapter_name ^^ { case _ ~ a => Chapter(a) }
}
private val session_entry: Parser[Session_Entry] =
{
val option =
option_name ~ opt($$$("=") ~! option_value ^^ { case _ ~ x => x }) ^^
{ case x ~ y => (x, y) }
val options = $$$("[") ~> rep1sep(option, $$$(",")) <~ $$$("]")
val theory_entry =
position(theory_name) ~ opt_keyword(GLOBAL) ^^ { case x ~ y => (x, y) }
val theories =
$$$(THEORIES) ~!
((options | success(Nil)) ~ rep1(theory_entry)) ^^
{ case _ ~ (x ~ y) => (x, y) }
val in_path = $$$("(") ~! ($$$(IN) ~ path ~ $$$(")")) ^^ { case _ ~ (_ ~ x ~ _) => x }
val document_theories =
$$$(DOCUMENT_THEORIES) ~! rep1(position(name)) ^^ { case _ ~ x => x }
val document_files =
$$$(DOCUMENT_FILES) ~!
((in_path | success("document")) ~ rep1(path)) ^^ { case _ ~ (x ~ y) => y.map((x, _)) }
val prune = $$$("[") ~! (nat ~ $$$("]")) ^^ { case _ ~ (x ~ _) => x } | success(0)
val export_files =
$$$(EXPORT_FILES) ~! ((in_path | success("export")) ~ prune ~ rep1(embedded)) ^^
{ case _ ~ (x ~ y ~ z) => (x, y, z) }
command(SESSION) ~!
(position(session_name) ~
(($$$("(") ~! (rep1(name) <~ $$$(")")) ^^ { case _ ~ x => x }) | success(Nil)) ~
(($$$(IN) ~! path ^^ { case _ ~ x => x }) | success(".")) ~
($$$("=") ~!
(opt(session_name ~! $$$("+") ^^ { case x ~ _ => x }) ~
(($$$(DESCRIPTION) ~! text ^^ { case _ ~ x => x }) | success("")) ~
(($$$(OPTIONS) ~! options ^^ { case _ ~ x => x }) | success(Nil)) ~
(($$$(SESSIONS) ~! rep1(session_name) ^^ { case _ ~ x => x }) | success(Nil)) ~
(($$$(DIRECTORIES) ~! rep1(path) ^^ { case _ ~ x => x }) | success(Nil)) ~
rep(theories) ~
(opt(document_theories) ^^ (x => x.getOrElse(Nil))) ~
(rep(document_files) ^^ (x => x.flatten)) ~
rep(export_files)))) ^^
{ case _ ~ ((a, pos) ~ b ~ c ~ (_ ~ (d ~ e ~ f ~ g ~ h ~ i ~ j ~ k ~ l))) =>
Session_Entry(pos, a, b, c, d, e, f, g, h, i, j, k, l) }
}
def parse_root(path: Path): List[Entry] =
{
val toks = Token.explode(root_syntax.keywords, File.read(path))
val start = Token.Pos.file(path.implode)
parse_all(rep(chapter | session_entry), Token.reader(toks, start)) match {
case Success(result, _) => result
case bad => error(bad.toString)
}
}
}
def parse_root(path: Path): List[Entry] = Parser.parse_root(path)
def parse_root_entries(path: Path): List[Session_Entry] =
for (entry <- Parser.parse_root(path) if entry.isInstanceOf[Session_Entry])
yield entry.asInstanceOf[Session_Entry]
def read_root(options: Options, select: Boolean, path: Path): List[Info] =
{
var entry_chapter = UNSORTED
val infos = new mutable.ListBuffer[Info]
parse_root(path).foreach {
case Chapter(name) => entry_chapter = name
case entry: Session_Entry =>
infos += make_info(options, select, path.dir, entry_chapter, entry)
}
infos.toList
}
def parse_roots(roots: Path): List[String] =
{
for {
line <- split_lines(File.read(roots))
if !(line == "" || line.startsWith("#"))
} yield line
}
/* load sessions from certain directories */
def is_session_dir(dir: Path): Boolean =
(dir + ROOT).is_file || (dir + ROOTS).is_file
private def check_session_dir(dir: Path): Path =
if (is_session_dir(dir)) File.pwd() + dir.expand
else error("Bad session root directory (missing ROOT or ROOTS): " + dir.expand.toString)
def directories(dirs: List[Path], select_dirs: List[Path]): List[(Boolean, Path)] =
{
val default_dirs = Components.directories().filter(is_session_dir)
for { (select, dir) <- (default_dirs ::: dirs).map((false, _)) ::: select_dirs.map((true, _)) }
yield (select, dir.canonical)
}
def load_structure(options: Options,
dirs: List[Path] = Nil,
select_dirs: List[Path] = Nil,
infos: List[Info] = Nil): Structure =
{
def load_dir(select: Boolean, dir: Path): List[(Boolean, Path)] =
load_root(select, dir) ::: load_roots(select, dir)
def load_root(select: Boolean, dir: Path): List[(Boolean, Path)] =
{
val root = dir + ROOT
if (root.is_file) List((select, root)) else Nil
}
def load_roots(select: Boolean, dir: Path): List[(Boolean, Path)] =
{
val roots = dir + ROOTS
if (roots.is_file) {
for {
entry <- parse_roots(roots)
dir1 =
try { check_session_dir(dir + Path.explode(entry)) }
catch {
case ERROR(msg) =>
error(msg + "\nThe error(s) above occurred in session catalog " + roots.toString)
}
res <- load_dir(select, dir1)
} yield res
}
else Nil
}
val roots =
for {
(select, dir) <- directories(dirs, select_dirs)
res <- load_dir(select, check_session_dir(dir))
} yield res
val unique_roots =
roots.foldLeft(Map.empty[JFile, (Boolean, Path)]) {
case (m, (select, path)) =>
val file = path.canonical_file
m.get(file) match {
case None => m + (file -> (select, path))
case Some((select1, path1)) => m + (file -> (select1 || select, path1))
}
}.toList.map(_._2)
Structure.make(unique_roots.flatMap(p => read_root(options, p._1, p._2)) ::: infos)
}
/* Isabelle tool wrapper */
val isabelle_tool = Isabelle_Tool("sessions", "explore structure of Isabelle sessions",
Scala_Project.here, args =>
{
var base_sessions: List[String] = Nil
var select_dirs: List[Path] = Nil
var requirements = false
var exclude_session_groups: List[String] = Nil
var all_sessions = false
var dirs: List[Path] = Nil
var session_groups: List[String] = Nil
var exclude_sessions: List[String] = Nil
val getopts = Getopts("""
Usage: isabelle sessions [OPTIONS] [SESSIONS ...]
Options are:
-B NAME include session NAME and all descendants
-D DIR include session directory and select its sessions
-R refer to requirements of selected sessions
-X NAME exclude sessions from group NAME and all descendants
-a select all sessions
-d DIR include session directory
-g NAME select session group NAME
-x NAME exclude session NAME and all descendants
Explore the structure of Isabelle sessions and print result names in
topological order (on stdout).
""",
"B:" -> (arg => base_sessions = base_sessions ::: List(arg)),
"D:" -> (arg => select_dirs = select_dirs ::: List(Path.explode(arg))),
"R" -> (_ => requirements = true),
"X:" -> (arg => exclude_session_groups = exclude_session_groups ::: List(arg)),
"a" -> (_ => all_sessions = true),
"d:" -> (arg => dirs = dirs ::: List(Path.explode(arg))),
"g:" -> (arg => session_groups = session_groups ::: List(arg)),
"x:" -> (arg => exclude_sessions = exclude_sessions ::: List(arg)))
val sessions = getopts(args)
val options = Options.init()
val selection =
Selection(requirements = requirements, all_sessions = all_sessions, base_sessions = base_sessions,
exclude_session_groups = exclude_session_groups, exclude_sessions = exclude_sessions,
session_groups = session_groups, sessions = sessions)
val sessions_structure =
load_structure(options, dirs = dirs, select_dirs = select_dirs).selection(selection)
for (name <- sessions_structure.imports_topological_order) {
Output.writeln(name, stdout = true)
}
})
/** heap file with SHA1 digest **/
private val sha1_prefix = "SHA1:"
def read_heap_digest(heap: Path): Option[String] =
{
if (heap.is_file) {
using(FileChannel.open(heap.java_path, StandardOpenOption.READ))(file =>
{
val len = file.size
val n = sha1_prefix.length + SHA1.digest_length
if (len >= n) {
file.position(len - n)
val buf = ByteBuffer.allocate(n)
var i = 0
var m = 0
do {
m = file.read(buf)
if (m != -1) i += m
}
while (m != -1 && n > i)
if (i == n) {
val prefix = new String(buf.array(), 0, sha1_prefix.length, UTF8.charset)
val s = new String(buf.array(), sha1_prefix.length, SHA1.digest_length, UTF8.charset)
if (prefix == sha1_prefix) Some(s) else None
}
else None
}
else None
})
}
else None
}
def write_heap_digest(heap: Path): String =
read_heap_digest(heap) match {
case None =>
val s = SHA1.digest(heap).rep
File.append(heap, sha1_prefix + s)
s
case Some(s) => s
}
/** persistent store **/
object Session_Info
{
val session_name = SQL.Column.string("session_name").make_primary_key
// Build_Log.Session_Info
val session_timing = SQL.Column.bytes("session_timing")
val command_timings = SQL.Column.bytes("command_timings")
val theory_timings = SQL.Column.bytes("theory_timings")
val ml_statistics = SQL.Column.bytes("ml_statistics")
val task_statistics = SQL.Column.bytes("task_statistics")
val errors = SQL.Column.bytes("errors")
val build_log_columns =
List(session_name, session_timing, command_timings, theory_timings,
ml_statistics, task_statistics, errors)
// Build.Session_Info
val sources = SQL.Column.string("sources")
val input_heaps = SQL.Column.string("input_heaps")
val output_heap = SQL.Column.string("output_heap")
val return_code = SQL.Column.int("return_code")
val build_columns = List(sources, input_heaps, output_heap, return_code)
val table = SQL.Table("isabelle_session_info", build_log_columns ::: build_columns)
}
class Database_Context private[Sessions](
val store: Sessions.Store,
database_server: Option[SQL.Database]) extends AutoCloseable
{
- def cache: XML.Cache = store.cache
+ def cache: Term.Cache = store.cache
def close(): Unit = database_server.foreach(_.close())
def output_database[A](session: String)(f: SQL.Database => A): A =
database_server match {
case Some(db) => f(db)
case None => using(store.open_database(session, output = true))(f)
}
def input_database[A](session: String)(f: (SQL.Database, String) => Option[A]): Option[A] =
database_server match {
case Some(db) => f(db, session)
case None =>
store.try_open_database(session) match {
case Some(db) => using(db)(f(_, session))
case None => None
}
}
def read_export(
sessions: List[String], theory_name: String, name: String): Option[Export.Entry] =
{
val attempts =
database_server match {
case Some(db) =>
sessions.view.map(session_name =>
Export.read_entry(db, store.cache, session_name, theory_name, name))
case None =>
sessions.view.map(session_name =>
store.try_open_database(session_name) match {
case Some(db) =>
using(db)(Export.read_entry(_, store.cache, session_name, theory_name, name))
case None => None
})
}
attempts.collectFirst({ case Some(entry) => entry })
}
def get_export(
session_hierarchy: List[String], theory_name: String, name: String): Export.Entry =
read_export(session_hierarchy, theory_name, name) getOrElse
Export.empty_entry(theory_name, name)
override def toString: String =
{
val s =
database_server match {
case Some(db) => db.toString
case None => "input_dirs = " + store.input_dirs.map(_.absolute).mkString(", ")
}
"Database_Context(" + s + ")"
}
}
- def store(options: Options, cache: XML.Cache = XML.Cache.make()): Store =
+ def store(options: Options, cache: Term.Cache = Term.Cache.make()): Store =
new Store(options, cache)
- class Store private[Sessions](val options: Options, val cache: XML.Cache)
+ class Store private[Sessions](val options: Options, val cache: Term.Cache)
{
store =>
override def toString: String = "Store(output_dir = " + output_dir.absolute + ")"
/* directories */
val system_output_dir: Path = Path.explode("$ISABELLE_HEAPS_SYSTEM/$ML_IDENTIFIER")
val user_output_dir: Path = Path.explode("$ISABELLE_HEAPS/$ML_IDENTIFIER")
def system_heaps: Boolean = options.bool("system_heaps")
val output_dir: Path =
if (system_heaps) system_output_dir else user_output_dir
val input_dirs: List[Path] =
if (system_heaps) List(system_output_dir)
else List(user_output_dir, system_output_dir)
def presentation_dir: Path =
if (system_heaps) Path.explode("$ISABELLE_BROWSER_INFO_SYSTEM")
else Path.explode("$ISABELLE_BROWSER_INFO")
/* file names */
def heap(name: String): Path = Path.basic(name)
def database(name: String): Path = Path.basic("log") + Path.basic(name).ext("db")
def log(name: String): Path = Path.basic("log") + Path.basic(name)
def log_gz(name: String): Path = log(name).ext("gz")
def output_heap(name: String): Path = output_dir + heap(name)
def output_database(name: String): Path = output_dir + database(name)
def output_log(name: String): Path = output_dir + log(name)
def output_log_gz(name: String): Path = output_dir + log_gz(name)
def prepare_output_dir(): Unit = Isabelle_System.make_directory(output_dir + Path.basic("log"))
/* heap */
def find_heap(name: String): Option[Path] =
input_dirs.map(_ + heap(name)).find(_.is_file)
def find_heap_digest(name: String): Option[String] =
find_heap(name).flatMap(read_heap_digest)
def the_heap(name: String): Path =
find_heap(name) getOrElse
error("Missing heap image for session " + quote(name) + " -- expected in:\n" +
cat_lines(input_dirs.map(dir => " " + dir.expand.implode)))
/* database */
def database_server: Boolean = options.bool("build_database_server")
def open_database_server(): SQL.Database =
PostgreSQL.open_database(
user = options.string("build_database_user"),
password = options.string("build_database_password"),
database = options.string("build_database_name"),
host = options.string("build_database_host"),
port = options.int("build_database_port"),
ssh =
options.proper_string("build_database_ssh_host").map(ssh_host =>
SSH.open_session(options,
host = ssh_host,
user = options.string("build_database_ssh_user"),
port = options.int("build_database_ssh_port"))),
ssh_close = true)
def open_database_context(): Database_Context =
new Database_Context(store, if (database_server) Some(open_database_server()) else None)
def try_open_database(name: String, output: Boolean = false): Option[SQL.Database] =
{
def check(db: SQL.Database): Option[SQL.Database] =
if (output || session_info_exists(db)) Some(db) else { db.close(); None }
if (database_server) check(open_database_server())
else if (output) Some(SQLite.open_database(output_database(name)))
else {
(for {
dir <- input_dirs.view
path = dir + database(name) if path.is_file
db <- check(SQLite.open_database(path))
} yield db).headOption
}
}
def open_database(name: String, output: Boolean = false): SQL.Database =
try_open_database(name, output = output) getOrElse
error("Missing build database for session " + quote(name))
def clean_output(name: String): (Boolean, Boolean) =
{
val relevant_db =
database_server &&
{
try_open_database(name) match {
case Some(db) =>
try {
db.transaction {
val relevant_db = session_info_defined(db, name)
init_session_info(db, name)
relevant_db
}
} finally { db.close() }
case None => false
}
}
val del =
for {
dir <-
(if (system_heaps) List(user_output_dir, system_output_dir) else List(user_output_dir))
file <- List(heap(name), database(name), log(name), log_gz(name))
path = dir + file if path.is_file
} yield path.file.delete
val relevant = relevant_db || del.nonEmpty
val ok = del.forall(b => b)
(relevant, ok)
}
/* SQL database content */
def read_bytes(db: SQL.Database, name: String, column: SQL.Column): Bytes =
db.using_statement(Session_Info.table.select(List(column),
Session_Info.session_name.where_equal(name)))(stmt =>
{
val res = stmt.execute_query()
if (!res.next()) Bytes.empty else res.bytes(column)
})
def read_properties(db: SQL.Database, name: String, column: SQL.Column): List[Properties.T] =
Properties.uncompress(read_bytes(db, name, column), cache = cache)
/* session info */
def init_session_info(db: SQL.Database, name: String): Unit =
{
db.transaction {
db.create_table(Session_Info.table)
db.using_statement(
Session_Info.table.delete(Session_Info.session_name.where_equal(name)))(_.execute())
db.create_table(Export.Data.table)
db.using_statement(
Export.Data.table.delete(Export.Data.session_name.where_equal(name)))(_.execute())
db.create_table(Document_Build.Data.table)
db.using_statement(
Document_Build.Data.table.delete(
Document_Build.Data.session_name.where_equal(name)))(_.execute())
}
}
def session_info_exists(db: SQL.Database): Boolean =
{
val tables = db.tables
tables.contains(Session_Info.table.name) &&
tables.contains(Export.Data.table.name)
}
def session_info_defined(db: SQL.Database, name: String): Boolean =
db.transaction {
session_info_exists(db) &&
{
db.using_statement(
Session_Info.table.select(List(Session_Info.session_name),
Session_Info.session_name.where_equal(name)))(stmt => stmt.execute_query().next())
}
}
def write_session_info(
db: SQL.Database,
name: String,
build_log: Build_Log.Session_Info,
build: Build.Session_Info): Unit =
{
db.transaction {
db.using_statement(Session_Info.table.insert())(stmt =>
{
stmt.string(1) = name
stmt.bytes(2) = Properties.encode(build_log.session_timing)
stmt.bytes(3) = Properties.compress(build_log.command_timings, cache = cache.xz)
stmt.bytes(4) = Properties.compress(build_log.theory_timings, cache = cache.xz)
stmt.bytes(5) = Properties.compress(build_log.ml_statistics, cache = cache.xz)
stmt.bytes(6) = Properties.compress(build_log.task_statistics, cache = cache.xz)
stmt.bytes(7) = Build_Log.compress_errors(build_log.errors, cache = cache.xz)
stmt.string(8) = build.sources
stmt.string(9) = cat_lines(build.input_heaps)
stmt.string(10) = build.output_heap getOrElse ""
stmt.int(11) = build.return_code
stmt.execute()
})
}
}
def read_session_timing(db: SQL.Database, name: String): Properties.T =
Properties.decode(read_bytes(db, name, Session_Info.session_timing), cache = cache)
def read_command_timings(db: SQL.Database, name: String): List[Properties.T] =
read_properties(db, name, Session_Info.command_timings)
def read_theory_timings(db: SQL.Database, name: String): List[Properties.T] =
read_properties(db, name, Session_Info.theory_timings)
def read_ml_statistics(db: SQL.Database, name: String): List[Properties.T] =
read_properties(db, name, Session_Info.ml_statistics)
def read_task_statistics(db: SQL.Database, name: String): List[Properties.T] =
read_properties(db, name, Session_Info.task_statistics)
def read_theories(db: SQL.Database, name: String): List[String] =
read_theory_timings(db, name).flatMap(Markup.Name.unapply)
def read_errors(db: SQL.Database, name: String): List[String] =
Build_Log.uncompress_errors(read_bytes(db, name, Session_Info.errors), cache = cache)
def read_build(db: SQL.Database, name: String): Option[Build.Session_Info] =
{
if (db.tables.contains(Session_Info.table.name)) {
db.using_statement(Session_Info.table.select(Session_Info.build_columns,
Session_Info.session_name.where_equal(name)))(stmt =>
{
val res = stmt.execute_query()
if (!res.next()) None
else {
Some(
Build.Session_Info(
res.string(Session_Info.sources),
split_lines(res.string(Session_Info.input_heaps)),
res.string(Session_Info.output_heap) match { case "" => None case s => Some(s) },
res.int(Session_Info.return_code)))
}
})
}
else None
}
}
}
diff --git a/src/Pure/Tools/build.scala b/src/Pure/Tools/build.scala
--- a/src/Pure/Tools/build.scala
+++ b/src/Pure/Tools/build.scala
@@ -1,685 +1,685 @@
/* Title: Pure/Tools/build.scala
Author: Makarius
Options: :folding=explicit:
Build and manage Isabelle sessions.
*/
package isabelle
import scala.collection.immutable.SortedSet
import scala.annotation.tailrec
object Build
{
/** auxiliary **/
/* persistent build info */
sealed case class Session_Info(
sources: String,
input_heaps: List[String],
output_heap: Option[String],
return_code: Int)
{
def ok: Boolean = return_code == 0
}
/* queue with scheduling information */
private object Queue
{
type Timings = (List[Properties.T], Double)
def load_timings(progress: Progress, store: Sessions.Store, session_name: String): Timings =
{
val no_timings: Timings = (Nil, 0.0)
store.try_open_database(session_name) match {
case None => no_timings
case Some(db) =>
def ignore_error(msg: String) =
{
progress.echo_warning("Ignoring bad database " + db + (if (msg == "") "" else "\n" + msg))
no_timings
}
try {
val command_timings = store.read_command_timings(db, session_name)
val session_timing =
store.read_session_timing(db, session_name) match {
case Markup.Elapsed(t) => t
case _ => 0.0
}
(command_timings, session_timing)
}
catch {
case ERROR(msg) => ignore_error(msg)
case exn: java.lang.Error => ignore_error(Exn.message(exn))
case _: XML.Error => ignore_error("")
}
finally { db.close() }
}
}
def make_session_timing(sessions_structure: Sessions.Structure, timing: Map[String, Double])
: Map[String, Double] =
{
val maximals = sessions_structure.build_graph.maximals.toSet
def desc_timing(session_name: String): Double =
{
if (maximals.contains(session_name)) timing(session_name)
else {
val descendants = sessions_structure.build_descendants(List(session_name)).toSet
val g = sessions_structure.build_graph.restrict(descendants)
(0.0 :: g.maximals.flatMap(desc => {
val ps = g.all_preds(List(desc))
if (ps.exists(p => !timing.isDefinedAt(p))) None
else Some(ps.map(timing(_)).sum)
})).max
}
}
timing.keySet.iterator.map(name => (name -> desc_timing(name))).toMap.withDefaultValue(0.0)
}
def apply(progress: Progress, sessions_structure: Sessions.Structure, store: Sessions.Store)
: Queue =
{
val graph = sessions_structure.build_graph
val names = graph.keys
val timings = names.map(name => (name, load_timings(progress, store, name)))
val command_timings =
timings.map({ case (name, (ts, _)) => (name, ts) }).toMap.withDefaultValue(Nil)
val session_timing =
make_session_timing(sessions_structure,
timings.map({ case (name, (_, t)) => (name, t) }).toMap)
object Ordering extends scala.math.Ordering[String]
{
def compare(name1: String, name2: String): Int =
session_timing(name2) compare session_timing(name1) match {
case 0 =>
sessions_structure(name2).timeout compare sessions_structure(name1).timeout match {
case 0 => name1 compare name2
case ord => ord
}
case ord => ord
}
}
new Queue(graph, SortedSet(names: _*)(Ordering), command_timings)
}
}
private class Queue(
graph: Graph[String, Sessions.Info],
order: SortedSet[String],
val command_timings: String => List[Properties.T])
{
def is_inner(name: String): Boolean = !graph.is_maximal(name)
def is_empty: Boolean = graph.is_empty
def - (name: String): Queue =
new Queue(graph.del_node(name), order - name, command_timings)
def dequeue(skip: String => Boolean): Option[(String, Sessions.Info)] =
{
val it = order.iterator.dropWhile(name => skip(name) || !graph.is_minimal(name))
if (it.hasNext) { val name = it.next(); Some((name, graph.get_node(name))) }
else None
}
}
/** build with results **/
class Results private[Build](results: Map[String, (Option[Process_Result], Sessions.Info)])
{
def sessions: Set[String] = results.keySet
def infos: List[Sessions.Info] = results.values.map(_._2).toList
def cancelled(name: String): Boolean = results(name)._1.isEmpty
def apply(name: String): Process_Result = results(name)._1.getOrElse(Process_Result(1))
def get_info(name: String): Option[Sessions.Info] = results.get(name).map(_._2)
def info(name: String): Sessions.Info = get_info(name).get
val rc: Int =
results.iterator.map({ case (_, (Some(r), _)) => r.rc case (_, (None, _)) => 1 }).
foldLeft(Process_Result.RC.ok)(_ max _)
def ok: Boolean = rc == Process_Result.RC.ok
override def toString: String = rc.toString
}
def session_finished(session_name: String, process_result: Process_Result): String =
"Finished " + session_name + " (" + process_result.timing.message_resources + ")"
def session_timing(session_name: String, build_log: Build_Log.Session_Info): String =
{
val props = build_log.session_timing
val threads = Markup.Session_Timing.Threads.unapply(props) getOrElse 1
val timing = Markup.Timing_Properties.parse(props)
"Timing " + session_name + " (" + threads + " threads, " + timing.message_factor + ")"
}
def build(
options: Options,
selection: Sessions.Selection = Sessions.Selection.empty,
presentation: Presentation.Context = Presentation.Context.none,
progress: Progress = new Progress,
check_unknown_files: Boolean = false,
build_heap: Boolean = false,
clean_build: Boolean = false,
dirs: List[Path] = Nil,
select_dirs: List[Path] = Nil,
infos: List[Sessions.Info] = Nil,
numa_shuffling: Boolean = false,
max_jobs: Int = 1,
list_files: Boolean = false,
check_keywords: Set[String] = Set.empty,
fresh_build: Boolean = false,
no_build: Boolean = false,
soft_build: Boolean = false,
verbose: Boolean = false,
export_files: Boolean = false,
session_setup: (String, Session) => Unit = (_, _) => ()): Results =
{
val build_options =
options +
"completion_limit=0" +
"editor_tracing_messages=0" +
"kodkod_scala=false" +
("pide_reports=" + options.bool("build_pide_reports"))
val store = Sessions.store(build_options)
Isabelle_Fonts.init()
/* session selection and dependencies */
val full_sessions =
Sessions.load_structure(build_options, dirs = dirs, select_dirs = select_dirs, infos = infos)
val full_sessions_selection = full_sessions.imports_selection(selection)
def sources_stamp(deps: Sessions.Deps, session_name: String): String =
{
val digests =
full_sessions(session_name).meta_digest ::
deps.sources(session_name) :::
deps.imported_sources(session_name)
SHA1.digest_set(digests).toString
}
val deps =
{
val deps0 =
Sessions.deps(full_sessions.selection(selection),
progress = progress, inlined_files = true, verbose = verbose,
list_files = list_files, check_keywords = check_keywords).check_errors
if (soft_build && !fresh_build) {
val outdated =
deps0.sessions_structure.build_topological_order.flatMap(name =>
store.try_open_database(name) match {
case Some(db) =>
using(db)(store.read_build(_, name)) match {
case Some(build)
if build.ok && build.sources == sources_stamp(deps0, name) => None
case _ => Some(name)
}
case None => Some(name)
})
Sessions.deps(full_sessions.selection(Sessions.Selection(sessions = outdated)),
progress = progress, inlined_files = true).check_errors
}
else deps0
}
/* check unknown files */
if (check_unknown_files) {
val source_files =
(for {
(_, base) <- deps.session_bases.iterator
(path, _) <- base.sources.iterator
} yield path).toList
val exclude_files = List(Path.explode("$POLYML_EXE")).map(_.canonical_file)
val unknown_files =
Mercurial.check_files(source_files)._2.
filterNot(path => exclude_files.contains(path.canonical_file))
if (unknown_files.nonEmpty) {
progress.echo_warning("Unknown files (not part of the underlying Mercurial repository):" +
unknown_files.map(path => path.expand.implode).sorted.mkString("\n ", "\n ", ""))
}
}
/* main build process */
val queue = Queue(progress, deps.sessions_structure, store)
store.prepare_output_dir()
if (clean_build) {
for (name <- full_sessions.imports_descendants(full_sessions_selection)) {
val (relevant, ok) = store.clean_output(name)
if (relevant) {
if (ok) progress.echo("Cleaned " + name)
else progress.echo(name + " FAILED to clean")
}
}
}
// scheduler loop
case class Result(
current: Boolean,
heap_digest: Option[String],
process: Option[Process_Result],
info: Sessions.Info)
{
def ok: Boolean =
process match {
case None => false
case Some(res) => res.ok
}
}
def sleep(): Unit =
Isabelle_Thread.interrupt_handler(_ => progress.stop()) { Time.seconds(0.5).sleep() }
val log =
build_options.string("system_log") match {
case "" => No_Logger
case "true" => Logger.make(progress)
case log_file => Logger.make(Some(Path.explode(log_file)))
}
val numa_nodes = new NUMA.Nodes(numa_shuffling)
@tailrec def loop(
pending: Queue,
running: Map[String, (List[String], Build_Job)],
results: Map[String, Result]): Map[String, Result] =
{
def used_node(i: Int): Boolean =
running.iterator.exists(
{ case (_, (_, job)) => job.numa_node.isDefined && job.numa_node.get == i })
if (pending.is_empty) results
else {
if (progress.stopped) {
for ((_, (_, job)) <- running) job.terminate()
}
running.find({ case (_, (_, job)) => job.is_finished }) match {
case Some((session_name, (input_heaps, job))) =>
//{{{ finish job
val (process_result, heap_digest) = job.join
val log_lines = process_result.out_lines.filterNot(Protocol_Message.Marker.test)
val process_result_tail =
{
val tail = job.info.options.int("process_output_tail")
process_result.copy(
out_lines =
"(see also " + store.output_log(session_name).file.toString + ")" ::
(if (tail == 0) log_lines else log_lines.drop(log_lines.length - tail max 0)))
}
val build_log =
Build_Log.Log_File(session_name, process_result.out_lines).
parse_session_info(
command_timings = true,
theory_timings = true,
ml_statistics = true,
task_statistics = true)
// write log file
if (process_result.ok) {
File.write_gzip(store.output_log_gz(session_name), terminate_lines(log_lines))
}
else File.write(store.output_log(session_name), terminate_lines(log_lines))
// write database
using(store.open_database(session_name, output = true))(db =>
store.write_session_info(db, session_name,
build_log =
if (process_result.timeout) build_log.error("Timeout") else build_log,
build =
Session_Info(sources_stamp(deps, session_name), input_heaps, heap_digest,
process_result.rc)))
// messages
process_result.err_lines.foreach(progress.echo)
if (process_result.ok) {
if (verbose) progress.echo(session_timing(session_name, build_log))
progress.echo(session_finished(session_name, process_result))
}
else {
progress.echo(session_name + " FAILED")
if (!process_result.interrupted) progress.echo(process_result_tail.out)
}
loop(pending - session_name, running - session_name,
results +
(session_name -> Result(false, heap_digest, Some(process_result_tail), job.info)))
//}}}
case None if running.size < (max_jobs max 1) =>
//{{{ check/start next job
pending.dequeue(running.isDefinedAt) match {
case Some((session_name, info)) =>
val ancestor_results =
deps.sessions_structure.build_requirements(List(session_name)).
filterNot(_ == session_name).map(results(_))
val ancestor_heaps = ancestor_results.flatMap(_.heap_digest)
val do_store =
build_heap || Sessions.is_pure(session_name) || queue.is_inner(session_name)
val (current, heap_digest) =
{
store.try_open_database(session_name) match {
case Some(db) =>
using(db)(store.read_build(_, session_name)) match {
case Some(build) =>
val heap_digest = store.find_heap_digest(session_name)
val current =
!fresh_build &&
build.ok &&
build.sources == sources_stamp(deps, session_name) &&
build.input_heaps == ancestor_heaps &&
build.output_heap == heap_digest &&
!(do_store && heap_digest.isEmpty)
(current, heap_digest)
case None => (false, None)
}
case None => (false, None)
}
}
val all_current = current && ancestor_results.forall(_.current)
if (all_current)
loop(pending - session_name, running,
results +
(session_name -> Result(true, heap_digest, Some(Process_Result(0)), info)))
else if (no_build) {
progress.echo_if(verbose, "Skipping " + session_name + " ...")
loop(pending - session_name, running,
results +
(session_name -> Result(false, heap_digest, Some(Process_Result(1)), info)))
}
else if (ancestor_results.forall(_.ok) && !progress.stopped) {
progress.echo((if (do_store) "Building " else "Running ") + session_name + " ...")
store.clean_output(session_name)
using(store.open_database(session_name, output = true))(
store.init_session_info(_, session_name))
val numa_node = numa_nodes.next(used_node)
val job =
new Build_Job(progress, session_name, info, deps, store, do_store,
log, session_setup, numa_node, queue.command_timings(session_name))
loop(pending, running + (session_name -> (ancestor_heaps, job)), results)
}
else {
progress.echo(session_name + " CANCELLED")
loop(pending - session_name, running,
results + (session_name -> Result(false, heap_digest, None, info)))
}
case None => sleep(); loop(pending, running, results)
}
///}}}
case None => sleep(); loop(pending, running, results)
}
}
}
/* build results */
val results =
{
val results0 =
if (deps.is_empty) {
progress.echo_warning("Nothing to build")
Map.empty[String, Result]
}
else Isabelle_Thread.uninterruptible { loop(queue, Map.empty, Map.empty) }
new Results(
(for ((name, result) <- results0.iterator)
yield (name, (result.process, result.info))).toMap)
}
if (export_files) {
for (name <- full_sessions.imports_selection(selection).iterator if results(name).ok) {
val info = results.info(name)
if (info.export_files.nonEmpty) {
progress.echo("Exporting " + info.name + " ...")
for ((dir, prune, pats) <- info.export_files) {
Export.export_files(store, name, info.dir + dir,
progress = if (verbose) progress else new Progress,
export_prune = prune,
export_patterns = pats)
}
}
}
}
if (results.rc != Process_Result.RC.ok && (verbose || !no_build)) {
val unfinished =
(for {
name <- results.sessions.iterator
if !results(name).ok
} yield name).toList.sorted
progress.echo("Unfinished session(s): " + commas(unfinished))
}
/* PDF/HTML presentation */
if (!no_build && !progress.stopped && results.ok) {
val selected = full_sessions_selection.toSet
val presentation_sessions =
(for {
session_name <- deps.sessions_structure.imports_topological_order.iterator
info <- results.get_info(session_name)
if selected(session_name) && presentation.enabled(info) && results(session_name).ok }
yield info).toList
if (presentation_sessions.nonEmpty) {
val presentation_dir = presentation.dir(store)
progress.echo("Presentation in " + presentation_dir.absolute)
Presentation.update_root(presentation_dir)
for ((chapter, infos) <- presentation_sessions.groupBy(_.chapter).iterator) {
val entries = infos.map(info => (info.name, info.description))
Presentation.update_chapter(presentation_dir, chapter, entries)
}
val resources = Resources.empty
- val html_context = Presentation.html_context()
+ val html_context = Presentation.html_context(cache = store.cache)
using(store.open_database_context())(db_context =>
for (info <- presentation_sessions) {
progress.expose_interrupt()
progress.echo("Presenting " + info.name + " ...")
Presentation.session_html(
resources, info.name, deps, db_context, progress = progress,
verbose = verbose, html_context = html_context,
Presentation.elements1, presentation = presentation)
})
}
}
results
}
/* Isabelle tool wrapper */
val isabelle_tool = Isabelle_Tool("build", "build and manage Isabelle sessions",
Scala_Project.here, args =>
{
val build_options = Word.explode(Isabelle_System.getenv("ISABELLE_BUILD_OPTIONS"))
var base_sessions: List[String] = Nil
var select_dirs: List[Path] = Nil
var numa_shuffling = false
var presentation = Presentation.Context.none
var requirements = false
var soft_build = false
var exclude_session_groups: List[String] = Nil
var all_sessions = false
var build_heap = false
var clean_build = false
var dirs: List[Path] = Nil
var export_files = false
var fresh_build = false
var session_groups: List[String] = Nil
var max_jobs = 1
var check_keywords: Set[String] = Set.empty
var list_files = false
var no_build = false
var options = Options.init(opts = build_options)
var verbose = false
var exclude_sessions: List[String] = Nil
val getopts = Getopts("""
Usage: isabelle build [OPTIONS] [SESSIONS ...]
Options are:
-B NAME include session NAME and all descendants
-D DIR include session directory and select its sessions
-N cyclic shuffling of NUMA CPU nodes (performance tuning)
-P DIR enable HTML/PDF presentation in directory (":" for default)
-R refer to requirements of selected sessions
-S soft build: only observe changes of sources, not heap images
-X NAME exclude sessions from group NAME and all descendants
-a select all sessions
-b build heap images
-c clean build
-d DIR include session directory
-e export files from session specification into file-system
-f fresh build
-g NAME select session group NAME
-j INT maximum number of parallel jobs (default 1)
-k KEYWORD check theory sources for conflicts with proposed keywords
-l list session source files
-n no build -- test dependencies only
-o OPTION override Isabelle system OPTION (via NAME=VAL or NAME)
-v verbose
-x NAME exclude session NAME and all descendants
Build and manage Isabelle sessions, depending on implicit settings:
""" + Library.indent_lines(2, Build_Log.Settings.show()) + "\n",
"B:" -> (arg => base_sessions = base_sessions ::: List(arg)),
"D:" -> (arg => select_dirs = select_dirs ::: List(Path.explode(arg))),
"N" -> (_ => numa_shuffling = true),
"P:" -> (arg => presentation = Presentation.Context.make(arg)),
"R" -> (_ => requirements = true),
"S" -> (_ => soft_build = true),
"X:" -> (arg => exclude_session_groups = exclude_session_groups ::: List(arg)),
"a" -> (_ => all_sessions = true),
"b" -> (_ => build_heap = true),
"c" -> (_ => clean_build = true),
"d:" -> (arg => dirs = dirs ::: List(Path.explode(arg))),
"e" -> (_ => export_files = true),
"f" -> (_ => fresh_build = true),
"g:" -> (arg => session_groups = session_groups ::: List(arg)),
"j:" -> (arg => max_jobs = Value.Int.parse(arg)),
"k:" -> (arg => check_keywords = check_keywords + arg),
"l" -> (_ => list_files = true),
"n" -> (_ => no_build = true),
"o:" -> (arg => options = options + arg),
"v" -> (_ => verbose = true),
"x:" -> (arg => exclude_sessions = exclude_sessions ::: List(arg)))
val sessions = getopts(args)
val progress = new Console_Progress(verbose = verbose)
val start_date = Date.now()
if (verbose) {
progress.echo(
"Started at " + Build_Log.print_date(start_date) +
" (" + Isabelle_System.getenv("ML_IDENTIFIER") + " on " + Isabelle_System.hostname() +")")
progress.echo(Build_Log.Settings.show() + "\n")
}
val results =
progress.interrupt_handler {
build(options,
selection = Sessions.Selection(
requirements = requirements,
all_sessions = all_sessions,
base_sessions = base_sessions,
exclude_session_groups = exclude_session_groups,
exclude_sessions = exclude_sessions,
session_groups = session_groups,
sessions = sessions),
presentation = presentation,
progress = progress,
check_unknown_files = Mercurial.is_repository(Path.ISABELLE_HOME),
build_heap = build_heap,
clean_build = clean_build,
dirs = dirs,
select_dirs = select_dirs,
numa_shuffling = NUMA.enabled_warning(progress, numa_shuffling),
max_jobs = max_jobs,
list_files = list_files,
check_keywords = check_keywords,
fresh_build = fresh_build,
no_build = no_build,
soft_build = soft_build,
verbose = verbose,
export_files = export_files)
}
val end_date = Date.now()
val elapsed_time = end_date.time - start_date.time
if (verbose) {
progress.echo("\nFinished at " + Build_Log.print_date(end_date))
}
val total_timing =
results.sessions.iterator.map(a => results(a).timing).foldLeft(Timing.zero)(_ + _).
copy(elapsed = elapsed_time)
progress.echo(total_timing.message_resources)
sys.exit(results.rc)
})
/* build logic image */
def build_logic(options: Options, logic: String,
progress: Progress = new Progress,
build_heap: Boolean = false,
dirs: List[Path] = Nil,
fresh: Boolean = false,
strict: Boolean = false): Int =
{
val selection = Sessions.Selection.session(logic)
val rc =
if (!fresh && build(options, selection = selection,
build_heap = build_heap, no_build = true, dirs = dirs).ok) Process_Result.RC.ok
else {
progress.echo("Build started for Isabelle/" + logic + " ...")
Build.build(options, selection = selection, progress = progress,
build_heap = build_heap, fresh_build = fresh, dirs = dirs).rc
}
if (strict && rc != Process_Result.RC.ok) error("Failed to build Isabelle/" + logic) else rc
}
}
diff --git a/src/Pure/Tools/build_job.scala b/src/Pure/Tools/build_job.scala
--- a/src/Pure/Tools/build_job.scala
+++ b/src/Pure/Tools/build_job.scala
@@ -1,555 +1,555 @@
/* Title: Pure/Tools/build_job.scala
Author: Makarius
Build job running prover process, with rudimentary PIDE session.
*/
package isabelle
import java.util.HashMap
import scala.collection.mutable
object Build_Job
{
/* theory markup/messages from database */
def read_theory(
db_context: Sessions.Database_Context,
resources: Resources,
session_hierarchy: List[String],
theory: String,
unicode_symbols: Boolean = false): Option[Command] =
{
def read(name: String): Export.Entry =
db_context.get_export(session_hierarchy, theory, name)
def read_xml(name: String): XML.Body =
YXML.parse_body(
Symbol.output(unicode_symbols, UTF8.decode_permissive(read(name).uncompressed)),
cache = db_context.cache)
(read(Export.DOCUMENT_ID).text, split_lines(read(Export.FILES).text)) match {
case (Value.Long(id), thy_file :: blobs_files) =>
val node_name = resources.file_node(Path.explode(thy_file), theory = theory)
val results =
Command.Results.make(
for (elem @ XML.Elem(Markup(_, Markup.Serial(i)), _) <- read_xml(Export.MESSAGES))
yield i -> elem)
val blobs =
blobs_files.map(file =>
{
val path = Path.explode(file)
val name = resources.file_node(path)
val src_path = File.relative_path(node_name.master_dir_path, path).getOrElse(path)
Command.Blob(name, src_path, None)
})
val blobs_xml =
for (i <- (1 to blobs.length).toList)
yield read_xml(Export.MARKUP + i)
val blobs_info =
Command.Blobs_Info(
for { (Command.Blob(name, src_path, _), xml) <- blobs zip blobs_xml }
yield {
val text = XML.content(xml)
val chunk = Symbol.Text_Chunk(text)
val digest = SHA1.digest(Symbol.encode(text))
Exn.Res(Command.Blob(name, src_path, Some((digest, chunk))))
})
val thy_xml = read_xml(Export.MARKUP)
val thy_source = XML.content(thy_xml)
val markups_index =
Command.Markup_Index.markup :: blobs.map(Command.Markup_Index.blob)
val markups =
Command.Markups.make(
for ((index, xml) <- markups_index.zip(thy_xml :: blobs_xml))
yield index -> Markup_Tree.from_XML(xml))
val command =
Command.unparsed(thy_source, theory = true, id = id, node_name = node_name,
blobs_info = blobs_info, results = results, markups = markups)
Some(command)
case _ => None
}
}
/* print messages */
def print_log(
options: Options,
session_name: String,
theories: List[String] = Nil,
verbose: Boolean = false,
progress: Progress = new Progress,
margin: Double = Pretty.default_margin,
breakgain: Double = Pretty.default_breakgain,
metric: Pretty.Metric = Symbol.Metric,
unicode_symbols: Boolean = false): Unit =
{
val store = Sessions.store(options)
val resources = Resources.empty
val session = new Session(options, resources)
using(store.open_database_context())(db_context =>
{
val result =
db_context.input_database(session_name)((db, _) =>
{
val theories = store.read_theories(db, session_name)
val errors = store.read_errors(db, session_name)
store.read_build(db, session_name).map(info => (theories, errors, info.return_code))
})
result match {
case None => error("Missing build database for session " + quote(session_name))
case Some((used_theories, errors, rc)) =>
theories.filterNot(used_theories.toSet) match {
case Nil =>
case bad => error("Unknown theories " + commas_quote(bad))
}
val print_theories =
if (theories.isEmpty) used_theories else used_theories.filter(theories.toSet)
for (thy <- print_theories) {
val thy_heading = "\nTheory " + quote(thy) + ":"
read_theory(db_context, resources, List(session_name), thy,
unicode_symbols = unicode_symbols)
match {
case None => progress.echo(thy_heading + " MISSING")
case Some(command) =>
val snapshot = Document.State.init.snippet(command)
val rendering = new Rendering(snapshot, options, session)
val messages =
rendering.text_messages(Text.Range.full)
.filter(message => verbose || Protocol.is_exported(message.info))
if (messages.nonEmpty) {
val line_document = Line.Document(command.source)
progress.echo(thy_heading)
for (Text.Info(range, elem) <- messages) {
val line = line_document.position(range.start).line1
val pos = Position.Line_File(line, command.node_name.node)
progress.echo(
Protocol.message_text(elem, heading = true, pos = pos,
margin = margin, breakgain = breakgain, metric = metric))
}
}
}
}
if (errors.nonEmpty) {
val msg = Symbol.output(unicode_symbols, cat_lines(errors))
progress.echo("\nBuild errors:\n" + Output.error_message_text(msg))
}
if (rc != Process_Result.RC.ok) {
progress.echo("\n" + Process_Result.RC.print_long(rc))
}
}
})
}
/* Isabelle tool wrapper */
val isabelle_tool = Isabelle_Tool("log", "print messages from build database",
Scala_Project.here, args =>
{
/* arguments */
var unicode_symbols = false
var theories: List[String] = Nil
var margin = Pretty.default_margin
var options = Options.init()
var verbose = false
val getopts = Getopts("""
Usage: isabelle log [OPTIONS] SESSION
Options are:
-T NAME restrict to given theories (multiple options possible)
-U output Unicode symbols
-m MARGIN margin for pretty printing (default: """ + margin + """)
-o OPTION override Isabelle system OPTION (via NAME=VAL or NAME)
-v print all messages, including information etc.
Print messages from the build database of the given session, without any
checks against current sources: results from a failed build can be
printed as well.
""",
"T:" -> (arg => theories = theories ::: List(arg)),
"U" -> (_ => unicode_symbols = true),
"m:" -> (arg => margin = Value.Double.parse(arg)),
"o:" -> (arg => options = options + arg),
"v" -> (_ => verbose = true))
val more_args = getopts(args)
val session_name =
more_args match {
case List(session_name) => session_name
case _ => getopts.usage()
}
val progress = new Console_Progress()
print_log(options, session_name, theories = theories, verbose = verbose, margin = margin,
progress = progress, unicode_symbols = unicode_symbols)
})
}
class Build_Job(progress: Progress,
session_name: String,
val info: Sessions.Info,
deps: Sessions.Deps,
store: Sessions.Store,
do_store: Boolean,
log: Logger,
session_setup: (String, Session) => Unit,
val numa_node: Option[Int],
command_timings0: List[Properties.T])
{
val options: Options = NUMA.policy_options(info.options, numa_node)
private val sessions_structure = deps.sessions_structure
private val future_result: Future[Process_Result] =
Future.thread("build", uninterruptible = true) {
val parent = info.parent.getOrElse("")
val base = deps(parent)
val result_base = deps(session_name)
val env = new HashMap(Isabelle_System.settings())
env.put("ISABELLE_ML_DEBUGGER", options.bool("ML_debugger").toString)
val is_pure = Sessions.is_pure(session_name)
val use_prelude = if (is_pure) Thy_Header.ml_roots.map(_._1) else Nil
val eval_store =
if (do_store) {
(if (info.theories.nonEmpty) List("ML_Heap.share_common_data ()") else Nil) :::
List("ML_Heap.save_child " +
ML_Syntax.print_string_bytes(File.platform_path(store.output_heap(session_name))))
}
else Nil
val resources =
new Resources(sessions_structure, base, log = log, command_timings = command_timings0)
val session =
new Session(options, resources) {
- override val cache: XML.Cache = store.cache
+ override val cache: Term.Cache = store.cache
override def build_blobs_info(name: Document.Node.Name): Command.Blobs_Info =
{
result_base.load_commands.get(name.expand) match {
case Some(spans) =>
val syntax = result_base.theory_syntax(name)
Command.build_blobs_info(syntax, name, spans)
case None => Command.Blobs_Info.none
}
}
}
object Build_Session_Errors
{
private val promise: Promise[List[String]] = Future.promise
def result: Exn.Result[List[String]] = promise.join_result
def cancel(): Unit = promise.cancel()
def apply(errs: List[String]): Unit =
{
try { promise.fulfill(errs) }
catch { case _: IllegalStateException => }
}
}
val export_consumer =
Export.consumer(store.open_database(session_name, output = true), store.cache,
progress = progress)
val stdout = new StringBuilder(1000)
val stderr = new StringBuilder(1000)
val command_timings = new mutable.ListBuffer[Properties.T]
val theory_timings = new mutable.ListBuffer[Properties.T]
val session_timings = new mutable.ListBuffer[Properties.T]
val runtime_statistics = new mutable.ListBuffer[Properties.T]
val task_statistics = new mutable.ListBuffer[Properties.T]
def fun(
name: String,
acc: mutable.ListBuffer[Properties.T],
unapply: Properties.T => Option[Properties.T]): (String, Session.Protocol_Function) =
{
name -> ((msg: Prover.Protocol_Output) =>
unapply(msg.properties) match {
case Some(props) => acc += props; true
case _ => false
})
}
session.init_protocol_handler(new Session.Protocol_Handler
{
override def exit(): Unit = Build_Session_Errors.cancel()
private def build_session_finished(msg: Prover.Protocol_Output): Boolean =
{
val (rc, errors) =
try {
val (rc, errs) =
{
import XML.Decode._
pair(int, list(x => x))(Symbol.decode_yxml(msg.text))
}
val errors =
for (err <- errs) yield {
val prt = Protocol_Message.expose_no_reports(err)
Pretty.string_of(prt, metric = Symbol.Metric)
}
(rc, errors)
}
catch { case ERROR(err) => (Process_Result.RC.failure, List(err)) }
session.protocol_command("Prover.stop", rc.toString)
Build_Session_Errors(errors)
true
}
private def loading_theory(msg: Prover.Protocol_Output): Boolean =
msg.properties match {
case Markup.Loading_Theory(Markup.Name(name)) =>
progress.theory(Progress.Theory(name, session = session_name))
false
case _ => false
}
private def export_(msg: Prover.Protocol_Output): Boolean =
msg.properties match {
case Protocol.Export(args) =>
export_consumer(session_name, args, msg.chunk)
true
case _ => false
}
override val functions =
List(
Markup.Build_Session_Finished.name -> build_session_finished,
Markup.Loading_Theory.name -> loading_theory,
Markup.EXPORT -> export_,
fun(Markup.Theory_Timing.name, theory_timings, Markup.Theory_Timing.unapply),
fun(Markup.Session_Timing.name, session_timings, Markup.Session_Timing.unapply),
fun(Markup.Task_Statistics.name, task_statistics, Markup.Task_Statistics.unapply))
})
session.command_timings += Session.Consumer("command_timings")
{
case Session.Command_Timing(props) =>
for {
elapsed <- Markup.Elapsed.unapply(props)
elapsed_time = Time.seconds(elapsed)
if elapsed_time.is_relevant && elapsed_time >= options.seconds("command_timing_threshold")
} command_timings += props.filter(Markup.command_timing_property)
}
session.runtime_statistics += Session.Consumer("ML_statistics")
{
case Session.Runtime_Statistics(props) => runtime_statistics += props
}
session.finished_theories += Session.Consumer[Document.Snapshot]("finished_theories")
{
case snapshot =>
if (!progress.stopped) {
val rendering = new Rendering(snapshot, options, session)
def export_(name: String, xml: XML.Body, compress: Boolean = true): Unit =
{
if (!progress.stopped) {
val theory_name = snapshot.node_name.theory
val args =
Protocol.Export.Args(theory_name = theory_name, name = name, compress = compress)
val bytes = Bytes(Symbol.encode(YXML.string_of_body(xml)))
if (!bytes.is_empty) export_consumer(session_name, args, bytes)
}
}
def export_text(name: String, text: String, compress: Boolean = true): Unit =
export_(name, List(XML.Text(text)), compress = compress)
for (command <- snapshot.snippet_command) {
export_text(Export.DOCUMENT_ID, command.id.toString, compress = false)
}
export_text(Export.FILES,
cat_lines(snapshot.node_files.map(_.symbolic.node)), compress = false)
for (((_, xml), i) <- snapshot.xml_markup_blobs().zipWithIndex) {
export_(Export.MARKUP + (i + 1), xml)
}
export_(Export.MARKUP, snapshot.xml_markup())
export_(Export.MESSAGES, snapshot.messages.map(_._1))
val citations = Library.distinct(rendering.citations(Text.Range.full).map(_.info))
export_text(Export.DOCUMENT_CITATIONS, cat_lines(citations))
}
}
session.all_messages += Session.Consumer[Any]("build_session_output")
{
case msg: Prover.Output =>
val message = msg.message
if (msg.is_system) resources.log(Protocol.message_text(message))
if (msg.is_stdout) {
stdout ++= Symbol.encode(XML.content(message))
}
else if (msg.is_stderr) {
stderr ++= Symbol.encode(XML.content(message))
}
else if (msg.is_exit) {
val err =
"Prover terminated" +
(msg.properties match {
case Markup.Process_Result(result) => ": " + result.print_rc
case _ => ""
})
Build_Session_Errors(List(err))
}
case _ =>
}
session_setup(session_name, session)
val eval_main = Command_Line.ML_tool("Isabelle_Process.init_build ()" :: eval_store)
val process =
Isabelle_Process.start(session, options, sessions_structure, store,
logic = parent, raw_ml_system = is_pure,
use_prelude = use_prelude, eval_main = eval_main,
cwd = info.dir.file, env = env)
val build_errors =
Isabelle_Thread.interrupt_handler(_ => process.terminate()) {
Exn.capture { process.await_startup() } match {
case Exn.Res(_) =>
val resources_yxml = resources.init_session_yxml
val encode_options: XML.Encode.T[Options] =
options => session.prover_options(options).encode
val args_yxml =
YXML.string_of_body(
{
import XML.Encode._
pair(string, list(pair(encode_options, list(pair(string, properties)))))(
(session_name, info.theories))
})
session.protocol_command("build_session", resources_yxml, args_yxml)
Build_Session_Errors.result
case Exn.Exn(exn) => Exn.Res(List(Exn.message(exn)))
}
}
val process_result =
Isabelle_Thread.interrupt_handler(_ => process.terminate()) { process.await_shutdown() }
session.stop()
val export_errors =
export_consumer.shutdown(close = true).map(Output.error_message_text)
val (document_output, document_errors) =
try {
if (build_errors.isInstanceOf[Exn.Res[_]] && process_result.ok && info.documents.nonEmpty) {
using(store.open_database_context())(db_context =>
{
val documents =
Document_Build.build_documents(
Document_Build.context(session_name, deps, db_context, progress = progress),
output_sources = info.document_output,
output_pdf = info.document_output)
db_context.output_database(session_name)(db =>
documents.foreach(_.write(db, session_name)))
(documents.flatMap(_.log_lines), Nil)
})
}
else (Nil, Nil)
}
catch {
case exn: Document_Build.Build_Error => (exn.log_lines, List(exn.message))
case Exn.Interrupt.ERROR(msg) => (Nil, List(msg))
}
val result =
{
val theory_timing =
theory_timings.iterator.map(
{ case props @ Markup.Name(name) => name -> props }).toMap
val used_theory_timings =
for { (name, _) <- deps(session_name).used_theories }
yield theory_timing.getOrElse(name.theory, Markup.Name(name.theory))
val more_output =
Library.trim_line(stdout.toString) ::
command_timings.toList.map(Protocol.Command_Timing_Marker.apply) :::
used_theory_timings.map(Protocol.Theory_Timing_Marker.apply) :::
session_timings.toList.map(Protocol.Session_Timing_Marker.apply) :::
runtime_statistics.toList.map(Protocol.ML_Statistics_Marker.apply) :::
task_statistics.toList.map(Protocol.Task_Statistics_Marker.apply) :::
document_output
process_result.output(more_output)
.error(Library.trim_line(stderr.toString))
.errors_rc(export_errors ::: document_errors)
}
build_errors match {
case Exn.Res(build_errs) =>
val errs = build_errs ::: document_errors
if (errs.nonEmpty) {
result.error_rc.output(
errs.flatMap(s => split_lines(Output.error_message_text(s))) :::
errs.map(Protocol.Error_Message_Marker.apply))
}
else if (progress.stopped && result.ok) result.copy(rc = Process_Result.RC.interrupt)
else result
case Exn.Exn(Exn.Interrupt()) =>
if (result.ok) result.copy(rc = Process_Result.RC.interrupt)
else result
case Exn.Exn(exn) => throw exn
}
}
def terminate(): Unit = future_result.cancel()
def is_finished: Boolean = future_result.is_finished
private val timeout_request: Option[Event_Timer.Request] =
{
if (info.timeout_ignored) None
else Some(Event_Timer.request(Time.now() + info.timeout) { terminate() })
}
def join: (Process_Result, Option[String]) =
{
val result1 = future_result.join
val was_timeout =
timeout_request match {
case None => false
case Some(request) => !request.cancel()
}
val result2 =
if (result1.ok) result1
else if (was_timeout) result1.error(Output.error_message_text("Timeout")).timeout_rc
else if (result1.interrupted) result1.error(Output.error_message_text("Interrupt"))
else result1
val heap_digest =
if (result2.ok && do_store && store.output_heap(session_name).is_file)
Some(Sessions.write_heap_digest(store.output_heap(session_name)))
else None
(result2, heap_digest)
}
}
diff --git a/src/Pure/term.scala b/src/Pure/term.scala
--- a/src/Pure/term.scala
+++ b/src/Pure/term.scala
@@ -1,306 +1,307 @@
/* Title: Pure/term.scala
Author: Makarius
Lambda terms, types, sorts.
Note: Isabelle/ML is the primary environment for logical operations.
*/
package isabelle
object Term
{
/* types and terms */
sealed case class Indexname(name: String, index: Int = 0)
{
private lazy val hash: Int = (name, index).hashCode()
override def hashCode(): Int = hash
override def toString: String =
if (index == -1) name
else {
val dot =
Symbol.explode(name).reverse match {
case _ :: s :: _ if s == Symbol.sub_decoded || s == Symbol.sub => false
case c :: _ => Symbol.is_digit(c)
case _ => true
}
if (dot) "?" + name + "." + index
else if (index != 0) "?" + name + index
else "?" + name
}
}
type Class = String
type Sort = List[Class]
sealed abstract class Typ
case class Type(name: String, args: List[Typ] = Nil) extends Typ
{
private lazy val hash: Int = ("Type", name, args).hashCode()
override def hashCode(): Int = hash
override def toString: String =
if (this == dummyT) "_"
else "Type(" + name + (if (args.isEmpty) "" else "," + args) + ")"
}
case class TFree(name: String, sort: Sort = Nil) extends Typ
{
private lazy val hash: Int = ("TFree", name, sort).hashCode()
override def hashCode(): Int = hash
override def toString: String =
"TFree(" + name + (if (sort.isEmpty) "" else "," + sort) + ")"
}
case class TVar(name: Indexname, sort: Sort = Nil) extends Typ
{
private lazy val hash: Int = ("TVar", name, sort).hashCode()
override def hashCode(): Int = hash
override def toString: String =
"TVar(" + name + (if (sort.isEmpty) "" else "," + sort) + ")"
}
val dummyT: Type = Type("dummy")
sealed abstract class Term
{
def head: Term =
this match {
case App(fun, _) => fun.head
case _ => this
}
}
case class Const(name: String, typargs: List[Typ] = Nil) extends Term
{
private lazy val hash: Int = ("Const", name, typargs).hashCode()
override def hashCode(): Int = hash
override def toString: String =
if (this == dummy) "_"
else "Const(" + name + (if (typargs.isEmpty) "" else "," + typargs) + ")"
}
case class Free(name: String, typ: Typ = dummyT) extends Term
{
private lazy val hash: Int = ("Free", name, typ).hashCode()
override def hashCode(): Int = hash
override def toString: String =
"Free(" + name + (if (typ == dummyT) "" else "," + typ) + ")"
}
case class Var(name: Indexname, typ: Typ = dummyT) extends Term
{
private lazy val hash: Int = ("Var", name, typ).hashCode()
override def hashCode(): Int = hash
override def toString: String =
"Var(" + name + (if (typ == dummyT) "" else "," + typ) + ")"
}
case class Bound(index: Int) extends Term
case class Abs(name: String, typ: Typ, body: Term) extends Term
{
private lazy val hash: Int = ("Abs", name, typ, body).hashCode()
override def hashCode(): Int = hash
}
case class App(fun: Term, arg: Term) extends Term
{
private lazy val hash: Int = ("App", fun, arg).hashCode()
override def hashCode(): Int = hash
override def toString: String =
this match {
case OFCLASS(ty, c) => "OFCLASS(" + ty + "," + c + ")"
case _ => "App(" + fun + "," + arg + ")"
}
}
def dummy_pattern(ty: Typ): Term = Const("Pure.dummy_pattern", List(ty))
val dummy: Term = dummy_pattern(dummyT)
sealed abstract class Proof
case object MinProof extends Proof
case class PBound(index: Int) extends Proof
case class Abst(name: String, typ: Typ, body: Proof) extends Proof
{
private lazy val hash: Int = ("Abst", name, typ, body).hashCode()
override def hashCode(): Int = hash
}
case class AbsP(name: String, hyp: Term, body: Proof) extends Proof
{
private lazy val hash: Int = ("AbsP", name, hyp, body).hashCode()
override def hashCode(): Int = hash
}
case class Appt(fun: Proof, arg: Term) extends Proof
{
private lazy val hash: Int = ("Appt", fun, arg).hashCode()
override def hashCode(): Int = hash
}
case class AppP(fun: Proof, arg: Proof) extends Proof
{
private lazy val hash: Int = ("AppP", fun, arg).hashCode()
override def hashCode(): Int = hash
}
case class Hyp(hyp: Term) extends Proof
{
private lazy val hash: Int = ("Hyp", hyp).hashCode()
override def hashCode(): Int = hash
}
case class PAxm(name: String, types: List[Typ]) extends Proof
{
private lazy val hash: Int = ("PAxm", name, types).hashCode()
override def hashCode(): Int = hash
}
case class PClass(typ: Typ, cls: Class) extends Proof
{
private lazy val hash: Int = ("PClass", typ, cls).hashCode()
override def hashCode(): Int = hash
}
case class Oracle(name: String, prop: Term, types: List[Typ]) extends Proof
{
private lazy val hash: Int = ("Oracle", name, prop, types).hashCode()
override def hashCode(): Int = hash
}
case class PThm(serial: Long, theory_name: String, name: String, types: List[Typ]) extends Proof
{
private lazy val hash: Int = ("PThm", serial, theory_name, name, types).hashCode()
override def hashCode(): Int = hash
}
/* type classes within the logic */
object Class_Const
{
val suffix = "_class"
def apply(c: Class): String = c + suffix
def unapply(s: String): Option[Class] =
if (s.endsWith(suffix)) Some(s.substring(0, s.length - suffix.length)) else None
}
object OFCLASS
{
def apply(ty: Typ, s: Sort): List[Term] = s.map(c => apply(ty, c))
def apply(ty: Typ, c: Class): Term =
App(Const(Class_Const(c), List(ty)), Const(Pure_Thy.TYPE, List(ty)))
def unapply(t: Term): Option[(Typ, String)] =
t match {
case App(Const(Class_Const(c), List(ty)), Const(Pure_Thy.TYPE, List(ty1)))
if ty == ty1 => Some((ty, c))
case _ => None
}
}
/** cache **/
object Cache
{
def make(
+ xz: XZ.Cache = XZ.Cache.make(),
max_string: Int = isabelle.Cache.default_max_string,
initial_size: Int = isabelle.Cache.default_initial_size): Cache =
- new Cache(initial_size, max_string)
+ new Cache(xz, initial_size, max_string)
val none: Cache = make(max_string = 0)
}
- class Cache private[Term](max_string: Int, initial_size: Int)
- extends isabelle.Cache(max_string, initial_size)
+ class Cache(xz: XZ.Cache, max_string: Int, initial_size: Int)
+ extends XML.Cache(xz, max_string, initial_size)
{
protected def cache_indexname(x: Indexname): Indexname =
lookup(x) getOrElse store(Indexname(cache_string(x.name), x.index))
protected def cache_sort(x: Sort): Sort =
if (x.isEmpty) Nil
else lookup(x) getOrElse store(x.map(cache_string))
protected def cache_typ(x: Typ): Typ =
{
if (x == dummyT) dummyT
else
lookup(x) match {
case Some(y) => y
case None =>
x match {
case Type(name, args) => store(Type(cache_string(name), cache_typs(args)))
case TFree(name, sort) => store(TFree(cache_string(name), cache_sort(sort)))
case TVar(name, sort) => store(TVar(cache_indexname(name), cache_sort(sort)))
}
}
}
protected def cache_typs(x: List[Typ]): List[Typ] =
{
if (x.isEmpty) Nil
else {
lookup(x) match {
case Some(y) => y
case None => store(x.map(cache_typ))
}
}
}
protected def cache_term(x: Term): Term =
{
lookup(x) match {
case Some(y) => y
case None =>
x match {
case Const(name, typargs) => store(Const(cache_string(name), cache_typs(typargs)))
case Free(name, typ) => store(Free(cache_string(name), cache_typ(typ)))
case Var(name, typ) => store(Var(cache_indexname(name), cache_typ(typ)))
case Bound(_) => store(x)
case Abs(name, typ, body) =>
store(Abs(cache_string(name), cache_typ(typ), cache_term(body)))
case App(fun, arg) => store(App(cache_term(fun), cache_term(arg)))
}
}
}
protected def cache_proof(x: Proof): Proof =
{
if (x == MinProof) MinProof
else {
lookup(x) match {
case Some(y) => y
case None =>
(x: @unchecked) match {
case PBound(_) => store(x)
case Abst(name, typ, body) =>
store(Abst(cache_string(name), cache_typ(typ), cache_proof(body)))
case AbsP(name, hyp, body) =>
store(AbsP(cache_string(name), cache_term(hyp), cache_proof(body)))
case Appt(fun, arg) => store(Appt(cache_proof(fun), cache_term(arg)))
case AppP(fun, arg) => store(AppP(cache_proof(fun), cache_proof(arg)))
case Hyp(hyp) => store(Hyp(cache_term(hyp)))
case PAxm(name, types) => store(PAxm(cache_string(name), cache_typs(types)))
case PClass(typ, cls) => store(PClass(cache_typ(typ), cache_string(cls)))
case Oracle(name, prop, types) =>
store(Oracle(cache_string(name), cache_term(prop), cache_typs(types)))
case PThm(serial, theory_name, name, types) =>
store(PThm(serial, cache_string(theory_name), cache_string(name), cache_typs(types)))
}
}
}
}
// main methods
def indexname(x: Indexname): Indexname =
if (no_cache) x else synchronized { cache_indexname(x) }
def sort(x: Sort): Sort =
if (no_cache) x else synchronized { cache_sort(x) }
def typ(x: Typ): Typ =
if (no_cache) x else synchronized { cache_typ(x) }
def term(x: Term): Term =
if (no_cache) x else synchronized { cache_term(x) }
def proof(x: Proof): Proof =
if (no_cache) x else synchronized { cache_proof(x) }
def position(x: Position.T): Position.T =
if (no_cache) x
else synchronized { x.map({ case (a, b) => (cache_string(a), cache_string(b)) }) }
}
}