diff --git a/src/Pure/PIDE/resources.scala b/src/Pure/PIDE/resources.scala
--- a/src/Pure/PIDE/resources.scala
+++ b/src/Pure/PIDE/resources.scala
@@ -1,451 +1,454 @@
/* Title: Pure/PIDE/resources.scala
Author: Makarius
Resources for theories and auxiliary files.
*/
package isabelle
import scala.util.parsing.input.Reader
import java.io.{File => JFile}
object Resources
{
def empty: Resources =
new Resources(Sessions.Structure.empty, Sessions.Structure.empty.bootstrap)
+
+ def file_node(file: Path, dir: String = "", theory: String = ""): Document.Node.Name =
+ empty.file_node(file, dir = dir, theory = theory)
}
class Resources(
val sessions_structure: Sessions.Structure,
val session_base: Sessions.Base,
val log: Logger = No_Logger,
command_timings: List[Properties.T] = Nil)
{
resources =>
/* init session */
def init_session_yxml: String =
{
import XML.Encode._
YXML.string_of_body(
pair(list(pair(string, properties)),
pair(list(pair(string, string)),
pair(list(pair(string, list(string))),
pair(list(properties),
pair(list(pair(string, properties)),
pair(list(pair(string, pair(bool, properties))),
pair(list(pair(string, string)), list(string))))))))(
(sessions_structure.session_positions,
(sessions_structure.dest_session_directories,
(sessions_structure.bibtex_entries,
(command_timings,
(Command_Span.load_commands.map(cmd => (cmd.name, cmd.position)),
(Scala.functions.map(fun => (fun.name, (fun.multi, fun.position))),
(session_base.global_theories.toList,
session_base.loaded_theories.keys)))))))))
}
/* file formats */
def make_theory_name(name: Document.Node.Name): Option[Document.Node.Name] =
File_Format.registry.get(name).flatMap(_.make_theory_name(resources, name))
def make_theory_content(thy_name: Document.Node.Name): Option[String] =
File_Format.registry.get_theory(thy_name).flatMap(_.make_theory_content(resources, thy_name))
def is_hidden(name: Document.Node.Name): Boolean =
!name.is_theory || name.theory == Sessions.root_name || File_Format.registry.is_theory(name)
def html_document(snapshot: Document.Snapshot): Option[Presentation.HTML_Document] =
File_Format.registry.get(snapshot.node_name).flatMap(_.html_document(snapshot))
/* file-system operations */
def append(dir: String, source_path: Path): String =
(Path.explode(dir) + source_path).expand.implode
def append(node_name: Document.Node.Name, source_path: Path): String =
append(node_name.master_dir, source_path)
def file_node(file: Path, dir: String = "", theory: String = ""): Document.Node.Name =
{
val node = append(dir, file)
val master_dir = append(dir, file.dir)
Document.Node.Name(node, master_dir, theory)
}
def loaded_theory_node(theory: String): Document.Node.Name =
Document.Node.Name(theory, "", theory)
/* source files of Isabelle/ML bootstrap */
def source_file(raw_name: String): Option[String] =
{
if (Path.is_wellformed(raw_name)) {
if (Path.is_valid(raw_name)) {
def check(p: Path): Option[Path] = if (p.is_file) Some(p) else None
val path = Path.explode(raw_name)
val path1 =
if (path.is_absolute || path.is_current) check(path)
else {
check(Path.explode("~~/src/Pure") + path) orElse
(if (Isabelle_System.getenv("ML_SOURCES") == "") None
else check(Path.explode("$ML_SOURCES") + path))
}
Some(File.platform_path(path1 getOrElse path))
}
else None
}
else Some(raw_name)
}
/* theory files */
def load_commands(syntax: Outer_Syntax, name: Document.Node.Name)
: () => List[Command_Span.Span] =
{
val (is_utf8, raw_text) =
with_thy_reader(name, reader => (Scan.reader_is_utf8(reader), reader.source.toString))
() =>
{
if (syntax.has_load_commands(raw_text)) {
val text = Symbol.decode(Scan.reader_decode_utf8(is_utf8, raw_text))
syntax.parse_spans(text).filter(_.is_load_command(syntax))
}
else Nil
}
}
def loaded_files(syntax: Outer_Syntax, name: Document.Node.Name, spans: List[Command_Span.Span])
: List[Path] =
{
val dir = name.master_dir_path
for { span <- spans; file <- span.loaded_files(syntax).files }
yield (dir + Path.explode(file)).expand
}
def pure_files(syntax: Outer_Syntax): List[Path] =
{
val pure_dir = Path.explode("~~/src/Pure")
for {
(name, theory) <- Thy_Header.ml_roots
path = (pure_dir + Path.explode(name)).expand
node_name = Document.Node.Name(path.implode, path.dir.implode, theory)
file <- loaded_files(syntax, node_name, load_commands(syntax, node_name)())
} yield file
}
def theory_name(qualifier: String, theory: String): String =
if (Long_Name.is_qualified(theory) || session_base.global_theories.isDefinedAt(theory))
theory
else Long_Name.qualify(qualifier, theory)
def find_theory_node(theory: String): Option[Document.Node.Name] =
{
val thy_file = Path.basic(Long_Name.base_name(theory)).thy
val session = session_base.theory_qualifier(theory)
val dirs =
sessions_structure.get(session) match {
case Some(info) => info.dirs
case None => Nil
}
dirs.collectFirst({
case dir if (dir + thy_file).is_file => file_node(dir + thy_file, theory = theory) })
}
def import_name(qualifier: String, dir: String, s: String): Document.Node.Name =
{
val theory = theory_name(qualifier, Thy_Header.import_name(s))
def theory_node = file_node(Path.explode(s).thy, dir = dir, theory = theory)
if (!Thy_Header.is_base_name(s)) theory_node
else if (session_base.loaded_theory(theory)) loaded_theory_node(theory)
else {
find_theory_node(theory) match {
case Some(node_name) => node_name
case None => if (Long_Name.is_qualified(s)) loaded_theory_node(theory) else theory_node
}
}
}
def import_name(name: Document.Node.Name, s: String): Document.Node.Name =
import_name(session_base.theory_qualifier(name), name.master_dir, s)
def import_name(info: Sessions.Info, s: String): Document.Node.Name =
import_name(info.name, info.dir.implode, s)
def find_theory(file: JFile): Option[Document.Node.Name] =
{
for {
qualifier <- session_base.session_directories.get(File.canonical(file).getParentFile)
theory_base <- proper_string(Thy_Header.theory_name(file.getName))
theory = theory_name(qualifier, theory_base)
theory_node <- find_theory_node(theory)
if File.eq(theory_node.path.file, file)
} yield theory_node
}
def complete_import_name(context_name: Document.Node.Name, s: String): List[String] =
{
val context_session = session_base.theory_qualifier(context_name)
val context_dir =
try { Some(context_name.master_dir_path) }
catch { case ERROR(_) => None }
(for {
(session, (info, _)) <- sessions_structure.imports_graph.iterator
dir <- (if (session == context_session) context_dir.toList else info.dirs).iterator
theory <- Thy_Header.try_read_dir(dir).iterator
if Completion.completed(s)(theory)
} yield {
if (session == context_session || session_base.global_theories.isDefinedAt(theory)) theory
else Long_Name.qualify(session, theory)
}).toList.sorted
}
def with_thy_reader[A](name: Document.Node.Name, f: Reader[Char] => A): A =
{
val path = name.path
if (path.is_file) using(Scan.byte_reader(path.file))(f)
else if (name.node == name.theory)
error("Cannot load theory " + quote(name.theory))
else error ("Cannot load theory file " + path)
}
def check_thy(node_name: Document.Node.Name, reader: Reader[Char],
command: Boolean = true, strict: Boolean = true): Document.Node.Header =
{
if (node_name.is_theory && reader.source.length > 0) {
try {
val header = Thy_Header.read(node_name, reader, command = command, strict = strict)
val imports =
header.imports.map({ case (s, pos) =>
val name = import_name(node_name, s)
if (Sessions.exclude_theory(name.theory_base_name))
error("Bad theory name " + quote(name.theory_base_name) + Position.here(pos))
(name, pos)
})
Document.Node.Header(imports, header.keywords, header.abbrevs)
}
catch { case exn: Throwable => Document.Node.bad_header(Exn.message(exn)) }
}
else Document.Node.no_header
}
/* special header */
def special_header(name: Document.Node.Name): Option[Document.Node.Header] =
{
val imports =
if (name.theory == Sessions.root_name) List(import_name(name, Sessions.theory_name))
else if (Thy_Header.is_ml_root(name.theory)) List(import_name(name, Thy_Header.ML_BOOTSTRAP))
else if (Thy_Header.is_bootstrap(name.theory)) List(import_name(name, Thy_Header.PURE))
else Nil
if (imports.isEmpty) None
else Some(Document.Node.Header(imports.map((_, Position.none))))
}
/* blobs */
def undefined_blobs(nodes: Document.Nodes): List[Document.Node.Name] =
(for {
(node_name, node) <- nodes.iterator
if !session_base.loaded_theory(node_name)
cmd <- node.load_commands.iterator
name <- cmd.blobs_undefined.iterator
} yield name).toList
/* document changes */
def parse_change(
reparse_limit: Int,
previous: Document.Version,
doc_blobs: Document.Blobs,
edits: List[Document.Edit_Text],
consolidate: List[Document.Node.Name]): Session.Change =
Thy_Syntax.parse_change(resources, reparse_limit, previous, doc_blobs, edits, consolidate)
def commit(change: Session.Change): Unit = {}
/* theory and file dependencies */
def dependencies(
thys: List[(Document.Node.Name, Position.T)],
progress: Progress = new Progress): Dependencies[Unit] =
Dependencies.empty[Unit].require_thys((), thys, progress = progress)
def session_dependencies(info: Sessions.Info, progress: Progress = new Progress)
: Dependencies[Options] =
{
info.theories.foldLeft(Dependencies.empty[Options]) {
case (dependencies, (options, thys)) =>
dependencies.require_thys(options,
for { (thy, pos) <- thys } yield (import_name(info, thy), pos),
progress = progress)
}
}
object Dependencies
{
def empty[A]: Dependencies[A] = new Dependencies[A](Nil, Map.empty)
private def show_path(names: List[Document.Node.Name]): String =
names.map(name => quote(name.theory)).mkString(" via ")
private def cycle_msg(names: List[Document.Node.Name]): String =
"Cyclic dependency of " + show_path(names)
private def required_by(initiators: List[Document.Node.Name]): String =
if (initiators.isEmpty) ""
else "\n(required by " + show_path(initiators.reverse) + ")"
}
final class Dependencies[A] private(
rev_entries: List[Document.Node.Entry],
seen: Map[Document.Node.Name, A])
{
private def cons(entry: Document.Node.Entry): Dependencies[A] =
new Dependencies[A](entry :: rev_entries, seen)
def require_thy(adjunct: A,
thy: (Document.Node.Name, Position.T),
initiators: List[Document.Node.Name] = Nil,
progress: Progress = new Progress): Dependencies[A] =
{
val (name, pos) = thy
def message: String =
"The error(s) above occurred for theory " + quote(name.theory) +
Dependencies.required_by(initiators) + Position.here(pos)
if (seen.isDefinedAt(name)) this
else {
val dependencies1 = new Dependencies[A](rev_entries, seen + (name -> adjunct))
if (session_base.loaded_theory(name)) dependencies1
else {
try {
if (initiators.contains(name)) error(Dependencies.cycle_msg(initiators))
progress.expose_interrupt()
val header =
try {
with_thy_reader(name, check_thy(name, _, command = false)).cat_errors(message)
}
catch { case ERROR(msg) => cat_error(msg, message) }
val entry = Document.Node.Entry(name, header)
dependencies1.require_thys(adjunct, header.imports_pos,
initiators = name :: initiators, progress = progress).cons(entry)
}
catch {
case e: Throwable =>
dependencies1.cons(Document.Node.Entry(name, Document.Node.bad_header(Exn.message(e))))
}
}
}
}
def require_thys(adjunct: A,
thys: List[(Document.Node.Name, Position.T)],
progress: Progress = new Progress,
initiators: List[Document.Node.Name] = Nil): Dependencies[A] =
thys.foldLeft(this)(_.require_thy(adjunct, _, progress = progress, initiators = initiators))
def entries: List[Document.Node.Entry] = rev_entries.reverse
def theories: List[Document.Node.Name] = entries.map(_.name)
def theories_adjunct: List[(Document.Node.Name, A)] = theories.map(name => (name, seen(name)))
def errors: List[String] = entries.flatMap(_.header.errors)
def check_errors: Dependencies[A] =
errors match {
case Nil => this
case errs => error(cat_lines(errs))
}
lazy val theory_graph: Document.Node.Name.Graph[Unit] =
{
val regular = theories.toSet
val irregular =
(for {
entry <- entries.iterator
imp <- entry.header.imports
if !regular(imp)
} yield imp).toSet
Document.Node.Name.make_graph(
irregular.toList.map(name => ((name, ()), Nil)) :::
entries.map(entry => ((entry.name, ()), entry.header.imports)))
}
lazy val loaded_theories: Graph[String, Outer_Syntax] =
entries.foldLeft(session_base.loaded_theories) {
case (graph, entry) =>
val name = entry.name.theory
val imports = entry.header.imports.map(_.theory)
val graph1 = (name :: imports).foldLeft(graph)(_.default_node(_, Outer_Syntax.empty))
val graph2 = imports.foldLeft(graph1)(_.add_edge(_, name))
val syntax0 = if (name == Thy_Header.PURE) List(Thy_Header.bootstrap_syntax) else Nil
val syntax1 = (name :: graph2.imm_preds(name).toList).map(graph2.get_node)
val syntax = Outer_Syntax.merge(syntax0 ::: syntax1) + entry.header
graph2.map_node(name, _ => syntax)
}
def get_syntax(name: Document.Node.Name): Outer_Syntax =
loaded_theories.get_node(name.theory)
def load_commands: List[(Document.Node.Name, List[Command_Span.Span])] =
theories.zip(
Par_List.map((e: () => List[Command_Span.Span]) => e(),
theories.map(name => resources.load_commands(get_syntax(name), name))))
.filter(p => p._2.nonEmpty)
def loaded_files(name: Document.Node.Name, spans: List[Command_Span.Span])
: (String, List[Path]) =
{
val theory = name.theory
val syntax = get_syntax(name)
val files1 = resources.loaded_files(syntax, name, spans)
val files2 = if (theory == Thy_Header.PURE) pure_files(syntax) else Nil
(theory, files1 ::: files2)
}
def loaded_files: List[Path] =
for {
(name, spans) <- load_commands
file <- loaded_files(name, spans)._2
} yield file
def imported_files: List[Path] =
{
val base_theories =
loaded_theories.all_preds(theories.map(_.theory)).
filter(session_base.loaded_theories.defined)
base_theories.map(theory => session_base.known_theories(theory).name.path) :::
base_theories.flatMap(session_base.known_loaded_files.withDefaultValue(Nil))
}
lazy val overall_syntax: Outer_Syntax =
Outer_Syntax.merge(loaded_theories.maximals.map(loaded_theories.get_node))
override def toString: String = entries.toString
}
}
diff --git a/src/Pure/Thy/presentation.scala b/src/Pure/Thy/presentation.scala
--- a/src/Pure/Thy/presentation.scala
+++ b/src/Pure/Thy/presentation.scala
@@ -1,639 +1,637 @@
/* Title: Pure/Thy/presentation.scala
Author: Makarius
HTML presentation of PIDE document content.
*/
package isabelle
import scala.annotation.tailrec
import scala.collection.immutable.SortedMap
import scala.collection.mutable
object Presentation
{
/** HTML documents **/
/* HTML context */
sealed case class HTML_Document(title: String, content: String)
def html_context(cache: Term.Cache = Term.Cache.make()): HTML_Context =
new HTML_Context(cache)
final class HTML_Context private[Presentation](val cache: Term.Cache)
{
private val already_presented = Synchronized(Set.empty[String])
def register_presented(nodes: List[Document.Node.Name]): List[Document.Node.Name] =
already_presented.change_result(presented =>
(nodes.filterNot(name => presented.contains(name.theory)),
presented ++ nodes.iterator.map(_.theory)))
private val theory_cache = Synchronized(Map.empty[String, Export_Theory.Theory])
def cache_theory(thy_name: String, make_thy: => Export_Theory.Theory): Export_Theory.Theory =
{
theory_cache.change_result(thys =>
{
thys.get(thy_name) match {
case Some(thy) => (thy, thys)
case None =>
val thy = make_thy
(thy, thys + (thy_name -> thy))
}
})
}
def head(title: String, rest: XML.Body = Nil): XML.Tree =
HTML.div("head", HTML.chapter(title) :: rest)
def source(body: XML.Body): XML.Tree = HTML.pre("source", body)
def contents(heading: String, items: List[XML.Body], css_class: String = "contents")
: List[XML.Elem] =
{
if (items.isEmpty) Nil
else List(HTML.div(css_class, List(HTML.section(heading), HTML.itemize(items))))
}
def html_document(title: String, body: XML.Body, fonts_css: String): HTML_Document =
{
val content =
HTML.output_document(
List(
HTML.style(fonts_css + "\n\n" + File.read(HTML.isabelle_css)),
HTML.title(title)),
List(HTML.source(body)), css = "", structural = false)
HTML_Document(title, content)
}
}
/* presentation elements */
sealed case class Elements(
html: Markup.Elements = Markup.Elements.empty,
entity: Markup.Elements = Markup.Elements.empty,
language: Markup.Elements = Markup.Elements.empty)
val elements1: Elements =
Elements(
html = Rendering.foreground_elements ++ Rendering.text_color_elements +
Markup.NUMERAL + Markup.COMMENT + Markup.ENTITY + Markup.LANGUAGE,
entity = Markup.Elements(Markup.THEORY, Markup.TYPE_NAME, Markup.CONSTANT, Markup.FACT,
Markup.CLASS, Markup.LOCALE, Markup.FREE))
val elements2: Elements =
Elements(
html = elements1.html ++ Rendering.markdown_elements,
language = Markup.Elements(Markup.Language.DOCUMENT))
/* formal entities */
type Entity = Export_Theory.Entity[Export_Theory.No_Content]
object Entity_Context
{
val empty: Entity_Context = new Entity_Context
def make(
- resources: Resources,
session: String,
deps: Sessions.Deps,
node: Document.Node.Name,
theory_exports: Map[String, Export_Theory.Theory]): Entity_Context =
new Entity_Context {
private val seen_ranges: mutable.Set[Symbol.Range] = mutable.Set.empty
override def make_def(range: Symbol.Range, body: XML.Body): Option[XML.Elem] =
{
body match {
case List(XML.Elem(Markup("span", List("id" -> _)), _)) => None
case _ =>
Some {
val entities =
theory_exports.get(node.theory).flatMap(_.entity_by_range.get(range))
.getOrElse(Nil)
val body1 =
if (seen_ranges.contains(range)) {
HTML.entity_def(HTML.span(HTML.id(offset_id(range)), body))
}
else HTML.span(body)
entities.map(_.kname).foldLeft(body1) {
case (elem, id) => HTML.entity_def(HTML.span(HTML.id(id), List(elem)))
}
}
}
}
private def offset_id(range: Text.Range): String =
"offset_" + range.start + ".." + range.stop
private def physical_ref(thy_name: String, props: Properties.T): Option[String] =
{
for {
range <- Position.Def_Range.unapply(props)
if thy_name == node.theory
} yield {
seen_ranges += range
offset_id(range)
}
}
private def logical_ref(thy_name: String, kind: String, name: String): Option[String] =
for {
thy <- theory_exports.get(thy_name)
entity <- thy.entity_by_kind_name.get((kind, name))
} yield entity.kname
override def make_ref(props: Properties.T, body: XML.Body): Option[XML.Elem] =
{
(props, props, props, props, props) match {
case (Markup.Kind(Markup.THEORY), Markup.Name(theory), Position.Def_File(thy_file), _, _) =>
- val node_name = resources.file_node(Path.explode(thy_file), theory = theory)
+ val node_name = Resources.file_node(Path.explode(thy_file), theory = theory)
node_relative(deps, session, node_name).map(html_dir =>
HTML.link(html_dir + html_name(node_name), body))
case (Markup.Ref(_), Position.Def_File(def_file), Position.Def_Theory(def_theory),
Markup.Kind(kind), Markup.Name(name)) =>
val file_path = Path.explode(def_file)
val proper_thy_name =
proper_string(def_theory) orElse
(if (File.eq(node.path, file_path)) Some(node.theory) else None)
for {
thy_name <- proper_thy_name
- node_name = resources.file_node(file_path, theory = thy_name)
+ node_name = Resources.file_node(file_path, theory = thy_name)
html_dir <- node_relative(deps, session, node_name)
html_file = node_file(node_name)
html_ref <-
logical_ref(thy_name, kind, name) orElse physical_ref(thy_name, props)
} yield {
HTML.entity_ref(HTML.link(html_dir + html_file + "#" + html_ref, body))
}
case _ => None
}
}
}
}
class Entity_Context
{
def make_def(range: Symbol.Range, body: XML.Body): Option[XML.Elem] = None
def make_ref(props: Properties.T, body: XML.Body): Option[XML.Elem] = None
}
/* HTML output */
private val div_elements =
Set(HTML.div.name, HTML.pre.name, HTML.par.name, HTML.list.name, HTML.enum.name,
HTML.descr.name)
def make_html(
entity_context: Entity_Context,
elements: Elements,
xml: XML.Body): XML.Body =
{
def html_div(html: XML.Body): Boolean =
html exists {
case XML.Elem(markup, body) => div_elements.contains(markup.name) || html_div(body)
case XML.Text(_) => false
}
def html_class(c: String, html: XML.Body): XML.Body =
if (c == "") html
else if (html_div(html)) List(HTML.div(c, html))
else List(HTML.span(c, html))
def html_body(xml_body: XML.Body, end_offset: Symbol.Offset): (XML.Body, Symbol.Offset) =
xml_body.foldRight((List.empty[XML.Tree], end_offset)) { case (tree, (res, end_offset1)) =>
val (res1, offset) = html_body_single(tree, end_offset1)
(res1 ++ res, offset)
}
@tailrec
def html_body_single(xml_tree: XML.Tree, end_offset: Symbol.Offset): (XML.Body, Symbol.Offset) =
xml_tree match {
case XML.Wrapped_Elem(markup, _, body) => html_body_single(XML.Elem(markup, body), end_offset)
case XML.Elem(Markup(Markup.ENTITY, props @ Markup.Kind(kind)), body) =>
val (body1, offset) = html_body(body, end_offset)
if (elements.entity(kind)) {
entity_context.make_ref(props, body1) match {
case Some(link) => (List(link), offset)
case None => (body1, offset)
}
}
else (body1, offset)
case XML.Elem(Markup(Markup.LANGUAGE, Markup.Name(name)), body) =>
val (body1, offset) = html_body(body, end_offset)
(html_class(if (elements.language(name)) name else "", body1), offset)
case XML.Elem(Markup(Markup.MARKDOWN_PARAGRAPH, _), body) =>
val (body1, offset) = html_body(body, end_offset)
(List(HTML.par(body1)), offset)
case XML.Elem(Markup(Markup.MARKDOWN_ITEM, _), body) =>
val (body1, offset) = html_body(body, end_offset)
(List(HTML.item(body1)), offset)
case XML.Elem(Markup(Markup.Markdown_Bullet.name, _), text) =>
(Nil, end_offset - XML.symbol_length(text))
case XML.Elem(Markup.Markdown_List(kind), body) =>
val (body1, offset) = html_body(body, end_offset)
if (kind == Markup.ENUMERATE) (List(HTML.enum(body1)), offset)
else (List(HTML.list(body1)), offset)
case XML.Elem(markup, body) =>
val name = markup.name
val (body1, offset) = html_body(body, end_offset)
val html =
markup.properties match {
case Markup.Kind(kind) if kind == Markup.COMMAND || kind == Markup.KEYWORD =>
html_class(kind, body1)
case _ =>
body1
}
Rendering.foreground.get(name) orElse Rendering.text_color.get(name) match {
case Some(c) => (html_class(c.toString, html), offset)
case None => (html_class(name, html), offset)
}
case XML.Text(text) =>
val offset = end_offset - Symbol.length(text)
val body = HTML.text(Symbol.decode(text))
entity_context.make_def(Text.Range(offset, end_offset), body) match {
case Some(body1) => (List(body1), offset)
case None => (body, offset)
}
}
html_body(xml, XML.symbol_length(xml) + 1)._1
}
/* PIDE HTML document */
def html_document(
resources: Resources,
snapshot: Document.Snapshot,
html_context: HTML_Context,
elements: Elements,
plain_text: Boolean = false,
fonts_css: String = HTML.fonts_css()): HTML_Document =
{
require(!snapshot.is_outdated, "document snapshot outdated")
val name = snapshot.node_name
if (plain_text) {
val title = "File " + Symbol.cartouche_decoded(name.path.file_name)
val body = HTML.text(snapshot.node.source)
html_context.html_document(title, body, fonts_css)
}
else {
resources.html_document(snapshot) getOrElse {
val title =
if (name.is_theory) "Theory " + quote(name.theory_base_name)
else "File " + Symbol.cartouche_decoded(name.path.file_name)
val xml = snapshot.xml_markup(elements = elements.html)
val body = make_html(Entity_Context.empty, elements, xml)
html_context.html_document(title, body, fonts_css)
}
}
}
/** HTML presentation **/
/* presentation context */
object Context
{
val none: Context = new Context { def enabled: Boolean = false }
val standard: Context = new Context { def enabled: Boolean = true }
def dir(path: Path): Context =
new Context {
def enabled: Boolean = true
override def dir(store: Sessions.Store): Path = path
}
def make(s: String): Context =
if (s == ":") standard else dir(Path.explode(s))
}
abstract class Context private
{
def enabled: Boolean
def enabled(info: Sessions.Info): Boolean = enabled || info.browser_info
def dir(store: Sessions.Store): Path = store.presentation_dir
def dir(store: Sessions.Store, info: Sessions.Info): Path =
dir(store) + Path.explode(info.chapter_session)
}
/* maintain chapter index */
private val sessions_path = Path.basic(".sessions")
private def read_sessions(dir: Path): List[(String, String)] =
{
val path = dir + sessions_path
if (path.is_file) {
import XML.Decode._
list(pair(string, string))(Symbol.decode_yxml(File.read(path)))
}
else Nil
}
def update_chapter(
presentation_dir: Path, chapter: String, new_sessions: List[(String, String)]): Unit =
{
val dir = Isabelle_System.make_directory(presentation_dir + Path.basic(chapter))
val sessions0 =
try { read_sessions(dir) }
catch { case _: XML.Error => Nil }
val sessions = (SortedMap.empty[String, String] ++ sessions0 ++ new_sessions).toList
File.write(dir + sessions_path,
{
import XML.Encode._
YXML.string_of_body(list(pair(string, string))(sessions))
})
val title = "Isabelle/" + chapter + " sessions"
HTML.write_document(dir, "index.html",
List(HTML.title(title + Isabelle_System.isabelle_heading())),
HTML.chapter(title) ::
(if (sessions.isEmpty) Nil
else
List(HTML.div("sessions",
List(HTML.description(
sessions.map({ case (name, description) =>
val descr = Symbol.trim_blank_lines(description)
(List(HTML.link(name + "/index.html", HTML.text(name))),
if (descr == "") Nil
else HTML.break ::: List(HTML.pre(HTML.text(descr)))) })))))),
base = Some(presentation_dir))
}
def update_root(presentation_dir: Path): Unit =
{
Isabelle_System.make_directory(presentation_dir)
HTML.init_fonts(presentation_dir)
Isabelle_System.copy_file(Path.explode("~~/lib/logo/isabelle.gif"),
presentation_dir + Path.explode("isabelle.gif"))
val title = "The " + XML.text(Isabelle_System.isabelle_name()) + " Library"
File.write(presentation_dir + Path.explode("index.html"),
HTML.header +
"""
""" + HTML.head_meta + """
""" + title + """
""" + File.read(Path.explode("~~/lib/html/library_index_content.template")) +
"""
""" + HTML.footer)
}
/* present session */
val session_graph_path = Path.explode("session_graph.pdf")
val readme_path = Path.explode("README.html")
val files_path = Path.explode("files")
def html_name(name: Document.Node.Name): String = Path.explode(name.theory_base_name).html.implode
def html_path(path: Path): String = (files_path + path.squash.html).implode
private def node_file(node: Document.Node.Name): String =
if (node.node.endsWith(".thy")) html_name(node) else html_path(Path.explode(node.node))
private def session_relative(deps: Sessions.Deps, session0: String, session1: String): Option[String] =
{
for {
info0 <- deps.sessions_structure.get(session0)
info1 <- deps.sessions_structure.get(session1)
} yield info0.relative_path(info1)
}
def node_relative(
deps: Sessions.Deps,
session0: String,
node_name: Document.Node.Name): Option[String] =
{
val session1 = deps(session0).theory_qualifier(node_name)
session_relative(deps, session0, session1)
}
def theory_link(deps: Sessions.Deps, session0: String,
name: Document.Node.Name, body: XML.Body, anchor: Option[String] = None): Option[XML.Tree] =
{
val session1 = deps(session0).theory_qualifier(name)
val info0 = deps.sessions_structure.get(session0)
val info1 = deps.sessions_structure.get(session1)
val fragment = if (anchor.isDefined) "#" + anchor.get else ""
if (info0.isDefined && info1.isDefined) {
Some(HTML.link(info0.get.relative_path(info1.get) + html_name(name) + fragment, body))
}
else None
}
def session_html(
- resources: Resources,
session: String,
deps: Sessions.Deps,
db_context: Sessions.Database_Context,
progress: Progress = new Progress,
verbose: Boolean = false,
html_context: HTML_Context,
session_elements: Elements,
presentation: Context): Unit =
{
val hierarchy = deps.sessions_structure.hierarchy(session)
val info = deps.sessions_structure(session)
val options = info.options
val base = deps(session)
def make_session_dir(name: String): Path =
Isabelle_System.make_directory(
presentation.dir(db_context.store, deps.sessions_structure(name)))
val session_dir = make_session_dir(session)
val presentation_dir = presentation.dir(db_context.store)
Bytes.write(session_dir + session_graph_path,
graphview.Graph_File.make_pdf(options, base.session_graph_display))
val documents =
for {
doc <- info.document_variants
document <- db_context.input_database(session)(Document_Build.read_document(_, _, doc.name))
} yield {
val doc_path = (session_dir + doc.path.pdf).expand
if (verbose) progress.echo("Presenting document " + session + "/" + doc.name)
if (options.bool("document_echo")) progress.echo("Document at " + doc_path)
Bytes.write(doc_path, document.pdf)
doc
}
val view_links =
{
val deps_link =
HTML.link(session_graph_path, HTML.text("theory dependencies"))
val readme_links =
if ((info.dir + readme_path).is_file) {
Isabelle_System.copy_file(info.dir + readme_path, session_dir + readme_path)
List(HTML.link(readme_path, HTML.text("README")))
}
else Nil
val document_links =
documents.map(doc => HTML.link(doc.path.pdf, HTML.text(doc.name)))
Library.separate(HTML.break ::: HTML.nl,
(deps_link :: readme_links ::: document_links).
map(link => HTML.text("View ") ::: List(link))).flatten
}
val all_used_theories = hierarchy.reverse.flatMap(a => deps(a).used_theories.map(_._1))
val present_theories = html_context.register_presented(all_used_theories)
val theory_exports: Map[String, Export_Theory.Theory] =
(for (node <- all_used_theories.iterator) yield {
val thy_name = node.theory
val theory =
if (thy_name == Thy_Header.PURE) Export_Theory.no_theory
else {
html_context.cache_theory(thy_name,
{
val provider = Export.Provider.database_context(db_context, hierarchy, thy_name)
if (Export_Theory.read_theory_parents(provider, thy_name).isDefined) {
Export_Theory.read_theory(
provider, session, thy_name, cache = html_context.cache)
}
else Export_Theory.no_theory
})
}
thy_name -> theory
}).toMap
def entity_context(name: Document.Node.Name): Entity_Context =
- Entity_Context.make(resources, session, deps, name, theory_exports)
+ Entity_Context.make(session, deps, name, theory_exports)
val theories: List[XML.Body] =
{
sealed case class Seen_File(
src_path: Path, file_name: String, thy_session: String, thy_name: Document.Node.Name)
{
def check(src_path1: Path, file_name1: String, thy_session1: String): Boolean =
(src_path == src_path1 || file_name == file_name1) && thy_session == thy_session1
}
var seen_files = List.empty[Seen_File]
sealed case class Theory(
name: Document.Node.Name,
command: Command,
files_html: List[(Path, XML.Tree)],
html: XML.Tree)
def read_theory(name: Document.Node.Name): Option[Theory] =
{
progress.expose_interrupt()
- for (command <- Build_Job.read_theory(db_context, resources, hierarchy, name.theory))
+ for (command <- Build_Job.read_theory(db_context, hierarchy, name.theory))
yield {
if (verbose) progress.echo("Presenting theory " + name)
val snapshot = Document.State.init.snippet(command)
val thy_elements =
session_elements.copy(entity =
theory_exports(name.theory).others.keySet.foldLeft(session_elements.entity)(_ + _))
val files_html =
for {
(src_path, xml) <- snapshot.xml_markup_blobs(elements = thy_elements.html)
if xml.nonEmpty
}
yield {
progress.expose_interrupt()
if (verbose) progress.echo("Presenting file " + src_path)
(src_path, html_context.source(
make_html(entity_context(name), thy_elements, xml)))
}
val html =
html_context.source(
make_html(entity_context(name), thy_elements,
snapshot.xml_markup(elements = thy_elements.html)))
Theory(name, command, files_html, html)
}
}
(for (thy <- Par_List.map(read_theory, present_theories).flatten) yield {
val thy_session = base.theory_qualifier(thy.name)
val thy_dir = make_session_dir(thy_session)
val files =
for { (src_path, file_html) <- thy.files_html }
yield {
val file_name = html_path(src_path)
seen_files.find(_.check(src_path, file_name, thy_session)) match {
case None => seen_files ::= Seen_File(src_path, file_name, thy_session, thy.name)
case Some(seen_file) =>
error("Incoherent use of file name " + src_path + " as " + quote(file_name) +
" in theory " + seen_file.thy_name + " vs. " + thy.name)
}
val file_path = thy_dir + Path.explode(file_name)
val file_title = "File " + Symbol.cartouche_decoded(src_path.implode_short)
HTML.write_document(file_path.dir, file_path.file_name,
List(HTML.title(file_title)), List(html_context.head(file_title), file_html),
base = Some(presentation_dir))
List(HTML.link(file_name, HTML.text(file_title)))
}
val thy_title = "Theory " + thy.name.theory_base_name
HTML.write_document(thy_dir, html_name(thy.name),
List(HTML.title(thy_title)), List(html_context.head(thy_title), thy.html),
base = Some(presentation_dir))
if (thy_session == session) {
Some(
List(HTML.link(html_name(thy.name),
HTML.text(thy.name.theory_base_name) :::
(if (files.isEmpty) Nil else List(HTML.itemize(files))))))
}
else None
}).flatten
}
val title = "Session " + session
HTML.write_document(session_dir, "index.html",
List(HTML.title(title + Isabelle_System.isabelle_heading())),
html_context.head(title, List(HTML.par(view_links))) ::
html_context.contents("Theories", theories),
base = Some(presentation_dir))
}
}
diff --git a/src/Pure/Tools/build.scala b/src/Pure/Tools/build.scala
--- a/src/Pure/Tools/build.scala
+++ b/src/Pure/Tools/build.scala
@@ -1,685 +1,684 @@
/* Title: Pure/Tools/build.scala
Author: Makarius
Options: :folding=explicit:
Build and manage Isabelle sessions.
*/
package isabelle
import scala.collection.immutable.SortedSet
import scala.annotation.tailrec
object Build
{
/** auxiliary **/
/* persistent build info */
sealed case class Session_Info(
sources: String,
input_heaps: List[String],
output_heap: Option[String],
return_code: Int)
{
def ok: Boolean = return_code == 0
}
/* queue with scheduling information */
private object Queue
{
type Timings = (List[Properties.T], Double)
def load_timings(progress: Progress, store: Sessions.Store, session_name: String): Timings =
{
val no_timings: Timings = (Nil, 0.0)
store.try_open_database(session_name) match {
case None => no_timings
case Some(db) =>
def ignore_error(msg: String) =
{
progress.echo_warning("Ignoring bad database " + db + (if (msg == "") "" else "\n" + msg))
no_timings
}
try {
val command_timings = store.read_command_timings(db, session_name)
val session_timing =
store.read_session_timing(db, session_name) match {
case Markup.Elapsed(t) => t
case _ => 0.0
}
(command_timings, session_timing)
}
catch {
case ERROR(msg) => ignore_error(msg)
case exn: java.lang.Error => ignore_error(Exn.message(exn))
case _: XML.Error => ignore_error("")
}
finally { db.close() }
}
}
def make_session_timing(sessions_structure: Sessions.Structure, timing: Map[String, Double])
: Map[String, Double] =
{
val maximals = sessions_structure.build_graph.maximals.toSet
def desc_timing(session_name: String): Double =
{
if (maximals.contains(session_name)) timing(session_name)
else {
val descendants = sessions_structure.build_descendants(List(session_name)).toSet
val g = sessions_structure.build_graph.restrict(descendants)
(0.0 :: g.maximals.flatMap(desc => {
val ps = g.all_preds(List(desc))
if (ps.exists(p => !timing.isDefinedAt(p))) None
else Some(ps.map(timing(_)).sum)
})).max
}
}
timing.keySet.iterator.map(name => (name -> desc_timing(name))).toMap.withDefaultValue(0.0)
}
def apply(progress: Progress, sessions_structure: Sessions.Structure, store: Sessions.Store)
: Queue =
{
val graph = sessions_structure.build_graph
val names = graph.keys
val timings = names.map(name => (name, load_timings(progress, store, name)))
val command_timings =
timings.map({ case (name, (ts, _)) => (name, ts) }).toMap.withDefaultValue(Nil)
val session_timing =
make_session_timing(sessions_structure,
timings.map({ case (name, (_, t)) => (name, t) }).toMap)
object Ordering extends scala.math.Ordering[String]
{
def compare(name1: String, name2: String): Int =
session_timing(name2) compare session_timing(name1) match {
case 0 =>
sessions_structure(name2).timeout compare sessions_structure(name1).timeout match {
case 0 => name1 compare name2
case ord => ord
}
case ord => ord
}
}
new Queue(graph, SortedSet(names: _*)(Ordering), command_timings)
}
}
private class Queue(
graph: Graph[String, Sessions.Info],
order: SortedSet[String],
val command_timings: String => List[Properties.T])
{
def is_inner(name: String): Boolean = !graph.is_maximal(name)
def is_empty: Boolean = graph.is_empty
def - (name: String): Queue =
new Queue(graph.del_node(name), order - name, command_timings)
def dequeue(skip: String => Boolean): Option[(String, Sessions.Info)] =
{
val it = order.iterator.dropWhile(name => skip(name) || !graph.is_minimal(name))
if (it.hasNext) { val name = it.next(); Some((name, graph.get_node(name))) }
else None
}
}
/** build with results **/
class Results private[Build](results: Map[String, (Option[Process_Result], Sessions.Info)])
{
def sessions: Set[String] = results.keySet
def infos: List[Sessions.Info] = results.values.map(_._2).toList
def cancelled(name: String): Boolean = results(name)._1.isEmpty
def apply(name: String): Process_Result = results(name)._1.getOrElse(Process_Result(1))
def get_info(name: String): Option[Sessions.Info] = results.get(name).map(_._2)
def info(name: String): Sessions.Info = get_info(name).get
val rc: Int =
results.iterator.map({ case (_, (Some(r), _)) => r.rc case (_, (None, _)) => 1 }).
foldLeft(Process_Result.RC.ok)(_ max _)
def ok: Boolean = rc == Process_Result.RC.ok
override def toString: String = rc.toString
}
def session_finished(session_name: String, process_result: Process_Result): String =
"Finished " + session_name + " (" + process_result.timing.message_resources + ")"
def session_timing(session_name: String, build_log: Build_Log.Session_Info): String =
{
val props = build_log.session_timing
val threads = Markup.Session_Timing.Threads.unapply(props) getOrElse 1
val timing = Markup.Timing_Properties.parse(props)
"Timing " + session_name + " (" + threads + " threads, " + timing.message_factor + ")"
}
def build(
options: Options,
selection: Sessions.Selection = Sessions.Selection.empty,
presentation: Presentation.Context = Presentation.Context.none,
progress: Progress = new Progress,
check_unknown_files: Boolean = false,
build_heap: Boolean = false,
clean_build: Boolean = false,
dirs: List[Path] = Nil,
select_dirs: List[Path] = Nil,
infos: List[Sessions.Info] = Nil,
numa_shuffling: Boolean = false,
max_jobs: Int = 1,
list_files: Boolean = false,
check_keywords: Set[String] = Set.empty,
fresh_build: Boolean = false,
no_build: Boolean = false,
soft_build: Boolean = false,
verbose: Boolean = false,
export_files: Boolean = false,
session_setup: (String, Session) => Unit = (_, _) => ()): Results =
{
val build_options =
options +
"completion_limit=0" +
"editor_tracing_messages=0" +
"kodkod_scala=false" +
("pide_reports=" + options.bool("build_pide_reports"))
val store = Sessions.store(build_options)
Isabelle_Fonts.init()
/* session selection and dependencies */
val full_sessions =
Sessions.load_structure(build_options, dirs = dirs, select_dirs = select_dirs, infos = infos)
val full_sessions_selection = full_sessions.imports_selection(selection)
def sources_stamp(deps: Sessions.Deps, session_name: String): String =
{
val digests =
full_sessions(session_name).meta_digest ::
deps.sources(session_name) :::
deps.imported_sources(session_name)
SHA1.digest_set(digests).toString
}
val deps =
{
val deps0 =
Sessions.deps(full_sessions.selection(selection),
progress = progress, inlined_files = true, verbose = verbose,
list_files = list_files, check_keywords = check_keywords).check_errors
if (soft_build && !fresh_build) {
val outdated =
deps0.sessions_structure.build_topological_order.flatMap(name =>
store.try_open_database(name) match {
case Some(db) =>
using(db)(store.read_build(_, name)) match {
case Some(build)
if build.ok && build.sources == sources_stamp(deps0, name) => None
case _ => Some(name)
}
case None => Some(name)
})
Sessions.deps(full_sessions.selection(Sessions.Selection(sessions = outdated)),
progress = progress, inlined_files = true).check_errors
}
else deps0
}
/* check unknown files */
if (check_unknown_files) {
val source_files =
(for {
(_, base) <- deps.session_bases.iterator
(path, _) <- base.sources.iterator
} yield path).toList
val exclude_files = List(Path.explode("$POLYML_EXE")).map(_.canonical_file)
val unknown_files =
Mercurial.check_files(source_files)._2.
filterNot(path => exclude_files.contains(path.canonical_file))
if (unknown_files.nonEmpty) {
progress.echo_warning("Unknown files (not part of the underlying Mercurial repository):" +
unknown_files.map(path => path.expand.implode).sorted.mkString("\n ", "\n ", ""))
}
}
/* main build process */
val queue = Queue(progress, deps.sessions_structure, store)
store.prepare_output_dir()
if (clean_build) {
for (name <- full_sessions.imports_descendants(full_sessions_selection)) {
val (relevant, ok) = store.clean_output(name)
if (relevant) {
if (ok) progress.echo("Cleaned " + name)
else progress.echo(name + " FAILED to clean")
}
}
}
// scheduler loop
case class Result(
current: Boolean,
heap_digest: Option[String],
process: Option[Process_Result],
info: Sessions.Info)
{
def ok: Boolean =
process match {
case None => false
case Some(res) => res.ok
}
}
def sleep(): Unit =
Isabelle_Thread.interrupt_handler(_ => progress.stop()) { Time.seconds(0.5).sleep() }
val log =
build_options.string("system_log") match {
case "" => No_Logger
case "true" => Logger.make(progress)
case log_file => Logger.make(Some(Path.explode(log_file)))
}
val numa_nodes = new NUMA.Nodes(numa_shuffling)
@tailrec def loop(
pending: Queue,
running: Map[String, (List[String], Build_Job)],
results: Map[String, Result]): Map[String, Result] =
{
def used_node(i: Int): Boolean =
running.iterator.exists(
{ case (_, (_, job)) => job.numa_node.isDefined && job.numa_node.get == i })
if (pending.is_empty) results
else {
if (progress.stopped) {
for ((_, (_, job)) <- running) job.terminate()
}
running.find({ case (_, (_, job)) => job.is_finished }) match {
case Some((session_name, (input_heaps, job))) =>
//{{{ finish job
val (process_result, heap_digest) = job.join
val log_lines = process_result.out_lines.filterNot(Protocol_Message.Marker.test)
val process_result_tail =
{
val tail = job.info.options.int("process_output_tail")
process_result.copy(
out_lines =
"(see also " + store.output_log(session_name).file.toString + ")" ::
(if (tail == 0) log_lines else log_lines.drop(log_lines.length - tail max 0)))
}
val build_log =
Build_Log.Log_File(session_name, process_result.out_lines).
parse_session_info(
command_timings = true,
theory_timings = true,
ml_statistics = true,
task_statistics = true)
// write log file
if (process_result.ok) {
File.write_gzip(store.output_log_gz(session_name), terminate_lines(log_lines))
}
else File.write(store.output_log(session_name), terminate_lines(log_lines))
// write database
using(store.open_database(session_name, output = true))(db =>
store.write_session_info(db, session_name,
build_log =
if (process_result.timeout) build_log.error("Timeout") else build_log,
build =
Session_Info(sources_stamp(deps, session_name), input_heaps, heap_digest,
process_result.rc)))
// messages
process_result.err_lines.foreach(progress.echo)
if (process_result.ok) {
if (verbose) progress.echo(session_timing(session_name, build_log))
progress.echo(session_finished(session_name, process_result))
}
else {
progress.echo(session_name + " FAILED")
if (!process_result.interrupted) progress.echo(process_result_tail.out)
}
loop(pending - session_name, running - session_name,
results +
(session_name -> Result(false, heap_digest, Some(process_result_tail), job.info)))
//}}}
case None if running.size < (max_jobs max 1) =>
//{{{ check/start next job
pending.dequeue(running.isDefinedAt) match {
case Some((session_name, info)) =>
val ancestor_results =
deps.sessions_structure.build_requirements(List(session_name)).
filterNot(_ == session_name).map(results(_))
val ancestor_heaps = ancestor_results.flatMap(_.heap_digest)
val do_store =
build_heap || Sessions.is_pure(session_name) || queue.is_inner(session_name)
val (current, heap_digest) =
{
store.try_open_database(session_name) match {
case Some(db) =>
using(db)(store.read_build(_, session_name)) match {
case Some(build) =>
val heap_digest = store.find_heap_digest(session_name)
val current =
!fresh_build &&
build.ok &&
build.sources == sources_stamp(deps, session_name) &&
build.input_heaps == ancestor_heaps &&
build.output_heap == heap_digest &&
!(do_store && heap_digest.isEmpty)
(current, heap_digest)
case None => (false, None)
}
case None => (false, None)
}
}
val all_current = current && ancestor_results.forall(_.current)
if (all_current)
loop(pending - session_name, running,
results +
(session_name -> Result(true, heap_digest, Some(Process_Result(0)), info)))
else if (no_build) {
progress.echo_if(verbose, "Skipping " + session_name + " ...")
loop(pending - session_name, running,
results +
(session_name -> Result(false, heap_digest, Some(Process_Result(1)), info)))
}
else if (ancestor_results.forall(_.ok) && !progress.stopped) {
progress.echo((if (do_store) "Building " else "Running ") + session_name + " ...")
store.clean_output(session_name)
using(store.open_database(session_name, output = true))(
store.init_session_info(_, session_name))
val numa_node = numa_nodes.next(used_node)
val job =
new Build_Job(progress, session_name, info, deps, store, do_store,
log, session_setup, numa_node, queue.command_timings(session_name))
loop(pending, running + (session_name -> (ancestor_heaps, job)), results)
}
else {
progress.echo(session_name + " CANCELLED")
loop(pending - session_name, running,
results + (session_name -> Result(false, heap_digest, None, info)))
}
case None => sleep(); loop(pending, running, results)
}
///}}}
case None => sleep(); loop(pending, running, results)
}
}
}
/* build results */
val results =
{
val results0 =
if (deps.is_empty) {
progress.echo_warning("Nothing to build")
Map.empty[String, Result]
}
else Isabelle_Thread.uninterruptible { loop(queue, Map.empty, Map.empty) }
new Results(
(for ((name, result) <- results0.iterator)
yield (name, (result.process, result.info))).toMap)
}
if (export_files) {
for (name <- full_sessions.imports_selection(selection).iterator if results(name).ok) {
val info = results.info(name)
if (info.export_files.nonEmpty) {
progress.echo("Exporting " + info.name + " ...")
for ((dir, prune, pats) <- info.export_files) {
Export.export_files(store, name, info.dir + dir,
progress = if (verbose) progress else new Progress,
export_prune = prune,
export_patterns = pats)
}
}
}
}
if (results.rc != Process_Result.RC.ok && (verbose || !no_build)) {
val unfinished =
(for {
name <- results.sessions.iterator
if !results(name).ok
} yield name).toList.sorted
progress.echo("Unfinished session(s): " + commas(unfinished))
}
/* PDF/HTML presentation */
if (!no_build && !progress.stopped && results.ok) {
val selected = full_sessions_selection.toSet
val presentation_sessions =
(for {
session_name <- deps.sessions_structure.imports_topological_order.iterator
info <- results.get_info(session_name)
if selected(session_name) && presentation.enabled(info) && results(session_name).ok }
yield info).toList
if (presentation_sessions.nonEmpty) {
val presentation_dir = presentation.dir(store)
progress.echo("Presentation in " + presentation_dir.absolute)
Presentation.update_root(presentation_dir)
for ((chapter, infos) <- presentation_sessions.groupBy(_.chapter).iterator) {
val entries = infos.map(info => (info.name, info.description))
Presentation.update_chapter(presentation_dir, chapter, entries)
}
- val resources = Resources.empty
val html_context = Presentation.html_context(cache = store.cache)
using(store.open_database_context())(db_context =>
for (info <- presentation_sessions) {
progress.expose_interrupt()
progress.echo("Presenting " + info.name + " ...")
Presentation.session_html(
- resources, info.name, deps, db_context, progress = progress,
+ info.name, deps, db_context, progress = progress,
verbose = verbose, html_context = html_context,
Presentation.elements1, presentation = presentation)
})
}
}
results
}
/* Isabelle tool wrapper */
val isabelle_tool = Isabelle_Tool("build", "build and manage Isabelle sessions",
Scala_Project.here, args =>
{
val build_options = Word.explode(Isabelle_System.getenv("ISABELLE_BUILD_OPTIONS"))
var base_sessions: List[String] = Nil
var select_dirs: List[Path] = Nil
var numa_shuffling = false
var presentation = Presentation.Context.none
var requirements = false
var soft_build = false
var exclude_session_groups: List[String] = Nil
var all_sessions = false
var build_heap = false
var clean_build = false
var dirs: List[Path] = Nil
var export_files = false
var fresh_build = false
var session_groups: List[String] = Nil
var max_jobs = 1
var check_keywords: Set[String] = Set.empty
var list_files = false
var no_build = false
var options = Options.init(opts = build_options)
var verbose = false
var exclude_sessions: List[String] = Nil
val getopts = Getopts("""
Usage: isabelle build [OPTIONS] [SESSIONS ...]
Options are:
-B NAME include session NAME and all descendants
-D DIR include session directory and select its sessions
-N cyclic shuffling of NUMA CPU nodes (performance tuning)
-P DIR enable HTML/PDF presentation in directory (":" for default)
-R refer to requirements of selected sessions
-S soft build: only observe changes of sources, not heap images
-X NAME exclude sessions from group NAME and all descendants
-a select all sessions
-b build heap images
-c clean build
-d DIR include session directory
-e export files from session specification into file-system
-f fresh build
-g NAME select session group NAME
-j INT maximum number of parallel jobs (default 1)
-k KEYWORD check theory sources for conflicts with proposed keywords
-l list session source files
-n no build -- test dependencies only
-o OPTION override Isabelle system OPTION (via NAME=VAL or NAME)
-v verbose
-x NAME exclude session NAME and all descendants
Build and manage Isabelle sessions, depending on implicit settings:
""" + Library.indent_lines(2, Build_Log.Settings.show()) + "\n",
"B:" -> (arg => base_sessions = base_sessions ::: List(arg)),
"D:" -> (arg => select_dirs = select_dirs ::: List(Path.explode(arg))),
"N" -> (_ => numa_shuffling = true),
"P:" -> (arg => presentation = Presentation.Context.make(arg)),
"R" -> (_ => requirements = true),
"S" -> (_ => soft_build = true),
"X:" -> (arg => exclude_session_groups = exclude_session_groups ::: List(arg)),
"a" -> (_ => all_sessions = true),
"b" -> (_ => build_heap = true),
"c" -> (_ => clean_build = true),
"d:" -> (arg => dirs = dirs ::: List(Path.explode(arg))),
"e" -> (_ => export_files = true),
"f" -> (_ => fresh_build = true),
"g:" -> (arg => session_groups = session_groups ::: List(arg)),
"j:" -> (arg => max_jobs = Value.Int.parse(arg)),
"k:" -> (arg => check_keywords = check_keywords + arg),
"l" -> (_ => list_files = true),
"n" -> (_ => no_build = true),
"o:" -> (arg => options = options + arg),
"v" -> (_ => verbose = true),
"x:" -> (arg => exclude_sessions = exclude_sessions ::: List(arg)))
val sessions = getopts(args)
val progress = new Console_Progress(verbose = verbose)
val start_date = Date.now()
if (verbose) {
progress.echo(
"Started at " + Build_Log.print_date(start_date) +
" (" + Isabelle_System.getenv("ML_IDENTIFIER") + " on " + Isabelle_System.hostname() +")")
progress.echo(Build_Log.Settings.show() + "\n")
}
val results =
progress.interrupt_handler {
build(options,
selection = Sessions.Selection(
requirements = requirements,
all_sessions = all_sessions,
base_sessions = base_sessions,
exclude_session_groups = exclude_session_groups,
exclude_sessions = exclude_sessions,
session_groups = session_groups,
sessions = sessions),
presentation = presentation,
progress = progress,
check_unknown_files = Mercurial.is_repository(Path.ISABELLE_HOME),
build_heap = build_heap,
clean_build = clean_build,
dirs = dirs,
select_dirs = select_dirs,
numa_shuffling = NUMA.enabled_warning(progress, numa_shuffling),
max_jobs = max_jobs,
list_files = list_files,
check_keywords = check_keywords,
fresh_build = fresh_build,
no_build = no_build,
soft_build = soft_build,
verbose = verbose,
export_files = export_files)
}
val end_date = Date.now()
val elapsed_time = end_date.time - start_date.time
if (verbose) {
progress.echo("\nFinished at " + Build_Log.print_date(end_date))
}
val total_timing =
results.sessions.iterator.map(a => results(a).timing).foldLeft(Timing.zero)(_ + _).
copy(elapsed = elapsed_time)
progress.echo(total_timing.message_resources)
sys.exit(results.rc)
})
/* build logic image */
def build_logic(options: Options, logic: String,
progress: Progress = new Progress,
build_heap: Boolean = false,
dirs: List[Path] = Nil,
fresh: Boolean = false,
strict: Boolean = false): Int =
{
val selection = Sessions.Selection.session(logic)
val rc =
if (!fresh && build(options, selection = selection,
build_heap = build_heap, no_build = true, dirs = dirs).ok) Process_Result.RC.ok
else {
progress.echo("Build started for Isabelle/" + logic + " ...")
Build.build(options, selection = selection, progress = progress,
build_heap = build_heap, fresh_build = fresh, dirs = dirs).rc
}
if (strict && rc != Process_Result.RC.ok) error("Failed to build Isabelle/" + logic) else rc
}
}
diff --git a/src/Pure/Tools/build_job.scala b/src/Pure/Tools/build_job.scala
--- a/src/Pure/Tools/build_job.scala
+++ b/src/Pure/Tools/build_job.scala
@@ -1,555 +1,552 @@
/* Title: Pure/Tools/build_job.scala
Author: Makarius
Build job running prover process, with rudimentary PIDE session.
*/
package isabelle
import java.util.HashMap
import scala.collection.mutable
object Build_Job
{
/* theory markup/messages from database */
def read_theory(
db_context: Sessions.Database_Context,
- resources: Resources,
session_hierarchy: List[String],
theory: String,
unicode_symbols: Boolean = false): Option[Command] =
{
def read(name: String): Export.Entry =
db_context.get_export(session_hierarchy, theory, name)
def read_xml(name: String): XML.Body =
YXML.parse_body(
Symbol.output(unicode_symbols, UTF8.decode_permissive(read(name).uncompressed)),
cache = db_context.cache)
(read(Export.DOCUMENT_ID).text, split_lines(read(Export.FILES).text)) match {
case (Value.Long(id), thy_file :: blobs_files) =>
- val node_name = resources.file_node(Path.explode(thy_file), theory = theory)
+ val node_name = Resources.file_node(Path.explode(thy_file), theory = theory)
val results =
Command.Results.make(
for (elem @ XML.Elem(Markup(_, Markup.Serial(i)), _) <- read_xml(Export.MESSAGES))
yield i -> elem)
val blobs =
blobs_files.map(file =>
{
val path = Path.explode(file)
- val name = resources.file_node(path)
+ val name = Resources.file_node(path)
val src_path = File.relative_path(node_name.master_dir_path, path).getOrElse(path)
Command.Blob(name, src_path, None)
})
val blobs_xml =
for (i <- (1 to blobs.length).toList)
yield read_xml(Export.MARKUP + i)
val blobs_info =
Command.Blobs_Info(
for { (Command.Blob(name, src_path, _), xml) <- blobs zip blobs_xml }
yield {
val text = XML.content(xml)
val chunk = Symbol.Text_Chunk(text)
val digest = SHA1.digest(Symbol.encode(text))
Exn.Res(Command.Blob(name, src_path, Some((digest, chunk))))
})
val thy_xml = read_xml(Export.MARKUP)
val thy_source = XML.content(thy_xml)
val markups_index =
Command.Markup_Index.markup :: blobs.map(Command.Markup_Index.blob)
val markups =
Command.Markups.make(
for ((index, xml) <- markups_index.zip(thy_xml :: blobs_xml))
yield index -> Markup_Tree.from_XML(xml))
val command =
Command.unparsed(thy_source, theory = true, id = id, node_name = node_name,
blobs_info = blobs_info, results = results, markups = markups)
Some(command)
case _ => None
}
}
/* print messages */
def print_log(
options: Options,
session_name: String,
theories: List[String] = Nil,
verbose: Boolean = false,
progress: Progress = new Progress,
margin: Double = Pretty.default_margin,
breakgain: Double = Pretty.default_breakgain,
metric: Pretty.Metric = Symbol.Metric,
unicode_symbols: Boolean = false): Unit =
{
val store = Sessions.store(options)
- val resources = Resources.empty
- val session = new Session(options, resources)
+ val session = new Session(options, Resources.empty)
using(store.open_database_context())(db_context =>
{
val result =
db_context.input_database(session_name)((db, _) =>
{
val theories = store.read_theories(db, session_name)
val errors = store.read_errors(db, session_name)
store.read_build(db, session_name).map(info => (theories, errors, info.return_code))
})
result match {
case None => error("Missing build database for session " + quote(session_name))
case Some((used_theories, errors, rc)) =>
theories.filterNot(used_theories.toSet) match {
case Nil =>
case bad => error("Unknown theories " + commas_quote(bad))
}
val print_theories =
if (theories.isEmpty) used_theories else used_theories.filter(theories.toSet)
for (thy <- print_theories) {
val thy_heading = "\nTheory " + quote(thy) + ":"
- read_theory(db_context, resources, List(session_name), thy,
- unicode_symbols = unicode_symbols)
+ read_theory(db_context, List(session_name), thy, unicode_symbols = unicode_symbols)
match {
case None => progress.echo(thy_heading + " MISSING")
case Some(command) =>
val snapshot = Document.State.init.snippet(command)
val rendering = new Rendering(snapshot, options, session)
val messages =
rendering.text_messages(Text.Range.full)
.filter(message => verbose || Protocol.is_exported(message.info))
if (messages.nonEmpty) {
val line_document = Line.Document(command.source)
progress.echo(thy_heading)
for (Text.Info(range, elem) <- messages) {
val line = line_document.position(range.start).line1
val pos = Position.Line_File(line, command.node_name.node)
progress.echo(
Protocol.message_text(elem, heading = true, pos = pos,
margin = margin, breakgain = breakgain, metric = metric))
}
}
}
}
if (errors.nonEmpty) {
val msg = Symbol.output(unicode_symbols, cat_lines(errors))
progress.echo("\nBuild errors:\n" + Output.error_message_text(msg))
}
if (rc != Process_Result.RC.ok) {
progress.echo("\n" + Process_Result.RC.print_long(rc))
}
}
})
}
/* Isabelle tool wrapper */
val isabelle_tool = Isabelle_Tool("log", "print messages from build database",
Scala_Project.here, args =>
{
/* arguments */
var unicode_symbols = false
var theories: List[String] = Nil
var margin = Pretty.default_margin
var options = Options.init()
var verbose = false
val getopts = Getopts("""
Usage: isabelle log [OPTIONS] SESSION
Options are:
-T NAME restrict to given theories (multiple options possible)
-U output Unicode symbols
-m MARGIN margin for pretty printing (default: """ + margin + """)
-o OPTION override Isabelle system OPTION (via NAME=VAL or NAME)
-v print all messages, including information etc.
Print messages from the build database of the given session, without any
checks against current sources: results from a failed build can be
printed as well.
""",
"T:" -> (arg => theories = theories ::: List(arg)),
"U" -> (_ => unicode_symbols = true),
"m:" -> (arg => margin = Value.Double.parse(arg)),
"o:" -> (arg => options = options + arg),
"v" -> (_ => verbose = true))
val more_args = getopts(args)
val session_name =
more_args match {
case List(session_name) => session_name
case _ => getopts.usage()
}
val progress = new Console_Progress()
print_log(options, session_name, theories = theories, verbose = verbose, margin = margin,
progress = progress, unicode_symbols = unicode_symbols)
})
}
class Build_Job(progress: Progress,
session_name: String,
val info: Sessions.Info,
deps: Sessions.Deps,
store: Sessions.Store,
do_store: Boolean,
log: Logger,
session_setup: (String, Session) => Unit,
val numa_node: Option[Int],
command_timings0: List[Properties.T])
{
val options: Options = NUMA.policy_options(info.options, numa_node)
private val sessions_structure = deps.sessions_structure
private val future_result: Future[Process_Result] =
Future.thread("build", uninterruptible = true) {
val parent = info.parent.getOrElse("")
val base = deps(parent)
val result_base = deps(session_name)
val env = new HashMap(Isabelle_System.settings())
env.put("ISABELLE_ML_DEBUGGER", options.bool("ML_debugger").toString)
val is_pure = Sessions.is_pure(session_name)
val use_prelude = if (is_pure) Thy_Header.ml_roots.map(_._1) else Nil
val eval_store =
if (do_store) {
(if (info.theories.nonEmpty) List("ML_Heap.share_common_data ()") else Nil) :::
List("ML_Heap.save_child " +
ML_Syntax.print_string_bytes(File.platform_path(store.output_heap(session_name))))
}
else Nil
val resources =
new Resources(sessions_structure, base, log = log, command_timings = command_timings0)
val session =
new Session(options, resources) {
override val cache: Term.Cache = store.cache
override def build_blobs_info(name: Document.Node.Name): Command.Blobs_Info =
{
result_base.load_commands.get(name.expand) match {
case Some(spans) =>
val syntax = result_base.theory_syntax(name)
Command.build_blobs_info(syntax, name, spans)
case None => Command.Blobs_Info.none
}
}
}
object Build_Session_Errors
{
private val promise: Promise[List[String]] = Future.promise
def result: Exn.Result[List[String]] = promise.join_result
def cancel(): Unit = promise.cancel()
def apply(errs: List[String]): Unit =
{
try { promise.fulfill(errs) }
catch { case _: IllegalStateException => }
}
}
val export_consumer =
Export.consumer(store.open_database(session_name, output = true), store.cache,
progress = progress)
val stdout = new StringBuilder(1000)
val stderr = new StringBuilder(1000)
val command_timings = new mutable.ListBuffer[Properties.T]
val theory_timings = new mutable.ListBuffer[Properties.T]
val session_timings = new mutable.ListBuffer[Properties.T]
val runtime_statistics = new mutable.ListBuffer[Properties.T]
val task_statistics = new mutable.ListBuffer[Properties.T]
def fun(
name: String,
acc: mutable.ListBuffer[Properties.T],
unapply: Properties.T => Option[Properties.T]): (String, Session.Protocol_Function) =
{
name -> ((msg: Prover.Protocol_Output) =>
unapply(msg.properties) match {
case Some(props) => acc += props; true
case _ => false
})
}
session.init_protocol_handler(new Session.Protocol_Handler
{
override def exit(): Unit = Build_Session_Errors.cancel()
private def build_session_finished(msg: Prover.Protocol_Output): Boolean =
{
val (rc, errors) =
try {
val (rc, errs) =
{
import XML.Decode._
pair(int, list(x => x))(Symbol.decode_yxml(msg.text))
}
val errors =
for (err <- errs) yield {
val prt = Protocol_Message.expose_no_reports(err)
Pretty.string_of(prt, metric = Symbol.Metric)
}
(rc, errors)
}
catch { case ERROR(err) => (Process_Result.RC.failure, List(err)) }
session.protocol_command("Prover.stop", rc.toString)
Build_Session_Errors(errors)
true
}
private def loading_theory(msg: Prover.Protocol_Output): Boolean =
msg.properties match {
case Markup.Loading_Theory(Markup.Name(name)) =>
progress.theory(Progress.Theory(name, session = session_name))
false
case _ => false
}
private def export_(msg: Prover.Protocol_Output): Boolean =
msg.properties match {
case Protocol.Export(args) =>
export_consumer(session_name, args, msg.chunk)
true
case _ => false
}
override val functions =
List(
Markup.Build_Session_Finished.name -> build_session_finished,
Markup.Loading_Theory.name -> loading_theory,
Markup.EXPORT -> export_,
fun(Markup.Theory_Timing.name, theory_timings, Markup.Theory_Timing.unapply),
fun(Markup.Session_Timing.name, session_timings, Markup.Session_Timing.unapply),
fun(Markup.Task_Statistics.name, task_statistics, Markup.Task_Statistics.unapply))
})
session.command_timings += Session.Consumer("command_timings")
{
case Session.Command_Timing(props) =>
for {
elapsed <- Markup.Elapsed.unapply(props)
elapsed_time = Time.seconds(elapsed)
if elapsed_time.is_relevant && elapsed_time >= options.seconds("command_timing_threshold")
} command_timings += props.filter(Markup.command_timing_property)
}
session.runtime_statistics += Session.Consumer("ML_statistics")
{
case Session.Runtime_Statistics(props) => runtime_statistics += props
}
session.finished_theories += Session.Consumer[Document.Snapshot]("finished_theories")
{
case snapshot =>
if (!progress.stopped) {
val rendering = new Rendering(snapshot, options, session)
def export_(name: String, xml: XML.Body, compress: Boolean = true): Unit =
{
if (!progress.stopped) {
val theory_name = snapshot.node_name.theory
val args =
Protocol.Export.Args(theory_name = theory_name, name = name, compress = compress)
val bytes = Bytes(Symbol.encode(YXML.string_of_body(xml)))
if (!bytes.is_empty) export_consumer(session_name, args, bytes)
}
}
def export_text(name: String, text: String, compress: Boolean = true): Unit =
export_(name, List(XML.Text(text)), compress = compress)
for (command <- snapshot.snippet_command) {
export_text(Export.DOCUMENT_ID, command.id.toString, compress = false)
}
export_text(Export.FILES,
cat_lines(snapshot.node_files.map(_.symbolic.node)), compress = false)
for (((_, xml), i) <- snapshot.xml_markup_blobs().zipWithIndex) {
export_(Export.MARKUP + (i + 1), xml)
}
export_(Export.MARKUP, snapshot.xml_markup())
export_(Export.MESSAGES, snapshot.messages.map(_._1))
val citations = Library.distinct(rendering.citations(Text.Range.full).map(_.info))
export_text(Export.DOCUMENT_CITATIONS, cat_lines(citations))
}
}
session.all_messages += Session.Consumer[Any]("build_session_output")
{
case msg: Prover.Output =>
val message = msg.message
if (msg.is_system) resources.log(Protocol.message_text(message))
if (msg.is_stdout) {
stdout ++= Symbol.encode(XML.content(message))
}
else if (msg.is_stderr) {
stderr ++= Symbol.encode(XML.content(message))
}
else if (msg.is_exit) {
val err =
"Prover terminated" +
(msg.properties match {
case Markup.Process_Result(result) => ": " + result.print_rc
case _ => ""
})
Build_Session_Errors(List(err))
}
case _ =>
}
session_setup(session_name, session)
val eval_main = Command_Line.ML_tool("Isabelle_Process.init_build ()" :: eval_store)
val process =
Isabelle_Process.start(session, options, sessions_structure, store,
logic = parent, raw_ml_system = is_pure,
use_prelude = use_prelude, eval_main = eval_main,
cwd = info.dir.file, env = env)
val build_errors =
Isabelle_Thread.interrupt_handler(_ => process.terminate()) {
Exn.capture { process.await_startup() } match {
case Exn.Res(_) =>
val resources_yxml = resources.init_session_yxml
val encode_options: XML.Encode.T[Options] =
options => session.prover_options(options).encode
val args_yxml =
YXML.string_of_body(
{
import XML.Encode._
pair(string, list(pair(encode_options, list(pair(string, properties)))))(
(session_name, info.theories))
})
session.protocol_command("build_session", resources_yxml, args_yxml)
Build_Session_Errors.result
case Exn.Exn(exn) => Exn.Res(List(Exn.message(exn)))
}
}
val process_result =
Isabelle_Thread.interrupt_handler(_ => process.terminate()) { process.await_shutdown() }
session.stop()
val export_errors =
export_consumer.shutdown(close = true).map(Output.error_message_text)
val (document_output, document_errors) =
try {
if (build_errors.isInstanceOf[Exn.Res[_]] && process_result.ok && info.documents.nonEmpty) {
using(store.open_database_context())(db_context =>
{
val documents =
Document_Build.build_documents(
Document_Build.context(session_name, deps, db_context, progress = progress),
output_sources = info.document_output,
output_pdf = info.document_output)
db_context.output_database(session_name)(db =>
documents.foreach(_.write(db, session_name)))
(documents.flatMap(_.log_lines), Nil)
})
}
else (Nil, Nil)
}
catch {
case exn: Document_Build.Build_Error => (exn.log_lines, List(exn.message))
case Exn.Interrupt.ERROR(msg) => (Nil, List(msg))
}
val result =
{
val theory_timing =
theory_timings.iterator.map(
{ case props @ Markup.Name(name) => name -> props }).toMap
val used_theory_timings =
for { (name, _) <- deps(session_name).used_theories }
yield theory_timing.getOrElse(name.theory, Markup.Name(name.theory))
val more_output =
Library.trim_line(stdout.toString) ::
command_timings.toList.map(Protocol.Command_Timing_Marker.apply) :::
used_theory_timings.map(Protocol.Theory_Timing_Marker.apply) :::
session_timings.toList.map(Protocol.Session_Timing_Marker.apply) :::
runtime_statistics.toList.map(Protocol.ML_Statistics_Marker.apply) :::
task_statistics.toList.map(Protocol.Task_Statistics_Marker.apply) :::
document_output
process_result.output(more_output)
.error(Library.trim_line(stderr.toString))
.errors_rc(export_errors ::: document_errors)
}
build_errors match {
case Exn.Res(build_errs) =>
val errs = build_errs ::: document_errors
if (errs.nonEmpty) {
result.error_rc.output(
errs.flatMap(s => split_lines(Output.error_message_text(s))) :::
errs.map(Protocol.Error_Message_Marker.apply))
}
else if (progress.stopped && result.ok) result.copy(rc = Process_Result.RC.interrupt)
else result
case Exn.Exn(Exn.Interrupt()) =>
if (result.ok) result.copy(rc = Process_Result.RC.interrupt)
else result
case Exn.Exn(exn) => throw exn
}
}
def terminate(): Unit = future_result.cancel()
def is_finished: Boolean = future_result.is_finished
private val timeout_request: Option[Event_Timer.Request] =
{
if (info.timeout_ignored) None
else Some(Event_Timer.request(Time.now() + info.timeout) { terminate() })
}
def join: (Process_Result, Option[String]) =
{
val result1 = future_result.join
val was_timeout =
timeout_request match {
case None => false
case Some(request) => !request.cancel()
}
val result2 =
if (result1.ok) result1
else if (was_timeout) result1.error(Output.error_message_text("Timeout")).timeout_rc
else if (result1.interrupted) result1.error(Output.error_message_text("Interrupt"))
else result1
val heap_digest =
if (result2.ok && do_store && store.output_heap(session_name).is_file)
Some(Sessions.write_heap_digest(store.output_heap(session_name)))
else None
(result2, heap_digest)
}
}
diff --git a/src/Pure/Tools/profiling_report.scala b/src/Pure/Tools/profiling_report.scala
--- a/src/Pure/Tools/profiling_report.scala
+++ b/src/Pure/Tools/profiling_report.scala
@@ -1,86 +1,85 @@
/* Title: Pure/Tools/profiling_report.scala
Author: Makarius
Report Poly/ML profiling information from session build database.
*/
package isabelle
object Profiling_Report
{
def profiling_report(
options: Options,
session: String,
theories: List[String] = Nil,
clean_name: Boolean = false,
progress: Progress = new Progress): Unit =
{
val store = Sessions.store(options)
- val resources = Resources.empty
using(store.open_database_context())(db_context =>
{
val result =
db_context.input_database(session)((db, name) => Some(store.read_theories(db, name)))
result match {
case None => error("Missing build database for session " + quote(session))
case Some(used_theories) =>
theories.filterNot(used_theories.toSet) match {
case Nil =>
case bad => error("Unknown theories " + commas_quote(bad))
}
val reports =
(for {
thy <- used_theories.iterator
if theories.isEmpty || theories.contains(thy)
- command <- Build_Job.read_theory(db_context, resources, List(session), thy).iterator
+ command <- Build_Job.read_theory(db_context, List(session), thy).iterator
snapshot = Document.State.init.snippet(command)
(Protocol.ML_Profiling(report), _) <- snapshot.messages.iterator
} yield if (clean_name) report.clean_name else report).toList
for (report <- ML_Profiling.account(reports)) progress.echo(report.print)
}
})
}
/* Isabelle tool wrapper */
val isabelle_tool =
Isabelle_Tool("profiling_report", "report Poly/ML profiling information from log files",
Scala_Project.here, args =>
{
var theories: List[String] = Nil
var clean_name = false
var options = Options.init()
val getopts =
Getopts("""
Usage: isabelle profiling_report [OPTIONS] SESSION
Options are:
-T NAME restrict to given theories (multiple options possible)
-c clean function names
-o OPTION override Isabelle system OPTION (via NAME=VAL or NAME)
Report Poly/ML profiling from the build database of the given session
(without up-to-date check of sources).
""",
"T:" -> (arg => theories = theories ::: List(arg)),
"c" -> (_ => clean_name = true),
"o:" -> (arg => options = options + arg))
val more_args = getopts(args)
val session_name =
more_args match {
case List(session_name) => session_name
case _ => getopts.usage()
}
val progress = new Console_Progress()
profiling_report(options, session_name, theories = theories,
clean_name = clean_name, progress = progress)
})
}