diff --git a/src/Pure/Thy/export.scala b/src/Pure/Thy/export.scala --- a/src/Pure/Thy/export.scala +++ b/src/Pure/Thy/export.scala @@ -1,522 +1,530 @@ /* Title: Pure/Thy/export.scala Author: Makarius Manage theory exports: compressed blobs. */ package isabelle import scala.annotation.tailrec import scala.util.matching.Regex object Export { /* artefact names */ val DOCUMENT_ID = "PIDE/document_id" val FILES = "PIDE/files" val MARKUP = "PIDE/markup" val MESSAGES = "PIDE/messages" val DOCUMENT_PREFIX = "document/" val DOCUMENT_LATEX = DOCUMENT_PREFIX + "latex" val DOCUMENT_CITATIONS = DOCUMENT_PREFIX + "citations" val THEORY_PREFIX: String = "theory/" val PROOFS_PREFIX: String = "proofs/" val THEORY_PARENTS: String = THEORY_PREFIX + "parents" def explode_name(s: String): List[String] = space_explode('/', s) def implode_name(elems: Iterable[String]): String = elems.mkString("/") /* SQL data model */ object Data { val session_name = SQL.Column.string("session_name").make_primary_key val theory_name = SQL.Column.string("theory_name").make_primary_key val name = SQL.Column.string("name").make_primary_key val executable = SQL.Column.bool("executable") val compressed = SQL.Column.bool("compressed") val body = SQL.Column.bytes("body") val table = SQL.Table("isabelle_exports", List(session_name, theory_name, name, executable, compressed, body)) def where_equal(session_name: String, theory_name: String = "", name: String = ""): SQL.Source = "WHERE " + Data.session_name.equal(session_name) + (if (theory_name == "") "" else " AND " + Data.theory_name.equal(theory_name)) + (if (name == "") "" else " AND " + Data.name.equal(name)) } def compound_name(a: String, b: String): String = if (a.isEmpty) b else a + ":" + b sealed case class Entry_Name(session: String = "", theory: String = "", name: String = "") { val compound_name: String = Export.compound_name(theory, name) def make_path(prune: Int = 0): Path = { val elems = theory :: space_explode('/', name) if (elems.length < prune + 1) { error("Cannot prune path by " + prune + " element(s): " + Path.make(elems)) } else Path.make(elems.drop(prune)) } def readable(db: SQL.Database): Boolean = { val select = Data.table.select(List(Data.name), Data.where_equal(session, theory, name)) db.using_statement(select)(stmt => stmt.execute_query().next()) } def read(db: SQL.Database, cache: XML.Cache): Option[Entry] = { val select = Data.table.select(List(Data.executable, Data.compressed, Data.body), Data.where_equal(session, theory, name)) db.using_statement(select) { stmt => val res = stmt.execute_query() if (res.next()) { val executable = res.bool(Data.executable) val compressed = res.bool(Data.compressed) val bytes = res.bytes(Data.body) val body = Future.value(compressed, bytes) Some(Entry(this, executable, body, cache)) } else None } } } def read_theory_names(db: SQL.Database, session_name: String): List[String] = { val select = Data.table.select(List(Data.theory_name), Data.where_equal(session_name, name = THEORY_PARENTS)) + " ORDER BY " + Data.theory_name db.using_statement(select)(stmt => stmt.execute_query().iterator(_.string(Data.theory_name)).toList) } def read_entry_names(db: SQL.Database, session_name: String): List[Entry_Name] = { val select = Data.table.select(List(Data.theory_name, Data.name), Data.where_equal(session_name)) + " ORDER BY " + Data.theory_name + ", " + Data.name db.using_statement(select)(stmt => stmt.execute_query().iterator(res => Entry_Name(session = session_name, theory = res.string(Data.theory_name), name = res.string(Data.name))).toList) } def message(msg: String, theory_name: String, name: String): String = msg + " " + quote(name) + " for theory " + quote(theory_name) def empty_entry(theory_name: String, name: String): Entry = Entry(Entry_Name(theory = theory_name, name = name), false, Future.value(false, Bytes.empty), XML.Cache.none) sealed case class Entry( entry_name: Entry_Name, executable: Boolean, body: Future[(Boolean, Bytes)], cache: XML.Cache ) { def session_name: String = entry_name.session def theory_name: String = entry_name.theory def name: String = entry_name.name override def toString: String = name def compound_name: String = entry_name.compound_name def name_has_prefix(s: String): Boolean = name.startsWith(s) val name_elems: List[String] = explode_name(name) def name_extends(elems: List[String]): Boolean = name_elems.startsWith(elems) && name_elems != elems def text: String = uncompressed.text def uncompressed: Bytes = { val (compressed, bytes) = body.join if (compressed) bytes.uncompress(cache = cache.xz) else bytes } def uncompressed_yxml: XML.Body = YXML.parse_body(UTF8.decode_permissive(uncompressed), cache = cache) def write(db: SQL.Database): Unit = { val (compressed, bytes) = body.join db.using_statement(Data.table.insert()) { stmt => stmt.string(1) = session_name stmt.string(2) = theory_name stmt.string(3) = name stmt.bool(4) = executable stmt.bool(5) = compressed stmt.bytes(6) = bytes stmt.execute() } } } def make_regex(pattern: String): Regex = { @tailrec def make(result: List[String], depth: Int, chs: List[Char]): Regex = chs match { case '*' :: '*' :: rest => make("[^:]*" :: result, depth, rest) case '*' :: rest => make("[^:/]*" :: result, depth, rest) case '?' :: rest => make("[^:/]" :: result, depth, rest) case '\\' :: c :: rest => make(("\\" + c) :: result, depth, rest) case '{' :: rest => make("(" :: result, depth + 1, rest) case ',' :: rest if depth > 0 => make("|" :: result, depth, rest) case '}' :: rest if depth > 0 => make(")" :: result, depth - 1, rest) case c :: rest if ".+()".contains(c) => make(("\\" + c) :: result, depth, rest) case c :: rest => make(c.toString :: result, depth, rest) case Nil => result.reverse.mkString.r } make(Nil, 0, pattern.toList) } def make_matcher(pats: List[String]): Entry_Name => Boolean = { val regs = pats.map(make_regex) (entry_name: Entry_Name) => regs.exists(_.pattern.matcher(entry_name.compound_name).matches) } def make_entry( session_name: String, args: Protocol.Export.Args, bytes: Bytes, cache: XML.Cache ): Entry = { val body = if (args.compress) Future.fork(bytes.maybe_compress(cache = cache.xz)) else Future.value((false, bytes)) val entry_name = Entry_Name(session = session_name, theory = args.theory_name, name = args.name) Entry(entry_name, args.executable, body, cache) } - /* specific entries */ - - def read_document_id(read: String => Entry): Option[Long] = - read(DOCUMENT_ID).text match { - case Value.Long(id) => Some(id) - case _ => None - } - - def read_files(read: String => Entry): Option[(String, List[String])] = - split_lines(read(FILES).text) match { - case thy_file :: blobs_files => Some((thy_file, blobs_files)) - case Nil => None - } - - /* database consumer thread */ def consumer(db: SQL.Database, cache: XML.Cache, progress: Progress = new Progress): Consumer = new Consumer(db, cache, progress) class Consumer private[Export](db: SQL.Database, cache: XML.Cache, progress: Progress) { private val errors = Synchronized[List[String]](Nil) private val consumer = Consumer_Thread.fork_bulk[(Entry, Boolean)](name = "export")( bulk = { case (entry, _) => entry.body.is_finished }, consume = { (args: List[(Entry, Boolean)]) => val results = db.transaction { for ((entry, strict) <- args) yield { if (progress.stopped) { entry.body.cancel() Exn.Res(()) } else if (entry.entry_name.readable(db)) { if (strict) { val msg = message("Duplicate export", entry.theory_name, entry.name) errors.change(msg :: _) } Exn.Res(()) } else Exn.capture { entry.write(db) } } } (results, true) }) def make_entry(session_name: String, args: Protocol.Export.Args, body: Bytes): Unit = { if (!progress.stopped && !body.is_empty) { consumer.send(Export.make_entry(session_name, args, body, cache) -> args.strict) } } def shutdown(close: Boolean = false): List[String] = { consumer.shutdown() if (close) db.close() errors.value.reverse ::: (if (progress.stopped) List("Export stopped") else Nil) } } /* context for retrieval */ def context(db_context: Sessions.Database_Context): Context = new Context(db_context) def open_context(store: Sessions.Store): Context = new Context(store.open_database_context()) { override def close(): Unit = db_context.close() } def open_session_context( store: Sessions.Store, session_base_info: Sessions.Base_Info, document_snapshot: Option[Document.Snapshot] = None ): Session_Context = { open_context(store).open_session( session_base_info, document_snapshot = document_snapshot, close_context = true) } class Session_Database private[Export](val session: String, val db: SQL.Database) { def close(): Unit = () lazy val theory_names: List[String] = read_theory_names(db, session) lazy val entry_names: List[Entry_Name] = read_entry_names(db, session) } class Context private[Export](val db_context: Sessions.Database_Context) extends AutoCloseable { context => override def toString: String = db_context.toString def close(): Unit = () def open_session( session_base_info: Sessions.Base_Info, document_snapshot: Option[Document.Snapshot] = None, close_context: Boolean = false ): Session_Context = { - val session_base = session_base_info.check.base - val session_hierarchy = - session_base_info.sessions_structure.build_hierarchy(session_base.session_name) + val session_name = session_base_info.check.base.session_name + val session_hierarchy = session_base_info.sessions_structure.build_hierarchy(session_name) val session_databases = db_context.database_server match { case Some(db) => session_hierarchy.map(name => new Session_Database(name, db)) case None => val store = db_context.store val attempts = session_hierarchy.map(name => name -> store.try_open_database(name, server = false)) attempts.collectFirst({ case (name, None) => name }) match { case Some(bad) => for ((_, Some(db)) <- attempts) db.close() store.bad_database(bad) case None => for ((name, Some(db)) <- attempts) yield { new Session_Database(name, db) { override def close(): Unit = this.db.close() } } } } - new Session_Context(db_context.cache, session_base, session_databases, document_snapshot) { + new Session_Context(context, session_base_info, session_databases, document_snapshot) { override def close(): Unit = { session_databases.foreach(_.close()) if (close_context) context.close() } } } } class Session_Context private[Export]( - val cache: Term.Cache, - session_base: Sessions.Base, + val export_context: Context, + session_base_info: Sessions.Base_Info, db_hierarchy: List[Session_Database], document_snapshot: Option[Document.Snapshot] ) extends AutoCloseable { session_context => def close(): Unit = () + def cache: Term.Cache = export_context.db_context.cache + + def sessions_structure: Sessions.Structure = session_base_info.sessions_structure + + def session_base: Sessions.Base = session_base_info.base + def session_name: String = if (document_snapshot.isDefined) Sessions.DRAFT else session_base.session_name def session_stack: List[String] = ((if (document_snapshot.isDefined) List(session_name) else Nil) ::: db_hierarchy.map(_.session)).reverse private def select[A]( session: String, select1: Entry_Name => Option[A], select2: Session_Database => List[A] ): List[A] = { def sel(name: String): List[A] = if (name == Sessions.DRAFT) { (for { snapshot <- document_snapshot.iterator entry_name <- snapshot.all_exports.keysIterator res <- select1(entry_name).iterator } yield entry_name -> res).toList.sortBy(_._1.compound_name).map(_._2) } else { db_hierarchy.find(_.session == name).map(select2).getOrElse(Nil) } if (session.nonEmpty) sel(session) else session_stack.flatMap(sel) } def entry_names(session: String = session_name): List[Entry_Name] = select(session, Some(_), _.entry_names) def theory_names(session: String = session_name): List[String] = select(session, a => if(a.name == THEORY_PARENTS) Some(a.theory) else None, _.theory_names) def get(theory: String, name: String): Option[Entry] = { def snapshot_entry = for { snapshot <- document_snapshot entry_name = Entry_Name(session = Sessions.DRAFT, theory = theory, name = name) entry <- snapshot.all_exports.get(entry_name) } yield entry def db_entry = db_hierarchy.view.map(session_db => Export.Entry_Name(session = session_db.session, theory = theory, name = name) .read(session_db.db, cache)) .collectFirst({ case Some(entry) => entry }) snapshot_entry orElse db_entry } def apply(theory: String, name: String, permissive: Boolean = false): Entry = get(theory, name) match { case None if permissive => empty_entry(theory, name) case None => error("Missing export entry " + quote(compound_name(theory, name))) case Some(entry) => entry } def theory(theory: String): Theory_Context = new Theory_Context(session_context, theory) + def read_document(session: String, name: String): Option[Document_Build.Document_Output] = + db_hierarchy.find(_.session == session).flatMap(session_db => + Document_Build.read_document(session_db.db, session_db.session, name)) + override def toString: String = "Export.Session_Context(" + commas_quote(session_stack) + ")" } class Theory_Context private[Export]( val session_context: Session_Context, val theory: String ) { + def cache: Term.Cache = session_context.cache + def get(name: String): Option[Entry] = session_context.get(theory, name) def apply(name: String, permissive: Boolean = false): Entry = session_context.apply(theory, name, permissive = permissive) def uncompressed_yxml(name: String): XML.Body = get(name) match { case Some(entry) => entry.uncompressed_yxml case None => Nil } + def document_id(): Option[Long] = + apply(DOCUMENT_ID, permissive = true).text match { + case Value.Long(id) => Some(id) + case _ => None + } + + def files(): Option[(String, List[String])] = + split_lines(apply(FILES, permissive = true).text) match { + case Nil => None + case thy_file :: blobs_files => Some((thy_file, blobs_files)) + } + override def toString: String = "Export.Theory_Context(" + quote(theory) + ")" } /* export to file-system */ def export_files( store: Sessions.Store, session_name: String, export_dir: Path, progress: Progress = new Progress, export_prune: Int = 0, export_list: Boolean = false, export_patterns: List[String] = Nil ): Unit = { using(store.open_database(session_name)) { db => val entry_names = read_entry_names(db, session_name) // list if (export_list) { for (entry_name <- entry_names) progress.echo(entry_name.compound_name) } // export if (export_patterns.nonEmpty) { val matcher = make_matcher(export_patterns) for { entry_name <- entry_names if matcher(entry_name) entry <- entry_name.read(db, store.cache) } { val path = export_dir + entry_name.make_path(prune = export_prune) progress.echo("export " + path + (if (entry.executable) " (executable)" else "")) Isabelle_System.make_directory(path.dir) val bytes = entry.uncompressed if (!path.is_file || Bytes.read(path) != bytes) Bytes.write(path, bytes) File.set_executable(path, entry.executable) } } } } /* Isabelle tool wrapper */ val default_export_dir: Path = Path.explode("export") val isabelle_tool = Isabelle_Tool("export", "retrieve theory exports", Scala_Project.here, { args => /* arguments */ var export_dir = default_export_dir var dirs: List[Path] = Nil var export_list = false var no_build = false var options = Options.init() var export_prune = 0 var export_patterns: List[String] = Nil val getopts = Getopts(""" Usage: isabelle export [OPTIONS] SESSION Options are: -O DIR output directory for exported files (default: """ + default_export_dir + """) -d DIR include session directory -l list exports -n no build of session -o OPTION override Isabelle system OPTION (via NAME=VAL or NAME) -p NUM prune path of exported files by NUM elements -x PATTERN extract files matching pattern (e.g. "*:**" for all) List or export theory exports for SESSION: named blobs produced by isabelle build. Option -l or -x is required; option -x may be repeated. The PATTERN language resembles glob patterns in the shell, with ? and * (both excluding ":" and "/"), ** (excluding ":"), and [abc] or [^abc], and variants {pattern1,pattern2,pattern3}. """, "O:" -> (arg => export_dir = Path.explode(arg)), "d:" -> (arg => dirs = dirs ::: List(Path.explode(arg))), "l" -> (_ => export_list = true), "n" -> (_ => no_build = true), "o:" -> (arg => options = options + arg), "p:" -> (arg => export_prune = Value.Int.parse(arg)), "x:" -> (arg => export_patterns ::= arg)) val more_args = getopts(args) val session_name = more_args match { case List(session_name) if export_list || export_patterns.nonEmpty => session_name case _ => getopts.usage() } val progress = new Console_Progress() /* build */ if (!no_build) { val rc = progress.interrupt_handler { Build.build_logic(options, session_name, progress = progress, dirs = dirs) } if (rc != Process_Result.RC.ok) sys.exit(rc) } /* export files */ val store = Sessions.store(options) export_files(store, session_name, export_dir, progress = progress, export_prune = export_prune, export_list = export_list, export_patterns = export_patterns) }) } diff --git a/src/Pure/Thy/presentation.scala b/src/Pure/Thy/presentation.scala --- a/src/Pure/Thy/presentation.scala +++ b/src/Pure/Thy/presentation.scala @@ -1,670 +1,668 @@ /* Title: Pure/Thy/presentation.scala Author: Makarius HTML presentation of PIDE document content. */ package isabelle import scala.annotation.tailrec import scala.collection.immutable.SortedMap import scala.collection.mutable object Presentation { /** HTML documents **/ /* HTML context */ sealed case class HTML_Document(title: String, content: String) abstract class HTML_Context { /* directory structure and resources */ def nodes: Nodes def root_dir: Path def theory_session(name: Document.Node.Name): Sessions.Info def session_dir(info: Sessions.Info): Path = root_dir + Path.explode(info.chapter_session) def theory_dir(name: Document.Node.Name): Path = session_dir(theory_session(name)) def files_path(name: Document.Node.Name, path: Path): Path = theory_dir(name) + Path.explode("files") + path.squash.html /* HTML content */ def head(title: String, rest: XML.Body = Nil): XML.Tree = HTML.div("head", HTML.chapter(title) :: rest) def source(body: XML.Body): XML.Tree = HTML.pre("source", body) def contents( heading: String, items: List[XML.Body], css_class: String = "contents" ) : List[XML.Elem] = { if (items.isEmpty) Nil else List(HTML.div(css_class, List(HTML.section(heading), HTML.itemize(items)))) } val isabelle_css: String = File.read(HTML.isabelle_css) def html_document(title: String, body: XML.Body, fonts_css: String): HTML_Document = { val content = HTML.output_document( List( HTML.style(fonts_css + "\n\n" + isabelle_css), HTML.title(title)), List(HTML.source(body)), css = "", structural = false) HTML_Document(title, content) } } /* presentation elements */ sealed case class Elements( html: Markup.Elements = Markup.Elements.empty, entity: Markup.Elements = Markup.Elements.empty, language: Markup.Elements = Markup.Elements.empty) val elements1: Elements = Elements( html = Rendering.foreground_elements ++ Rendering.text_color_elements + Markup.NUMERAL + Markup.COMMENT + Markup.ENTITY + Markup.LANGUAGE, entity = Markup.Elements(Markup.THEORY, Markup.TYPE_NAME, Markup.CONSTANT, Markup.FACT, Markup.CLASS, Markup.LOCALE, Markup.FREE)) val elements2: Elements = Elements( html = elements1.html ++ Rendering.markdown_elements, language = Markup.Elements(Markup.Language.DOCUMENT)) /* per-session node info */ sealed case class File_Info(theory: String, is_theory: Boolean = false) object Node_Info { val empty: Node_Info = new Node_Info(Map.empty, Map.empty, Nil) def make(theory: Export_Theory.Theory): Node_Info = { val by_range = theory.entity_iterator.toList.groupBy(_.range) val by_kind_name = theory.entity_iterator.map(entity => ((entity.kind, entity.name), entity)).toMap val others = theory.others.keySet.toList new Node_Info(by_range, by_kind_name, others) } } class Node_Info private( by_range: Map[Symbol.Range, List[Export_Theory.Entity0]], by_kind_name: Map[(String, String), Export_Theory.Entity0], val others: List[String]) { def for_range(range: Symbol.Range): List[Export_Theory.Entity0] = by_range.getOrElse(range, Nil) def get_kind_name(kind: String, name: String): Option[String] = by_kind_name.get((kind, name)).map(_.kname) } object Nodes { val empty: Nodes = new Nodes(Map.empty, Map.empty) def read( presentation_sessions: List[String], deps: Sessions.Deps, db_context: Sessions.Database_Context ): Nodes = { val export_context = Export.context(db_context) type Batch = (String, List[String]) val batches = presentation_sessions.foldLeft((Set.empty[String], List.empty[Batch]))( { case ((seen, batches), session) => val thys = deps(session).loaded_theories.keys.filterNot(seen) (seen ++ thys, (session, thys) :: batches) })._2 val theory_node_info = Par_List.map[Batch, List[(String, Node_Info)]]( { case (session, thys) => using(export_context.open_session(deps.base_info(session))) { session_context => for (thy_name <- thys) yield { val theory_context = session_context.theory(thy_name) val theory = Export_Theory.read_theory(theory_context, permissive = true, cache = db_context.cache) thy_name -> Node_Info.make(theory) } } }, batches).flatten.toMap val files_info = deps.sessions_structure.build_requirements(presentation_sessions).flatMap(session => using(export_context.open_session(deps.base_info(session))) { session_context => session_context.theory_names().flatMap { theory => val theory_context = session_context.theory(theory) - Export.read_files(theory_context(_, permissive = true)) match { + theory_context.files() match { case None => Nil - case Some((thy, other)) => + case Some((thy, blobs)) => val thy_file_info = File_Info(theory, is_theory = true) - (thy -> thy_file_info) :: other.map(_ -> File_Info(theory)) + (thy -> thy_file_info) :: blobs.map(_ -> File_Info(theory)) } } }).toMap new Nodes(theory_node_info, files_info) } } class Nodes private( theory_node_info: Map[String, Node_Info], val files_info: Map[String, File_Info] ) { def apply(name: String): Node_Info = theory_node_info.getOrElse(name, Node_Info.empty) def get(name: String): Option[Node_Info] = theory_node_info.get(name) } /* formal entities */ type Entity = Export_Theory.Entity[Export_Theory.No_Content] object Entity_Context { object Theory_Ref { def unapply(props: Properties.T): Option[Document.Node.Name] = (props, props, props) match { case (Markup.Kind(Markup.THEORY), Markup.Name(theory), Position.Def_File(thy_file)) => Some(Resources.file_node(Path.explode(thy_file), theory = theory)) case _ => None } } object Entity_Ref { def unapply(props: Properties.T): Option[(Path, Option[String], String, String)] = (props, props, props, props) match { case (Markup.Entity.Ref.Prop(_), Position.Def_File(def_file), Markup.Kind(kind), Markup.Name(name)) => val def_theory = Position.Def_Theory.unapply(props) Some((Path.explode(def_file), def_theory, kind, name)) case _ => None } } val empty: Entity_Context = new Entity_Context def make( session: String, deps: Sessions.Deps, node: Document.Node.Name, html_context: HTML_Context): Entity_Context = new Entity_Context { private val seen_ranges: mutable.Set[Symbol.Range] = mutable.Set.empty override def make_def(range: Symbol.Range, body: XML.Body): Option[XML.Elem] = { body match { case List(XML.Elem(Markup("span", List("id" -> _)), _)) => None case _ => Some { val entities = html_context.nodes(node.theory).for_range(range) val body1 = if (seen_ranges.contains(range)) { HTML.entity_def(HTML.span(HTML.id(offset_id(range)), body)) } else HTML.span(body) entities.map(_.kname).foldLeft(body1) { case (elem, id) => HTML.entity_def(HTML.span(HTML.id(id), List(elem))) } } } } private def offset_id(range: Text.Range): String = "offset_" + range.start + ".." + range.stop private def physical_ref(thy_name: String, props: Properties.T): Option[String] = { for { range <- Position.Def_Range.unapply(props) if thy_name == node.theory } yield { seen_ranges += range offset_id(range) } } private def logical_ref(thy_name: String, kind: String, name: String): Option[String] = html_context.nodes.get(thy_name).flatMap(_.get_kind_name(kind, name)) override def make_ref(props: Properties.T, body: XML.Body): Option[XML.Elem] = { props match { case Theory_Ref(node_name) => node_relative(deps, session, node_name).map(html_dir => HTML.link(html_dir + html_name(node_name), body)) case Entity_Ref(file_path, def_theory, kind, name) if file_path.get_ext == "thy" => for { thy_name <- def_theory orElse (if (File.eq(node.path, file_path)) Some(node.theory) else None) node_name = Resources.file_node(file_path, theory = thy_name) html_dir <- node_relative(deps, session, node_name) html_file = node_file(node_name) html_ref <- logical_ref(thy_name, kind, name) orElse physical_ref(thy_name, props) } yield { HTML.entity_ref(HTML.link(html_dir + html_file + "#" + html_ref, body)) } case _ => None } } } } class Entity_Context { def make_def(range: Symbol.Range, body: XML.Body): Option[XML.Elem] = None def make_ref(props: Properties.T, body: XML.Body): Option[XML.Elem] = None } /* HTML output */ private val div_elements = Set(HTML.div.name, HTML.pre.name, HTML.par.name, HTML.list.name, HTML.`enum`.name, HTML.descr.name) def make_html( entity_context: Entity_Context, elements: Elements, xml: XML.Body ): XML.Body = { def html_div(html: XML.Body): Boolean = html exists { case XML.Elem(markup, body) => div_elements.contains(markup.name) || html_div(body) case XML.Text(_) => false } def html_class(c: String, html: XML.Body): XML.Body = if (c == "") html else if (html_div(html)) List(HTML.div(c, html)) else List(HTML.span(c, html)) def html_body(xml_body: XML.Body, end_offset: Symbol.Offset): (XML.Body, Symbol.Offset) = xml_body.foldRight((List.empty[XML.Tree], end_offset)) { case (tree, (res, end_offset1)) => val (res1, offset) = html_body_single(tree, end_offset1) (res1 ++ res, offset) } @tailrec def html_body_single(xml_tree: XML.Tree, end_offset: Symbol.Offset): (XML.Body, Symbol.Offset) = xml_tree match { case XML.Wrapped_Elem(markup, _, body) => html_body_single(XML.Elem(markup, body), end_offset) case XML.Elem(Markup(Markup.ENTITY, props @ Markup.Kind(kind)), body) => val (body1, offset) = html_body(body, end_offset) if (elements.entity(kind)) { entity_context.make_ref(props, body1) match { case Some(link) => (List(link), offset) case None => (body1, offset) } } else (body1, offset) case XML.Elem(Markup(Markup.LANGUAGE, Markup.Name(name)), body) => val (body1, offset) = html_body(body, end_offset) (html_class(if (elements.language(name)) name else "", body1), offset) case XML.Elem(Markup(Markup.MARKDOWN_PARAGRAPH, _), body) => val (body1, offset) = html_body(body, end_offset) (List(HTML.par(body1)), offset) case XML.Elem(Markup(Markup.MARKDOWN_ITEM, _), body) => val (body1, offset) = html_body(body, end_offset) (List(HTML.item(body1)), offset) case XML.Elem(Markup(Markup.Markdown_Bullet.name, _), text) => (Nil, end_offset - XML.symbol_length(text)) case XML.Elem(Markup.Markdown_List(kind), body) => val (body1, offset) = html_body(body, end_offset) if (kind == Markup.ENUMERATE) (List(HTML.`enum`(body1)), offset) else (List(HTML.list(body1)), offset) case XML.Elem(markup, body) => val name = markup.name val (body1, offset) = html_body(body, end_offset) val html = markup.properties match { case Markup.Kind(kind) if kind == Markup.COMMAND || kind == Markup.KEYWORD => html_class(kind, body1) case _ => body1 } Rendering.foreground.get(name) orElse Rendering.text_color.get(name) match { case Some(c) => (html_class(c.toString, html), offset) case None => (html_class(name, html), offset) } case XML.Text(text) => val offset = end_offset - Symbol.length(text) val body = HTML.text(Symbol.decode(text)) entity_context.make_def(Text.Range(offset, end_offset), body) match { case Some(body1) => (List(body1), offset) case None => (body, offset) } } html_body(xml, XML.symbol_length(xml) + 1)._1 } /* PIDE HTML document */ def html_document( snapshot: Document.Snapshot, html_context: HTML_Context, elements: Elements, plain_text: Boolean = false, fonts_css: String = HTML.fonts_css() ): HTML_Document = { require(!snapshot.is_outdated, "document snapshot outdated") val name = snapshot.node_name if (plain_text) { val title = "File " + Symbol.cartouche_decoded(name.path.file_name) val body = HTML.text(snapshot.node.source) html_context.html_document(title, body, fonts_css) } else { Resources.html_document(snapshot) getOrElse { val title = if (name.is_theory) "Theory " + quote(name.theory_base_name) else "File " + Symbol.cartouche_decoded(name.path.file_name) val xml = snapshot.xml_markup(elements = elements.html) val body = make_html(Entity_Context.empty, elements, xml) html_context.html_document(title, body, fonts_css) } } } /** HTML presentation **/ /* presentation context */ object Context { val none: Context = new Context { def enabled: Boolean = false } val standard: Context = new Context { def enabled: Boolean = true } def dir(path: Path): Context = new Context { def enabled: Boolean = true override def dir(store: Sessions.Store): Path = path } def make(s: String): Context = if (s == ":") standard else dir(Path.explode(s)) } abstract class Context private { def enabled: Boolean def enabled(info: Sessions.Info): Boolean = enabled || info.browser_info def dir(store: Sessions.Store): Path = store.presentation_dir def dir(store: Sessions.Store, info: Sessions.Info): Path = dir(store) + Path.explode(info.chapter_session) } /* maintain chapter index */ private val sessions_path = Path.basic(".sessions") private def read_sessions(dir: Path): List[(String, String)] = { val path = dir + sessions_path if (path.is_file) { import XML.Decode._ list(pair(string, string))(Symbol.decode_yxml(File.read(path))) } else Nil } def update_chapter( presentation_dir: Path, chapter: String, new_sessions: List[(String, String)] ): Unit = { val dir = Isabelle_System.make_directory(presentation_dir + Path.basic(chapter)) val sessions0 = try { read_sessions(dir) } catch { case _: XML.Error => Nil } val sessions = (SortedMap.empty[String, String] ++ sessions0 ++ new_sessions).toList File.write(dir + sessions_path, { import XML.Encode._ YXML.string_of_body(list(pair(string, string))(sessions)) }) val title = "Isabelle/" + chapter + " sessions" HTML.write_document(dir, "index.html", List(HTML.title(title + Isabelle_System.isabelle_heading())), HTML.chapter(title) :: (if (sessions.isEmpty) Nil else List(HTML.div("sessions", List(HTML.description( sessions.map({ case (name, description) => val descr = Symbol.trim_blank_lines(description) (List(HTML.link(name + "/index.html", HTML.text(name))), if (descr == "") Nil else HTML.break ::: List(HTML.pre(HTML.text(descr)))) })))))), base = Some(presentation_dir)) } def update_root(presentation_dir: Path): Unit = { Isabelle_System.make_directory(presentation_dir) HTML.init_fonts(presentation_dir) Isabelle_System.copy_file(Path.explode("~~/lib/logo/isabelle.gif"), presentation_dir + Path.explode("isabelle.gif")) val title = "The " + XML.text(Isabelle_System.isabelle_name()) + " Library" File.write(presentation_dir + Path.explode("index.html"), HTML.header + """ """ + HTML.head_meta + """ """ + title + """
[Isabelle]
""" + title + """

""" + File.read(Path.explode("~~/lib/html/library_index_content.template")) + """ """ + HTML.footer) } /* present session */ val session_graph_path = Path.explode("session_graph.pdf") val readme_path = Path.explode("README.html") def html_name(name: Document.Node.Name): String = Path.explode(name.theory_base_name).html.implode def files_path(src_path: Path): String = (Path.explode("files") + src_path.squash.html).implode private def node_file(name: Document.Node.Name): String = if (name.node.endsWith(".thy")) html_name(name) else files_path(name.path) private def session_relative( deps: Sessions.Deps, session0: String, session1: String ): Option[String] = { for { info0 <- deps.sessions_structure.get(session0) info1 <- deps.sessions_structure.get(session1) } yield info0.relative_path(info1) } def node_relative( deps: Sessions.Deps, session0: String, node_name: Document.Node.Name ): Option[String] = { val session1 = deps(session0).theory_qualifier(node_name) session_relative(deps, session0, session1) } def session_html( - session: String, + session_context: Export.Session_Context, deps: Sessions.Deps, - db_context: Sessions.Database_Context, progress: Progress = new Progress, verbose: Boolean = false, html_context: HTML_Context, session_elements: Elements ): Unit = { - val info = deps.sessions_structure(session) + val session = session_context.session_name + val info = session_context.sessions_structure(session) val options = info.options - val base = deps(session) + val base = session_context.session_base - val session_hierarchy = deps.sessions_structure.build_hierarchy(session) val session_dir = Isabelle_System.make_directory(html_context.session_dir(info)) Bytes.write(session_dir + session_graph_path, graphview.Graph_File.make_pdf(options, base.session_graph_display)) val documents = for { doc <- info.document_variants - document <- db_context.database(session)(db => - Document_Build.read_document(db, session, doc.name)) + document <- session_context.read_document(session, doc.name) } yield { val doc_path = (session_dir + doc.path.pdf).expand if (verbose) progress.echo("Presenting document " + session + "/" + doc.name) if (options.bool("document_echo")) progress.echo("Document at " + doc_path) Bytes.write(doc_path, document.pdf) doc } val view_links = { val deps_link = HTML.link(session_graph_path, HTML.text("theory dependencies")) val readme_links = if ((info.dir + readme_path).is_file) { Isabelle_System.copy_file(info.dir + readme_path, session_dir + readme_path) List(HTML.link(readme_path, HTML.text("README"))) } else Nil val document_links = documents.map(doc => HTML.link(doc.path.pdf, HTML.text(doc.name))) Library.separate(HTML.break ::: HTML.nl, (deps_link :: readme_links ::: document_links). map(link => HTML.text("View ") ::: List(link))).flatten } def entity_context(name: Document.Node.Name): Entity_Context = Entity_Context.make(session, deps, name, html_context) sealed case class Seen_File( src_path: Path, thy_name: Document.Node.Name, thy_session: String ) { val files_path: Path = html_context.files_path(thy_name, src_path) def check(src_path1: Path, thy_name1: Document.Node.Name, thy_session1: String): Boolean = { val files_path1 = html_context.files_path(thy_name1, src_path1) (src_path == src_path1 || files_path == files_path1) && thy_session == thy_session1 } } var seen_files = List.empty[Seen_File] def present_theory(name: Document.Node.Name): Option[XML.Body] = { progress.expose_interrupt() - Build_Job.read_theory(db_context, session_hierarchy, name.theory).flatMap { command => + Build_Job.read_theory(session_context.theory(name.theory)).flatMap { command => if (verbose) progress.echo("Presenting theory " + name) val snapshot = Document.State.init.snippet(command) val thy_elements = session_elements.copy(entity = html_context.nodes(name.theory).others.foldLeft(session_elements.entity)(_ + _)) val files_html = for { (src_path, xml) <- snapshot.xml_markup_blobs(elements = thy_elements.html) if xml.nonEmpty } yield { progress.expose_interrupt() if (verbose) progress.echo("Presenting file " + src_path) (src_path, html_context.source( make_html(Entity_Context.empty, thy_elements, xml))) } val thy_html = html_context.source( make_html(entity_context(name), thy_elements, snapshot.xml_markup(elements = thy_elements.html))) val thy_session = html_context.theory_session(name).name val thy_dir = Isabelle_System.make_directory(html_context.theory_dir(name)) val files = for { (src_path, file_html) <- files_html } yield { seen_files.find(_.check(src_path, name, thy_session)) match { case None => seen_files ::= Seen_File(src_path, name, thy_session) case Some(seen_file) => error("Incoherent use of file name " + src_path + " as " + files_path(src_path) + " in theory " + seen_file.thy_name + " vs. " + name) } val file_path = html_context.files_path(name, src_path) val file_title = "File " + Symbol.cartouche_decoded(src_path.implode_short) HTML.write_document(file_path.dir, file_path.file_name, List(HTML.title(file_title)), List(html_context.head(file_title), file_html), base = Some(html_context.root_dir)) List(HTML.link(files_path(src_path), HTML.text(file_title))) } val thy_title = "Theory " + name.theory_base_name HTML.write_document(thy_dir, html_name(name), List(HTML.title(thy_title)), List(html_context.head(thy_title), thy_html), base = Some(html_context.root_dir)) if (thy_session == session) { Some( List(HTML.link(html_name(name), HTML.text(name.theory_base_name) ::: (if (files.isEmpty) Nil else List(HTML.itemize(files)))))) } else None } } val theories = base.proper_session_theories.flatMap(present_theory) val title = "Session " + session HTML.write_document(session_dir, "index.html", List(HTML.title(title + Isabelle_System.isabelle_heading())), html_context.head(title, List(HTML.par(view_links))) :: html_context.contents("Theories", theories), base = Some(html_context.root_dir)) } } diff --git a/src/Pure/Tools/build.scala b/src/Pure/Tools/build.scala --- a/src/Pure/Tools/build.scala +++ b/src/Pure/Tools/build.scala @@ -1,682 +1,689 @@ /* Title: Pure/Tools/build.scala Author: Makarius Options: :folding=explicit: Build and manage Isabelle sessions. */ package isabelle import scala.collection.immutable.SortedSet import scala.annotation.tailrec object Build { /** auxiliary **/ /* persistent build info */ sealed case class Session_Info( sources: String, input_heaps: List[String], output_heap: Option[String], return_code: Int ) { def ok: Boolean = return_code == 0 } /* queue with scheduling information */ private object Queue { type Timings = (List[Properties.T], Double) def load_timings(progress: Progress, store: Sessions.Store, session_name: String): Timings = { val no_timings: Timings = (Nil, 0.0) store.try_open_database(session_name) match { case None => no_timings case Some(db) => def ignore_error(msg: String) = { progress.echo_warning("Ignoring bad database " + db + (if (msg == "") "" else "\n" + msg)) no_timings } try { val command_timings = store.read_command_timings(db, session_name) val session_timing = store.read_session_timing(db, session_name) match { case Markup.Elapsed(t) => t case _ => 0.0 } (command_timings, session_timing) } catch { case ERROR(msg) => ignore_error(msg) case exn: java.lang.Error => ignore_error(Exn.message(exn)) case _: XML.Error => ignore_error("") } finally { db.close() } } } def make_session_timing( sessions_structure: Sessions.Structure, timing: Map[String, Double] ) : Map[String, Double] = { val maximals = sessions_structure.build_graph.maximals.toSet def desc_timing(session_name: String): Double = { if (maximals.contains(session_name)) timing(session_name) else { val descendants = sessions_structure.build_descendants(List(session_name)).toSet val g = sessions_structure.build_graph.restrict(descendants) (0.0 :: g.maximals.flatMap { desc => val ps = g.all_preds(List(desc)) if (ps.exists(p => !timing.isDefinedAt(p))) None else Some(ps.map(timing(_)).sum) }).max } } timing.keySet.iterator.map(name => (name -> desc_timing(name))).toMap.withDefaultValue(0.0) } def apply( progress: Progress, sessions_structure: Sessions.Structure, store: Sessions.Store ) : Queue = { val graph = sessions_structure.build_graph val names = graph.keys val timings = names.map(name => (name, load_timings(progress, store, name))) val command_timings = timings.map({ case (name, (ts, _)) => (name, ts) }).toMap.withDefaultValue(Nil) val session_timing = make_session_timing(sessions_structure, timings.map({ case (name, (_, t)) => (name, t) }).toMap) object Ordering extends scala.math.Ordering[String] { def compare(name1: String, name2: String): Int = session_timing(name2) compare session_timing(name1) match { case 0 => sessions_structure(name2).timeout compare sessions_structure(name1).timeout match { case 0 => name1 compare name2 case ord => ord } case ord => ord } } new Queue(graph, SortedSet(names: _*)(Ordering), command_timings) } } private class Queue( graph: Graph[String, Sessions.Info], order: SortedSet[String], val command_timings: String => List[Properties.T] ) { def is_inner(name: String): Boolean = !graph.is_maximal(name) def is_empty: Boolean = graph.is_empty def - (name: String): Queue = new Queue(graph.del_node(name), order - name, command_timings) def dequeue(skip: String => Boolean): Option[(String, Sessions.Info)] = { val it = order.iterator.dropWhile(name => skip(name) || !graph.is_minimal(name)) if (it.hasNext) { val name = it.next(); Some((name, graph.get_node(name))) } else None } } /** build with results **/ class Results private[Build](results: Map[String, (Option[Process_Result], Sessions.Info)]) { def sessions: Set[String] = results.keySet def infos: List[Sessions.Info] = results.values.map(_._2).toList def cancelled(name: String): Boolean = results(name)._1.isEmpty def apply(name: String): Process_Result = results(name)._1.getOrElse(Process_Result(1)) def get_info(name: String): Option[Sessions.Info] = results.get(name).map(_._2) def info(name: String): Sessions.Info = get_info(name).get val rc: Int = results.iterator.map({ case (_, (Some(r), _)) => r.rc case (_, (None, _)) => 1 }). foldLeft(Process_Result.RC.ok)(_ max _) def ok: Boolean = rc == Process_Result.RC.ok override def toString: String = rc.toString } def session_finished(session_name: String, process_result: Process_Result): String = "Finished " + session_name + " (" + process_result.timing.message_resources + ")" def session_timing(session_name: String, build_log: Build_Log.Session_Info): String = { val props = build_log.session_timing val threads = Markup.Session_Timing.Threads.unapply(props) getOrElse 1 val timing = Markup.Timing_Properties.get(props) "Timing " + session_name + " (" + threads + " threads, " + timing.message_factor + ")" } def build( options: Options, selection: Sessions.Selection = Sessions.Selection.empty, presentation: Presentation.Context = Presentation.Context.none, progress: Progress = new Progress, check_unknown_files: Boolean = false, build_heap: Boolean = false, clean_build: Boolean = false, dirs: List[Path] = Nil, select_dirs: List[Path] = Nil, infos: List[Sessions.Info] = Nil, numa_shuffling: Boolean = false, max_jobs: Int = 1, list_files: Boolean = false, check_keywords: Set[String] = Set.empty, fresh_build: Boolean = false, no_build: Boolean = false, soft_build: Boolean = false, verbose: Boolean = false, export_files: Boolean = false, session_setup: (String, Session) => Unit = (_, _) => () ): Results = { val build_options = options + "completion_limit=0" + "editor_tracing_messages=0" + ("pide_reports=" + options.bool("build_pide_reports")) val store = Sessions.store(build_options) Isabelle_Fonts.init() /* session selection and dependencies */ val full_sessions = Sessions.load_structure(build_options, dirs = dirs, select_dirs = select_dirs, infos = infos) val full_sessions_selection = full_sessions.imports_selection(selection) val full_sessions_selected = full_sessions_selection.toSet def sources_stamp(deps: Sessions.Deps, session_name: String): String = { val digests = full_sessions(session_name).meta_digest :: deps.session_sources(session_name) ::: deps.imported_sources(session_name) SHA1.digest_set(digests).toString } val deps = { val deps0 = Sessions.deps(full_sessions.selection(selection), progress = progress, inlined_files = true, verbose = verbose, list_files = list_files, check_keywords = check_keywords).check_errors if (soft_build && !fresh_build) { val outdated = deps0.sessions_structure.build_topological_order.flatMap(name => store.try_open_database(name) match { case Some(db) => using(db)(store.read_build(_, name)) match { case Some(build) if build.ok && build.sources == sources_stamp(deps0, name) => None case _ => Some(name) } case None => Some(name) }) Sessions.deps(full_sessions.selection(Sessions.Selection(sessions = outdated)), progress = progress, inlined_files = true).check_errors } else deps0 } val presentation_sessions = (for { session_name <- deps.sessions_structure.build_topological_order.iterator info <- deps.sessions_structure.get(session_name) if full_sessions_selected(session_name) && presentation.enabled(info) } yield info).toList /* check unknown files */ if (check_unknown_files) { val source_files = (for { (_, base) <- deps.session_bases.iterator (path, _) <- base.session_sources.iterator } yield path).toList val exclude_files = List(Path.explode("$POLYML_EXE")).map(_.canonical_file) val unknown_files = Mercurial.check_files(source_files)._2. filterNot(path => exclude_files.contains(path.canonical_file)) if (unknown_files.nonEmpty) { progress.echo_warning("Unknown files (not part of the underlying Mercurial repository):" + unknown_files.map(path => path.expand.implode).sorted.mkString("\n ", "\n ", "")) } } /* main build process */ val queue = Queue(progress, deps.sessions_structure, store) store.prepare_output_dir() if (clean_build) { for (name <- full_sessions.imports_descendants(full_sessions_selection)) { val (relevant, ok) = store.clean_output(name) if (relevant) { if (ok) progress.echo("Cleaned " + name) else progress.echo(name + " FAILED to clean") } } } // scheduler loop case class Result( current: Boolean, heap_digest: Option[String], process: Option[Process_Result], info: Sessions.Info ) { def ok: Boolean = process match { case None => false case Some(res) => res.ok } } def sleep(): Unit = Isabelle_Thread.interrupt_handler(_ => progress.stop()) { Time.seconds(0.5).sleep() } val log = build_options.string("system_log") match { case "" => No_Logger case "-" => Logger.make(progress) case log_file => Logger.make(Some(Path.explode(log_file))) } val numa_nodes = new NUMA.Nodes(numa_shuffling) @tailrec def loop( pending: Queue, running: Map[String, (List[String], Build_Job)], results: Map[String, Result] ): Map[String, Result] = { def used_node(i: Int): Boolean = running.iterator.exists( { case (_, (_, job)) => job.numa_node.isDefined && job.numa_node.get == i }) if (pending.is_empty) results else { if (progress.stopped) { for ((_, (_, job)) <- running) job.terminate() } running.find({ case (_, (_, job)) => job.is_finished }) match { case Some((session_name, (input_heaps, job))) => //{{{ finish job val (process_result, heap_digest) = job.join val log_lines = process_result.out_lines.filterNot(Protocol_Message.Marker.test) val process_result_tail = { val tail = job.info.options.int("process_output_tail") process_result.copy( out_lines = "(see also " + store.output_log(session_name).file.toString + ")" :: (if (tail == 0) log_lines else log_lines.drop(log_lines.length - tail max 0))) } val build_log = Build_Log.Log_File(session_name, process_result.out_lines). parse_session_info( command_timings = true, theory_timings = true, ml_statistics = true, task_statistics = true) // write log file if (process_result.ok) { File.write_gzip(store.output_log_gz(session_name), terminate_lines(log_lines)) } else File.write(store.output_log(session_name), terminate_lines(log_lines)) // write database using(store.open_database(session_name, output = true))(db => store.write_session_info(db, session_name, build_log = if (process_result.timeout) build_log.error("Timeout") else build_log, build = Session_Info(sources_stamp(deps, session_name), input_heaps, heap_digest, process_result.rc))) // messages process_result.err_lines.foreach(progress.echo) if (process_result.ok) { if (verbose) progress.echo(session_timing(session_name, build_log)) progress.echo(session_finished(session_name, process_result)) } else { progress.echo(session_name + " FAILED") if (!process_result.interrupted) progress.echo(process_result_tail.out) } loop(pending - session_name, running - session_name, results + (session_name -> Result(false, heap_digest, Some(process_result_tail), job.info))) //}}} case None if running.size < (max_jobs max 1) => //{{{ check/start next job pending.dequeue(running.isDefinedAt) match { case Some((session_name, info)) => val ancestor_results = deps.sessions_structure.build_requirements(List(session_name)). filterNot(_ == session_name).map(results(_)) val ancestor_heaps = ancestor_results.flatMap(_.heap_digest) val do_store = build_heap || Sessions.is_pure(session_name) || queue.is_inner(session_name) val (current, heap_digest) = { store.try_open_database(session_name) match { case Some(db) => using(db)(store.read_build(_, session_name)) match { case Some(build) => val heap_digest = store.find_heap_digest(session_name) val current = !fresh_build && build.ok && build.sources == sources_stamp(deps, session_name) && build.input_heaps == ancestor_heaps && build.output_heap == heap_digest && !(do_store && heap_digest.isEmpty) (current, heap_digest) case None => (false, None) } case None => (false, None) } } val all_current = current && ancestor_results.forall(_.current) if (all_current) loop(pending - session_name, running, results + (session_name -> Result(true, heap_digest, Some(Process_Result(0)), info))) else if (no_build) { progress.echo_if(verbose, "Skipping " + session_name + " ...") loop(pending - session_name, running, results + (session_name -> Result(false, heap_digest, Some(Process_Result(1)), info))) } else if (ancestor_results.forall(_.ok) && !progress.stopped) { progress.echo((if (do_store) "Building " else "Running ") + session_name + " ...") store.clean_output(session_name) using(store.open_database(session_name, output = true))( store.init_session_info(_, session_name)) val numa_node = numa_nodes.next(used_node) val job = new Build_Job(progress, session_name, info, deps, store, do_store, log, session_setup, numa_node, queue.command_timings(session_name)) loop(pending, running + (session_name -> (ancestor_heaps, job)), results) } else { progress.echo(session_name + " CANCELLED") loop(pending - session_name, running, results + (session_name -> Result(false, heap_digest, None, info))) } case None => sleep(); loop(pending, running, results) } ///}}} case None => sleep(); loop(pending, running, results) } } } /* build results */ val results = { val results0 = if (deps.is_empty) { progress.echo_warning("Nothing to build") Map.empty[String, Result] } else Isabelle_Thread.uninterruptible { loop(queue, Map.empty, Map.empty) } new Results( (for ((name, result) <- results0.iterator) yield (name, (result.process, result.info))).toMap) } if (export_files) { for (name <- full_sessions.imports_selection(selection).iterator if results(name).ok) { val info = results.info(name) if (info.export_files.nonEmpty) { progress.echo("Exporting " + info.name + " ...") for ((dir, prune, pats) <- info.export_files) { Export.export_files(store, name, info.dir + dir, progress = if (verbose) progress else new Progress, export_prune = prune, export_patterns = pats) } } } } if (results.rc != Process_Result.RC.ok && (verbose || !no_build)) { val unfinished = (for { name <- results.sessions.iterator if !results(name).ok } yield name).toList.sorted progress.echo("Unfinished session(s): " + commas(unfinished)) } /* PDF/HTML presentation */ if (!no_build && !progress.stopped && results.ok) { if (presentation_sessions.nonEmpty) { val presentation_dir = presentation.dir(store) progress.echo("Presentation in " + presentation_dir.absolute) Presentation.update_root(presentation_dir) for ((chapter, infos) <- presentation_sessions.groupBy(_.chapter).iterator) { val entries = infos.map(info => (info.name, info.description)) Presentation.update_chapter(presentation_dir, chapter, entries) } - using(store.open_database_context()) { db_context => + using(Export.open_context(store)) { export_context => + val db_context = export_context.db_context + val presentation_nodes = Presentation.Nodes.read(presentation_sessions.map(_.name), deps, db_context) Par_List.map({ (session: String) => progress.expose_interrupt() progress.echo("Presenting " + session + " ...") val html_context = new Presentation.HTML_Context { override def nodes: Presentation.Nodes = presentation_nodes override def root_dir: Path = presentation_dir override def theory_session(name: Document.Node.Name): Sessions.Info = deps.sessions_structure(deps(session).theory_qualifier(name)) } - Presentation.session_html( - session, deps, db_context, progress = progress, - verbose = verbose, html_context = html_context, - Presentation.elements1) + + val session_base_info = deps.base_info(session) + using(export_context.open_session(session_base_info)) { session_context => + Presentation.session_html(session_context, deps, + progress = progress, + verbose = verbose, + html_context = html_context, + Presentation.elements1) + } }, presentation_sessions.map(_.name)) } } } results } /* Isabelle tool wrapper */ val isabelle_tool = Isabelle_Tool("build", "build and manage Isabelle sessions", Scala_Project.here, { args => val build_options = Word.explode(Isabelle_System.getenv("ISABELLE_BUILD_OPTIONS")) var base_sessions: List[String] = Nil var select_dirs: List[Path] = Nil var numa_shuffling = false var presentation = Presentation.Context.none var requirements = false var soft_build = false var exclude_session_groups: List[String] = Nil var all_sessions = false var build_heap = false var clean_build = false var dirs: List[Path] = Nil var export_files = false var fresh_build = false var session_groups: List[String] = Nil var max_jobs = 1 var check_keywords: Set[String] = Set.empty var list_files = false var no_build = false var options = Options.init(opts = build_options) var verbose = false var exclude_sessions: List[String] = Nil val getopts = Getopts(""" Usage: isabelle build [OPTIONS] [SESSIONS ...] Options are: -B NAME include session NAME and all descendants -D DIR include session directory and select its sessions -N cyclic shuffling of NUMA CPU nodes (performance tuning) -P DIR enable HTML/PDF presentation in directory (":" for default) -R refer to requirements of selected sessions -S soft build: only observe changes of sources, not heap images -X NAME exclude sessions from group NAME and all descendants -a select all sessions -b build heap images -c clean build -d DIR include session directory -e export files from session specification into file-system -f fresh build -g NAME select session group NAME -j INT maximum number of parallel jobs (default 1) -k KEYWORD check theory sources for conflicts with proposed keywords -l list session source files -n no build -- test dependencies only -o OPTION override Isabelle system OPTION (via NAME=VAL or NAME) -v verbose -x NAME exclude session NAME and all descendants Build and manage Isabelle sessions, depending on implicit settings: """ + Library.indent_lines(2, Build_Log.Settings.show()) + "\n", "B:" -> (arg => base_sessions = base_sessions ::: List(arg)), "D:" -> (arg => select_dirs = select_dirs ::: List(Path.explode(arg))), "N" -> (_ => numa_shuffling = true), "P:" -> (arg => presentation = Presentation.Context.make(arg)), "R" -> (_ => requirements = true), "S" -> (_ => soft_build = true), "X:" -> (arg => exclude_session_groups = exclude_session_groups ::: List(arg)), "a" -> (_ => all_sessions = true), "b" -> (_ => build_heap = true), "c" -> (_ => clean_build = true), "d:" -> (arg => dirs = dirs ::: List(Path.explode(arg))), "e" -> (_ => export_files = true), "f" -> (_ => fresh_build = true), "g:" -> (arg => session_groups = session_groups ::: List(arg)), "j:" -> (arg => max_jobs = Value.Int.parse(arg)), "k:" -> (arg => check_keywords = check_keywords + arg), "l" -> (_ => list_files = true), "n" -> (_ => no_build = true), "o:" -> (arg => options = options + arg), "v" -> (_ => verbose = true), "x:" -> (arg => exclude_sessions = exclude_sessions ::: List(arg))) val sessions = getopts(args) val progress = new Console_Progress(verbose = verbose) val start_date = Date.now() if (verbose) { progress.echo( "Started at " + Build_Log.print_date(start_date) + " (" + Isabelle_System.getenv("ML_IDENTIFIER") + " on " + Isabelle_System.hostname() +")") progress.echo(Build_Log.Settings.show() + "\n") } val results = progress.interrupt_handler { build(options, selection = Sessions.Selection( requirements = requirements, all_sessions = all_sessions, base_sessions = base_sessions, exclude_session_groups = exclude_session_groups, exclude_sessions = exclude_sessions, session_groups = session_groups, sessions = sessions), presentation = presentation, progress = progress, check_unknown_files = Mercurial.is_repository(Path.ISABELLE_HOME), build_heap = build_heap, clean_build = clean_build, dirs = dirs, select_dirs = select_dirs, numa_shuffling = NUMA.enabled_warning(progress, numa_shuffling), max_jobs = max_jobs, list_files = list_files, check_keywords = check_keywords, fresh_build = fresh_build, no_build = no_build, soft_build = soft_build, verbose = verbose, export_files = export_files) } val end_date = Date.now() val elapsed_time = end_date.time - start_date.time if (verbose) { progress.echo("\nFinished at " + Build_Log.print_date(end_date)) } val total_timing = results.sessions.iterator.map(a => results(a).timing).foldLeft(Timing.zero)(_ + _). copy(elapsed = elapsed_time) progress.echo(total_timing.message_resources) sys.exit(results.rc) }) /* build logic image */ def build_logic(options: Options, logic: String, progress: Progress = new Progress, build_heap: Boolean = false, dirs: List[Path] = Nil, fresh: Boolean = false, strict: Boolean = false ): Int = { val selection = Sessions.Selection.session(logic) val rc = if (!fresh && build(options, selection = selection, build_heap = build_heap, no_build = true, dirs = dirs).ok) Process_Result.RC.ok else { progress.echo("Build started for Isabelle/" + logic + " ...") Build.build(options, selection = selection, progress = progress, build_heap = build_heap, fresh_build = fresh, dirs = dirs).rc } if (strict && rc != Process_Result.RC.ok) error("Failed to build Isabelle/" + logic) else rc } } diff --git a/src/Pure/Tools/build_job.scala b/src/Pure/Tools/build_job.scala --- a/src/Pure/Tools/build_job.scala +++ b/src/Pure/Tools/build_job.scala @@ -1,535 +1,547 @@ /* Title: Pure/Tools/build_job.scala Author: Makarius Build job running prover process, with rudimentary PIDE session. */ package isabelle import scala.collection.mutable object Build_Job { - /* theory markup/messages from database */ + /* theory markup/messages from session database */ - def read_theory( + def read_session_theory( db_context: Sessions.Database_Context, - session_hierarchy: List[String], + session: String, theory: String, unicode_symbols: Boolean = false ): Option[Command] = { - def read(name: String): Export.Entry = - db_context.get_export(session_hierarchy, theory, name) + val export_context = Export.context(db_context) + val session_base_info = Sessions.base_info_empty(session) + using(export_context.open_session(session_base_info)) { session_context => + Build_Job.read_theory(session_context.theory(theory), unicode_symbols = unicode_symbols) + } + } + + def read_theory( + theory_context: Export.Theory_Context, + unicode_symbols: Boolean = false + ): Option[Command] = { + def read(name: String): Export.Entry = theory_context(name, permissive = true) def read_xml(name: String): XML.Body = YXML.parse_body( Symbol.output(unicode_symbols, UTF8.decode_permissive(read(name).uncompressed)), - cache = db_context.cache) + cache = theory_context.cache) for { - id <- Export.read_document_id(read) - (thy_file, blobs_files) <- Export.read_files(read) + id <- theory_context.document_id() + (thy_file, blobs_files) <- theory_context.files() } yield { - val node_name = Resources.file_node(Path.explode(thy_file), theory = theory) + val node_name = Resources.file_node(Path.explode(thy_file), theory = theory_context.theory) val results = Command.Results.make( for (elem @ XML.Elem(Markup(_, Markup.Serial(i)), _) <- read_xml(Export.MESSAGES)) yield i -> elem) val blobs = blobs_files.map { file => val path = Path.explode(file) val name = Resources.file_node(path) val src_path = File.relative_path(node_name.master_dir_path, path).getOrElse(path) Command.Blob(name, src_path, None) } val blobs_xml = for (i <- (1 to blobs.length).toList) yield read_xml(Export.MARKUP + i) val blobs_info = Command.Blobs_Info( for { (Command.Blob(name, src_path, _), xml) <- blobs zip blobs_xml } yield { val text = XML.content(xml) val chunk = Symbol.Text_Chunk(text) val digest = SHA1.digest(Symbol.encode(text)) Exn.Res(Command.Blob(name, src_path, Some((digest, chunk)))) }) val thy_xml = read_xml(Export.MARKUP) val thy_source = XML.content(thy_xml) val markups_index = Command.Markup_Index.markup :: blobs.map(Command.Markup_Index.blob) val markups = Command.Markups.make( for ((index, xml) <- markups_index.zip(thy_xml :: blobs_xml)) yield index -> Markup_Tree.from_XML(xml)) Command.unparsed(thy_source, theory = true, id = id, node_name = node_name, blobs_info = blobs_info, results = results, markups = markups) } } /* print messages */ def print_log( options: Options, session_name: String, theories: List[String] = Nil, verbose: Boolean = false, progress: Progress = new Progress, margin: Double = Pretty.default_margin, breakgain: Double = Pretty.default_breakgain, metric: Pretty.Metric = Symbol.Metric, unicode_symbols: Boolean = false ): Unit = { val store = Sessions.store(options) val session = new Session(options, Resources.empty) using(store.open_database_context()) { db_context => val result = db_context.database(session_name) { db => val theories = store.read_theories(db, session_name) val errors = store.read_errors(db, session_name) store.read_build(db, session_name).map(info => (theories, errors, info.return_code)) } result match { case None => error("Missing build database for session " + quote(session_name)) case Some((used_theories, errors, rc)) => theories.filterNot(used_theories.toSet) match { case Nil => case bad => error("Unknown theories " + commas_quote(bad)) } val print_theories = if (theories.isEmpty) used_theories else used_theories.filter(theories.toSet) + for (thy <- print_theories) { val thy_heading = "\nTheory " + quote(thy) + ":" - read_theory(db_context, List(session_name), thy, unicode_symbols = unicode_symbols) + + read_session_theory(db_context, session_name, thy, unicode_symbols = unicode_symbols) match { case None => progress.echo(thy_heading + " MISSING") case Some(command) => val snapshot = Document.State.init.snippet(command) val rendering = new Rendering(snapshot, options, session) val messages = rendering.text_messages(Text.Range.full) .filter(message => verbose || Protocol.is_exported(message.info)) if (messages.nonEmpty) { val line_document = Line.Document(command.source) progress.echo(thy_heading) for (Text.Info(range, elem) <- messages) { val line = line_document.position(range.start).line1 val pos = Position.Line_File(line, command.node_name.node) progress.echo( Protocol.message_text(elem, heading = true, pos = pos, margin = margin, breakgain = breakgain, metric = metric)) } } } } if (errors.nonEmpty) { val msg = Symbol.output(unicode_symbols, cat_lines(errors)) progress.echo("\nBuild errors:\n" + Output.error_message_text(msg)) } if (rc != Process_Result.RC.ok) { progress.echo("\n" + Process_Result.RC.print_long(rc)) } } } } /* Isabelle tool wrapper */ val isabelle_tool = Isabelle_Tool("log", "print messages from build database", Scala_Project.here, { args => /* arguments */ var unicode_symbols = false var theories: List[String] = Nil var margin = Pretty.default_margin var options = Options.init() var verbose = false val getopts = Getopts(""" Usage: isabelle log [OPTIONS] SESSION Options are: -T NAME restrict to given theories (multiple options possible) -U output Unicode symbols -m MARGIN margin for pretty printing (default: """ + margin + """) -o OPTION override Isabelle system OPTION (via NAME=VAL or NAME) -v print all messages, including information etc. Print messages from the build database of the given session, without any checks against current sources: results from a failed build can be printed as well. """, "T:" -> (arg => theories = theories ::: List(arg)), "U" -> (_ => unicode_symbols = true), "m:" -> (arg => margin = Value.Double.parse(arg)), "o:" -> (arg => options = options + arg), "v" -> (_ => verbose = true)) val more_args = getopts(args) val session_name = more_args match { case List(session_name) => session_name case _ => getopts.usage() } val progress = new Console_Progress() print_log(options, session_name, theories = theories, verbose = verbose, margin = margin, progress = progress, unicode_symbols = unicode_symbols) }) } class Build_Job(progress: Progress, session_name: String, val info: Sessions.Info, deps: Sessions.Deps, store: Sessions.Store, do_store: Boolean, log: Logger, session_setup: (String, Session) => Unit, val numa_node: Option[Int], command_timings0: List[Properties.T] ) { val options: Options = NUMA.policy_options(info.options, numa_node) private val sessions_structure = deps.sessions_structure private val future_result: Future[Process_Result] = Future.thread("build", uninterruptible = true) { val parent = info.parent.getOrElse("") val base = deps(parent) val result_base = deps(session_name) val env = Isabelle_System.settings( List("ISABELLE_ML_DEBUGGER" -> options.bool("ML_debugger").toString)) val is_pure = Sessions.is_pure(session_name) val use_prelude = if (is_pure) Thy_Header.ml_roots.map(_._1) else Nil val eval_store = if (do_store) { (if (info.theories.nonEmpty) List("ML_Heap.share_common_data ()") else Nil) ::: List("ML_Heap.save_child " + ML_Syntax.print_string_bytes(File.platform_path(store.output_heap(session_name)))) } else Nil val resources = new Resources(sessions_structure, base, log = log, command_timings = command_timings0) val session = new Session(options, resources) { override val cache: Term.Cache = store.cache override def build_blobs_info(name: Document.Node.Name): Command.Blobs_Info = { result_base.load_commands.get(name.expand) match { case Some(spans) => val syntax = result_base.theory_syntax(name) Command.build_blobs_info(syntax, name, spans) case None => Command.Blobs_Info.none } } } object Build_Session_Errors { private val promise: Promise[List[String]] = Future.promise def result: Exn.Result[List[String]] = promise.join_result def cancel(): Unit = promise.cancel() def apply(errs: List[String]): Unit = { try { promise.fulfill(errs) } catch { case _: IllegalStateException => } } } val export_consumer = Export.consumer(store.open_database(session_name, output = true), store.cache, progress = progress) val stdout = new StringBuilder(1000) val stderr = new StringBuilder(1000) val command_timings = new mutable.ListBuffer[Properties.T] val theory_timings = new mutable.ListBuffer[Properties.T] val session_timings = new mutable.ListBuffer[Properties.T] val runtime_statistics = new mutable.ListBuffer[Properties.T] val task_statistics = new mutable.ListBuffer[Properties.T] def fun( name: String, acc: mutable.ListBuffer[Properties.T], unapply: Properties.T => Option[Properties.T] ): (String, Session.Protocol_Function) = { name -> ((msg: Prover.Protocol_Output) => unapply(msg.properties) match { case Some(props) => acc += props; true case _ => false }) } session.init_protocol_handler(new Session.Protocol_Handler { override def exit(): Unit = Build_Session_Errors.cancel() private def build_session_finished(msg: Prover.Protocol_Output): Boolean = { val (rc, errors) = try { val (rc, errs) = { import XML.Decode._ pair(int, list(x => x))(Symbol.decode_yxml(msg.text)) } val errors = for (err <- errs) yield { val prt = Protocol_Message.expose_no_reports(err) Pretty.string_of(prt, metric = Symbol.Metric) } (rc, errors) } catch { case ERROR(err) => (Process_Result.RC.failure, List(err)) } session.protocol_command("Prover.stop", rc.toString) Build_Session_Errors(errors) true } private def loading_theory(msg: Prover.Protocol_Output): Boolean = msg.properties match { case Markup.Loading_Theory(Markup.Name(name)) => progress.theory(Progress.Theory(name, session = session_name)) false case _ => false } private def export_(msg: Prover.Protocol_Output): Boolean = msg.properties match { case Protocol.Export(args) => export_consumer.make_entry(session_name, args, msg.chunk) true case _ => false } override val functions: Session.Protocol_Functions = List( Markup.Build_Session_Finished.name -> build_session_finished, Markup.Loading_Theory.name -> loading_theory, Markup.EXPORT -> export_, fun(Markup.Theory_Timing.name, theory_timings, Markup.Theory_Timing.unapply), fun(Markup.Session_Timing.name, session_timings, Markup.Session_Timing.unapply), fun(Markup.Task_Statistics.name, task_statistics, Markup.Task_Statistics.unapply)) }) session.command_timings += Session.Consumer("command_timings") { case Session.Command_Timing(props) => for { elapsed <- Markup.Elapsed.unapply(props) elapsed_time = Time.seconds(elapsed) if elapsed_time.is_relevant && elapsed_time >= options.seconds("command_timing_threshold") } command_timings += props.filter(Markup.command_timing_property) } session.runtime_statistics += Session.Consumer("ML_statistics") { case Session.Runtime_Statistics(props) => runtime_statistics += props } session.finished_theories += Session.Consumer[Document.Snapshot]("finished_theories") { case snapshot => if (!progress.stopped) { val rendering = new Rendering(snapshot, options, session) def export_(name: String, xml: XML.Body, compress: Boolean = true): Unit = { if (!progress.stopped) { val theory_name = snapshot.node_name.theory val args = Protocol.Export.Args(theory_name = theory_name, name = name, compress = compress) val body = Bytes(Symbol.encode(YXML.string_of_body(xml))) export_consumer.make_entry(session_name, args, body) } } def export_text(name: String, text: String, compress: Boolean = true): Unit = export_(name, List(XML.Text(text)), compress = compress) for (command <- snapshot.snippet_command) { export_text(Export.DOCUMENT_ID, command.id.toString, compress = false) } export_text(Export.FILES, cat_lines(snapshot.node_files.map(_.symbolic.node)), compress = false) for (((_, xml), i) <- snapshot.xml_markup_blobs().zipWithIndex) { export_(Export.MARKUP + (i + 1), xml) } export_(Export.MARKUP, snapshot.xml_markup()) export_(Export.MESSAGES, snapshot.messages.map(_._1)) val citations = Library.distinct(rendering.citations(Text.Range.full).map(_.info)) export_text(Export.DOCUMENT_CITATIONS, cat_lines(citations)) } } session.all_messages += Session.Consumer[Any]("build_session_output") { case msg: Prover.Output => val message = msg.message if (msg.is_system) resources.log(Protocol.message_text(message)) if (msg.is_stdout) { stdout ++= Symbol.encode(XML.content(message)) } else if (msg.is_stderr) { stderr ++= Symbol.encode(XML.content(message)) } else if (msg.is_exit) { val err = "Prover terminated" + (msg.properties match { case Markup.Process_Result(result) => ": " + result.print_rc case _ => "" }) Build_Session_Errors(List(err)) } case _ => } session_setup(session_name, session) val eval_main = Command_Line.ML_tool("Isabelle_Process.init_build ()" :: eval_store) val process = Isabelle_Process.start(session, options, sessions_structure, store, logic = parent, raw_ml_system = is_pure, use_prelude = use_prelude, eval_main = eval_main, cwd = info.dir.file, env = env) val build_errors = Isabelle_Thread.interrupt_handler(_ => process.terminate()) { Exn.capture { process.await_startup() } match { case Exn.Res(_) => val resources_yxml = resources.init_session_yxml val encode_options: XML.Encode.T[Options] = options => session.prover_options(options).encode val args_yxml = YXML.string_of_body( { import XML.Encode._ pair(string, list(pair(encode_options, list(pair(string, properties)))))( (session_name, info.theories)) }) session.protocol_command("build_session", resources_yxml, args_yxml) Build_Session_Errors.result case Exn.Exn(exn) => Exn.Res(List(Exn.message(exn))) } } val process_result = Isabelle_Thread.interrupt_handler(_ => process.terminate()) { process.await_shutdown() } session.stop() val export_errors = export_consumer.shutdown(close = true).map(Output.error_message_text) val (document_output, document_errors) = try { if (build_errors.isInstanceOf[Exn.Res[_]] && process_result.ok && info.documents.nonEmpty) { using(store.open_database_context()) { db_context => val documents = Document_Build.build_documents( Document_Build.context(session_name, deps, db_context, progress = progress), output_sources = info.document_output, output_pdf = info.document_output) db_context.database_output(session_name)(db => documents.foreach(_.write(db, session_name))) (documents.flatMap(_.log_lines), Nil) } } else (Nil, Nil) } catch { case exn: Document_Build.Build_Error => (exn.log_lines, List(exn.message)) case Exn.Interrupt.ERROR(msg) => (Nil, List(msg)) } val result = { val theory_timing = theory_timings.iterator.flatMap( { case props @ Markup.Name(name) => Some(name -> props) case _ => None }).toMap val used_theory_timings = for { (name, _) <- deps(session_name).used_theories } yield theory_timing.getOrElse(name.theory, Markup.Name(name.theory)) val more_output = Library.trim_line(stdout.toString) :: command_timings.toList.map(Protocol.Command_Timing_Marker.apply) ::: used_theory_timings.map(Protocol.Theory_Timing_Marker.apply) ::: session_timings.toList.map(Protocol.Session_Timing_Marker.apply) ::: runtime_statistics.toList.map(Protocol.ML_Statistics_Marker.apply) ::: task_statistics.toList.map(Protocol.Task_Statistics_Marker.apply) ::: document_output process_result.output(more_output) .error(Library.trim_line(stderr.toString)) .errors_rc(export_errors ::: document_errors) } build_errors match { case Exn.Res(build_errs) => val errs = build_errs ::: document_errors if (errs.nonEmpty) { result.error_rc.output( errs.flatMap(s => split_lines(Output.error_message_text(s))) ::: errs.map(Protocol.Error_Message_Marker.apply)) } else if (progress.stopped && result.ok) result.copy(rc = Process_Result.RC.interrupt) else result case Exn.Exn(Exn.Interrupt()) => if (result.ok) result.copy(rc = Process_Result.RC.interrupt) else result case Exn.Exn(exn) => throw exn } } def terminate(): Unit = future_result.cancel() def is_finished: Boolean = future_result.is_finished private val timeout_request: Option[Event_Timer.Request] = { if (info.timeout_ignored) None else Some(Event_Timer.request(Time.now() + info.timeout) { terminate() }) } def join: (Process_Result, Option[String]) = { val result1 = future_result.join val was_timeout = timeout_request match { case None => false case Some(request) => !request.cancel() } val result2 = if (result1.ok) result1 else if (was_timeout) result1.error(Output.error_message_text("Timeout")).timeout_rc else if (result1.interrupted) result1.error(Output.error_message_text("Interrupt")) else result1 val heap_digest = if (result2.ok && do_store && store.output_heap(session_name).is_file) Some(Sessions.write_heap_digest(store.output_heap(session_name))) else None (result2, heap_digest) } } diff --git a/src/Pure/Tools/profiling_report.scala b/src/Pure/Tools/profiling_report.scala --- a/src/Pure/Tools/profiling_report.scala +++ b/src/Pure/Tools/profiling_report.scala @@ -1,81 +1,80 @@ /* Title: Pure/Tools/profiling_report.scala Author: Makarius Report Poly/ML profiling information from session build database. */ package isabelle object Profiling_Report { def profiling_report( options: Options, session: String, theories: List[String] = Nil, clean_name: Boolean = false, progress: Progress = new Progress ): Unit = { val store = Sessions.store(options) using(store.open_database_context()) { db_context => - val result = db_context.database(session)(db => Some(store.read_theories(db, session))) - result match { + db_context.database(session)(db => Some(store.read_theories(db, session))) match { case None => error("Missing build database for session " + quote(session)) case Some(used_theories) => theories.filterNot(used_theories.toSet) match { case Nil => case bad => error("Unknown theories " + commas_quote(bad)) } val reports = (for { thy <- used_theories.iterator if theories.isEmpty || theories.contains(thy) - command <- Build_Job.read_theory(db_context, List(session), thy).iterator + command <- Build_Job.read_session_theory(db_context, session, thy).iterator snapshot = Document.State.init.snippet(command) (Protocol.ML_Profiling(report), _) <- snapshot.messages.iterator } yield if (clean_name) report.clean_name else report).toList for (report <- ML_Profiling.account(reports)) progress.echo(report.print) } } } /* Isabelle tool wrapper */ val isabelle_tool = Isabelle_Tool("profiling_report", "report Poly/ML profiling information from log files", Scala_Project.here, { args => var theories: List[String] = Nil var clean_name = false var options = Options.init() val getopts = Getopts(""" Usage: isabelle profiling_report [OPTIONS] SESSION Options are: -T NAME restrict to given theories (multiple options possible) -c clean function names -o OPTION override Isabelle system OPTION (via NAME=VAL or NAME) Report Poly/ML profiling from the build database of the given session (without up-to-date check of sources). """, "T:" -> (arg => theories = theories ::: List(arg)), "c" -> (_ => clean_name = true), "o:" -> (arg => options = options + arg)) val more_args = getopts(args) val session_name = more_args match { case List(session_name) => session_name case _ => getopts.usage() } val progress = new Console_Progress() profiling_report(options, session_name, theories = theories, clean_name = clean_name, progress = progress) }) }