diff --git a/src/Pure/Thy/document_build.scala b/src/Pure/Thy/document_build.scala --- a/src/Pure/Thy/document_build.scala +++ b/src/Pure/Thy/document_build.scala @@ -1,549 +1,548 @@ /* Title: Pure/Thy/document_build.scala Author: Makarius Build theory document (PDF) from session database. */ package isabelle object Document_Build { /* document variants */ object Content { def apply(path: Path, content: Bytes): Content = new Content_Bytes(path, content) def apply(path: Path, content: String): Content = new Content_String(path, content) } trait Content { def write(dir: Path): Unit } final class Content_Bytes private[Document_Build](path: Path, content: Bytes) extends Content { def write(dir: Path): Unit = Bytes.write(dir + path, content) } final class Content_String private[Document_Build](path: Path, content: String) extends Content { def write(dir: Path): Unit = File.write(dir + path, content) } abstract class Document_Name { def name: String def path: Path = Path.basic(name) override def toString: String = name } object Document_Variant { def parse(name: String, tags: String): Document_Variant = Document_Variant(name, Library.space_explode(',', tags)) def parse(opt: String): Document_Variant = Library.space_explode('=', opt) match { case List(name) => Document_Variant(name, Nil) case List(name, tags) => parse(name, tags) case _ => error("Malformed document variant: " + quote(opt)) } } sealed case class Document_Variant(name: String, tags: List[String]) extends Document_Name { def print_tags: String = tags.mkString(",") def print: String = if (tags.isEmpty) name else name + "=" + print_tags def isabelletags: Content = { val path = Path.explode("isabelletags.sty") val content = Library.terminate_lines( tags.map(tag => tag.toList match { case '/' :: cs => "\\isafoldtag{" + cs.mkString + "}" case '-' :: cs => "\\isadroptag{" + cs.mkString + "}" case '+' :: cs => "\\isakeeptag{" + cs.mkString + "}" case cs => "\\isakeeptag{" + cs.mkString + "}" })) Content(path, content) } } sealed case class Document_Input(name: String, sources: SHA1.Digest) extends Document_Name sealed case class Document_Output(name: String, sources: SHA1.Digest, log_xz: Bytes, pdf: Bytes) extends Document_Name { def log: String = log_xz.uncompress().text def log_lines: List[String] = split_lines(log) def write(db: SQL.Database, session_name: String): Unit = write_document(db, session_name, this) def write(dir: Path): Path = { val path = dir + Path.basic(name).pdf Isabelle_System.make_directory(path.expand.dir) Bytes.write(path, pdf) path } } /* SQL data model */ object Data { val session_name = SQL.Column.string("session_name").make_primary_key val name = SQL.Column.string("name").make_primary_key val sources = SQL.Column.string("sources") val log_xz = SQL.Column.bytes("log_xz") val pdf = SQL.Column.bytes("pdf") val table = SQL.Table("isabelle_documents", List(session_name, name, sources, log_xz, pdf)) def where_equal(session_name: String, name: String = ""): SQL.Source = "WHERE " + Data.session_name.equal(session_name) + (if (name == "") "" else " AND " + Data.name.equal(name)) } def read_documents(db: SQL.Database, session_name: String): List[Document_Input] = { val select = Data.table.select(List(Data.name, Data.sources), Data.where_equal(session_name)) db.using_statement(select)(stmt => stmt.execute_query().iterator(res => { val name = res.string(Data.name) val sources = res.string(Data.sources) Document_Input(name, SHA1.fake(sources)) }).toList) } def read_document(db: SQL.Database, session_name: String, name: String): Option[Document_Output] = { val select = Data.table.select(sql = Data.where_equal(session_name, name)) db.using_statement(select)(stmt => { val res = stmt.execute_query() if (res.next()) { val name = res.string(Data.name) val sources = res.string(Data.sources) val log_xz = res.bytes(Data.log_xz) val pdf = res.bytes(Data.pdf) Some(Document_Output(name, SHA1.fake(sources), log_xz, pdf)) } else None }) } def write_document(db: SQL.Database, session_name: String, doc: Document_Output): Unit = { db.using_statement(Data.table.insert())(stmt => { stmt.string(1) = session_name stmt.string(2) = doc.name stmt.string(3) = doc.sources.toString stmt.bytes(4) = doc.log_xz stmt.bytes(5) = doc.pdf stmt.execute() }) } /* context */ val isabelle_styles: List[Path] = List("comment.sty", "isabelle.sty", "isabellesym.sty", "pdfsetup.sty", "railsetup.sty"). map(name => Path.explode("~~/lib/texinputs") + Path.basic(name)) def context( session: String, deps: Sessions.Deps, db_context: Sessions.Database_Context, progress: Progress = new Progress): Context = { val info = deps.sessions_structure(session) val base = deps(session) val hierarchy = deps.sessions_structure.hierarchy(session) new Context(info, base, hierarchy, db_context, progress) } final class Context private[Document_Build]( info: Sessions.Info, base: Sessions.Base, hierarchy: List[String], db_context: Sessions.Database_Context, val progress: Progress = new Progress) { /* session info */ def session: String = info.name def options: Options = info.options def document_bibliography: Boolean = options.bool("document_bibliography") def document_preprocessor: Option[String] = proper_string(options.string("document_preprocessor")) def document_logo: Option[String] = options.string("document_logo") match { case "" => None case "_" => Some("") case name => Some(name) } def document_build: String = options.string("document_build") def get_engine(): Engine = { val name = document_build engines.find(_.name == name).getOrElse(error("Bad document_build engine " + quote(name))) } def get_export(theory: String, name: String): Export.Entry = db_context.get_export(hierarchy, theory, name) /* document content */ def documents: List[Document_Variant] = info.documents def session_theories: List[Document.Node.Name] = base.session_theories def document_theories: List[Document.Node.Name] = session_theories ::: base.document_theories lazy val tex_files: List[Content] = for (name <- document_theories) yield { val path = Path.basic(tex_name(name)) - val xml = YXML.parse_body(get_export(name.theory, document_tex_name(name)).text) + val xml = YXML.parse_body(get_export(name.theory, Export.DOCUMENT_LATEX).text) val content = Latex.output(xml, file_pos = name.path.implode_symbolic) Content(path, content) } lazy val session_graph: Content = { val path = Presentation.session_graph_path val content = graphview.Graph_File.make_pdf(options, base.session_graph_display) Content(path, content) } lazy val session_tex: Content = { val path = Path.basic("session.tex") val content = Library.terminate_lines( base.session_theories.map(name => "\\input{" + tex_name(name) + "}")) Content(path, content) } lazy val isabelle_logo: Option[Content] = { document_logo.map(logo_name => Isabelle_System.with_tmp_file("logo", ext = "pdf")(tmp_path => { Logo.create_logo(logo_name, output_file = tmp_path, quiet = true) val path = Path.basic("isabelle_logo.pdf") val content = Bytes.read(tmp_path) Content(path, content) })) } /* document directory */ def prepare_directory(dir: Path, doc: Document_Variant): Directory = { val doc_dir = dir + Path.basic(doc.name) Isabelle_System.make_directory(doc_dir) /* sources */ isabelle_styles.foreach(Isabelle_System.copy_file(_, doc_dir)) doc.isabelletags.write(doc_dir) for ((base_dir, src) <- info.document_files) { Isabelle_System.copy_file_base(info.dir + base_dir, src, doc_dir) } session_tex.write(doc_dir) tex_files.foreach(_.write(doc_dir)) val root_name1 = "root_" + doc.name val root_name = if ((doc_dir + Path.explode(root_name1).tex).is_file) root_name1 else "root" for (name <- document_preprocessor) { def message(s: String): String = s + " for document_preprocessor=" + quote(name) val path = doc_dir + Path.explode(name) if (path.is_file) { try { Isabelle_System.bash(File.bash_path(path), cwd = doc_dir.file).check } catch { case ERROR(msg) => cat_error(msg, message("The error(s) above occurred")) } } else error(message("Missing executable")) } val digests1 = List(doc.print, document_logo.toString, document_build).map(SHA1.digest) val digests2 = File.find_files(doc_dir.file, follow_links = true).map(SHA1.digest) val sources = SHA1.digest_set(digests1 ::: digests2) /* derived material (without SHA1 digest) */ isabelle_logo.foreach(_.write(doc_dir)) session_graph.write(doc_dir) Directory(doc_dir, doc, root_name, sources) } def old_document(directory: Directory): Option[Document_Output] = for { old_doc <- db_context.input_database(session)(read_document(_, _, directory.doc.name)) if old_doc.sources == directory.sources } yield old_doc } sealed case class Directory( doc_dir: Path, doc: Document_Variant, root_name: String, sources: SHA1.Digest) { def root_name_script(ext: String = ""): String = Bash.string(if (ext.isEmpty) root_name else root_name + "." + ext) def conditional_script(ext: String, exe: String, after: String = ""): String = "if [ -f " + root_name_script(ext) + " ]\n" + "then\n" + " " + exe + " " + root_name_script() + "\n" + (if (after.isEmpty) "" else " " + after) + "fi\n" def log_errors(): List[String] = Latex.latex_errors(doc_dir, root_name) ::: Bibtex.bibtex_errors(doc_dir, root_name) def make_document(log: List[String], errors: List[String]): Document_Output = { val root_pdf = Path.basic(root_name).pdf val result_pdf = doc_dir + root_pdf if (errors.nonEmpty) { val errors1 = errors ::: List("Failed to build document " + quote(doc.name)) throw new Build_Error(log, Exn.cat_message(errors1: _*)) } else if (!result_pdf.is_file) { val message = "Bad document result: expected to find " + root_pdf throw new Build_Error(log, message) } else { val log_xz = Bytes(cat_lines(log)).compress() val pdf = Bytes.read(result_pdf) Document_Output(doc.name, sources, log_xz, pdf) } } } /* build engines */ lazy val engines: List[Engine] = Isabelle_System.make_services(classOf[Engine]) abstract class Engine(val name: String) extends Isabelle_System.Service { override def toString: String = name def prepare_directory(context: Context, dir: Path, doc: Document_Variant): Directory def build_document(context: Context, directory: Directory, verbose: Boolean): Document_Output } abstract class Bash_Engine(name: String) extends Engine(name) { def prepare_directory(context: Context, dir: Path, doc: Document_Variant): Directory = context.prepare_directory(dir, doc) def use_pdflatex: Boolean = false def latex_script(context: Context, directory: Directory): String = (if (use_pdflatex) "$ISABELLE_PDFLATEX" else "$ISABELLE_LUALATEX") + " " + directory.root_name_script() + "\n" def bibtex_script(context: Context, directory: Directory, latex: Boolean = false): String = { val ext = if (context.document_bibliography) "aux" else "bib" directory.conditional_script(ext, "$ISABELLE_BIBTEX", after = if (latex) latex_script(context, directory) else "") } def makeindex_script(context: Context, directory: Directory, latex: Boolean = false): String = directory.conditional_script("idx", "$ISABELLE_MAKEINDEX", after = if (latex) latex_script(context, directory) else "") def use_build_script: Boolean = false def build_script(context: Context, directory: Directory): String = { val has_build_script = (directory.doc_dir + Path.explode("build")).is_file if (!use_build_script && has_build_script) { error("Unexpected document build script for option document_build=" + quote(context.document_build)) } else if (use_build_script && !has_build_script) error("Missing document build script") else if (has_build_script) "./build pdf " + Bash.string(directory.doc.name) else { "set -e\n" + latex_script(context, directory) + bibtex_script(context, directory, latex = true) + makeindex_script(context, directory) + latex_script(context, directory) + makeindex_script(context, directory, latex = true) } } def build_document(context: Context, directory: Directory, verbose: Boolean): Document_Output = { val result = context.progress.bash( build_script(context, directory), cwd = directory.doc_dir.file, echo = verbose, watchdog = Time.seconds(0.5)) val log = result.out_lines ::: result.err_lines val errors = (if (result.ok) Nil else List(result.err)) ::: directory.log_errors() directory.make_document(log, errors) } } class LuaLaTeX_Engine extends Bash_Engine("lualatex") class PDFLaTeX_Engine extends Bash_Engine("pdflatex") { override def use_pdflatex: Boolean = true } class Build_Engine extends Bash_Engine("build") { override def use_build_script: Boolean = true } /* build documents */ def tex_name(name: Document.Node.Name): String = name.theory_base_name + ".tex" - def document_tex_name(name: Document.Node.Name): String = Export.DOCUMENT_PREFIX + tex_name(name) class Build_Error(val log_lines: List[String], val message: String) extends Exn.User_Error(message) def build_documents( context: Context, output_sources: Option[Path] = None, output_pdf: Option[Path] = None, verbose: Boolean = false): List[Document_Output] = { val progress = context.progress val engine = context.get_engine() val documents = for (doc <- context.documents) yield { Isabelle_System.with_tmp_dir("document")(tmp_dir => { progress.echo("Preparing " + context.session + "/" + doc.name + " ...") val start = Time.now() output_sources.foreach(engine.prepare_directory(context, _, doc)) val directory = engine.prepare_directory(context, tmp_dir, doc) val document = context.old_document(directory) getOrElse engine.build_document(context, directory, verbose) val stop = Time.now() val timing = stop - start progress.echo("Finished " + context.session + "/" + doc.name + " (" + timing.message_hms + " elapsed time)") document }) } for (dir <- output_pdf; doc <- documents) { val path = doc.write(dir) progress.echo("Document at " + path.absolute) } documents } /* Isabelle tool wrapper */ val isabelle_tool = Isabelle_Tool("document", "prepare session theory document", Scala_Project.here, args => { var output_sources: Option[Path] = None var output_pdf: Option[Path] = None var verbose_latex = false var dirs: List[Path] = Nil var options = Options.init() var verbose_build = false val getopts = Getopts( """ Usage: isabelle document [OPTIONS] SESSION Options are: -O DIR output directory for LaTeX sources and resulting PDF -P DIR output directory for resulting PDF -S DIR output directory for LaTeX sources -V verbose latex -d DIR include session directory -o OPTION override Isabelle system OPTION (via NAME=VAL or NAME) -v verbose build Prepare the theory document of a session. """, "O:" -> (arg => { val dir = Path.explode(arg) output_sources = Some(dir) output_pdf = Some(dir) }), "P:" -> (arg => { output_pdf = Some(Path.explode(arg)) }), "S:" -> (arg => { output_sources = Some(Path.explode(arg)) }), "V" -> (_ => verbose_latex = true), "d:" -> (arg => dirs = dirs ::: List(Path.explode(arg))), "o:" -> (arg => options = options + arg), "v" -> (_ => verbose_build = true)) val more_args = getopts(args) val session = more_args match { case List(a) => a case _ => getopts.usage() } val progress = new Console_Progress(verbose = verbose_build) val store = Sessions.store(options) progress.interrupt_handler { val res = Build.build(options, selection = Sessions.Selection.session(session), dirs = dirs, progress = progress, verbose = verbose_build) if (!res.ok) error("Failed to build session " + quote(session)) val deps = Sessions.load_structure(options + "document=pdf", dirs = dirs). selection_deps(Sessions.Selection.session(session)) if (output_sources.isEmpty && output_pdf.isEmpty) { progress.echo_warning("No output directory") } using(store.open_database_context())(db_context => { build_documents(context(session, deps, db_context, progress = progress), output_sources = output_sources, output_pdf = output_pdf, verbose = verbose_latex) }) } }) } diff --git a/src/Pure/Thy/export.scala b/src/Pure/Thy/export.scala --- a/src/Pure/Thy/export.scala +++ b/src/Pure/Thy/export.scala @@ -1,458 +1,459 @@ /* Title: Pure/Thy/export.scala Author: Makarius Manage theory exports: compressed blobs. */ package isabelle import scala.annotation.tailrec import scala.util.matching.Regex object Export { /* artefact names */ val DOCUMENT_ID = "PIDE/document_id" val FILES = "PIDE/files" val MARKUP = "PIDE/markup" val MESSAGES = "PIDE/messages" val DOCUMENT_PREFIX = "document/" - val CITATIONS = DOCUMENT_PREFIX + "citations" + val DOCUMENT_LATEX = DOCUMENT_PREFIX + "latex" + val DOCUMENT_CITATIONS = DOCUMENT_PREFIX + "citations" val THEORY_PREFIX: String = "theory/" val PROOFS_PREFIX: String = "proofs/" def explode_name(s: String): List[String] = space_explode('/', s) def implode_name(elems: Iterable[String]): String = elems.mkString("/") /* SQL data model */ object Data { val session_name = SQL.Column.string("session_name").make_primary_key val theory_name = SQL.Column.string("theory_name").make_primary_key val name = SQL.Column.string("name").make_primary_key val executable = SQL.Column.bool("executable") val compressed = SQL.Column.bool("compressed") val body = SQL.Column.bytes("body") val table = SQL.Table("isabelle_exports", List(session_name, theory_name, name, executable, compressed, body)) def where_equal(session_name: String, theory_name: String = "", name: String = ""): SQL.Source = "WHERE " + Data.session_name.equal(session_name) + (if (theory_name == "") "" else " AND " + Data.theory_name.equal(theory_name)) + (if (name == "") "" else " AND " + Data.name.equal(name)) } def read_name(db: SQL.Database, session_name: String, theory_name: String, name: String): Boolean = { val select = Data.table.select(List(Data.name), Data.where_equal(session_name, theory_name, name)) db.using_statement(select)(stmt => stmt.execute_query().next()) } def read_names(db: SQL.Database, session_name: String, theory_name: String): List[String] = { val select = Data.table.select(List(Data.name), Data.where_equal(session_name, theory_name)) db.using_statement(select)(stmt => stmt.execute_query().iterator(res => res.string(Data.name)).toList) } def read_theory_names(db: SQL.Database, session_name: String): List[String] = { val select = Data.table.select(List(Data.theory_name), Data.where_equal(session_name), distinct = true) db.using_statement(select)(stmt => stmt.execute_query().iterator(_.string(Data.theory_name)).toList) } def read_theory_exports(db: SQL.Database, session_name: String): List[(String, String)] = { val select = Data.table.select(List(Data.theory_name, Data.name), Data.where_equal(session_name)) db.using_statement(select)(stmt => stmt.execute_query().iterator(res => (res.string(Data.theory_name), res.string(Data.name))).toList) } def message(msg: String, theory_name: String, name: String): String = msg + " " + quote(name) + " for theory " + quote(theory_name) def compound_name(a: String, b: String): String = if (a.isEmpty) b else a + ":" + b def empty_entry(theory_name: String, name: String): Entry = Entry("", theory_name, name, false, Future.value(false, Bytes.empty), XML.Cache.none) sealed case class Entry( session_name: String, theory_name: String, name: String, executable: Boolean, body: Future[(Boolean, Bytes)], cache: XML.Cache) { override def toString: String = name def compound_name: String = Export.compound_name(theory_name, name) def name_has_prefix(s: String): Boolean = name.startsWith(s) val name_elems: List[String] = explode_name(name) def name_extends(elems: List[String]): Boolean = name_elems.startsWith(elems) && name_elems != elems def text: String = uncompressed.text def uncompressed: Bytes = { val (compressed, bytes) = body.join if (compressed) bytes.uncompress(cache = cache.xz) else bytes } def uncompressed_yxml: XML.Body = YXML.parse_body(UTF8.decode_permissive(uncompressed), cache = cache) def write(db: SQL.Database): Unit = { val (compressed, bytes) = body.join db.using_statement(Data.table.insert())(stmt => { stmt.string(1) = session_name stmt.string(2) = theory_name stmt.string(3) = name stmt.bool(4) = executable stmt.bool(5) = compressed stmt.bytes(6) = bytes stmt.execute() }) } } def make_regex(pattern: String): Regex = { @tailrec def make(result: List[String], depth: Int, chs: List[Char]): Regex = chs match { case '*' :: '*' :: rest => make("[^:]*" :: result, depth, rest) case '*' :: rest => make("[^:/]*" :: result, depth, rest) case '?' :: rest => make("[^:/]" :: result, depth, rest) case '\\' :: c :: rest => make(("\\" + c) :: result, depth, rest) case '{' :: rest => make("(" :: result, depth + 1, rest) case ',' :: rest if depth > 0 => make("|" :: result, depth, rest) case '}' :: rest if depth > 0 => make(")" :: result, depth - 1, rest) case c :: rest if ".+()".contains(c) => make(("\\" + c) :: result, depth, rest) case c :: rest => make(c.toString :: result, depth, rest) case Nil => result.reverse.mkString.r } make(Nil, 0, pattern.toList) } def make_matcher(pattern: String): (String, String) => Boolean = { val regex = make_regex(pattern) (theory_name: String, name: String) => regex.pattern.matcher(compound_name(theory_name, name)).matches } def make_entry( session_name: String, args: Protocol.Export.Args, bytes: Bytes, cache: XML.Cache): Entry = { val body = if (args.compress) Future.fork(bytes.maybe_compress(cache = cache.xz)) else Future.value((false, bytes)) Entry(session_name, args.theory_name, args.name, args.executable, body, cache) } def read_entry(db: SQL.Database, cache: XML.Cache, session_name: String, theory_name: String, name: String): Option[Entry] = { val select = Data.table.select(List(Data.executable, Data.compressed, Data.body), Data.where_equal(session_name, theory_name, name)) db.using_statement(select)(stmt => { val res = stmt.execute_query() if (res.next()) { val executable = res.bool(Data.executable) val compressed = res.bool(Data.compressed) val bytes = res.bytes(Data.body) val body = Future.value(compressed, bytes) Some(Entry(session_name, theory_name, name, executable, body, cache)) } else None }) } def read_entry(dir: Path, cache: XML.Cache, session_name: String, theory_name: String, name: String): Option[Entry] = { val path = dir + Path.basic(theory_name) + Path.explode(name) if (path.is_file) { val executable = File.is_executable(path) val uncompressed = Bytes.read(path) val body = Future.value((false, uncompressed)) Some(Entry(session_name, theory_name, name, executable, body, cache)) } else None } /* database consumer thread */ def consumer(db: SQL.Database, cache: XML.Cache): Consumer = new Consumer(db, cache) class Consumer private[Export](db: SQL.Database, cache: XML.Cache) { private val errors = Synchronized[List[String]](Nil) private val consumer = Consumer_Thread.fork_bulk[(Entry, Boolean)](name = "export")( bulk = { case (entry, _) => entry.body.is_finished }, consume = (args: List[(Entry, Boolean)]) => { val results = db.transaction { for ((entry, strict) <- args) yield { if (read_name(db, entry.session_name, entry.theory_name, entry.name)) { if (strict) { val msg = message("Duplicate export", entry.theory_name, entry.name) errors.change(msg :: _) } Exn.Res(()) } else Exn.capture { entry.write(db) } } } (results, true) }) def apply(session_name: String, args: Protocol.Export.Args, body: Bytes): Unit = consumer.send(make_entry(session_name, args, body, cache) -> args.strict) def shutdown(close: Boolean = false): List[String] = { consumer.shutdown() if (close) db.close() errors.value.reverse } } /* abstract provider */ object Provider { def none: Provider = new Provider { def apply(export_name: String): Option[Entry] = None def focus(other_theory: String): Provider = this override def toString: String = "none" } def database_context( context: Sessions.Database_Context, sessions: List[String], theory_name: String): Provider = new Provider { def apply(export_name: String): Option[Entry] = context.read_export(sessions, theory_name, export_name) def focus(other_theory: String): Provider = this override def toString: String = context.toString } def database(db: SQL.Database, cache: XML.Cache, session_name: String, theory_name: String) : Provider = { new Provider { def apply(export_name: String): Option[Entry] = read_entry(db, cache, session_name, theory_name, export_name) def focus(other_theory: String): Provider = if (other_theory == theory_name) this else Provider.database(db, cache, session_name, other_theory) override def toString: String = db.toString } } def snapshot(snapshot: Document.Snapshot): Provider = new Provider { def apply(export_name: String): Option[Entry] = snapshot.exports_map.get(export_name) def focus(other_theory: String): Provider = if (other_theory == snapshot.node_name.theory) this else { val node_name = snapshot.version.nodes.theory_name(other_theory) getOrElse error("Bad theory " + quote(other_theory)) Provider.snapshot(snapshot.state.snapshot(node_name)) } override def toString: String = snapshot.toString } def directory(dir: Path, cache: XML.Cache, session_name: String, theory_name: String) : Provider = { new Provider { def apply(export_name: String): Option[Entry] = read_entry(dir, cache, session_name, theory_name, export_name) def focus(other_theory: String): Provider = if (other_theory == theory_name) this else Provider.directory(dir, cache, session_name, other_theory) override def toString: String = dir.toString } } } trait Provider { def apply(export_name: String): Option[Entry] def uncompressed_yxml(export_name: String): XML.Body = apply(export_name) match { case Some(entry) => entry.uncompressed_yxml case None => Nil } def focus(other_theory: String): Provider } /* export to file-system */ def export_files( store: Sessions.Store, session_name: String, export_dir: Path, progress: Progress = new Progress, export_prune: Int = 0, export_list: Boolean = false, export_patterns: List[String] = Nil): Unit = { using(store.open_database(session_name))(db => { db.transaction { val export_names = read_theory_exports(db, session_name) // list if (export_list) { (for ((theory_name, name) <- export_names) yield compound_name(theory_name, name)). sorted.foreach(progress.echo) } // export if (export_patterns.nonEmpty) { val exports = (for { export_pattern <- export_patterns.iterator matcher = make_matcher(export_pattern) (theory_name, name) <- export_names if matcher(theory_name, name) } yield (theory_name, name)).toSet for { (theory_name, group) <- exports.toList.groupBy(_._1).toList.sortBy(_._1) name <- group.map(_._2).sorted entry <- read_entry(db, store.cache, session_name, theory_name, name) } { val elems = theory_name :: space_explode('/', name) val path = if (elems.length < export_prune + 1) { error("Cannot prune path by " + export_prune + " element(s): " + Path.make(elems)) } else export_dir + Path.make(elems.drop(export_prune)) progress.echo("export " + path + (if (entry.executable) " (executable)" else "")) Isabelle_System.make_directory(path.dir) Bytes.write(path, entry.uncompressed) File.set_executable(path, entry.executable) } } } }) } /* Isabelle tool wrapper */ val default_export_dir: Path = Path.explode("export") val isabelle_tool = Isabelle_Tool("export", "retrieve theory exports", Scala_Project.here, args => { /* arguments */ var export_dir = default_export_dir var dirs: List[Path] = Nil var export_list = false var no_build = false var options = Options.init() var export_prune = 0 var export_patterns: List[String] = Nil val getopts = Getopts(""" Usage: isabelle export [OPTIONS] SESSION Options are: -O DIR output directory for exported files (default: """ + default_export_dir + """) -d DIR include session directory -l list exports -n no build of session -o OPTION override Isabelle system OPTION (via NAME=VAL or NAME) -p NUM prune path of exported files by NUM elements -x PATTERN extract files matching pattern (e.g. "*:**" for all) List or export theory exports for SESSION: named blobs produced by isabelle build. Option -l or -x is required; option -x may be repeated. The PATTERN language resembles glob patterns in the shell, with ? and * (both excluding ":" and "/"), ** (excluding ":"), and [abc] or [^abc], and variants {pattern1,pattern2,pattern3}. """, "O:" -> (arg => export_dir = Path.explode(arg)), "d:" -> (arg => dirs = dirs ::: List(Path.explode(arg))), "l" -> (_ => export_list = true), "n" -> (_ => no_build = true), "o:" -> (arg => options = options + arg), "p:" -> (arg => export_prune = Value.Int.parse(arg)), "x:" -> (arg => export_patterns ::= arg)) val more_args = getopts(args) val session_name = more_args match { case List(session_name) if export_list || export_patterns.nonEmpty => session_name case _ => getopts.usage() } val progress = new Console_Progress() /* build */ if (!no_build) { val rc = progress.interrupt_handler { Build.build_logic(options, session_name, progress = progress, dirs = dirs) } if (rc != 0) sys.exit(rc) } /* export files */ val store = Sessions.store(options) export_files(store, session_name, export_dir, progress = progress, export_prune = export_prune, export_list = export_list, export_patterns = export_patterns) }) } diff --git a/src/Pure/Thy/thy_info.ML b/src/Pure/Thy/thy_info.ML --- a/src/Pure/Thy/thy_info.ML +++ b/src/Pure/Thy/thy_info.ML @@ -1,481 +1,472 @@ (* Title: Pure/Thy/thy_info.ML Author: Makarius Global theory info database, with auto-loading according to theory and file dependencies, and presentation of theory documents. *) signature THY_INFO = sig type presentation_context = {options: Options.T, file_pos: Position.T, adjust_pos: Position.T -> Position.T, segments: Document_Output.segment list} val adjust_pos_properties: presentation_context -> Position.T -> Properties.T val apply_presentation: presentation_context -> theory -> unit val add_presentation: (presentation_context -> theory -> unit) -> theory -> theory val get_names: unit -> string list val lookup_theory: string -> theory option val get_theory: string -> theory val master_directory: string -> Path.T val remove_thy: string -> unit val use_theories: Options.T -> string -> (string * Position.T) list -> unit val use_thy: string -> unit val script_thy: Position.T -> string -> theory -> theory val register_thy: theory -> unit val finish: unit -> unit end; structure Thy_Info: THY_INFO = struct (** theory presentation **) -(* artefact names *) - -fun document_name ext thy = - Path.explode_binding0 ("document/" ^ Context.theory_name thy ^ "." ^ ext); - -val document_tex_name = document_name "tex"; - - (* hook for consolidated theory *) type presentation_context = {options: Options.T, file_pos: Position.T, adjust_pos: Position.T -> Position.T, segments: Document_Output.segment list}; fun adjust_pos_properties (context: presentation_context) pos = Position.offset_properties_of (#adjust_pos context pos) @ Position.id_properties_of pos; structure Presentation = Theory_Data ( type T = ((presentation_context -> theory -> unit) * stamp) list; val empty = []; val extend = I; fun merge data : T = Library.merge (eq_snd op =) data; ); fun sequential_presentation options = not (Options.bool options \<^system_option>\parallel_presentation\); fun apply_presentation (context: presentation_context) thy = Par_List.map' {name = "apply_presentation", sequential = sequential_presentation (#options context)} (fn (f, _) => f context thy) (Presentation.get thy) |> ignore; fun add_presentation f = Presentation.map (cons (f, stamp ())); val _ = Theory.setup (add_presentation (fn {options, file_pos, segments, ...} => fn thy => if exists (Toplevel.is_skipped_proof o #state) segments then () else let val body = Document_Output.present_thy options thy segments; in if Options.string options "document" = "false" then () else let val thy_name = Context.theory_name thy; - val document_tex_name = document_tex_name thy; val latex = Latex.isabelle_body thy_name body; - in Export.export thy document_tex_name latex end + in Export.export thy \<^path_binding>\document/latex\ latex end end)); (** thy database **) (* messages *) val show_path = space_implode " via " o map quote; fun cycle_msg names = "Cyclic dependency of " ^ show_path names; (* derived graph operations *) fun add_deps name parents G = String_Graph.add_deps_acyclic (name, parents) G handle String_Graph.CYCLES namess => error (cat_lines (map cycle_msg namess)); fun new_entry name parents entry = String_Graph.new_node (name, entry) #> add_deps name parents; (* global thys *) type deps = {master: (Path.T * SHA1.digest), (*master dependencies for thy file*) imports: (string * Position.T) list}; (*source specification of imports (partially qualified)*) fun make_deps master imports : deps = {master = master, imports = imports}; fun master_dir_deps (d: deps option) = the_default Path.current (Option.map (Path.dir o #1 o #master) d); local val global_thys = Synchronized.var "Thy_Info.thys" (String_Graph.empty: (deps option * theory option) String_Graph.T); in fun get_thys () = Synchronized.value global_thys; fun change_thys f = Synchronized.change global_thys f; end; fun get_names () = String_Graph.topological_order (get_thys ()); (* access thy *) fun lookup thys name = try (String_Graph.get_node thys) name; fun lookup_thy name = lookup (get_thys ()) name; fun get thys name = (case lookup thys name of SOME thy => thy | NONE => error ("Theory loader: nothing known about theory " ^ quote name)); fun get_thy name = get (get_thys ()) name; (* access deps *) val lookup_deps = Option.map #1 o lookup_thy; val master_directory = master_dir_deps o #1 o get_thy; (* access theory *) fun lookup_theory name = (case lookup_thy name of SOME (_, SOME theory) => SOME theory | _ => NONE); fun get_theory name = (case lookup_theory name of SOME theory => theory | _ => error ("Theory loader: undefined entry for theory " ^ quote name)); val get_imports = Resources.imports_of o get_theory; (** thy operations **) (* remove *) fun remove name thys = (case lookup thys name of NONE => thys | SOME (NONE, _) => error ("Cannot update finished theory " ^ quote name) | SOME _ => let val succs = String_Graph.all_succs thys [name]; val _ = writeln ("Theory loader: removing " ^ commas_quote succs); in fold String_Graph.del_node succs thys end); val remove_thy = change_thys o remove; (* update *) fun update deps theory thys = let val name = Context.theory_long_name theory; val parents = map Context.theory_long_name (Theory.parents_of theory); val thys' = remove name thys; val _ = map (get thys') parents; in new_entry name parents (SOME deps, SOME theory) thys' end; fun update_thy deps theory = change_thys (update deps theory); (* scheduling loader tasks *) datatype result = Result of {theory: theory, exec_id: Document_ID.exec, present: unit -> unit, commit: unit -> unit, weight: int}; fun theory_result theory = Result {theory = theory, exec_id = Document_ID.none, present = I, commit = I, weight = 0}; fun result_theory (Result {theory, ...}) = theory; fun result_present (Result {present, ...}) = present; fun result_commit (Result {commit, ...}) = commit; fun result_ord (Result {weight = i, ...}, Result {weight = j, ...}) = int_ord (j, i); fun join_theory (Result {theory, exec_id, ...}) = let val _ = Execution.join [exec_id]; val res = Exn.capture Thm.consolidate_theory theory; val exns = maps Task_Queue.group_status (Execution.peek exec_id); in res :: map Exn.Exn exns end; datatype task = Task of string list * (theory list -> result) | Finished of theory; fun task_finished (Task _) = false | task_finished (Finished _) = true; fun task_parents deps (parents: string list) = map (the o AList.lookup (op =) deps) parents; val schedule_seq = String_Graph.schedule (fn deps => fn (_, task) => (case task of Task (parents, body) => let val result = body (task_parents deps parents); val _ = Par_Exn.release_all (join_theory result); val _ = result_present result (); val _ = result_commit result (); in result_theory result end | Finished thy => thy)) #> ignore; val schedule_futures = Thread_Attributes.uninterruptible (fn _ => fn tasks => let val futures = tasks |> String_Graph.schedule (fn deps => fn (name, task) => (case task of Task (parents, body) => (singleton o Future.forks) {name = "theory:" ^ name, group = NONE, deps = map (Future.task_of o #2) deps, pri = 0, interrupts = true} (fn () => (case filter (not o can Future.join o #2) deps of [] => body (map (result_theory o Future.join) (task_parents deps parents)) | bad => error ("Failed to load theory " ^ quote name ^ " (unresolved " ^ commas_quote (map #1 bad) ^ ")"))) | Finished theory => Future.value (theory_result theory))); val results1 = futures |> maps (fn future => (case Future.join_result future of Exn.Res result => join_theory result | Exn.Exn exn => [Exn.Exn exn])); val results2 = futures |> map_filter (Exn.get_res o Future.join_result) |> sort result_ord |> Par_List.map' {name = "present", sequential = sequential_presentation (Options.default ())} (fn result => Exn.capture (result_present result) ()); (* FIXME more precise commit order (!?) *) val results3 = futures |> map (fn future => Exn.capture (fn () => result_commit (Future.join future) ()) ()); (* FIXME avoid global Execution.reset (!??) *) val results4 = map Exn.Exn (maps Task_Queue.group_status (Execution.reset ())); val _ = Par_Exn.release_all (results1 @ results2 @ results3 @ results4); in () end); (* eval theory *) fun eval_thy options master_dir header text_pos text parents = let val (name, _) = #name header; val keywords = fold (curry Keyword.merge_keywords o Thy_Header.get_keywords) parents (Keyword.add_keywords (#keywords header) Keyword.empty_keywords); val spans = Outer_Syntax.parse_spans (Token.explode keywords text_pos text); val elements = Thy_Element.parse_elements keywords spans; val text_id = Position.copy_id text_pos Position.none; fun init () = Resources.begin_theory master_dir header parents; fun excursion () = let fun element_result span_elem (st, _) = let fun prepare span = let val tr = Position.setmp_thread_data text_id (fn () => Command.read_span keywords st master_dir init span) (); in Toplevel.timing (Resources.last_timing tr) tr end; val elem = Thy_Element.map_element prepare span_elem; val (results, st') = Toplevel.element_result keywords elem st; val pos' = Toplevel.pos_of (Thy_Element.last_element elem); in (results, (st', pos')) end; val (results, (end_state, end_pos)) = fold_map element_result elements (Toplevel.init_toplevel (), Position.none); val thy = Toplevel.end_theory end_pos end_state; in (results, thy) end; val (results, thy) = cond_timeit true ("theory " ^ quote name) excursion; fun present () = let val segments = (spans ~~ maps Toplevel.join_results results) |> map (fn (span, (st, tr, st')) => {span = span, prev_state = st, command = tr, state = st'}); val context: presentation_context = {options = options, file_pos = text_pos, adjust_pos = I, segments = segments}; in apply_presentation context thy end; in (thy, present, size text) end; (* require_thy -- checking database entries wrt. the file-system *) local fun required_by _ [] = "" | required_by s initiators = s ^ "(required by " ^ show_path (rev initiators) ^ ")"; fun load_thy options initiators deps text (name, header_pos) keywords parents = let val {master = (thy_path, _), imports} = deps; val dir = Path.dir thy_path; val exec_id = Document_ID.make (); val id = Document_ID.print exec_id; val put_id = Position.put_id id; val _ = Execution.running Document_ID.none exec_id [] orelse raise Fail ("Failed to register execution: " ^ id); val text_pos = put_id (Path.position thy_path); val text_props = Position.properties_of text_pos; val _ = remove_thy name; val _ = writeln ("Loading theory " ^ quote name ^ required_by " " initiators); val _ = Output.try_protocol_message (Markup.loading_theory name @ text_props) [XML.blob [text]]; val _ = Position.setmp_thread_data (Position.id_only id) (fn () => (parents, map #2 imports) |> ListPair.app (fn (thy, pos) => Context_Position.reports_global thy [(put_id pos, Theory.get_markup thy)])) (); val timing_start = Timing.start (); val header = Thy_Header.make (name, put_id header_pos) imports keywords; val (theory, present, weight) = eval_thy options dir header text_pos text (if name = Context.PureN then [Context.the_global_context ()] else parents); val timing_result = Timing.result timing_start; val timing_props = [Markup.theory_timing, (Markup.nameN, name)]; val _ = Output.try_protocol_message (timing_props @ Markup.timing_properties timing_result) [] fun commit () = update_thy deps theory; in Result {theory = theory, exec_id = exec_id, present = present, commit = commit, weight = weight} end; fun check_thy_deps dir name = (case lookup_deps name of SOME NONE => (true, NONE, Position.none, get_imports name, []) | NONE => let val {master, text, theory_pos, imports, keywords} = Resources.check_thy dir name in (false, SOME (make_deps master imports, text), theory_pos, imports, keywords) end | SOME (SOME {master, ...}) => let val {master = master', text = text', theory_pos = theory_pos', imports = imports', keywords = keywords'} = Resources.check_thy dir name; val deps' = SOME (make_deps master' imports', text'); val current = #2 master = #2 master' andalso (case lookup_theory name of NONE => false | SOME theory => Resources.loaded_files_current theory); in (current, deps', theory_pos', imports', keywords') end); in fun require_thys options initiators qualifier dir strs tasks = fold_map (require_thy options initiators qualifier dir) strs tasks |>> forall I and require_thy options initiators qualifier dir (s, require_pos) tasks = let val {master_dir, theory_name, ...} = Resources.import_name qualifier dir s; in (case try (String_Graph.get_node tasks) theory_name of SOME task => (task_finished task, tasks) | NONE => let val _ = member (op =) initiators theory_name andalso error (cycle_msg initiators); val (current, deps, theory_pos, imports, keywords) = check_thy_deps master_dir theory_name handle ERROR msg => cat_error msg ("The error(s) above occurred for theory " ^ quote theory_name ^ Position.here require_pos ^ required_by "\n" initiators); val qualifier' = Resources.theory_qualifier theory_name; val dir' = dir + master_dir_deps (Option.map #1 deps); val parents = map (#theory_name o Resources.import_name qualifier' dir' o #1) imports; val (parents_current, tasks') = require_thys options (theory_name :: initiators) qualifier' dir' imports tasks; val all_current = current andalso parents_current; val task = if all_current then Finished (get_theory theory_name) else (case deps of NONE => raise Fail "Malformed deps" | SOME (dep, text) => Task (parents, load_thy options initiators dep text (theory_name, theory_pos) keywords)); val tasks'' = new_entry theory_name parents task tasks'; in (all_current, tasks'') end) end; end; (* use theories *) fun use_theories options qualifier imports = let val (_, tasks) = require_thys options [] qualifier Path.current imports String_Graph.empty in if Multithreading.max_threads () > 1 then schedule_futures tasks else schedule_seq tasks end; fun use_thy name = use_theories (Options.default ()) Resources.default_qualifier [(name, Position.none)]; (* toplevel scripting -- without maintaining database *) fun script_thy pos txt thy = let val trs = Outer_Syntax.parse_text thy (K thy) pos txt; val end_pos = if null trs then pos else Toplevel.pos_of (List.last trs); val end_state = fold (Toplevel.command_exception true) trs (Toplevel.init_toplevel ()); in Toplevel.end_theory end_pos end_state end; (* register theory *) fun register_thy theory = let val name = Context.theory_long_name theory; val {master, ...} = Resources.check_thy (Resources.master_directory theory) name; val imports = Resources.imports_of theory; in change_thys (fn thys => let val thys' = remove name thys; val _ = writeln ("Registering theory " ^ quote name); in update (make_deps master imports) theory thys' end) end; (* finish all theories *) fun finish () = change_thys (String_Graph.map (fn _ => fn (_, entry) => (NONE, entry))); end; fun use_thy name = Runtime.toplevel_program (fn () => Thy_Info.use_thy name); diff --git a/src/Pure/Tools/build_job.scala b/src/Pure/Tools/build_job.scala --- a/src/Pure/Tools/build_job.scala +++ b/src/Pure/Tools/build_job.scala @@ -1,540 +1,540 @@ /* Title: Pure/Tools/build_job.scala Author: Makarius Build job running prover process, with rudimentary PIDE session. */ package isabelle import scala.collection.mutable object Build_Job { /* theory markup/messages from database */ def read_theory( db_context: Sessions.Database_Context, resources: Resources, session: String, theory: String, unicode_symbols: Boolean = false): Option[Command] = { def read(name: String): Export.Entry = db_context.get_export(List(session), theory, name) def read_xml(name: String): XML.Body = YXML.parse_body( Symbol.output(unicode_symbols, UTF8.decode_permissive(read(name).uncompressed)), cache = db_context.cache) (read(Export.DOCUMENT_ID).text, split_lines(read(Export.FILES).text)) match { case (Value.Long(id), thy_file :: blobs_files) => val node_name = resources.file_node(Path.explode(thy_file), theory = theory) val results = Command.Results.make( for (elem @ XML.Elem(Markup(_, Markup.Serial(i)), _) <- read_xml(Export.MESSAGES)) yield i -> elem) val blobs = blobs_files.map(file => { val path = Path.explode(file) val name = resources.file_node(path) val src_path = File.relative_path(node_name.master_dir_path, path).getOrElse(path) Command.Blob(name, src_path, None) }) val blobs_xml = for (i <- (1 to blobs.length).toList) yield read_xml(Export.MARKUP + i) val blobs_info = Command.Blobs_Info( for { (Command.Blob(name, src_path, _), xml) <- blobs zip blobs_xml } yield { val text = XML.content(xml) val chunk = Symbol.Text_Chunk(text) val digest = SHA1.digest(Symbol.encode(text)) Exn.Res(Command.Blob(name, src_path, Some((digest, chunk)))) }) val thy_xml = read_xml(Export.MARKUP) val thy_source = XML.content(thy_xml) val markups_index = Command.Markup_Index.markup :: blobs.map(Command.Markup_Index.blob) val markups = Command.Markups.make( for ((index, xml) <- markups_index.zip(thy_xml :: blobs_xml)) yield index -> Markup_Tree.from_XML(xml)) val command = Command.unparsed(thy_source, theory = true, id = id, node_name = node_name, blobs_info = blobs_info, results = results, markups = markups) Some(command) case _ => None } } /* print messages */ def print_log( options: Options, session_name: String, theories: List[String] = Nil, verbose: Boolean = false, progress: Progress = new Progress, margin: Double = Pretty.default_margin, breakgain: Double = Pretty.default_breakgain, metric: Pretty.Metric = Symbol.Metric, unicode_symbols: Boolean = false): Unit = { val store = Sessions.store(options) val resources = Resources.empty val session = new Session(options, resources) using(store.open_database_context())(db_context => { val result = db_context.input_database(session_name)((db, _) => { val theories = store.read_theories(db, session_name) val errors = store.read_errors(db, session_name) store.read_build(db, session_name).map(info => (theories, errors, info.return_code)) }) result match { case None => error("Missing build database for session " + quote(session_name)) case Some((used_theories, errors, rc)) => val bad_theories = theories.filterNot(used_theories.toSet) if (bad_theories.nonEmpty) error("Unknown theories " + commas_quote(bad_theories)) val print_theories = if (theories.isEmpty) used_theories else used_theories.filter(theories.toSet) for (thy <- print_theories) { val thy_heading = "\nTheory " + quote(thy) + ":" read_theory(db_context, resources, session_name, thy, unicode_symbols = unicode_symbols) match { case None => progress.echo(thy_heading + " MISSING") case Some(command) => val snapshot = Document.State.init.snippet(command) val rendering = new Rendering(snapshot, options, session) val messages = rendering.text_messages(Text.Range.full) .filter(message => verbose || Protocol.is_exported(message.info)) if (messages.nonEmpty) { val line_document = Line.Document(command.source) progress.echo(thy_heading) for (Text.Info(range, elem) <- messages) { val line = line_document.position(range.start).line1 val pos = Position.Line_File(line, command.node_name.node) progress.echo( Protocol.message_text(elem, heading = true, pos = pos, margin = margin, breakgain = breakgain, metric = metric)) } } } } if (errors.nonEmpty) { val msg = Symbol.output(unicode_symbols, cat_lines(errors)) progress.echo("\nBuild errors:\n" + Output.error_message_text(msg)) } if (rc != 0) progress.echo("\n" + Process_Result.print_return_code(rc)) } }) } /* Isabelle tool wrapper */ val isabelle_tool = Isabelle_Tool("log", "print messages from build database", Scala_Project.here, args => { /* arguments */ var unicode_symbols = false var theories: List[String] = Nil var margin = Pretty.default_margin var options = Options.init() var verbose = false val getopts = Getopts(""" Usage: isabelle log [OPTIONS] SESSION Options are: -T NAME restrict to given theories (multiple options possible) -U output Unicode symbols -m MARGIN margin for pretty printing (default: """ + margin + """) -o OPTION override Isabelle system OPTION (via NAME=VAL or NAME) -v print all messages, including information, tracing etc. Print messages from the build database of the given session, without any checks against current sources: results from a failed build can be printed as well. """, "T:" -> (arg => theories = theories ::: List(arg)), "U" -> (_ => unicode_symbols = true), "m:" -> (arg => margin = Value.Double.parse(arg)), "o:" -> (arg => options = options + arg), "v" -> (_ => verbose = true)) val more_args = getopts(args) val session_name = more_args match { case List(session_name) => session_name case _ => getopts.usage() } val progress = new Console_Progress() print_log(options, session_name, theories = theories, verbose = verbose, margin = margin, progress = progress, unicode_symbols = unicode_symbols) }) } class Build_Job(progress: Progress, session_name: String, val info: Sessions.Info, deps: Sessions.Deps, store: Sessions.Store, do_store: Boolean, verbose: Boolean, log: Logger, val numa_node: Option[Int], command_timings0: List[Properties.T]) { val options: Options = NUMA.policy_options(info.options, numa_node) private val sessions_structure = deps.sessions_structure private val future_result: Future[Process_Result] = Future.thread("build", uninterruptible = true) { val parent = info.parent.getOrElse("") val base = deps(parent) val result_base = deps(session_name) val env = Isabelle_System.settings() + ("ISABELLE_ML_DEBUGGER" -> options.bool("ML_debugger").toString) val is_pure = Sessions.is_pure(session_name) val use_prelude = if (is_pure) Thy_Header.ml_roots.map(_._1) else Nil val eval_store = if (do_store) { (if (info.theories.nonEmpty) List("ML_Heap.share_common_data ()") else Nil) ::: List("ML_Heap.save_child " + ML_Syntax.print_string_bytes(File.platform_path(store.output_heap(session_name)))) } else Nil val resources = new Resources(sessions_structure, base, log = log, command_timings = command_timings0) val session = new Session(options, resources) { override val cache: XML.Cache = store.cache override def build_blobs_info(name: Document.Node.Name): Command.Blobs_Info = { result_base.load_commands.get(name.expand) match { case Some(spans) => val syntax = result_base.theory_syntax(name) Command.build_blobs_info(syntax, name, spans) case None => Command.Blobs_Info.none } } } object Build_Session_Errors { private val promise: Promise[List[String]] = Future.promise def result: Exn.Result[List[String]] = promise.join_result def cancel(): Unit = promise.cancel() def apply(errs: List[String]): Unit = { try { promise.fulfill(errs) } catch { case _: IllegalStateException => } } } val export_consumer = Export.consumer(store.open_database(session_name, output = true), store.cache) val stdout = new StringBuilder(1000) val stderr = new StringBuilder(1000) val command_timings = new mutable.ListBuffer[Properties.T] val theory_timings = new mutable.ListBuffer[Properties.T] val session_timings = new mutable.ListBuffer[Properties.T] val runtime_statistics = new mutable.ListBuffer[Properties.T] val task_statistics = new mutable.ListBuffer[Properties.T] def fun( name: String, acc: mutable.ListBuffer[Properties.T], unapply: Properties.T => Option[Properties.T]): (String, Session.Protocol_Function) = { name -> ((msg: Prover.Protocol_Output) => unapply(msg.properties) match { case Some(props) => acc += props; true case _ => false }) } session.init_protocol_handler(new Session.Protocol_Handler { override def exit(): Unit = Build_Session_Errors.cancel() private def build_session_finished(msg: Prover.Protocol_Output): Boolean = { val (rc, errors) = try { val (rc, errs) = { import XML.Decode._ pair(int, list(x => x))(Symbol.decode_yxml(msg.text)) } val errors = for (err <- errs) yield { val prt = Protocol_Message.expose_no_reports(err) Pretty.string_of(prt, metric = Symbol.Metric) } (rc, errors) } catch { case ERROR(err) => (2, List(err)) } session.protocol_command("Prover.stop", rc.toString) Build_Session_Errors(errors) true } private def loading_theory(msg: Prover.Protocol_Output): Boolean = msg.properties match { case Markup.Loading_Theory(Markup.Name(name)) => progress.theory(Progress.Theory(name, session = session_name)) false case _ => false } private def export(msg: Prover.Protocol_Output): Boolean = msg.properties match { case Protocol.Export(args) => export_consumer(session_name, args, msg.chunk) true case _ => false } override val functions = List( Markup.Build_Session_Finished.name -> build_session_finished, Markup.Loading_Theory.name -> loading_theory, Markup.EXPORT -> export, fun(Markup.Theory_Timing.name, theory_timings, Markup.Theory_Timing.unapply), fun(Markup.Session_Timing.name, session_timings, Markup.Session_Timing.unapply), fun(Markup.Task_Statistics.name, task_statistics, Markup.Task_Statistics.unapply)) }) session.command_timings += Session.Consumer("command_timings") { case Session.Command_Timing(props) => for { elapsed <- Markup.Elapsed.unapply(props) elapsed_time = Time.seconds(elapsed) if elapsed_time.is_relevant && elapsed_time >= options.seconds("command_timing_threshold") } command_timings += props.filter(Markup.command_timing_property) } session.runtime_statistics += Session.Consumer("ML_statistics") { case Session.Runtime_Statistics(props) => runtime_statistics += props } session.finished_theories += Session.Consumer[Document.Snapshot]("finished_theories") { case snapshot => val rendering = new Rendering(snapshot, options, session) def export(name: String, xml: XML.Body, compress: Boolean = true): Unit = { val theory_name = snapshot.node_name.theory val args = Protocol.Export.Args(theory_name = theory_name, name = name, compress = compress) val bytes = Bytes(Symbol.encode(YXML.string_of_body(xml))) if (!bytes.is_empty) export_consumer(session_name, args, bytes) } def export_text(name: String, text: String, compress: Boolean = true): Unit = export(name, List(XML.Text(text)), compress = compress) for (command <- snapshot.snippet_command) { export_text(Export.DOCUMENT_ID, command.id.toString, compress = false) } export_text(Export.FILES, cat_lines(snapshot.node_files.map(_.symbolic.node)), compress = false) for (((_, xml), i) <- snapshot.xml_markup_blobs().zipWithIndex) { export(Export.MARKUP + (i + 1), xml) } export(Export.MARKUP, snapshot.xml_markup()) export(Export.MESSAGES, snapshot.messages.map(_._1)) val citations = Library.distinct(rendering.citations(Text.Range.full).map(_.info)) - export_text(Export.CITATIONS, cat_lines(citations)) + export_text(Export.DOCUMENT_CITATIONS, cat_lines(citations)) } session.all_messages += Session.Consumer[Any]("build_session_output") { case msg: Prover.Output => val message = msg.message if (msg.is_system) resources.log(Protocol.message_text(message)) if (msg.is_stdout) { stdout ++= Symbol.encode(XML.content(message)) } else if (msg.is_stderr) { stderr ++= Symbol.encode(XML.content(message)) } else if (msg.is_exit) { val err = "Prover terminated" + (msg.properties match { case Markup.Process_Result(result) => ": " + result.print_rc case _ => "" }) Build_Session_Errors(List(err)) } case _ => } val eval_main = Command_Line.ML_tool("Isabelle_Process.init_build ()" :: eval_store) val process = Isabelle_Process(session, options, sessions_structure, store, logic = parent, raw_ml_system = is_pure, use_prelude = use_prelude, eval_main = eval_main, cwd = info.dir.file, env = env) val build_errors = Isabelle_Thread.interrupt_handler(_ => process.terminate()) { Exn.capture { process.await_startup() } match { case Exn.Res(_) => val resources_yxml = resources.init_session_yxml val args_yxml = YXML.string_of_body( { import XML.Encode._ pair(string, list(pair(Options.encode, list(pair(string, properties)))))( (session_name, info.theories)) }) session.protocol_command("build_session", resources_yxml, args_yxml) Build_Session_Errors.result case Exn.Exn(exn) => Exn.Res(List(Exn.message(exn))) } } val process_result = Isabelle_Thread.interrupt_handler(_ => process.terminate()) { process.await_shutdown() } session.stop() val export_errors = export_consumer.shutdown(close = true).map(Output.error_message_text) val (document_output, document_errors) = try { if (build_errors.isInstanceOf[Exn.Res[_]] && process_result.ok && info.documents.nonEmpty) { using(store.open_database_context())(db_context => { val documents = Document_Build.build_documents( Document_Build.context(session_name, deps, db_context, progress = progress), output_sources = info.document_output, output_pdf = info.document_output) db_context.output_database(session_name)(db => documents.foreach(_.write(db, session_name))) (documents.flatMap(_.log_lines), Nil) }) } else (Nil, Nil) } catch { case exn: Document_Build.Build_Error => (exn.log_lines, List(exn.message)) case Exn.Interrupt.ERROR(msg) => (Nil, List(msg)) } val result = { val theory_timing = theory_timings.iterator.map( { case props @ Markup.Name(name) => name -> props }).toMap val used_theory_timings = for { (name, _) <- deps(session_name).used_theories } yield theory_timing.getOrElse(name.theory, Markup.Name(name.theory)) val more_output = Library.trim_line(stdout.toString) :: command_timings.toList.map(Protocol.Command_Timing_Marker.apply) ::: used_theory_timings.map(Protocol.Theory_Timing_Marker.apply) ::: session_timings.toList.map(Protocol.Session_Timing_Marker.apply) ::: runtime_statistics.toList.map(Protocol.ML_Statistics_Marker.apply) ::: task_statistics.toList.map(Protocol.Task_Statistics_Marker.apply) ::: document_output process_result.output(more_output) .error(Library.trim_line(stderr.toString)) .errors_rc(export_errors ::: document_errors) } build_errors match { case Exn.Res(build_errs) => val errs = build_errs ::: document_errors if (errs.isEmpty) result else { result.error_rc.output( errs.flatMap(s => split_lines(Output.error_message_text(s))) ::: errs.map(Protocol.Error_Message_Marker.apply)) } case Exn.Exn(Exn.Interrupt()) => if (result.ok) result.copy(rc = Exn.Interrupt.return_code) else result case Exn.Exn(exn) => throw exn } } def terminate(): Unit = future_result.cancel() def is_finished: Boolean = future_result.is_finished private val timeout_request: Option[Event_Timer.Request] = { if (info.timeout_ignored) None else Some(Event_Timer.request(Time.now() + info.timeout) { terminate() }) } def join: (Process_Result, Option[String]) = { val result1 = future_result.join val was_timeout = timeout_request match { case None => false case Some(request) => !request.cancel() } val result2 = if (result1.ok) result1 else if (was_timeout) result1.error(Output.error_message_text("Timeout")).timeout_rc else if (result1.interrupted) result1.error(Output.error_message_text("Interrupt")) else result1 val heap_digest = if (result2.ok && do_store && store.output_heap(session_name).is_file) Some(Sessions.write_heap_digest(store.output_heap(session_name))) else None (result2, heap_digest) } }