diff --git a/src/Pure/Thy/export.scala b/src/Pure/Thy/export.scala --- a/src/Pure/Thy/export.scala +++ b/src/Pure/Thy/export.scala @@ -1,621 +1,620 @@ /* Title: Pure/Thy/export.scala Author: Makarius Manage theory exports: compressed blobs. */ package isabelle import scala.annotation.tailrec import scala.util.matching.Regex object Export { /* artefact names */ val DOCUMENT_ID = "PIDE/document_id" val FILES = "PIDE/files" val MARKUP = "PIDE/markup" val MESSAGES = "PIDE/messages" val DOCUMENT_PREFIX = "document/" val DOCUMENT_LATEX = DOCUMENT_PREFIX + "latex" - val DOCUMENT_CITATIONS = DOCUMENT_PREFIX + "citations" val THEORY_PREFIX: String = "theory/" val PROOFS_PREFIX: String = "proofs/" def explode_name(s: String): List[String] = space_explode('/', s) def implode_name(elems: Iterable[String]): String = elems.mkString("/") /* SQL data model */ object Data { val session_name = SQL.Column.string("session_name").make_primary_key val theory_name = SQL.Column.string("theory_name").make_primary_key val name = SQL.Column.string("name").make_primary_key val executable = SQL.Column.bool("executable") val compressed = SQL.Column.bool("compressed") val body = SQL.Column.bytes("body") val table = SQL.Table("isabelle_exports", List(session_name, theory_name, name, executable, compressed, body)) def where_equal(session_name: String, theory_name: String = "", name: String = ""): SQL.Source = "WHERE " + Data.session_name.equal(session_name) + (if (theory_name == "") "" else " AND " + Data.theory_name.equal(theory_name)) + (if (name == "") "" else " AND " + Data.name.equal(name)) } def compound_name(a: String, b: String): String = if (a.isEmpty) b else a + ":" + b sealed case class Entry_Name(session: String = "", theory: String = "", name: String = "") { val compound_name: String = Export.compound_name(theory, name) def make_path(prune: Int = 0): Path = { val elems = theory :: space_explode('/', name) if (elems.length < prune + 1) { error("Cannot prune path by " + prune + " element(s): " + Path.make(elems)) } else Path.make(elems.drop(prune)) } def readable(db: SQL.Database): Boolean = { val select = Data.table.select(List(Data.name), Data.where_equal(session, theory, name)) db.using_statement(select)(stmt => stmt.execute_query().next()) } def read(db: SQL.Database, cache: XML.Cache): Option[Entry] = { val select = Data.table.select(List(Data.executable, Data.compressed, Data.body), Data.where_equal(session, theory, name)) db.using_statement(select) { stmt => val res = stmt.execute_query() if (res.next()) { val executable = res.bool(Data.executable) val compressed = res.bool(Data.compressed) val bytes = res.bytes(Data.body) val body = Future.value(compressed, bytes) Some(Entry(this, executable, body, cache)) } else None } } } def read_theory_names(db: SQL.Database, session_name: String): List[String] = { val select = Data.table.select(List(Data.theory_name), Data.where_equal(session_name), distinct = true) + SQL.order_by(List(Data.theory_name)) db.using_statement(select)(stmt => stmt.execute_query().iterator(_.string(Data.theory_name)).toList) } def read_entry_names(db: SQL.Database, session_name: String): List[Entry_Name] = { val select = Data.table.select(List(Data.theory_name, Data.name), Data.where_equal(session_name)) + SQL.order_by(List(Data.theory_name, Data.name)) db.using_statement(select)(stmt => stmt.execute_query().iterator(res => Entry_Name(session = session_name, theory = res.string(Data.theory_name), name = res.string(Data.name))).toList) } def message(msg: String, theory_name: String, name: String): String = msg + " " + quote(name) + " for theory " + quote(theory_name) object Entry { def apply( entry_name: Entry_Name, executable: Boolean, body: Future[(Boolean, Bytes)], cache: XML.Cache ): Entry = new Entry(entry_name, executable, body, cache) def empty(theory_name: String, name: String): Entry = Entry(Entry_Name(theory = theory_name, name = name), false, Future.value(false, Bytes.empty), XML.Cache.none) def make( session_name: String, args: Protocol.Export.Args, bytes: Bytes, cache: XML.Cache ): Entry = { val body = if (args.compress) Future.fork(bytes.maybe_compress(cache = cache.compress)) else Future.value((false, bytes)) val entry_name = Entry_Name(session = session_name, theory = args.theory_name, name = args.name) Entry(entry_name, args.executable, body, cache) } } final class Entry private( val entry_name: Entry_Name, val executable: Boolean, body: Future[(Boolean, Bytes)], cache: XML.Cache ) { def session_name: String = entry_name.session def theory_name: String = entry_name.theory def name: String = entry_name.name override def toString: String = name def is_finished: Boolean = body.is_finished def cancel(): Unit = body.cancel() def compound_name: String = entry_name.compound_name def name_has_prefix(s: String): Boolean = name.startsWith(s) val name_elems: List[String] = explode_name(name) def name_extends(elems: List[String]): Boolean = name_elems.startsWith(elems) && name_elems != elems def bytes: Bytes = { val (compressed, bs) = body.join if (compressed) bs.uncompress(cache = cache.compress) else bs } def text: String = bytes.text def yxml: XML.Body = YXML.parse_body(UTF8.decode_permissive(bytes), cache = cache) def write(db: SQL.Database): Unit = { val (compressed, bs) = body.join db.using_statement(Data.table.insert()) { stmt => stmt.string(1) = session_name stmt.string(2) = theory_name stmt.string(3) = name stmt.bool(4) = executable stmt.bool(5) = compressed stmt.bytes(6) = bs stmt.execute() } } } def make_regex(pattern: String): Regex = { @tailrec def make(result: List[String], depth: Int, chs: List[Char]): Regex = chs match { case '*' :: '*' :: rest => make("[^:]*" :: result, depth, rest) case '*' :: rest => make("[^:/]*" :: result, depth, rest) case '?' :: rest => make("[^:/]" :: result, depth, rest) case '\\' :: c :: rest => make(("\\" + c) :: result, depth, rest) case '{' :: rest => make("(" :: result, depth + 1, rest) case ',' :: rest if depth > 0 => make("|" :: result, depth, rest) case '}' :: rest if depth > 0 => make(")" :: result, depth - 1, rest) case c :: rest if ".+()".contains(c) => make(("\\" + c) :: result, depth, rest) case c :: rest => make(c.toString :: result, depth, rest) case Nil => result.reverse.mkString.r } make(Nil, 0, pattern.toList) } def make_matcher(pats: List[String]): Entry_Name => Boolean = { val regs = pats.map(make_regex) (entry_name: Entry_Name) => regs.exists(_.matches(entry_name.compound_name)) } /* database consumer thread */ def consumer(db: SQL.Database, cache: XML.Cache, progress: Progress = new Progress): Consumer = new Consumer(db, cache, progress) class Consumer private[Export](db: SQL.Database, cache: XML.Cache, progress: Progress) { private val errors = Synchronized[List[String]](Nil) private val consumer = Consumer_Thread.fork_bulk[(Entry, Boolean)](name = "export")( bulk = { case (entry, _) => entry.is_finished }, consume = { (args: List[(Entry, Boolean)]) => val results = db.transaction { for ((entry, strict) <- args) yield { if (progress.stopped) { entry.cancel() Exn.Res(()) } else if (entry.entry_name.readable(db)) { if (strict) { val msg = message("Duplicate export", entry.theory_name, entry.name) errors.change(msg :: _) } Exn.Res(()) } else Exn.capture { entry.write(db) } } } (results, true) }) def make_entry(session_name: String, args0: Protocol.Export.Args, body: Bytes): Unit = { if (!progress.stopped && !body.is_empty) { val args = if (db.is_server) args0.copy(compress = false) else args0 consumer.send(Entry.make(session_name, args, body, cache) -> args.strict) } } def shutdown(close: Boolean = false): List[String] = { consumer.shutdown() if (close) db.close() errors.value.reverse ::: (if (progress.stopped) List("Export stopped") else Nil) } } /* context for database access */ def open_database_context(store: Sessions.Store): Database_Context = { val database_server = if (store.database_server) Some(store.open_database_server()) else None new Database_Context(store, database_server) } def open_session_context0(store: Sessions.Store, session: String): Session_Context = open_database_context(store).open_session0(session, close_database_context = true) def open_session_context( store: Sessions.Store, session_background: Sessions.Background, document_snapshot: Option[Document.Snapshot] = None ): Session_Context = { open_database_context(store).open_session( session_background, document_snapshot = document_snapshot, close_database_context = true) } class Database_Context private[Export]( val store: Sessions.Store, val database_server: Option[SQL.Database] ) extends AutoCloseable { database_context => override def toString: String = { val s = database_server match { case Some(db) => db.toString case None => "input_dirs = " + store.input_dirs.map(_.absolute).mkString(", ") } "Database_Context(" + s + ")" } def cache: Term.Cache = store.cache def close(): Unit = database_server.foreach(_.close()) def open_database(session: String, output: Boolean = false): Session_Database = database_server match { case Some(db) => new Session_Database(session, db) case None => new Session_Database(session, store.open_database(session, output = output)) { override def close(): Unit = db.close() } } def open_session0(session: String, close_database_context: Boolean = false): Session_Context = open_session(Sessions.background0(session), close_database_context = close_database_context) def open_session( session_background: Sessions.Background, document_snapshot: Option[Document.Snapshot] = None, close_database_context: Boolean = false ): Session_Context = { val session_name = session_background.check_errors.session_name val session_hierarchy = session_background.sessions_structure.build_hierarchy(session_name) val session_databases = database_server match { case Some(db) => session_hierarchy.map(name => new Session_Database(name, db)) case None => val attempts = session_hierarchy.map(name => name -> store.try_open_database(name, server = false)) attempts.collectFirst({ case (name, None) => name }) match { case Some(bad) => for ((_, Some(db)) <- attempts) db.close() store.error_database(bad) case None => for ((name, Some(db)) <- attempts) yield { new Session_Database(name, db) { override def close(): Unit = this.db.close() } } } } new Session_Context( database_context, session_background, session_databases, document_snapshot) { override def close(): Unit = { session_databases.foreach(_.close()) if (close_database_context) database_context.close() } } } } class Session_Database private[Export](val session: String, val db: SQL.Database) extends AutoCloseable { def close(): Unit = () lazy private [Export] val theory_names: List[String] = read_theory_names(db, session) lazy private [Export] val entry_names: List[Entry_Name] = read_entry_names(db, session) } class Session_Context private[Export]( val database_context: Database_Context, session_background: Sessions.Background, db_hierarchy: List[Session_Database], val document_snapshot: Option[Document.Snapshot] ) extends AutoCloseable { session_context => def close(): Unit = () def cache: Term.Cache = database_context.cache def sessions_structure: Sessions.Structure = session_background.sessions_structure def session_base: Sessions.Base = session_background.base def session_name: String = if (document_snapshot.isDefined) Sessions.DRAFT else session_base.session_name def session_database(session: String = session_name): Option[Session_Database] = db_hierarchy.find(_.session == session) def session_db(session: String = session_name): Option[SQL.Database] = session_database(session = session).map(_.db) def session_stack: List[String] = ((if (document_snapshot.isDefined) List(session_name) else Nil) ::: db_hierarchy.map(_.session)).reverse private def select[A]( session: String, select: Session_Database => List[A], project: Entry_Name => A, sort_key: A => String ): List[A] = { def result(name: String): List[A] = if (name == Sessions.DRAFT) { (for { snapshot <- document_snapshot.iterator entry_name <- snapshot.all_exports.keysIterator } yield project(entry_name)).toSet.toList.sortBy(sort_key) } else session_database(name).map(select).getOrElse(Nil) if (session.nonEmpty) result(session) else session_stack.flatMap(result) } def entry_names(session: String = session_name): List[Entry_Name] = select(session, _.entry_names, identity, _.compound_name) def theory_names(session: String = session_name): List[String] = select(session, _.theory_names, _.theory, identity) def get(theory: String, name: String): Option[Entry] = { def snapshot_entry: Option[Entry] = for { snapshot <- document_snapshot entry_name = Entry_Name(session = Sessions.DRAFT, theory = theory, name = name) entry <- snapshot.all_exports.get(entry_name) } yield entry def db_entry: Option[Entry] = db_hierarchy.view.map(database => Export.Entry_Name(session = database.session, theory = theory, name = name) .read(database.db, cache)) .collectFirst({ case Some(entry) => entry }) snapshot_entry orElse db_entry } def apply(theory: String, name: String, permissive: Boolean = false): Entry = get(theory, name) match { case None if permissive => Entry.empty(theory, name) case None => error("Missing export entry " + quote(compound_name(theory, name))) case Some(entry) => entry } def theory(theory: String, other_cache: Option[Term.Cache] = None): Theory_Context = new Theory_Context(session_context, theory, other_cache) def get_source_file(name: String): Option[Sessions.Source_File] = { val store = database_context.store (for { database <- db_hierarchy.iterator file <- store.read_sources(database.db, database.session, name = name).iterator } yield file).nextOption() } def source_file(name: String): Sessions.Source_File = get_source_file(name).getOrElse(error("Missing session source file " + quote(name))) def theory_source(theory: String, unicode_symbols: Boolean = false): String = { def snapshot_source: Option[String] = for { snapshot <- document_snapshot text <- snapshot.version.nodes.iterator.collectFirst( { case (name, node) if name.theory == theory => node.source }) if text.nonEmpty } yield Symbol.output(unicode_symbols, text) def db_source: Option[String] = { val theory_context = session_context.theory(theory) for { name <- theory_context.files0(permissive = true).headOption file <- get_source_file(name) } yield { val text0 = file.bytes.text if (unicode_symbols) Symbol.decode(text0) else text0 } } snapshot_source orElse db_source getOrElse "" } def classpath(): List[File.Content] = { (for { session <- session_stack.iterator info <- sessions_structure.get(session).iterator if info.export_classpath.nonEmpty matcher = make_matcher(info.export_classpath) entry_name <- entry_names(session = session).iterator if matcher(entry_name) entry <- get(entry_name.theory, entry_name.name).iterator } yield File.content(entry.entry_name.make_path(), entry.bytes)).toList } override def toString: String = "Export.Session_Context(" + commas_quote(session_stack) + ")" } class Theory_Context private[Export]( val session_context: Session_Context, val theory: String, other_cache: Option[Term.Cache] ) { def cache: Term.Cache = other_cache getOrElse session_context.cache def get(name: String): Option[Entry] = session_context.get(theory, name) def apply(name: String, permissive: Boolean = false): Entry = session_context.apply(theory, name, permissive = permissive) def yxml(name: String): XML.Body = get(name) match { case Some(entry) => entry.yxml case None => Nil } def document_id(): Option[Long] = apply(DOCUMENT_ID, permissive = true).text match { case Value.Long(id) => Some(id) case _ => None } def files0(permissive: Boolean = false): List[String] = split_lines(apply(FILES, permissive = permissive).text) def files(permissive: Boolean = false): Option[(String, List[String])] = files0(permissive = permissive) match { case Nil => None case a :: bs => Some((a, bs)) } override def toString: String = "Export.Theory_Context(" + quote(theory) + ")" } /* export to file-system */ def export_files( store: Sessions.Store, session_name: String, export_dir: Path, progress: Progress = new Progress, export_prune: Int = 0, export_list: Boolean = false, export_patterns: List[String] = Nil ): Unit = { using(store.open_database(session_name)) { db => val entry_names = read_entry_names(db, session_name) // list if (export_list) { for (entry_name <- entry_names) progress.echo(entry_name.compound_name) } // export if (export_patterns.nonEmpty) { val matcher = make_matcher(export_patterns) for { entry_name <- entry_names if matcher(entry_name) entry <- entry_name.read(db, store.cache) } { val path = export_dir + entry_name.make_path(prune = export_prune) progress.echo("export " + path + (if (entry.executable) " (executable)" else "")) Isabelle_System.make_directory(path.dir) val bytes = entry.bytes if (!path.is_file || Bytes.read(path) != bytes) Bytes.write(path, bytes) File.set_executable(path, entry.executable) } } } } /* Isabelle tool wrapper */ val default_export_dir: Path = Path.explode("export") val isabelle_tool = Isabelle_Tool("export", "retrieve theory exports", Scala_Project.here, { args => /* arguments */ var export_dir = default_export_dir var dirs: List[Path] = Nil var export_list = false var no_build = false var options = Options.init() var export_prune = 0 var export_patterns: List[String] = Nil val getopts = Getopts(""" Usage: isabelle export [OPTIONS] SESSION Options are: -O DIR output directory for exported files (default: """ + default_export_dir + """) -d DIR include session directory -l list exports -n no build of session -o OPTION override Isabelle system OPTION (via NAME=VAL or NAME) -p NUM prune path of exported files by NUM elements -x PATTERN extract files matching pattern (e.g. "*:**" for all) List or export theory exports for SESSION: named blobs produced by isabelle build. Option -l or -x is required; option -x may be repeated. The PATTERN language resembles glob patterns in the shell, with ? and * (both excluding ":" and "/"), ** (excluding ":"), and [abc] or [^abc], and variants {pattern1,pattern2,pattern3}. """, "O:" -> (arg => export_dir = Path.explode(arg)), "d:" -> (arg => dirs = dirs ::: List(Path.explode(arg))), "l" -> (_ => export_list = true), "n" -> (_ => no_build = true), "o:" -> (arg => options = options + arg), "p:" -> (arg => export_prune = Value.Int.parse(arg)), "x:" -> (arg => export_patterns ::= arg)) val more_args = getopts(args) val session_name = more_args match { case List(session_name) if export_list || export_patterns.nonEmpty => session_name case _ => getopts.usage() } val progress = new Console_Progress() /* build */ if (!no_build) { val rc = progress.interrupt_handler { Build.build_logic(options, session_name, progress = progress, dirs = dirs) } if (rc != Process_Result.RC.ok) sys.exit(rc) } /* export files */ val store = Sessions.store(options) export_files(store, session_name, export_dir, progress = progress, export_prune = export_prune, export_list = export_list, export_patterns = export_patterns) }) } diff --git a/src/Pure/Tools/build_job.scala b/src/Pure/Tools/build_job.scala --- a/src/Pure/Tools/build_job.scala +++ b/src/Pure/Tools/build_job.scala @@ -1,601 +1,596 @@ /* Title: Pure/Tools/build_job.scala Author: Makarius Build job running prover process, with rudimentary PIDE session. */ package isabelle import scala.collection.mutable import scala.util.matching.Regex object Build_Job { /* theory markup/messages from session database */ def read_theory( theory_context: Export.Theory_Context, unicode_symbols: Boolean = false ): Option[Document.Snapshot] = { def decode_bytes(bytes: Bytes): String = Symbol.output(unicode_symbols, UTF8.decode_permissive(bytes)) def read(name: String): Export.Entry = theory_context(name, permissive = true) def read_xml(name: String): XML.Body = YXML.parse_body(decode_bytes(read(name).bytes), cache = theory_context.cache) def read_source_file(name: String): Sessions.Source_File = theory_context.session_context.source_file(name) for { id <- theory_context.document_id() (thy_file, blobs_files) <- theory_context.files(permissive = true) } yield { val master_dir = Path.explode(Url.strip_base_name(thy_file).getOrElse( error("Cannot determine theory master directory: " + quote(thy_file)))) val blobs = blobs_files.map { name => val path = Path.explode(name) val src_path = File.relative_path(master_dir, path).getOrElse(path) val file = read_source_file(name) val bytes = file.bytes val text = decode_bytes(bytes) val chunk = Symbol.Text_Chunk(text) Command.Blob(Document.Node.Name(name), src_path, Some((file.digest, chunk))) -> Document.Blobs.Item(bytes, text, chunk, changed = false) } val thy_source = decode_bytes(read_source_file(thy_file).bytes) val thy_xml = read_xml(Export.MARKUP) val blobs_xml = for (i <- (1 to blobs.length).toList) yield read_xml(Export.MARKUP + i) val markups_index = Command.Markup_Index.make(blobs.map(_._1)) val markups = Command.Markups.make( for ((index, xml) <- markups_index.zip(thy_xml :: blobs_xml)) yield index -> Markup_Tree.from_XML(xml)) val results = Command.Results.make( for (elem @ XML.Elem(Markup(_, Markup.Serial(i)), _) <- read_xml(Export.MESSAGES)) yield i -> elem) val command = Command.unparsed(thy_source, theory = true, id = id, node_name = Document.Node.Name(thy_file, theory = theory_context.theory), blobs_info = Command.Blobs_Info.make(blobs), markups = markups, results = results) val doc_blobs = Document.Blobs.make(blobs) Document.State.init.snippet(command, doc_blobs) } } /* print messages */ def print_log( options: Options, sessions: List[String], theories: List[String] = Nil, message_head: List[Regex] = Nil, message_body: List[Regex] = Nil, verbose: Boolean = false, progress: Progress = new Progress, margin: Double = Pretty.default_margin, breakgain: Double = Pretty.default_breakgain, metric: Pretty.Metric = Symbol.Metric, unicode_symbols: Boolean = false ): Unit = { val store = Sessions.store(options) val session = new Session(options, Resources.bootstrap) def check(filter: List[Regex], make_string: => String): Boolean = filter.isEmpty || { val s = Output.clean_yxml(make_string) filter.forall(r => r.findFirstIn(Output.clean_yxml(s)).nonEmpty) } def print(session_name: String): Unit = { using(Export.open_session_context0(store, session_name)) { session_context => val result = for { db <- session_context.session_db() theories = store.read_theories(db, session_name) errors = store.read_errors(db, session_name) info <- store.read_build(db, session_name) } yield (theories, errors, info.return_code) result match { case None => store.error_database(session_name) case Some((used_theories, errors, rc)) => theories.filterNot(used_theories.toSet) match { case Nil => case bad => error("Unknown theories " + commas_quote(bad)) } val print_theories = if (theories.isEmpty) used_theories else used_theories.filter(theories.toSet) for (thy <- print_theories) { val thy_heading = "\nTheory " + quote(thy) + " (in " + session_name + ")" + ":" read_theory(session_context.theory(thy), unicode_symbols = unicode_symbols) match { case None => progress.echo(thy_heading + " MISSING") case Some(snapshot) => val rendering = new Rendering(snapshot, options, session) val messages = rendering.text_messages(Text.Range.full) .filter(message => verbose || Protocol.is_exported(message.info)) if (messages.nonEmpty) { val line_document = Line.Document(snapshot.node.source) val buffer = new mutable.ListBuffer[String] for (Text.Info(range, elem) <- messages) { val line = line_document.position(range.start).line1 val pos = Position.Line_File(line, snapshot.node_name.node) def message_text: String = Protocol.message_text(elem, heading = true, pos = pos, margin = margin, breakgain = breakgain, metric = metric) val ok = check(message_head, Protocol.message_heading(elem, pos)) && check(message_body, XML.content(Pretty.unformatted(List(elem)))) if (ok) buffer += message_text } if (buffer.nonEmpty) { progress.echo(thy_heading) buffer.foreach(progress.echo) } } } } if (errors.nonEmpty) { val msg = Symbol.output(unicode_symbols, cat_lines(errors)) progress.echo("\nBuild errors:\n" + Output.error_message_text(msg)) } if (rc != Process_Result.RC.ok) { progress.echo("\n" + Process_Result.RC.print_long(rc)) } } } } val errors = new mutable.ListBuffer[String] for (session_name <- sessions) { Exn.interruptible_capture(print(session_name)) match { case Exn.Res(_) => case Exn.Exn(exn) => errors += Exn.message(exn) } } if (errors.nonEmpty) error(cat_lines(errors.toList)) } /* Isabelle tool wrapper */ val isabelle_tool = Isabelle_Tool("log", "print messages from build database", Scala_Project.here, { args => /* arguments */ var message_head = List.empty[Regex] var message_body = List.empty[Regex] var unicode_symbols = false var theories: List[String] = Nil var margin = Pretty.default_margin var options = Options.init() var verbose = false val getopts = Getopts(""" Usage: isabelle log [OPTIONS] [SESSIONS ...] Options are: -H REGEX filter messages by matching against head -M REGEX filter messages by matching against body -T NAME restrict to given theories (multiple options possible) -U output Unicode symbols -m MARGIN margin for pretty printing (default: """ + margin + """) -o OPTION override Isabelle system OPTION (via NAME=VAL or NAME) -v print all messages, including information etc. Print messages from the build database of the given sessions, without any checks against current sources nor session structure: results from old sessions or failed builds can be printed as well. Multiple options -H and -M are conjunctive: all given patterns need to match. Patterns match any substring, but ^ or $ may be used to match the start or end explicitly. """, "H:" -> (arg => message_head = message_head ::: List(arg.r)), "M:" -> (arg => message_body = message_body ::: List(arg.r)), "T:" -> (arg => theories = theories ::: List(arg)), "U" -> (_ => unicode_symbols = true), "m:" -> (arg => margin = Value.Double.parse(arg)), "o:" -> (arg => options = options + arg), "v" -> (_ => verbose = true)) val sessions = getopts(args) val progress = new Console_Progress() if (sessions.isEmpty) progress.echo_warning("No sessions to print") else { print_log(options, sessions, theories = theories, message_head = message_head, message_body = message_body, verbose = verbose, margin = margin, progress = progress, unicode_symbols = unicode_symbols) } }) } class Build_Job(progress: Progress, session_background: Sessions.Background, store: Sessions.Store, do_store: Boolean, log: Logger, session_setup: (String, Session) => Unit, val numa_node: Option[Int], command_timings0: List[Properties.T] ) { def session_name: String = session_background.session_name val info: Sessions.Info = session_background.sessions_structure(session_name) val options: Options = NUMA.policy_options(info.options, numa_node) val session_sources: Sessions.Sources = Sessions.Sources.load(session_background.base, cache = store.cache.compress) private val future_result: Future[Process_Result] = Future.thread("build", uninterruptible = true) { val parent = info.parent.getOrElse("") val env = Isabelle_System.settings( List("ISABELLE_ML_DEBUGGER" -> options.bool("ML_debugger").toString)) val is_pure = Sessions.is_pure(session_name) val use_prelude = if (is_pure) Thy_Header.ml_roots.map(_._1) else Nil val eval_store = if (do_store) { (if (info.theories.nonEmpty) List("ML_Heap.share_common_data ()") else Nil) ::: List("ML_Heap.save_child " + ML_Syntax.print_string_bytes(File.platform_path(store.output_heap(session_name)))) } else Nil def session_blobs(node_name: Document.Node.Name): List[(Command.Blob, Document.Blobs.Item)] = session_background.base.theory_load_commands.get(node_name.theory) match { case None => Nil case Some(spans) => val syntax = session_background.base.theory_syntax(node_name) val master_dir = Path.explode(node_name.master_dir) for (span <- spans; file <- span.loaded_files(syntax).files) yield { val src_path = Path.explode(file) val blob_name = Document.Node.Name(File.symbolic_path(master_dir + src_path)) val bytes = session_sources(blob_name.node).bytes val text = bytes.text val chunk = Symbol.Text_Chunk(text) Command.Blob(blob_name, src_path, Some((SHA1.digest(bytes), chunk))) -> Document.Blobs.Item(bytes, text, chunk, changed = false) } } val resources = new Resources(session_background, log = log, command_timings = command_timings0) val session = new Session(options, resources) { override val cache: Term.Cache = store.cache override def build_blobs_info(node_name: Document.Node.Name): Command.Blobs_Info = Command.Blobs_Info.make(session_blobs(node_name)) override def build_blobs(node_name: Document.Node.Name): Document.Blobs = Document.Blobs.make(session_blobs(node_name)) } object Build_Session_Errors { private val promise: Promise[List[String]] = Future.promise def result: Exn.Result[List[String]] = promise.join_result def cancel(): Unit = promise.cancel() def apply(errs: List[String]): Unit = { try { promise.fulfill(errs) } catch { case _: IllegalStateException => } } } val export_consumer = Export.consumer(store.open_database(session_name, output = true), store.cache, progress = progress) val stdout = new StringBuilder(1000) val stderr = new StringBuilder(1000) val command_timings = new mutable.ListBuffer[Properties.T] val theory_timings = new mutable.ListBuffer[Properties.T] val session_timings = new mutable.ListBuffer[Properties.T] val runtime_statistics = new mutable.ListBuffer[Properties.T] val task_statistics = new mutable.ListBuffer[Properties.T] def fun( name: String, acc: mutable.ListBuffer[Properties.T], unapply: Properties.T => Option[Properties.T] ): (String, Session.Protocol_Function) = { name -> ((msg: Prover.Protocol_Output) => unapply(msg.properties) match { case Some(props) => acc += props; true case _ => false }) } session.init_protocol_handler(new Session.Protocol_Handler { override def exit(): Unit = Build_Session_Errors.cancel() private def build_session_finished(msg: Prover.Protocol_Output): Boolean = { val (rc, errors) = try { val (rc, errs) = { import XML.Decode._ pair(int, list(x => x))(Symbol.decode_yxml(msg.text)) } val errors = for (err <- errs) yield { val prt = Protocol_Message.expose_no_reports(err) Pretty.string_of(prt, metric = Symbol.Metric) } (rc, errors) } catch { case ERROR(err) => (Process_Result.RC.failure, List(err)) } session.protocol_command("Prover.stop", rc.toString) Build_Session_Errors(errors) true } private def loading_theory(msg: Prover.Protocol_Output): Boolean = msg.properties match { case Markup.Loading_Theory(Markup.Name(name)) => progress.theory(Progress.Theory(name, session = session_name)) false case _ => false } private def export_(msg: Prover.Protocol_Output): Boolean = msg.properties match { case Protocol.Export(args) => export_consumer.make_entry(session_name, args, msg.chunk) true case _ => false } override val functions: Session.Protocol_Functions = List( Markup.Build_Session_Finished.name -> build_session_finished, Markup.Loading_Theory.name -> loading_theory, Markup.EXPORT -> export_, fun(Markup.Theory_Timing.name, theory_timings, Markup.Theory_Timing.unapply), fun(Markup.Session_Timing.name, session_timings, Markup.Session_Timing.unapply), fun(Markup.Task_Statistics.name, task_statistics, Markup.Task_Statistics.unapply)) }) session.command_timings += Session.Consumer("command_timings") { case Session.Command_Timing(props) => for { elapsed <- Markup.Elapsed.unapply(props) elapsed_time = Time.seconds(elapsed) if elapsed_time.is_relevant && elapsed_time >= options.seconds("command_timing_threshold") } command_timings += props.filter(Markup.command_timing_property) } session.runtime_statistics += Session.Consumer("ML_statistics") { case Session.Runtime_Statistics(props) => runtime_statistics += props } session.finished_theories += Session.Consumer[Document.Snapshot]("finished_theories") { case snapshot => if (!progress.stopped) { - val rendering = new Rendering(snapshot, options, session) - def export_(name: String, xml: XML.Body, compress: Boolean = true): Unit = { if (!progress.stopped) { val theory_name = snapshot.node_name.theory val args = Protocol.Export.Args(theory_name = theory_name, name = name, compress = compress) val body = Bytes(Symbol.encode(YXML.string_of_body(xml))) export_consumer.make_entry(session_name, args, body) } } def export_text(name: String, text: String, compress: Boolean = true): Unit = export_(name, List(XML.Text(text)), compress = compress) for (command <- snapshot.snippet_command) { export_text(Export.DOCUMENT_ID, command.id.toString, compress = false) } export_text(Export.FILES, cat_lines(snapshot.node_files.map(name => File.symbolic_path(name.path))), compress = false) for ((blob_name, i) <- snapshot.node_files.tail.zipWithIndex) { val xml = snapshot.switch(blob_name).xml_markup() export_(Export.MARKUP + (i + 1), xml) } export_(Export.MARKUP, snapshot.xml_markup()) export_(Export.MESSAGES, snapshot.messages.map(_._1)) - - val citations = Library.distinct(rendering.citations(Text.Range.full).map(_.info)) - export_text(Export.DOCUMENT_CITATIONS, cat_lines(citations)) } } session.all_messages += Session.Consumer[Any]("build_session_output") { case msg: Prover.Output => val message = msg.message if (msg.is_system) resources.log(Protocol.message_text(message)) if (msg.is_stdout) { stdout ++= Symbol.encode(XML.content(message)) } else if (msg.is_stderr) { stderr ++= Symbol.encode(XML.content(message)) } else if (msg.is_exit) { val err = "Prover terminated" + (msg.properties match { case Markup.Process_Result(result) => ": " + result.print_rc case _ => "" }) Build_Session_Errors(List(err)) } case _ => } session_setup(session_name, session) val eval_main = Command_Line.ML_tool("Isabelle_Process.init_build ()" :: eval_store) val process = Isabelle_Process.start(store, options, session, session_background, logic = parent, raw_ml_system = is_pure, use_prelude = use_prelude, eval_main = eval_main, cwd = info.dir.file, env = env) val build_errors = Isabelle_Thread.interrupt_handler(_ => process.terminate()) { Exn.capture { process.await_startup() } match { case Exn.Res(_) => val resources_yxml = resources.init_session_yxml val encode_options: XML.Encode.T[Options] = options => session.prover_options(options).encode val args_yxml = YXML.string_of_body( { import XML.Encode._ pair(string, list(pair(encode_options, list(pair(string, properties)))))( (session_name, info.theories)) }) session.protocol_command("build_session", resources_yxml, args_yxml) Build_Session_Errors.result case Exn.Exn(exn) => Exn.Res(List(Exn.message(exn))) } } val process_result = Isabelle_Thread.interrupt_handler(_ => process.terminate()) { process.await_shutdown() } session.stop() val export_errors = export_consumer.shutdown(close = true).map(Output.error_message_text) val (document_output, document_errors) = try { if (build_errors.isInstanceOf[Exn.Res[_]] && process_result.ok && info.documents.nonEmpty) { using(Export.open_database_context(store)) { database_context => val documents = using(database_context.open_session(session_background)) { session_context => Document_Build.build_documents( Document_Build.context(session_context, progress = progress), output_sources = info.document_output, output_pdf = info.document_output) } using(database_context.open_database(session_name, output = true))(session_database => documents.foreach(_.write(session_database.db, session_name))) (documents.flatMap(_.log_lines), Nil) } } else (Nil, Nil) } catch { case exn: Document_Build.Build_Error => (exn.log_lines, exn.log_errors) case Exn.Interrupt.ERROR(msg) => (Nil, List(msg)) } val result = { val theory_timing = theory_timings.iterator.flatMap( { case props @ Markup.Name(name) => Some(name -> props) case _ => None }).toMap val used_theory_timings = for { (name, _) <- session_background.base.used_theories } yield theory_timing.getOrElse(name.theory, Markup.Name(name.theory)) val more_output = Library.trim_line(stdout.toString) :: command_timings.toList.map(Protocol.Command_Timing_Marker.apply) ::: used_theory_timings.map(Protocol.Theory_Timing_Marker.apply) ::: session_timings.toList.map(Protocol.Session_Timing_Marker.apply) ::: runtime_statistics.toList.map(Protocol.ML_Statistics_Marker.apply) ::: task_statistics.toList.map(Protocol.Task_Statistics_Marker.apply) ::: document_output process_result.output(more_output) .error(Library.trim_line(stderr.toString)) .errors_rc(export_errors ::: document_errors) } build_errors match { case Exn.Res(build_errs) => val errs = build_errs ::: document_errors if (errs.nonEmpty) { result.error_rc.output( errs.flatMap(s => split_lines(Output.error_message_text(s))) ::: errs.map(Protocol.Error_Message_Marker.apply)) } else if (progress.stopped && result.ok) result.copy(rc = Process_Result.RC.interrupt) else result case Exn.Exn(Exn.Interrupt()) => if (result.ok) result.copy(rc = Process_Result.RC.interrupt) else result case Exn.Exn(exn) => throw exn } } def terminate(): Unit = future_result.cancel() def is_finished: Boolean = future_result.is_finished private val timeout_request: Option[Event_Timer.Request] = { if (info.timeout_ignored) None else Some(Event_Timer.request(Time.now() + info.timeout) { terminate() }) } def join: (Process_Result, Option[String]) = { val result1 = future_result.join val was_timeout = timeout_request match { case None => false case Some(request) => !request.cancel() } val result2 = if (result1.ok) result1 else if (was_timeout) result1.error(Output.error_message_text("Timeout")).timeout_rc else if (result1.interrupted) result1.error(Output.error_message_text("Interrupt")) else result1 val heap_digest = if (result2.ok && do_store && store.output_heap(session_name).is_file) { Some(ML_Heap.write_digest(store.output_heap(session_name))) } else None (result2, heap_digest) } }