diff --git a/src/Pure/Tools/build.scala b/src/Pure/Tools/build.scala --- a/src/Pure/Tools/build.scala +++ b/src/Pure/Tools/build.scala @@ -1,982 +1,668 @@ /* Title: Pure/Tools/build.scala Author: Makarius Options: :folding=explicit: Build and manage Isabelle sessions. */ package isabelle -import scala.collection.{SortedSet, mutable} +import scala.collection.SortedSet import scala.annotation.tailrec object Build { /** auxiliary **/ /* persistent build info */ sealed case class Session_Info( sources: String, input_heaps: List[String], output_heap: Option[String], return_code: Int) { def ok: Boolean = return_code == 0 } /* queue with scheduling information */ private object Queue { type Timings = (List[Properties.T], Double) def load_timings(progress: Progress, store: Sessions.Store, session_name: String): Timings = { val no_timings: Timings = (Nil, 0.0) store.try_open_database(session_name) match { case None => no_timings case Some(db) => def ignore_error(msg: String) = { progress.echo_warning("Ignoring bad database " + db + (if (msg == "") "" else "\n" + msg)) no_timings } try { val command_timings = store.read_command_timings(db, session_name) val session_timing = store.read_session_timing(db, session_name) match { case Markup.Elapsed(t) => t case _ => 0.0 } (command_timings, session_timing) } catch { case ERROR(msg) => ignore_error(msg) case exn: java.lang.Error => ignore_error(Exn.message(exn)) case _: XML.Error => ignore_error("") } finally { db.close } } } def make_session_timing(sessions_structure: Sessions.Structure, timing: Map[String, Double]) : Map[String, Double] = { val maximals = sessions_structure.build_graph.maximals.toSet def desc_timing(session_name: String): Double = { if (maximals.contains(session_name)) timing(session_name) else { val descendants = sessions_structure.build_descendants(List(session_name)).toSet val g = sessions_structure.build_graph.restrict(descendants) (0.0 :: g.maximals.flatMap(desc => { val ps = g.all_preds(List(desc)) if (ps.exists(p => !timing.isDefinedAt(p))) None else Some(ps.map(timing(_)).sum) })).max } } timing.keySet.iterator.map(name => (name -> desc_timing(name))).toMap.withDefaultValue(0.0) } def apply(progress: Progress, sessions_structure: Sessions.Structure, store: Sessions.Store) : Queue = { val graph = sessions_structure.build_graph val names = graph.keys val timings = names.map(name => (name, load_timings(progress, store, name))) val command_timings = timings.map({ case (name, (ts, _)) => (name, ts) }).toMap.withDefaultValue(Nil) val session_timing = make_session_timing(sessions_structure, timings.map({ case (name, (_, t)) => (name, t) }).toMap) object Ordering extends scala.math.Ordering[String] { def compare_timing(name1: String, name2: String): Int = { val t1 = session_timing(name1) val t2 = session_timing(name2) if (t1 == 0.0 || t2 == 0.0) 0 else t1 compare t2 } def compare(name1: String, name2: String): Int = compare_timing(name2, name1) match { case 0 => sessions_structure(name2).timeout compare sessions_structure(name1).timeout match { case 0 => name1 compare name2 case ord => ord } case ord => ord } } new Queue(graph, SortedSet(names: _*)(Ordering), command_timings) } } private class Queue( graph: Graph[String, Sessions.Info], order: SortedSet[String], val command_timings: String => List[Properties.T]) { def is_inner(name: String): Boolean = !graph.is_maximal(name) def is_empty: Boolean = graph.is_empty def - (name: String): Queue = new Queue(graph.del_node(name), order - name, // FIXME scala-2.10.0 .. 2.12.4 TreeSet problem!? command_timings) def dequeue(skip: String => Boolean): Option[(String, Sessions.Info)] = { val it = order.iterator.dropWhile(name => skip(name) || !graph.defined(name) // FIXME scala-2.10.0 .. 2.12.4 TreeSet problem!? || !graph.is_minimal(name)) if (it.hasNext) { val name = it.next; Some((name, graph.get_node(name))) } else None } } - /* PIDE protocol handler */ - - /* job: running prover process */ - - private class Job(progress: Progress, - session_name: String, - val info: Sessions.Info, - deps: Sessions.Deps, - store: Sessions.Store, - do_store: Boolean, - presentation: Presentation.Context, - verbose: Boolean, - val numa_node: Option[Int], - command_timings0: List[Properties.T]) - { - val options: Options = NUMA.policy_options(info.options, numa_node) - - private val sessions_structure = deps.sessions_structure - - private val future_result: Future[Process_Result] = - Future.thread("build", uninterruptible = true) { - val parent = info.parent.getOrElse("") - val base = deps(parent) - - val env = - Isabelle_System.settings() + - ("ISABELLE_ML_DEBUGGER" -> options.bool("ML_debugger").toString) - - val is_pure = Sessions.is_pure(session_name) - - val use_prelude = if (is_pure) Thy_Header.ml_roots.map(_._1) else Nil - - val eval_store = - if (do_store) { - (if (info.theories.nonEmpty) List("ML_Heap.share_common_data ()") else Nil) ::: - List("ML_Heap.save_child " + - ML_Syntax.print_string_bytes(File.platform_path(store.output_heap(session_name)))) - } - else Nil - - val resources = new Resources(sessions_structure, base, command_timings = command_timings0) - val session = - new Session(options, resources) { - override val xml_cache: XML.Cache = store.xml_cache - override val xz_cache: XZ.Cache = store.xz_cache - } - - object Build_Session_Errors - { - private val promise: Promise[List[String]] = Future.promise - - def result: Exn.Result[List[String]] = promise.join_result - def cancel: Unit = promise.cancel - def apply(errs: List[String]) - { - try { promise.fulfill(errs) } - catch { case _: IllegalStateException => } - } - } - - val export_consumer = - Export.consumer(store.open_database(session_name, output = true), cache = store.xz_cache) - - val stdout = new StringBuilder(1000) - val stderr = new StringBuilder(1000) - val messages = new mutable.ListBuffer[XML.Elem] - val command_timings = new mutable.ListBuffer[Properties.T] - val theory_timings = new mutable.ListBuffer[Properties.T] - val session_timings = new mutable.ListBuffer[Properties.T] - val runtime_statistics = new mutable.ListBuffer[Properties.T] - val task_statistics = new mutable.ListBuffer[Properties.T] - val document_output = new mutable.ListBuffer[String] - - def fun( - name: String, - acc: mutable.ListBuffer[Properties.T], - unapply: Properties.T => Option[Properties.T]): (String, Session.Protocol_Function) = - { - name -> ((msg: Prover.Protocol_Output) => - unapply(msg.properties) match { - case Some(props) => acc += props; true - case _ => false - }) - } - - session.init_protocol_handler(new Session.Protocol_Handler - { - override def exit() { Build_Session_Errors.cancel } - - private def build_session_finished(msg: Prover.Protocol_Output): Boolean = - { - val (rc, errors) = - try { - val (rc, errs) = - { - import XML.Decode._ - pair(int, list(x => x))(Symbol.decode_yxml(msg.text)) - } - val errors = - for (err <- errs) yield { - val prt = Protocol_Message.expose_no_reports(err) - Pretty.string_of(prt, metric = Symbol.Metric) - } - (rc, errors) - } - catch { case ERROR(err) => (2, List(err)) } - - session.protocol_command("Prover.stop", rc.toString) - Build_Session_Errors(errors) - true - } - - private def loading_theory(msg: Prover.Protocol_Output): Boolean = - msg.properties match { - case Markup.Loading_Theory(name) => - progress.theory(Progress.Theory(name, session = session_name)) - true - case _ => false - } - - private def export(msg: Prover.Protocol_Output): Boolean = - msg.properties match { - case Protocol.Export(args) => - export_consumer(session_name, args, msg.bytes) - true - case _ => false - } - - private def command_timing(props: Properties.T): Option[Properties.T] = - for { - props1 <- Markup.Command_Timing.unapply(props) - elapsed <- Markup.Elapsed.unapply(props1) - elapsed_time = Time.seconds(elapsed) - if elapsed_time.is_relevant && elapsed_time >= options.seconds("command_timing_threshold") - } yield props1.filter(p => Markup.command_timing_properties(p._1)) - - override val functions = - List( - Markup.Build_Session_Finished.name -> build_session_finished, - Markup.Loading_Theory.name -> loading_theory, - Markup.EXPORT -> export, - fun(Markup.Command_Timing.name, command_timings, command_timing), - fun(Markup.Theory_Timing.name, theory_timings, Markup.Theory_Timing.unapply), - fun(Markup.Session_Timing.name, session_timings, Markup.Session_Timing.unapply), - fun(Markup.Task_Statistics.name, task_statistics, Markup.Task_Statistics.unapply)) - }) - - session.runtime_statistics += Session.Consumer("ML_statistics") - { - case Session.Runtime_Statistics(props) => runtime_statistics += props - } - - session.all_messages += Session.Consumer[Any]("build_session_output") - { - case msg: Prover.Output => - val message = msg.message - if (msg.is_stdout) { - stdout ++= Symbol.encode(XML.content(message)) - } - else if (msg.is_stderr) { - stderr ++= Symbol.encode(XML.content(message)) - } - else if (Protocol.is_exported(message)) { - messages += message - } - else if (msg.is_exit) { - val err = - "Prover terminated" + - (msg.properties match { - case Markup.Process_Result(result) => ": " + result.print_rc - case _ => "" - }) - Build_Session_Errors(List(err)) - } - case _ => - } - - val eval_main = Command_Line.ML_tool("Isabelle_Process.init_build ()" :: eval_store) - - val process = - Isabelle_Process(session, options, sessions_structure, store, - logic = parent, raw_ml_system = is_pure, - use_prelude = use_prelude, eval_main = eval_main, - cwd = info.dir.file, env = env) - - val build_errors = - Isabelle_Thread.interrupt_handler(_ => process.terminate) { - Exn.capture { process.await_startup } match { - case Exn.Res(_) => - val resources_yxml = resources.init_session_yxml - val args_yxml = - YXML.string_of_body( - { - import XML.Encode._ - pair(string, list(pair(Options.encode, list(pair(string, properties)))))( - (session_name, info.theories)) - }) - session.protocol_command("build_session", resources_yxml, args_yxml) - Build_Session_Errors.result - case Exn.Exn(exn) => Exn.Res(List(Exn.message(exn))) - } - } - - val process_result = - Isabelle_Thread.interrupt_handler(_ => process.terminate) { process.await_shutdown } - - val export_errors = - export_consumer.shutdown(close = true).map(Output.error_message_text) - - val document_errors = - try { - if (build_errors.isInstanceOf[Exn.Res[_]] && process_result.ok) { - val documents = - if (info.documents.isEmpty) Nil - else { - val document_progress = - new Progress { - override def echo(msg: String): Unit = - document_output.synchronized { document_output += msg } - override def echo_document(path: Path): Unit = - progress.echo_document(path) - } - val documents = - Presentation.build_documents(session_name, deps, store, verbose = verbose, - verbose_latex = true, progress = document_progress) - using(store.open_database(session_name, output = true))(db => - for ((doc, pdf) <- documents) { - db.transaction { - Presentation.write_document(db, session_name, doc, pdf) - } - }) - documents - } - if (presentation.enabled(info)) { - val dir = Presentation.session_html(session_name, deps, store, presentation) - for ((doc, pdf) <- documents) Bytes.write(dir + doc.path.pdf, pdf) - if (verbose) progress.echo("Browser info at " + dir.absolute) - } - } - Nil - } - catch { case Exn.Interrupt.ERROR(msg) => List(msg) } - - val result = - { - val more_output = - Library.trim_line(stdout.toString) :: - messages.toList.map(message => - Symbol.encode(Protocol.message_text(List(message), metric = Symbol.Metric))) ::: - command_timings.toList.map(Protocol.Command_Timing_Marker.apply) ::: - theory_timings.toList.map(Protocol.Theory_Timing_Marker.apply) ::: - session_timings.toList.map(Protocol.Session_Timing_Marker.apply) ::: - runtime_statistics.toList.map(Protocol.ML_Statistics_Marker.apply) ::: - task_statistics.toList.map(Protocol.Task_Statistics_Marker.apply) ::: - document_output.toList - - val more_errors = - Library.trim_line(stderr.toString) :: export_errors ::: document_errors - - process_result.output(more_output).errors(more_errors) - } - - build_errors match { - case Exn.Res(build_errs) => - val errs = build_errs ::: document_errors - if (errs.isEmpty) result - else { - result.error_rc.output( - errs.flatMap(s => split_lines(Output.error_message_text(s))) ::: - errs.map(Protocol.Error_Message_Marker.apply)) - } - case Exn.Exn(Exn.Interrupt()) => - if (result.ok) result.copy(rc = Exn.Interrupt.return_code) else result - case Exn.Exn(exn) => throw exn - } - } - - def terminate: Unit = future_result.cancel - def is_finished: Boolean = future_result.is_finished - - private val timeout_request: Option[Event_Timer.Request] = - { - if (info.timeout > Time.zero) - Some(Event_Timer.request(Time.now() + info.timeout) { terminate }) - else None - } - - def join: (Process_Result, Option[String]) = - { - val result1 = future_result.join - - val was_timeout = - timeout_request match { - case None => false - case Some(request) => !request.cancel - } - - val result2 = - if (result1.interrupted) { - if (was_timeout) result1.error(Output.error_message_text("Timeout")).was_timeout - else result1.error(Output.error_message_text("Interrupt")) - } - else result1 - - val heap_digest = - if (result2.ok && do_store && store.output_heap(session_name).is_file) - Some(Sessions.write_heap_digest(store.output_heap(session_name))) - else None - - (result2, heap_digest) - } - } - - /** build with results **/ class Results private[Build](results: Map[String, (Option[Process_Result], Sessions.Info)]) { def sessions: Set[String] = results.keySet def cancelled(name: String): Boolean = results(name)._1.isEmpty def apply(name: String): Process_Result = results(name)._1.getOrElse(Process_Result(1)) def info(name: String): Sessions.Info = results(name)._2 val rc: Int = (0 /: results.iterator.map( { case (_, (Some(r), _)) => r.rc case (_, (None, _)) => 1 }))(_ max _) def ok: Boolean = rc == 0 override def toString: String = rc.toString } def session_finished(session_name: String, process_result: Process_Result): String = "Finished " + session_name + " (" + process_result.timing.message_resources + ")" def session_timing(session_name: String, build_log: Build_Log.Session_Info): String = { val props = build_log.session_timing val threads = Markup.Session_Timing.Threads.unapply(props) getOrElse 1 val timing = Markup.Timing_Properties.parse(props) "Timing " + session_name + " (" + threads + " threads, " + timing.message_factor + ")" } def build( options: Options, selection: Sessions.Selection = Sessions.Selection.empty, presentation: Presentation.Context = Presentation.Context.none, progress: Progress = new Progress, check_unknown_files: Boolean = false, build_heap: Boolean = false, clean_build: Boolean = false, dirs: List[Path] = Nil, select_dirs: List[Path] = Nil, infos: List[Sessions.Info] = Nil, numa_shuffling: Boolean = false, max_jobs: Int = 1, list_files: Boolean = false, check_keywords: Set[String] = Set.empty, fresh_build: Boolean = false, no_build: Boolean = false, soft_build: Boolean = false, verbose: Boolean = false, export_files: Boolean = false): Results = { val build_options = options + "completion_limit=0" + "editor_tracing_messages=0" + "pide_reports=false" val store = Sessions.store(build_options) Isabelle_Fonts.init() /* session selection and dependencies */ val full_sessions = Sessions.load_structure(build_options, dirs = dirs, select_dirs = select_dirs, infos = infos) def sources_stamp(deps: Sessions.Deps, session_name: String): String = { val digests = full_sessions(session_name).meta_digest :: deps.sources(session_name) ::: deps.imported_sources(session_name) SHA1.digest_set(digests).toString } val deps = { val deps0 = Sessions.deps(full_sessions.selection(selection), progress = progress, inlined_files = true, verbose = verbose, list_files = list_files, check_keywords = check_keywords).check_errors if (soft_build && !fresh_build) { val outdated = deps0.sessions_structure.build_topological_order.flatMap(name => store.try_open_database(name) match { case Some(db) => using(db)(store.read_build(_, name)) match { case Some(build) if build.ok && build.sources == sources_stamp(deps0, name) => None case _ => Some(name) } case None => Some(name) }) Sessions.deps(full_sessions.selection(Sessions.Selection(sessions = outdated)), progress = progress, inlined_files = true).check_errors } else deps0 } /* check unknown files */ if (check_unknown_files) { val source_files = (for { (_, base) <- deps.session_bases.iterator (path, _) <- base.sources.iterator } yield path).toList val exclude_files = List(Path.explode("$POLYML_EXE")).map(_.canonical_file) val unknown_files = Mercurial.check_files(source_files)._2. filterNot(path => exclude_files.contains(path.canonical_file)) if (unknown_files.nonEmpty) { progress.echo_warning("Unknown files (not part of the underlying Mercurial repository):" + unknown_files.map(path => path.expand.implode).sorted.mkString("\n ", "\n ", "")) } } /* main build process */ val queue = Queue(progress, deps.sessions_structure, store) store.prepare_output_dir() if (clean_build) { for (name <- full_sessions.imports_descendants(full_sessions.imports_selection(selection))) { val (relevant, ok) = store.clean_output(name) if (relevant) { if (ok) progress.echo("Cleaned " + name) else progress.echo(name + " FAILED to clean") } } } // scheduler loop case class Result( current: Boolean, heap_digest: Option[String], process: Option[Process_Result], info: Sessions.Info) { def ok: Boolean = process match { case None => false case Some(res) => res.rc == 0 } } def sleep() { Isabelle_Thread.interrupt_handler(_ => progress.stop) { Time.seconds(0.5).sleep } } val numa_nodes = new NUMA.Nodes(numa_shuffling) @tailrec def loop( pending: Queue, - running: Map[String, (List[String], Job)], + running: Map[String, (List[String], Build_Job)], results: Map[String, Result]): Map[String, Result] = { def used_node(i: Int): Boolean = running.iterator.exists( { case (_, (_, job)) => job.numa_node.isDefined && job.numa_node.get == i }) if (pending.is_empty) results else { if (progress.stopped) { for ((_, (_, job)) <- running) job.terminate } running.find({ case (_, (_, job)) => job.is_finished }) match { case Some((session_name, (input_heaps, job))) => //{{{ finish job val (process_result, heap_digest) = job.join val log_lines = process_result.out_lines.filterNot(Protocol_Message.Marker.test) val process_result_tail = { val tail = job.info.options.int("process_output_tail") process_result.copy( out_lines = "(see also " + store.output_log(session_name).file.toString + ")" :: (if (tail == 0) log_lines else log_lines.drop(log_lines.length - tail max 0))) } val build_log = Build_Log.Log_File(session_name, process_result.out_lines). parse_session_info( command_timings = true, theory_timings = true, ml_statistics = true, task_statistics = true) // write log file if (process_result.ok) { File.write_gzip(store.output_log_gz(session_name), terminate_lines(log_lines)) } else File.write(store.output_log(session_name), terminate_lines(log_lines)) // write database using(store.open_database(session_name, output = true))(db => store.write_session_info(db, session_name, build_log = if (process_result.timeout) build_log.error("Timeout") else build_log, build = Session_Info(sources_stamp(deps, session_name), input_heaps, heap_digest, process_result.rc))) // messages process_result.err_lines.foreach(progress.echo) if (process_result.ok) { if (verbose) progress.echo(session_timing(session_name, build_log)) progress.echo(session_finished(session_name, process_result)) } else { progress.echo(session_name + " FAILED") if (!process_result.interrupted) progress.echo(process_result_tail.out) } loop(pending - session_name, running - session_name, results + (session_name -> Result(false, heap_digest, Some(process_result_tail), job.info))) //}}} case None if running.size < (max_jobs max 1) => //{{{ check/start next job pending.dequeue(running.isDefinedAt) match { case Some((session_name, info)) => val ancestor_results = deps.sessions_structure.build_requirements(List(session_name)). filterNot(_ == session_name).map(results(_)) val ancestor_heaps = ancestor_results.flatMap(_.heap_digest) val do_store = build_heap || Sessions.is_pure(session_name) || queue.is_inner(session_name) val (current, heap_digest) = { store.try_open_database(session_name) match { case Some(db) => using(db)(store.read_build(_, session_name)) match { case Some(build) => val heap_digest = store.find_heap_digest(session_name) val current = !fresh_build && build.ok && build.sources == sources_stamp(deps, session_name) && build.input_heaps == ancestor_heaps && build.output_heap == heap_digest && !(do_store && heap_digest.isEmpty) (current, heap_digest) case None => (false, None) } case None => (false, None) } } val all_current = current && ancestor_results.forall(_.current) if (all_current) loop(pending - session_name, running, results + (session_name -> Result(true, heap_digest, Some(Process_Result(0)), info))) else if (no_build) { progress.echo_if(verbose, "Skipping " + session_name + " ...") loop(pending - session_name, running, results + (session_name -> Result(false, heap_digest, Some(Process_Result(1)), info))) } else if (ancestor_results.forall(_.ok) && !progress.stopped) { progress.echo((if (do_store) "Building " else "Running ") + session_name + " ...") store.clean_output(session_name) using(store.open_database(session_name, output = true))( store.init_session_info(_, session_name)) val numa_node = numa_nodes.next(used_node) val job = - new Job(progress, session_name, info, deps, store, do_store, presentation, + new Build_Job(progress, session_name, info, deps, store, do_store, presentation, verbose, numa_node, queue.command_timings(session_name)) loop(pending, running + (session_name -> (ancestor_heaps, job)), results) } else { progress.echo(session_name + " CANCELLED") loop(pending - session_name, running, results + (session_name -> Result(false, heap_digest, None, info))) } case None => sleep(); loop(pending, running, results) } ///}}} case None => sleep(); loop(pending, running, results) } } } /* build results */ val results0 = if (deps.is_empty) { progress.echo_warning("Nothing to build") Map.empty[String, Result] } else Isabelle_Thread.uninterruptible { loop(queue, Map.empty, Map.empty) } val results = new Results( (for ((name, result) <- results0.iterator) yield (name, (result.process, result.info))).toMap) if (export_files) { for (name <- full_sessions.imports_selection(selection).iterator if results(name).ok) { val info = results.info(name) if (info.export_files.nonEmpty) { progress.echo("Exporting " + info.name + " ...") for ((dir, prune, pats) <- info.export_files) { Export.export_files(store, name, info.dir + dir, progress = if (verbose) progress else new Progress, export_prune = prune, export_patterns = pats) } } } } if (results.rc != 0 && (verbose || !no_build)) { val unfinished = (for { name <- results.sessions.iterator if !results(name).ok } yield name).toList.sorted progress.echo("Unfinished session(s): " + commas(unfinished)) } /* global browser info */ if (!no_build) { val browser_chapters = (for { (name, result) <- results0.iterator if result.ok info = full_sessions(name) if presentation.enabled(info) } yield (info.chapter, (name, info.description))).toList.groupBy(_._1). map({ case (chapter, es) => (chapter, es.map(_._2)) }).filterNot(_._2.isEmpty) val dir = presentation.dir(store) for ((chapter, entries) <- browser_chapters) Presentation.update_chapter_index(dir, chapter, entries) if (browser_chapters.nonEmpty) Presentation.make_global_index(dir) } results } /* Isabelle tool wrapper */ val isabelle_tool = Isabelle_Tool("build", "build and manage Isabelle sessions", args => { val build_options = Word.explode(Isabelle_System.getenv("ISABELLE_BUILD_OPTIONS")) var base_sessions: List[String] = Nil var select_dirs: List[Path] = Nil var numa_shuffling = false var presentation = Presentation.Context.none var requirements = false var soft_build = false var exclude_session_groups: List[String] = Nil var all_sessions = false var build_heap = false var clean_build = false var dirs: List[Path] = Nil var export_files = false var fresh_build = false var session_groups: List[String] = Nil var max_jobs = 1 var check_keywords: Set[String] = Set.empty var list_files = false var no_build = false var options = Options.init(opts = build_options) var verbose = false var exclude_sessions: List[String] = Nil val getopts = Getopts(""" Usage: isabelle build [OPTIONS] [SESSIONS ...] Options are: -B NAME include session NAME and all descendants -D DIR include session directory and select its sessions -N cyclic shuffling of NUMA CPU nodes (performance tuning) -P DIR enable HTML/PDF presentation in directory (":" for default) -R refer to requirements of selected sessions -S soft build: only observe changes of sources, not heap images -X NAME exclude sessions from group NAME and all descendants -a select all sessions -b build heap images -c clean build -d DIR include session directory -e export files from session specification into file-system -f fresh build -g NAME select session group NAME -j INT maximum number of parallel jobs (default 1) -k KEYWORD check theory sources for conflicts with proposed keywords -l list session source files -n no build -- test dependencies only -o OPTION override Isabelle system OPTION (via NAME=VAL or NAME) -v verbose -x NAME exclude session NAME and all descendants Build and manage Isabelle sessions, depending on implicit settings: """ + Library.prefix_lines(" ", Build_Log.Settings.show()) + "\n", "B:" -> (arg => base_sessions = base_sessions ::: List(arg)), "D:" -> (arg => select_dirs = select_dirs ::: List(Path.explode(arg))), "N" -> (_ => numa_shuffling = true), "P:" -> (arg => presentation = Presentation.Context.make(arg)), "R" -> (_ => requirements = true), "S" -> (_ => soft_build = true), "X:" -> (arg => exclude_session_groups = exclude_session_groups ::: List(arg)), "a" -> (_ => all_sessions = true), "b" -> (_ => build_heap = true), "c" -> (_ => clean_build = true), "d:" -> (arg => dirs = dirs ::: List(Path.explode(arg))), "e" -> (_ => export_files = true), "f" -> (_ => fresh_build = true), "g:" -> (arg => session_groups = session_groups ::: List(arg)), "j:" -> (arg => max_jobs = Value.Int.parse(arg)), "k:" -> (arg => check_keywords = check_keywords + arg), "l" -> (_ => list_files = true), "n" -> (_ => no_build = true), "o:" -> (arg => options = options + arg), "v" -> (_ => verbose = true), "x:" -> (arg => exclude_sessions = exclude_sessions ::: List(arg))) val sessions = getopts(args) val progress = new Console_Progress(verbose = verbose) val start_date = Date.now() if (verbose) { progress.echo( "Started at " + Build_Log.print_date(start_date) + " (" + Isabelle_System.getenv("ML_IDENTIFIER") + " on " + Isabelle_System.hostname() +")") progress.echo(Build_Log.Settings.show() + "\n") } val results = progress.interrupt_handler { build(options, selection = Sessions.Selection( requirements = requirements, all_sessions = all_sessions, base_sessions = base_sessions, exclude_session_groups = exclude_session_groups, exclude_sessions = exclude_sessions, session_groups = session_groups, sessions = sessions), presentation = presentation, progress = progress, check_unknown_files = Mercurial.is_repository(Path.explode("~~")), build_heap = build_heap, clean_build = clean_build, dirs = dirs, select_dirs = select_dirs, numa_shuffling = NUMA.enabled_warning(progress, numa_shuffling), max_jobs = max_jobs, list_files = list_files, check_keywords = check_keywords, fresh_build = fresh_build, no_build = no_build, soft_build = soft_build, verbose = verbose, export_files = export_files) } val end_date = Date.now() val elapsed_time = end_date.time - start_date.time if (verbose) { progress.echo("\nFinished at " + Build_Log.print_date(end_date)) } val total_timing = (Timing.zero /: results.sessions.iterator.map(a => results(a).timing))(_ + _). copy(elapsed = elapsed_time) progress.echo(total_timing.message_resources) sys.exit(results.rc) }) /* build logic image */ def build_logic(options: Options, logic: String, progress: Progress = new Progress, build_heap: Boolean = false, dirs: List[Path] = Nil, fresh: Boolean = false, strict: Boolean = false): Int = { val selection = Sessions.Selection.session(logic) val rc = if (!fresh && build(options, selection = selection, build_heap = build_heap, no_build = true, dirs = dirs).ok) 0 else { progress.echo("Build started for Isabelle/" + logic + " ...") Build.build(options, selection = selection, progress = progress, build_heap = build_heap, fresh_build = fresh, dirs = dirs).rc } if (strict && rc != 0) error("Failed to build Isabelle/" + logic) else rc } } diff --git a/src/Pure/Tools/build_job.scala b/src/Pure/Tools/build_job.scala new file mode 100644 --- /dev/null +++ b/src/Pure/Tools/build_job.scala @@ -0,0 +1,320 @@ +/* Title: Pure/Tools/build_job.scala + Author: Makarius + +Build job running prover process, with rudimentary PIDE session. +*/ + +package isabelle + + +import scala.collection.mutable + + +class Build_Job(progress: Progress, + session_name: String, + val info: Sessions.Info, + deps: Sessions.Deps, + store: Sessions.Store, + do_store: Boolean, + presentation: Presentation.Context, + verbose: Boolean, + val numa_node: Option[Int], + command_timings0: List[Properties.T]) +{ + val options: Options = NUMA.policy_options(info.options, numa_node) + + private val sessions_structure = deps.sessions_structure + + private val future_result: Future[Process_Result] = + Future.thread("build", uninterruptible = true) { + val parent = info.parent.getOrElse("") + val base = deps(parent) + + val env = + Isabelle_System.settings() + + ("ISABELLE_ML_DEBUGGER" -> options.bool("ML_debugger").toString) + + val is_pure = Sessions.is_pure(session_name) + + val use_prelude = if (is_pure) Thy_Header.ml_roots.map(_._1) else Nil + + val eval_store = + if (do_store) { + (if (info.theories.nonEmpty) List("ML_Heap.share_common_data ()") else Nil) ::: + List("ML_Heap.save_child " + + ML_Syntax.print_string_bytes(File.platform_path(store.output_heap(session_name)))) + } + else Nil + + val resources = new Resources(sessions_structure, base, command_timings = command_timings0) + val session = + new Session(options, resources) { + override val xml_cache: XML.Cache = store.xml_cache + override val xz_cache: XZ.Cache = store.xz_cache + } + + object Build_Session_Errors + { + private val promise: Promise[List[String]] = Future.promise + + def result: Exn.Result[List[String]] = promise.join_result + def cancel: Unit = promise.cancel + def apply(errs: List[String]) + { + try { promise.fulfill(errs) } + catch { case _: IllegalStateException => } + } + } + + val export_consumer = + Export.consumer(store.open_database(session_name, output = true), cache = store.xz_cache) + + val stdout = new StringBuilder(1000) + val stderr = new StringBuilder(1000) + val messages = new mutable.ListBuffer[XML.Elem] + val command_timings = new mutable.ListBuffer[Properties.T] + val theory_timings = new mutable.ListBuffer[Properties.T] + val session_timings = new mutable.ListBuffer[Properties.T] + val runtime_statistics = new mutable.ListBuffer[Properties.T] + val task_statistics = new mutable.ListBuffer[Properties.T] + val document_output = new mutable.ListBuffer[String] + + def fun( + name: String, + acc: mutable.ListBuffer[Properties.T], + unapply: Properties.T => Option[Properties.T]): (String, Session.Protocol_Function) = + { + name -> ((msg: Prover.Protocol_Output) => + unapply(msg.properties) match { + case Some(props) => acc += props; true + case _ => false + }) + } + + session.init_protocol_handler(new Session.Protocol_Handler + { + override def exit() { Build_Session_Errors.cancel } + + private def build_session_finished(msg: Prover.Protocol_Output): Boolean = + { + val (rc, errors) = + try { + val (rc, errs) = + { + import XML.Decode._ + pair(int, list(x => x))(Symbol.decode_yxml(msg.text)) + } + val errors = + for (err <- errs) yield { + val prt = Protocol_Message.expose_no_reports(err) + Pretty.string_of(prt, metric = Symbol.Metric) + } + (rc, errors) + } + catch { case ERROR(err) => (2, List(err)) } + + session.protocol_command("Prover.stop", rc.toString) + Build_Session_Errors(errors) + true + } + + private def loading_theory(msg: Prover.Protocol_Output): Boolean = + msg.properties match { + case Markup.Loading_Theory(name) => + progress.theory(Progress.Theory(name, session = session_name)) + true + case _ => false + } + + private def export(msg: Prover.Protocol_Output): Boolean = + msg.properties match { + case Protocol.Export(args) => + export_consumer(session_name, args, msg.bytes) + true + case _ => false + } + + private def command_timing(props: Properties.T): Option[Properties.T] = + for { + props1 <- Markup.Command_Timing.unapply(props) + elapsed <- Markup.Elapsed.unapply(props1) + elapsed_time = Time.seconds(elapsed) + if elapsed_time.is_relevant && elapsed_time >= options.seconds("command_timing_threshold") + } yield props1.filter(p => Markup.command_timing_properties(p._1)) + + override val functions = + List( + Markup.Build_Session_Finished.name -> build_session_finished, + Markup.Loading_Theory.name -> loading_theory, + Markup.EXPORT -> export, + fun(Markup.Command_Timing.name, command_timings, command_timing), + fun(Markup.Theory_Timing.name, theory_timings, Markup.Theory_Timing.unapply), + fun(Markup.Session_Timing.name, session_timings, Markup.Session_Timing.unapply), + fun(Markup.Task_Statistics.name, task_statistics, Markup.Task_Statistics.unapply)) + }) + + session.runtime_statistics += Session.Consumer("ML_statistics") + { + case Session.Runtime_Statistics(props) => runtime_statistics += props + } + + session.all_messages += Session.Consumer[Any]("build_session_output") + { + case msg: Prover.Output => + val message = msg.message + if (msg.is_stdout) { + stdout ++= Symbol.encode(XML.content(message)) + } + else if (msg.is_stderr) { + stderr ++= Symbol.encode(XML.content(message)) + } + else if (Protocol.is_exported(message)) { + messages += message + } + else if (msg.is_exit) { + val err = + "Prover terminated" + + (msg.properties match { + case Markup.Process_Result(result) => ": " + result.print_rc + case _ => "" + }) + Build_Session_Errors(List(err)) + } + case _ => + } + + val eval_main = Command_Line.ML_tool("Isabelle_Process.init_build ()" :: eval_store) + + val process = + Isabelle_Process(session, options, sessions_structure, store, + logic = parent, raw_ml_system = is_pure, + use_prelude = use_prelude, eval_main = eval_main, + cwd = info.dir.file, env = env) + + val build_errors = + Isabelle_Thread.interrupt_handler(_ => process.terminate) { + Exn.capture { process.await_startup } match { + case Exn.Res(_) => + val resources_yxml = resources.init_session_yxml + val args_yxml = + YXML.string_of_body( + { + import XML.Encode._ + pair(string, list(pair(Options.encode, list(pair(string, properties)))))( + (session_name, info.theories)) + }) + session.protocol_command("build_session", resources_yxml, args_yxml) + Build_Session_Errors.result + case Exn.Exn(exn) => Exn.Res(List(Exn.message(exn))) + } + } + + val process_result = + Isabelle_Thread.interrupt_handler(_ => process.terminate) { process.await_shutdown } + + val export_errors = + export_consumer.shutdown(close = true).map(Output.error_message_text) + + val document_errors = + try { + if (build_errors.isInstanceOf[Exn.Res[_]] && process_result.ok) { + val documents = + if (info.documents.isEmpty) Nil + else { + val document_progress = + new Progress { + override def echo(msg: String): Unit = + document_output.synchronized { document_output += msg } + override def echo_document(path: Path): Unit = + progress.echo_document(path) + } + val documents = + Presentation.build_documents(session_name, deps, store, verbose = verbose, + verbose_latex = true, progress = document_progress) + using(store.open_database(session_name, output = true))(db => + for ((doc, pdf) <- documents) { + db.transaction { + Presentation.write_document(db, session_name, doc, pdf) + } + }) + documents + } + if (presentation.enabled(info)) { + val dir = Presentation.session_html(session_name, deps, store, presentation) + for ((doc, pdf) <- documents) Bytes.write(dir + doc.path.pdf, pdf) + if (verbose) progress.echo("Browser info at " + dir.absolute) + } + } + Nil + } + catch { case Exn.Interrupt.ERROR(msg) => List(msg) } + + val result = + { + val more_output = + Library.trim_line(stdout.toString) :: + messages.toList.map(message => + Symbol.encode(Protocol.message_text(List(message), metric = Symbol.Metric))) ::: + command_timings.toList.map(Protocol.Command_Timing_Marker.apply) ::: + theory_timings.toList.map(Protocol.Theory_Timing_Marker.apply) ::: + session_timings.toList.map(Protocol.Session_Timing_Marker.apply) ::: + runtime_statistics.toList.map(Protocol.ML_Statistics_Marker.apply) ::: + task_statistics.toList.map(Protocol.Task_Statistics_Marker.apply) ::: + document_output.toList + + val more_errors = + Library.trim_line(stderr.toString) :: export_errors ::: document_errors + + process_result.output(more_output).errors(more_errors) + } + + build_errors match { + case Exn.Res(build_errs) => + val errs = build_errs ::: document_errors + if (errs.isEmpty) result + else { + result.error_rc.output( + errs.flatMap(s => split_lines(Output.error_message_text(s))) ::: + errs.map(Protocol.Error_Message_Marker.apply)) + } + case Exn.Exn(Exn.Interrupt()) => + if (result.ok) result.copy(rc = Exn.Interrupt.return_code) else result + case Exn.Exn(exn) => throw exn + } + } + + def terminate: Unit = future_result.cancel + def is_finished: Boolean = future_result.is_finished + + private val timeout_request: Option[Event_Timer.Request] = + { + if (info.timeout > Time.zero) + Some(Event_Timer.request(Time.now() + info.timeout) { terminate }) + else None + } + + def join: (Process_Result, Option[String]) = + { + val result1 = future_result.join + + val was_timeout = + timeout_request match { + case None => false + case Some(request) => !request.cancel + } + + val result2 = + if (result1.interrupted) { + if (was_timeout) result1.error(Output.error_message_text("Timeout")).was_timeout + else result1.error(Output.error_message_text("Interrupt")) + } + else result1 + + val heap_digest = + if (result2.ok && do_store && store.output_heap(session_name).is_file) + Some(Sessions.write_heap_digest(store.output_heap(session_name))) + else None + + (result2, heap_digest) + } +} diff --git a/src/Pure/build-jars b/src/Pure/build-jars --- a/src/Pure/build-jars +++ b/src/Pure/build-jars @@ -1,318 +1,319 @@ #!/usr/bin/env bash # # Author: Makarius # # build-jars - build Isabelle/Scala # # Requires proper Isabelle settings environment. ## sources declare -a SOURCES=( src/HOL/Tools/Nitpick/kodkod.scala src/Pure/Admin/afp.scala src/Pure/Admin/build_csdp.scala src/Pure/Admin/build_cygwin.scala src/Pure/Admin/build_doc.scala src/Pure/Admin/build_e.scala src/Pure/Admin/build_fonts.scala src/Pure/Admin/build_history.scala src/Pure/Admin/build_jdk.scala src/Pure/Admin/build_log.scala src/Pure/Admin/build_polyml.scala src/Pure/Admin/build_release.scala src/Pure/Admin/build_spass.scala src/Pure/Admin/build_sqlite.scala src/Pure/Admin/build_status.scala src/Pure/Admin/build_verit.scala src/Pure/Admin/build_zipperposition.scala src/Pure/Admin/check_sources.scala src/Pure/Admin/ci_profile.scala src/Pure/Admin/components.scala src/Pure/Admin/isabelle_cronjob.scala src/Pure/Admin/isabelle_devel.scala src/Pure/Admin/jenkins.scala src/Pure/Admin/other_isabelle.scala src/Pure/Concurrent/consumer_thread.scala src/Pure/Concurrent/counter.scala src/Pure/Concurrent/delay.scala src/Pure/Concurrent/event_timer.scala src/Pure/Concurrent/future.scala src/Pure/Concurrent/isabelle_thread.scala src/Pure/Concurrent/mailbox.scala src/Pure/Concurrent/par_list.scala src/Pure/Concurrent/synchronized.scala src/Pure/GUI/color_value.scala src/Pure/GUI/gui.scala src/Pure/GUI/gui_thread.scala src/Pure/GUI/popup.scala src/Pure/GUI/wrap_panel.scala src/Pure/General/antiquote.scala src/Pure/General/bytes.scala src/Pure/General/cache.scala src/Pure/General/codepoint.scala src/Pure/General/comment.scala src/Pure/General/completion.scala src/Pure/General/csv.scala src/Pure/General/date.scala src/Pure/General/exn.scala src/Pure/General/file.scala src/Pure/General/file_watcher.scala src/Pure/General/graph.scala src/Pure/General/graph_display.scala src/Pure/General/graphics_file.scala src/Pure/General/http.scala src/Pure/General/json.scala src/Pure/General/linear_set.scala src/Pure/General/logger.scala src/Pure/General/long_name.scala src/Pure/General/mailman.scala src/Pure/General/mercurial.scala src/Pure/General/multi_map.scala src/Pure/General/output.scala src/Pure/General/path.scala src/Pure/General/position.scala src/Pure/General/pretty.scala src/Pure/General/properties.scala src/Pure/General/rdf.scala src/Pure/General/scan.scala src/Pure/General/sha1.scala src/Pure/General/sql.scala src/Pure/General/ssh.scala src/Pure/General/symbol.scala src/Pure/General/time.scala src/Pure/General/timing.scala src/Pure/General/untyped.scala src/Pure/General/url.scala src/Pure/General/utf8.scala src/Pure/General/uuid.scala src/Pure/General/value.scala src/Pure/General/word.scala src/Pure/General/xz.scala src/Pure/Isar/document_structure.scala src/Pure/Isar/keyword.scala src/Pure/Isar/line_structure.scala src/Pure/Isar/outer_syntax.scala src/Pure/Isar/parse.scala src/Pure/Isar/token.scala src/Pure/ML/ml_console.scala src/Pure/ML/ml_lex.scala src/Pure/ML/ml_process.scala src/Pure/ML/ml_statistics.scala src/Pure/ML/ml_syntax.scala src/Pure/PIDE/byte_message.scala src/Pure/PIDE/command.scala src/Pure/PIDE/command_span.scala src/Pure/PIDE/document.scala src/Pure/PIDE/document_id.scala src/Pure/PIDE/document_status.scala src/Pure/PIDE/editor.scala src/Pure/PIDE/headless.scala src/Pure/PIDE/line.scala src/Pure/PIDE/markup.scala src/Pure/PIDE/markup_tree.scala src/Pure/PIDE/protocol.scala src/Pure/PIDE/protocol_handlers.scala src/Pure/PIDE/protocol_message.scala src/Pure/PIDE/prover.scala src/Pure/PIDE/query_operation.scala src/Pure/PIDE/rendering.scala src/Pure/PIDE/resources.scala src/Pure/PIDE/session.scala src/Pure/PIDE/text.scala src/Pure/PIDE/xml.scala src/Pure/PIDE/yxml.scala src/Pure/ROOT.scala src/Pure/System/bash.scala src/Pure/System/command_line.scala src/Pure/System/cygwin.scala src/Pure/System/distribution.scala src/Pure/System/executable.scala src/Pure/System/getopts.scala src/Pure/System/isabelle_charset.scala src/Pure/System/isabelle_fonts.scala src/Pure/System/isabelle_platform.scala src/Pure/System/isabelle_process.scala src/Pure/System/isabelle_system.scala src/Pure/System/isabelle_tool.scala src/Pure/System/java_statistics.scala src/Pure/System/linux.scala src/Pure/System/mingw.scala src/Pure/System/numa.scala src/Pure/System/options.scala src/Pure/System/platform.scala src/Pure/System/posix_interrupt.scala src/Pure/System/process_result.scala src/Pure/System/progress.scala src/Pure/System/scala.scala src/Pure/System/system_channel.scala src/Pure/System/tty_loop.scala src/Pure/Thy/bibtex.scala src/Pure/Thy/export.scala src/Pure/Thy/export_theory.scala src/Pure/Thy/file_format.scala src/Pure/Thy/html.scala src/Pure/Thy/latex.scala src/Pure/Thy/presentation.scala src/Pure/Thy/sessions.scala src/Pure/Thy/thy_element.scala src/Pure/Thy/thy_header.scala src/Pure/Thy/thy_syntax.scala src/Pure/Tools/build.scala src/Pure/Tools/build_docker.scala + src/Pure/Tools/build_job.scala src/Pure/Tools/check_keywords.scala src/Pure/Tools/debugger.scala src/Pure/Tools/doc.scala src/Pure/Tools/dump.scala src/Pure/Tools/fontforge.scala src/Pure/Tools/main.scala src/Pure/Tools/mkroot.scala src/Pure/Tools/phabricator.scala src/Pure/Tools/print_operation.scala src/Pure/Tools/profiling_report.scala src/Pure/Tools/scala_project.scala src/Pure/Tools/server.scala src/Pure/Tools/server_commands.scala src/Pure/Tools/simplifier_trace.scala src/Pure/Tools/spell_checker.scala src/Pure/Tools/task_statistics.scala src/Pure/Tools/update.scala src/Pure/Tools/update_cartouches.scala src/Pure/Tools/update_comments.scala src/Pure/Tools/update_header.scala src/Pure/Tools/update_then.scala src/Pure/Tools/update_theorems.scala src/Pure/library.scala src/Pure/pure_thy.scala src/Pure/term.scala src/Pure/term_xml.scala src/Pure/thm_name.scala src/Tools/Graphview/graph_file.scala src/Tools/Graphview/graph_panel.scala src/Tools/Graphview/graphview.scala src/Tools/Graphview/layout.scala src/Tools/Graphview/main_panel.scala src/Tools/Graphview/metrics.scala src/Tools/Graphview/model.scala src/Tools/Graphview/mutator.scala src/Tools/Graphview/mutator_dialog.scala src/Tools/Graphview/mutator_event.scala src/Tools/Graphview/popups.scala src/Tools/Graphview/shapes.scala src/Tools/Graphview/tree_panel.scala src/Tools/VSCode/src/build_vscode.scala src/Tools/VSCode/src/channel.scala src/Tools/VSCode/src/document_model.scala src/Tools/VSCode/src/dynamic_output.scala src/Tools/VSCode/src/grammar.scala src/Tools/VSCode/src/preview_panel.scala src/Tools/VSCode/src/protocol.scala src/Tools/VSCode/src/server.scala src/Tools/VSCode/src/state_panel.scala src/Tools/VSCode/src/vscode_rendering.scala src/Tools/VSCode/src/vscode_resources.scala src/Tools/VSCode/src/vscode_spell_checker.scala ) ## diagnostics PRG="$(basename "$0")" function usage() { echo echo "Usage: isabelle $PRG [OPTIONS]" echo echo " Options are:" echo " -f fresh build" echo exit 1 } function fail() { echo "$1" >&2 exit 2 } [ -z "$ISABELLE_HOME" ] && fail "Missing Isabelle settings environment" ## process command line # options FRESH="" while getopts "f" OPT do case "$OPT" in f) FRESH=true ;; \?) usage ;; esac done shift $(($OPTIND - 1)) # args [ "$#" -ne 0 ] && usage ## target TARGET_DIR="lib/classes" TARGET_JAR="$TARGET_DIR/Pure.jar" TARGET_SHASUM="$TARGET_DIR/Pure.shasum" function target_shasum() { shasum -a1 -b "$TARGET_JAR" "${SOURCES[@]}" 2>/dev/null } function target_clean() { rm -rf "$TARGET_DIR" } [ -n "$FRESH" ] && target_clean ## build target_shasum | cmp "$TARGET_SHASUM" >/dev/null 2>/dev/null if [ "$?" -ne 0 ]; then echo "### Building Isabelle/Scala ..." target_clean BUILD_DIR="$TARGET_DIR/build" mkdir -p "$BUILD_DIR" ( export CLASSPATH="$(platform_path "$ISABELLE_CLASSPATH")" isabelle_scala scalac $ISABELLE_SCALAC_OPTIONS \ -d "$BUILD_DIR" "${SOURCES[@]}" ) || fail "Failed to compile sources" CHARSET_SERVICE="META-INF/services/java.nio.charset.spi.CharsetProvider" mkdir -p "$BUILD_DIR/$(dirname "$CHARSET_SERVICE")" echo isabelle.Isabelle_Charset_Provider > "$BUILD_DIR/$CHARSET_SERVICE" cp "$ISABELLE_HOME/lib/logo/isabelle_transparent-32.gif" "$BUILD_DIR/isabelle/." cp "$ISABELLE_HOME/lib/logo/isabelle_transparent.gif" "$BUILD_DIR/isabelle/." isabelle_jdk jar -c -f "$(platform_path "$TARGET_JAR")" -e isabelle.Main \ -C "$BUILD_DIR" META-INF \ -C "$BUILD_DIR" isabelle || fail "Failed to produce $TARGET_JAR" rm -rf "$BUILD_DIR" target_shasum > "$TARGET_SHASUM" fi