diff --git a/src/Pure/Tools/build.scala b/src/Pure/Tools/build.scala --- a/src/Pure/Tools/build.scala +++ b/src/Pure/Tools/build.scala @@ -1,752 +1,751 @@ /* Title: Pure/Tools/build.scala Author: Makarius Options: :folding=explicit: Command-line tools to build and manage Isabelle sessions. */ package isabelle import scala.collection.mutable import scala.util.matching.Regex object Build { /** "isabelle build" **/ /* options */ def hostname(options: Options): String = Isabelle_System.hostname(options.string("build_hostname")) def engine_name(options: Options): String = options.string("build_engine") /* results */ object Results { def apply(context: Build_Process.Context, results: Map[String, Process_Result]): Results = new Results(context.store, context.build_deps, results) } class Results private( val store: Store, val deps: Sessions.Deps, results: Map[String, Process_Result] ) { def cache: Term.Cache = store.cache def sessions_ok: List[String] = (for { name <- deps.sessions_structure.build_topological_order.iterator result <- results.get(name) if result.ok } yield name).toList def info(name: String): Sessions.Info = deps.sessions_structure(name) def sessions: Set[String] = results.keySet def cancelled(name: String): Boolean = !results(name).defined def apply(name: String): Process_Result = results(name).strict val rc: Int = results.valuesIterator.map(_.strict.rc).foldLeft(Process_Result.RC.ok)(_ max _) def ok: Boolean = rc == Process_Result.RC.ok def unfinished: List[String] = sessions.iterator.filterNot(apply(_).ok).toList.sorted override def toString: String = rc.toString } /* engine */ object Engine { lazy val services: List[Engine] = Isabelle_System.make_services(classOf[Engine]) def apply(name: String): Engine = services.find(_.name == name).getOrElse(error("Bad build engine " + quote(name))) } class Engine(val name: String) extends Isabelle_System.Service { override def toString: String = name def build_options(options: Options): Options = options + "completion_limit=0" + "editor_tracing_messages=0" def build_process(build_context: Build_Process.Context, build_progress: Progress): Build_Process = new Build_Process(build_context, build_progress) final def build_store(options: Options, cache: Term.Cache = Term.Cache.make()): Store = { val store = Store(build_options(options), cache = cache) Isabelle_System.make_directory(store.output_dir + Path.basic("log")) Isabelle_Fonts.init() store } final def run(context: Build_Process.Context, progress: Progress): Results = Isabelle_Thread.uninterruptible { using(build_process(context, progress)) { build_process => Results(context, build_process.run()) } } } class Default_Engine extends Engine("") { override def toString: String = "" } /* build */ def build( options: Options, selection: Sessions.Selection = Sessions.Selection.empty, browser_info: Browser_Info.Config = Browser_Info.Config.none, progress: Progress = new Progress, check_unknown_files: Boolean = false, build_heap: Boolean = false, clean_build: Boolean = false, dirs: List[Path] = Nil, select_dirs: List[Path] = Nil, infos: List[Sessions.Info] = Nil, numa_shuffling: Boolean = false, max_jobs: Int = 1, list_files: Boolean = false, check_keywords: Set[String] = Set.empty, fresh_build: Boolean = false, no_build: Boolean = false, soft_build: Boolean = false, export_files: Boolean = false, augment_options: String => List[Options.Spec] = _ => Nil, session_setup: (String, Session) => Unit = (_, _) => (), cache: Term.Cache = Term.Cache.make() ): Results = { val build_engine = Engine(engine_name(options)) val store = build_engine.build_store(options, cache = cache) val build_options = store.options /* session selection and dependencies */ val full_sessions = Sessions.load_structure(build_options, dirs = dirs, select_dirs = select_dirs, infos = infos, augment_options = augment_options) val full_sessions_selection = full_sessions.imports_selection(selection) val build_deps = { val deps0 = Sessions.deps(full_sessions.selection(selection), progress = progress, inlined_files = true, list_files = list_files, check_keywords = check_keywords).check_errors if (soft_build && !fresh_build) { val outdated = deps0.sessions_structure.build_topological_order.flatMap(name => store.try_open_database(name) match { case Some(db) => using(db)(store.read_build(_, name)) match { case Some(build) if build.ok => val session_options = deps0.sessions_structure(name).options val session_sources = deps0.sources_shasum(name) if (Sessions.eq_sources(session_options, build.sources, session_sources)) None else Some(name) case _ => Some(name) } case None => Some(name) }) Sessions.deps(full_sessions.selection(Sessions.Selection(sessions = outdated)), progress = progress, inlined_files = true).check_errors } else deps0 } /* check unknown files */ if (check_unknown_files) { val source_files = (for { (_, base) <- build_deps.session_bases.iterator (path, _) <- base.session_sources.iterator } yield path).toList Mercurial.check_files(source_files)._2 match { case Nil => case unknown_files => progress.echo_warning("Unknown files (not part of the underlying Mercurial repository):" + unknown_files.map(File.standard_path).sorted.mkString("\n ", "\n ", "")) } } /* build process and results */ val build_context = - Build_Process.init_context(store, build_deps, progress = progress, - hostname = hostname(build_options), build_heap = build_heap, - numa_shuffling = numa_shuffling, max_jobs = max_jobs, fresh_build = fresh_build, - no_build = no_build, session_setup = session_setup, master = true) + Build_Process.Context(store, build_deps, hostname = hostname(build_options), + build_heap = build_heap, numa_shuffling = numa_shuffling, max_jobs = max_jobs, + fresh_build = fresh_build, no_build = no_build, session_setup = session_setup, + master = true) if (clean_build) { using_optional(store.maybe_open_database_server()) { database_server => for (name <- full_sessions.imports_descendants(full_sessions_selection)) { store.clean_output(database_server, name) match { case None => case Some(true) => progress.echo("Cleaned " + name) case Some(false) => progress.echo(name + " FAILED to clean") } } } } val results = build_engine.run(build_context, progress) if (export_files) { for (name <- full_sessions_selection.iterator if results(name).ok) { val info = results.info(name) if (info.export_files.nonEmpty) { progress.echo("Exporting " + info.name + " ...") for ((dir, prune, pats) <- info.export_files) { Export.export_files(store, name, info.dir + dir, progress = if (progress.verbose) progress else new Progress, export_prune = prune, export_patterns = pats) } } } } val presentation_sessions = results.sessions_ok.filter(name => browser_info.enabled(results.info(name))) if (presentation_sessions.nonEmpty && !progress.stopped) { Browser_Info.build(browser_info, results.store, results.deps, presentation_sessions, progress = progress) } if (!results.ok && (progress.verbose || !no_build)) { progress.echo("Unfinished session(s): " + commas(results.unfinished)) } results } /* build logic image */ def build_logic(options: Options, logic: String, progress: Progress = new Progress, build_heap: Boolean = false, dirs: List[Path] = Nil, fresh: Boolean = false, strict: Boolean = false ): Int = { val selection = Sessions.Selection.session(logic) val rc = if (!fresh && build(options, selection = selection, build_heap = build_heap, no_build = true, dirs = dirs).ok) Process_Result.RC.ok else { progress.echo("Build started for Isabelle/" + logic + " ...") build(options, selection = selection, progress = progress, build_heap = build_heap, fresh_build = fresh, dirs = dirs).rc } if (strict && rc != Process_Result.RC.ok) error("Failed to build Isabelle/" + logic) else rc } /* command-line wrapper */ val isabelle_tool1 = Isabelle_Tool("build", "build and manage Isabelle sessions", Scala_Project.here, { args => var base_sessions: List[String] = Nil var select_dirs: List[Path] = Nil var numa_shuffling = false var browser_info = Browser_Info.Config.none var requirements = false var soft_build = false var exclude_session_groups: List[String] = Nil var all_sessions = false var build_heap = false var clean_build = false var dirs: List[Path] = Nil var export_files = false var fresh_build = false var session_groups: List[String] = Nil var max_jobs = 1 var check_keywords: Set[String] = Set.empty var list_files = false var no_build = false var options = Options.init(specs = Options.Spec.ISABELLE_BUILD_OPTIONS) var verbose = false var exclude_sessions: List[String] = Nil val getopts = Getopts(""" Usage: isabelle build [OPTIONS] [SESSIONS ...] Options are: -B NAME include session NAME and all descendants -D DIR include session directory and select its sessions -N cyclic shuffling of NUMA CPU nodes (performance tuning) -P DIR enable HTML/PDF presentation in directory (":" for default) -R refer to requirements of selected sessions -S soft build: only observe changes of sources, not heap images -X NAME exclude sessions from group NAME and all descendants -a select all sessions -b build heap images -c clean build -d DIR include session directory -e export files from session specification into file-system -f fresh build -g NAME select session group NAME -j INT maximum number of parallel jobs (default 1) -k KEYWORD check theory sources for conflicts with proposed keywords -l list session source files -n no build -- take existing session build databases -o OPTION override Isabelle system OPTION (via NAME=VAL or NAME) -v verbose -x NAME exclude session NAME and all descendants Build and manage Isabelle sessions: ML heaps, session databases, documents. Notable system options: see "isabelle options -l -t build" Notable system settings: """ + Library.indent_lines(4, Build_Log.Settings.show()) + "\n", "B:" -> (arg => base_sessions = base_sessions ::: List(arg)), "D:" -> (arg => select_dirs = select_dirs ::: List(Path.explode(arg))), "N" -> (_ => numa_shuffling = true), "P:" -> (arg => browser_info = Browser_Info.Config.make(arg)), "R" -> (_ => requirements = true), "S" -> (_ => soft_build = true), "X:" -> (arg => exclude_session_groups = exclude_session_groups ::: List(arg)), "a" -> (_ => all_sessions = true), "b" -> (_ => build_heap = true), "c" -> (_ => clean_build = true), "d:" -> (arg => dirs = dirs ::: List(Path.explode(arg))), "e" -> (_ => export_files = true), "f" -> (_ => fresh_build = true), "g:" -> (arg => session_groups = session_groups ::: List(arg)), "j:" -> (arg => max_jobs = Value.Int.parse(arg)), "k:" -> (arg => check_keywords = check_keywords + arg), "l" -> (_ => list_files = true), "n" -> (_ => no_build = true), "o:" -> (arg => options = options + arg), "v" -> (_ => verbose = true), "x:" -> (arg => exclude_sessions = exclude_sessions ::: List(arg))) val sessions = getopts(args) val progress = new Console_Progress(verbose = verbose) val start_date = Date.now() progress.echo( "Started at " + Build_Log.print_date(start_date) + " (" + Isabelle_System.ml_identifier() + " on " + hostname(options) +")", verbose = true) progress.echo(Build_Log.Settings.show() + "\n", verbose = true) val results = progress.interrupt_handler { build(options, selection = Sessions.Selection( requirements = requirements, all_sessions = all_sessions, base_sessions = base_sessions, exclude_session_groups = exclude_session_groups, exclude_sessions = exclude_sessions, session_groups = session_groups, sessions = sessions), browser_info = browser_info, progress = progress, check_unknown_files = Mercurial.is_repository(Path.ISABELLE_HOME), build_heap = build_heap, clean_build = clean_build, dirs = dirs, select_dirs = select_dirs, numa_shuffling = Host.numa_check(progress, numa_shuffling), max_jobs = max_jobs, list_files = list_files, check_keywords = check_keywords, fresh_build = fresh_build, no_build = no_build, soft_build = soft_build, export_files = export_files) } val stop_date = Date.now() val elapsed_time = stop_date.time - start_date.time progress.echo("\nFinished at " + Build_Log.print_date(stop_date), verbose = true) val total_timing = results.sessions.iterator.map(a => results(a).timing).foldLeft(Timing.zero)(_ + _). copy(elapsed = elapsed_time) progress.echo(total_timing.message_resources) sys.exit(results.rc) }) /** "isabelle build_worker" **/ /* identified builds */ def read_builds(options: Options): List[Build_Process.Build] = using_option(Store(options).maybe_open_build_database())( Build_Process.read_builds).getOrElse(Nil).filter(_.active) def print_builds(options: Options, builds: List[Build_Process.Build]): String = using_optional(Store(options).maybe_open_build_database()) { build_database => val print_database = build_database match { case None => "" case Some(db) => " (database: " + db + ")" } if (builds.isEmpty) "No build processes available" + print_database else { "Available build processes" + print_database + (for ((build, i) <- builds.iterator.zipWithIndex) yield { "\n " + (i + 1) + ": " + build.build_uuid + " (platform: " + build.ml_platform + ", start: " + Build_Log.print_date(build.start) + ")" }).mkString } } def id_builds( options: Options, id: String, builds: List[Build_Process.Build] ): Build_Process.Build = (id, builds.length) match { case (Value.Int(i), n) if 1 <= i && i <= n => builds(i - 1) case (UUID(_), _) if builds.exists(_.build_uuid == id) => builds.find(_.build_uuid == id).get case ("", 0) => error(print_builds(options, builds)) case ("", 1) => builds.head case _ => cat_error("Cannot identify build process " + quote(id), print_builds(options, builds)) } /* build_worker */ def build_worker( options: Options, build_master: Build_Process.Build, progress: Progress = new Progress, dirs: List[Path] = Nil, numa_shuffling: Boolean = false, max_jobs: Int = 1, cache: Term.Cache = Term.Cache.make() ): Results = { val build_engine = Engine(engine_name(options)) val store = build_engine.build_store(options, cache = cache) val build_options = store.options val sessions_structure = Sessions.load_structure(build_options, dirs = dirs). selection(Sessions.Selection(sessions = build_master.sessions)) val build_deps = Sessions.deps(sessions_structure, progress = progress, inlined_files = true).check_errors val build_context = - Build_Process.init_context(store, build_deps, progress = progress, - hostname = hostname(build_options), numa_shuffling = numa_shuffling, max_jobs = max_jobs, - build_uuid = build_master.build_uuid) + Build_Process.Context(store, build_deps, hostname = hostname(build_options), + numa_shuffling = numa_shuffling, max_jobs = max_jobs, build_uuid = build_master.build_uuid) build_engine.run(build_context, progress) } /* command-line wrapper */ val isabelle_tool2 = Isabelle_Tool("build_worker", "external worker for session build process", Scala_Project.here, { args => var build_id = "" var list_builds = false var numa_shuffling = false var dirs: List[Path] = Nil var max_jobs = 1 var options = Options.init(specs = Options.Spec.ISABELLE_BUILD_OPTIONS ::: List(Options.Spec.make("build_database_test"))) var verbose = false val getopts = Getopts(""" Usage: isabelle build_worker [OPTIONS] Options are: -N cyclic shuffling of NUMA CPU nodes (performance tuning) -d DIR include session directory -i ID identify build process, either via index (starting from 1) or Universally Unique Identifier (UUID) -j INT maximum number of parallel jobs (default 1) -l list build processes -o OPTION override Isabelle system OPTION (via NAME=VAL or NAME) -v verbose Run as external worker for session build process, as identified via option -i. The latter can be omitted, if there is exactly one build. """, "N" -> (_ => numa_shuffling = true), "d:" -> (arg => dirs = dirs ::: List(Path.explode(arg))), "i:" -> (arg => build_id = arg), "j:" -> (arg => max_jobs = Value.Int.parse(arg)), "l" -> (_ => list_builds = true), "o:" -> (arg => options = options + arg), "v" -> (_ => verbose = true)) val more_args = getopts(args) if (more_args.nonEmpty) getopts.usage() val progress = new Console_Progress(verbose = verbose) val builds = read_builds(options) if (list_builds) progress.echo(print_builds(options, builds)) if (!list_builds || build_id.nonEmpty) { val build = id_builds(options, build_id, builds) val results = progress.interrupt_handler { build_worker(options, build, progress = progress, dirs = dirs, numa_shuffling = Host.numa_check(progress, numa_shuffling), max_jobs = max_jobs) } sys.exit(results.rc) } }) /** "isabelle build_log" **/ /* theory markup/messages from session database */ def read_theory( theory_context: Export.Theory_Context, unicode_symbols: Boolean = false ): Option[Document.Snapshot] = { def decode_bytes(bytes: Bytes): String = Symbol.output(unicode_symbols, UTF8.decode_permissive(bytes)) def read(name: String): Export.Entry = theory_context(name, permissive = true) def read_xml(name: String): XML.Body = YXML.parse_body(decode_bytes(read(name).bytes), cache = theory_context.cache) def read_source_file(name: String): Store.Source_File = theory_context.session_context.source_file(name) for { id <- theory_context.document_id() (thy_file, blobs_files) <- theory_context.files(permissive = true) } yield { val master_dir = Path.explode(Url.strip_base_name(thy_file).getOrElse( error("Cannot determine theory master directory: " + quote(thy_file)))) val blobs = blobs_files.map { name => val path = Path.explode(name) val src_path = File.relative_path(master_dir, path).getOrElse(path) val file = read_source_file(name) val bytes = file.bytes val text = decode_bytes(bytes) val chunk = Symbol.Text_Chunk(text) Command.Blob(Document.Node.Name(name), src_path, Some((file.digest, chunk))) -> Document.Blobs.Item(bytes, text, chunk, changed = false) } val thy_source = decode_bytes(read_source_file(thy_file).bytes) val thy_xml = read_xml(Export.MARKUP) val blobs_xml = for (i <- (1 to blobs.length).toList) yield read_xml(Export.MARKUP + i) val markups_index = Command.Markup_Index.make(blobs.map(_._1)) val markups = Command.Markups.make( for ((index, xml) <- markups_index.zip(thy_xml :: blobs_xml)) yield index -> Markup_Tree.from_XML(xml)) val results = Command.Results.make( for (elem @ XML.Elem(Markup(_, Markup.Serial(i)), _) <- read_xml(Export.MESSAGES)) yield i -> elem) val command = Command.unparsed(thy_source, theory = true, id = id, node_name = Document.Node.Name(thy_file, theory = theory_context.theory), blobs_info = Command.Blobs_Info.make(blobs), markups = markups, results = results) val doc_blobs = Document.Blobs.make(blobs) Document.State.init.snippet(command, doc_blobs) } } /* print messages */ def print_log( options: Options, sessions: List[String], theories: List[String] = Nil, message_head: List[Regex] = Nil, message_body: List[Regex] = Nil, progress: Progress = new Progress, margin: Double = Pretty.default_margin, breakgain: Double = Pretty.default_breakgain, metric: Pretty.Metric = Symbol.Metric, unicode_symbols: Boolean = false ): Unit = { val store = Store(options) val session = new Session(options, Resources.bootstrap) def check(filter: List[Regex], make_string: => String): Boolean = filter.isEmpty || { val s = Output.clean_yxml(make_string) filter.forall(r => r.findFirstIn(Output.clean_yxml(s)).nonEmpty) } def print(session_name: String): Unit = { using(Export.open_session_context0(store, session_name)) { session_context => val result = for { db <- session_context.session_db() theories = store.read_theories(db, session_name) errors = store.read_errors(db, session_name) info <- store.read_build(db, session_name) } yield (theories, errors, info.return_code) result match { case None => store.error_database(session_name) case Some((used_theories, errors, rc)) => theories.filterNot(used_theories.toSet) match { case Nil => case bad => error("Unknown theories " + commas_quote(bad)) } val print_theories = if (theories.isEmpty) used_theories else used_theories.filter(theories.toSet) for (thy <- print_theories) { val thy_heading = "\nTheory " + quote(thy) + " (in " + session_name + ")" + ":" Build_Job.read_theory(session_context.theory(thy), unicode_symbols = unicode_symbols) match { case None => progress.echo(thy_heading + " MISSING") case Some(snapshot) => val rendering = new Rendering(snapshot, options, session) val messages = rendering.text_messages(Text.Range.full) .filter(message => progress.verbose || Protocol.is_exported(message.info)) if (messages.nonEmpty) { val line_document = Line.Document(snapshot.node.source) val buffer = new mutable.ListBuffer[String] for (Text.Info(range, elem) <- messages) { val line = line_document.position(range.start).line1 val pos = Position.Line_File(line, snapshot.node_name.node) def message_text: String = Protocol.message_text(elem, heading = true, pos = pos, margin = margin, breakgain = breakgain, metric = metric) val ok = check(message_head, Protocol.message_heading(elem, pos)) && check(message_body, XML.content(Pretty.unformatted(List(elem)))) if (ok) buffer += message_text } if (buffer.nonEmpty) { progress.echo(thy_heading) buffer.foreach(progress.echo(_)) } } } } if (errors.nonEmpty) { val msg = Symbol.output(unicode_symbols, cat_lines(errors)) progress.echo("\nBuild errors:\n" + Output.error_message_text(msg)) } if (rc != Process_Result.RC.ok) { progress.echo("\n" + Process_Result.RC.print_long(rc)) } } } } val errors = new mutable.ListBuffer[String] for (session_name <- sessions) { Exn.interruptible_capture(print(session_name)) match { case Exn.Res(_) => case Exn.Exn(exn) => errors += Exn.message(exn) } } if (errors.nonEmpty) error(cat_lines(errors.toList)) } /* command-line wrapper */ val isabelle_tool3 = Isabelle_Tool("build_log", "print messages from session build database", Scala_Project.here, { args => /* arguments */ var message_head = List.empty[Regex] var message_body = List.empty[Regex] var unicode_symbols = false var theories: List[String] = Nil var margin = Pretty.default_margin var options = Options.init() var verbose = false val getopts = Getopts(""" Usage: isabelle build_log [OPTIONS] [SESSIONS ...] Options are: -H REGEX filter messages by matching against head -M REGEX filter messages by matching against body -T NAME restrict to given theories (multiple options possible) -U output Unicode symbols -m MARGIN margin for pretty printing (default: """ + margin + """) -o OPTION override Isabelle system OPTION (via NAME=VAL or NAME) -v print all messages, including information etc. Print messages from the session build database of the given sessions, without any checks against current sources nor session structure: results from old sessions or failed builds can be printed as well. Multiple options -H and -M are conjunctive: all given patterns need to match. Patterns match any substring, but ^ or $ may be used to match the start or end explicitly. """, "H:" -> (arg => message_head = message_head ::: List(arg.r)), "M:" -> (arg => message_body = message_body ::: List(arg.r)), "T:" -> (arg => theories = theories ::: List(arg)), "U" -> (_ => unicode_symbols = true), "m:" -> (arg => margin = Value.Double.parse(arg)), "o:" -> (arg => options = options + arg), "v" -> (_ => verbose = true)) val sessions = getopts(args) val progress = new Console_Progress(verbose = verbose) if (sessions.isEmpty) progress.echo_warning("No sessions to print") else { print_log(options, sessions, theories = theories, message_head = message_head, message_body = message_body, margin = margin, progress = progress, unicode_symbols = unicode_symbols) } }) } diff --git a/src/Pure/Tools/build_job.scala b/src/Pure/Tools/build_job.scala --- a/src/Pure/Tools/build_job.scala +++ b/src/Pure/Tools/build_job.scala @@ -1,595 +1,600 @@ /* Title: Pure/Tools/build_job.scala Author: Makarius Build job running prover process, with rudimentary PIDE session. */ package isabelle import scala.collection.mutable trait Build_Job { def cancel(): Unit = () def is_finished: Boolean = false def join: (Process_Result, SHA1.Shasum) = (Process_Result.undefined, SHA1.no_shasum) } object Build_Job { /* build session */ def start_session( build_context: Build_Process.Context, + session_context: Session_Context, progress: Progress, log: Logger, database_server: Option[SQL.Database], session_background: Sessions.Background, + sources_shasum: SHA1.Shasum, input_shasum: SHA1.Shasum, - node_info: Host.Node_Info + node_info: Host.Node_Info, + store_heap: Boolean ): Session_Job = { - new Session_Job(build_context, progress, log, database_server, - session_background, input_shasum, node_info) + new Session_Job(build_context, session_context, progress, log, database_server, + session_background, sources_shasum, input_shasum, node_info, store_heap) } object Session_Context { def load( build_uuid: String, name: String, deps: List[String], ancestors: List[String], session_prefs: String, sources_shasum: SHA1.Shasum, timeout: Time, store: Store, progress: Progress = new Progress ): Session_Context = { def default: Session_Context = Session_Context( name, deps, ancestors, session_prefs, sources_shasum, timeout, Time.zero, Bytes.empty, build_uuid) store.try_open_database(name) match { case None => default case Some(db) => def ignore_error(msg: String) = { progress.echo_warning( "Ignoring bad database " + db + " for session " + quote(name) + if_proper(msg, ":\n" + msg)) default } try { val command_timings = store.read_command_timings(db, name) val elapsed = store.read_session_timing(db, name) match { case Markup.Elapsed(s) => Time.seconds(s) case _ => Time.zero } new Session_Context( name, deps, ancestors, session_prefs, sources_shasum, timeout, elapsed, command_timings, build_uuid) } catch { case ERROR(msg) => ignore_error(msg) case exn: java.lang.Error => ignore_error(Exn.message(exn)) case _: XML.Error => ignore_error("XML.Error") } finally { db.close() } } } } sealed case class Session_Context( name: String, deps: List[String], ancestors: List[String], session_prefs: String, sources_shasum: SHA1.Shasum, timeout: Time, old_time: Time, old_command_timings_blob: Bytes, build_uuid: String ) extends Library.Named class Session_Job private[Build_Job]( build_context: Build_Process.Context, + session_context: Session_Context, progress: Progress, log: Logger, database_server: Option[SQL.Database], session_background: Sessions.Background, + sources_shasum: SHA1.Shasum, input_shasum: SHA1.Shasum, - node_info: Host.Node_Info + node_info: Host.Node_Info, + store_heap: Boolean ) extends Build_Job { private val store = build_context.store def session_name: String = session_background.session_name private val info: Sessions.Info = session_background.sessions_structure(session_name) private val options: Options = node_info.process_policy(info.options) private val session_sources = Store.Sources.load(session_background.base, cache = store.cache.compress) - private val store_heap = build_context.store_heap(session_name) - private val future_result: Future[(Process_Result, SHA1.Shasum)] = Future.thread("build", uninterruptible = true) { val env = Isabelle_System.settings( List("ISABELLE_ML_DEBUGGER" -> options.bool("ML_debugger").toString)) val session_heaps = session_background.info.parent match { case None => Nil case Some(logic) => ML_Process.session_heaps(store, session_background, logic = logic) } val use_prelude = if (session_heaps.isEmpty) Thy_Header.ml_roots.map(_._1) else Nil val eval_store = if (store_heap) { (if (info.theories.nonEmpty) List("ML_Heap.share_common_data ()") else Nil) ::: List("ML_Heap.save_child " + ML_Syntax.print_string_bytes(File.platform_path(store.output_heap(session_name)))) } else Nil def session_blobs(node_name: Document.Node.Name): List[(Command.Blob, Document.Blobs.Item)] = session_background.base.theory_load_commands.get(node_name.theory) match { case None => Nil case Some(spans) => val syntax = session_background.base.theory_syntax(node_name) val master_dir = Path.explode(node_name.master_dir) for (span <- spans; file <- span.loaded_files(syntax).files) yield { val src_path = Path.explode(file) val blob_name = Document.Node.Name(File.symbolic_path(master_dir + src_path)) val bytes = session_sources(blob_name.node).bytes val text = bytes.text val chunk = Symbol.Text_Chunk(text) Command.Blob(blob_name, src_path, Some((SHA1.digest(bytes), chunk))) -> Document.Blobs.Item(bytes, text, chunk, changed = false) } } /* session */ val resources = new Resources(session_background, log = log, - command_timings = build_context.old_command_timings(session_name)) + command_timings = + Properties.uncompress(session_context.old_command_timings_blob, cache = store.cache)) val session = new Session(options, resources) { override val cache: Term.Cache = store.cache override def build_blobs_info(node_name: Document.Node.Name): Command.Blobs_Info = Command.Blobs_Info.make(session_blobs(node_name)) override def build_blobs(node_name: Document.Node.Name): Document.Blobs = Document.Blobs.make(session_blobs(node_name)) } object Build_Session_Errors { private val promise: Promise[List[String]] = Future.promise def result: Exn.Result[List[String]] = promise.join_result def cancel(): Unit = promise.cancel() def apply(errs: List[String]): Unit = { try { promise.fulfill(errs) } catch { case _: IllegalStateException => } } } val export_consumer = Export.consumer(store.open_database(session_name, output = true), store.cache, progress = progress) val stdout = new StringBuilder(1000) val stderr = new StringBuilder(1000) val command_timings = new mutable.ListBuffer[Properties.T] val theory_timings = new mutable.ListBuffer[Properties.T] val session_timings = new mutable.ListBuffer[Properties.T] val runtime_statistics = new mutable.ListBuffer[Properties.T] val task_statistics = new mutable.ListBuffer[Properties.T] def fun( name: String, acc: mutable.ListBuffer[Properties.T], unapply: Properties.T => Option[Properties.T] ): (String, Session.Protocol_Function) = { name -> ((msg: Prover.Protocol_Output) => unapply(msg.properties) match { case Some(props) => acc += props; true case _ => false }) } session.init_protocol_handler(new Session.Protocol_Handler { override def exit(): Unit = Build_Session_Errors.cancel() private def build_session_finished(msg: Prover.Protocol_Output): Boolean = { val (rc, errors) = try { val (rc, errs) = { import XML.Decode._ pair(int, list(x => x))(Symbol.decode_yxml(msg.text)) } val errors = for (err <- errs) yield { val prt = Protocol_Message.expose_no_reports(err) Pretty.string_of(prt, metric = Symbol.Metric) } (rc, errors) } catch { case ERROR(err) => (Process_Result.RC.failure, List(err)) } session.protocol_command("Prover.stop", rc.toString) Build_Session_Errors(errors) true } private def loading_theory(msg: Prover.Protocol_Output): Boolean = msg.properties match { case Markup.Loading_Theory(Markup.Name(name)) => progress.theory(Progress.Theory(name, session = session_name)) false case _ => false } private def export_(msg: Prover.Protocol_Output): Boolean = msg.properties match { case Protocol.Export(args) => export_consumer.make_entry(session_name, args, msg.chunk) true case _ => false } override val functions: Session.Protocol_Functions = List( Markup.Build_Session_Finished.name -> build_session_finished, Markup.Loading_Theory.name -> loading_theory, Markup.EXPORT -> export_, fun(Markup.Theory_Timing.name, theory_timings, Markup.Theory_Timing.unapply), fun(Markup.Session_Timing.name, session_timings, Markup.Session_Timing.unapply), fun(Markup.Task_Statistics.name, task_statistics, Markup.Task_Statistics.unapply)) }) session.command_timings += Session.Consumer("command_timings") { case Session.Command_Timing(props) => for { elapsed <- Markup.Elapsed.unapply(props) elapsed_time = Time.seconds(elapsed) if elapsed_time.is_relevant && elapsed_time >= options.seconds("command_timing_threshold") } command_timings += props.filter(Markup.command_timing_property) } session.runtime_statistics += Session.Consumer("ML_statistics") { case Session.Runtime_Statistics(props) => runtime_statistics += props } session.finished_theories += Session.Consumer[Document.Snapshot]("finished_theories") { case snapshot => if (!progress.stopped) { def export_(name: String, xml: XML.Body, compress: Boolean = true): Unit = { if (!progress.stopped) { val theory_name = snapshot.node_name.theory val args = Protocol.Export.Args(theory_name = theory_name, name = name, compress = compress) val body = Bytes(Symbol.encode(YXML.string_of_body(xml))) export_consumer.make_entry(session_name, args, body) } } def export_text(name: String, text: String, compress: Boolean = true): Unit = export_(name, List(XML.Text(text)), compress = compress) for (command <- snapshot.snippet_command) { export_text(Export.DOCUMENT_ID, command.id.toString, compress = false) } export_text(Export.FILES, cat_lines(snapshot.node_files.map(name => File.symbolic_path(name.path))), compress = false) for ((blob_name, i) <- snapshot.node_files.tail.zipWithIndex) { val xml = snapshot.switch(blob_name).xml_markup() export_(Export.MARKUP + (i + 1), xml) } export_(Export.MARKUP, snapshot.xml_markup()) export_(Export.MESSAGES, snapshot.messages.map(_._1)) } } session.all_messages += Session.Consumer[Any]("build_session_output") { case msg: Prover.Output => val message = msg.message if (msg.is_system) resources.log(Protocol.message_text(message)) if (msg.is_stdout) { stdout ++= Symbol.encode(XML.content(message)) } else if (msg.is_stderr) { stderr ++= Symbol.encode(XML.content(message)) } else if (msg.is_exit) { val err = "Prover terminated" + (msg.properties match { case Markup.Process_Result(result) => ": " + result.print_rc case _ => "" }) Build_Session_Errors(List(err)) } case _ => } build_context.session_setup(session_name, session) val eval_main = Command_Line.ML_tool("Isabelle_Process.init_build ()" :: eval_store) /* process */ val process = Isabelle_Process.start(options, session, session_background, session_heaps, use_prelude = use_prelude, eval_main = eval_main, cwd = info.dir.file, env = env) val timeout_request: Option[Event_Timer.Request] = if (info.timeout_ignored) None else Some(Event_Timer.request(Time.now() + info.timeout) { process.terminate() }) val build_errors = Isabelle_Thread.interrupt_handler(_ => process.terminate()) { Exn.capture { process.await_startup() } match { case Exn.Res(_) => val resources_yxml = resources.init_session_yxml val encode_options: XML.Encode.T[Options] = options => session.prover_options(options).encode val args_yxml = YXML.string_of_body( { import XML.Encode._ pair(string, list(pair(encode_options, list(pair(string, properties)))))( (session_name, info.theories)) }) session.protocol_command("build_session", resources_yxml, args_yxml) Build_Session_Errors.result case Exn.Exn(exn) => Exn.Res(List(Exn.message(exn))) } } val result0 = Isabelle_Thread.interrupt_handler(_ => process.terminate()) { process.await_shutdown() } val was_timeout = timeout_request match { case None => false case Some(request) => !request.cancel() } session.stop() val export_errors = export_consumer.shutdown(close = true).map(Output.error_message_text) val (document_output, document_errors) = try { if (build_errors.isInstanceOf[Exn.Res[_]] && result0.ok && info.documents.nonEmpty) { using(Export.open_database_context(store)) { database_context => val documents = using(database_context.open_session(session_background)) { session_context => Document_Build.build_documents( Document_Build.context(session_context, progress = progress), output_sources = info.document_output, output_pdf = info.document_output) } using(database_context.open_database(session_name, output = true))(session_database => documents.foreach(_.write(session_database.db, session_name))) (documents.flatMap(_.log_lines), Nil) } } else (Nil, Nil) } catch { case exn: Document_Build.Build_Error => (exn.log_lines, exn.log_errors) case Exn.Interrupt.ERROR(msg) => (Nil, List(msg)) } /* process result */ val result1 = { val theory_timing = theory_timings.iterator.flatMap( { case props @ Markup.Name(name) => Some(name -> props) case _ => None }).toMap val used_theory_timings = for { (name, _) <- session_background.base.used_theories } yield theory_timing.getOrElse(name.theory, Markup.Name(name.theory)) val more_output = Library.trim_line(stdout.toString) :: command_timings.toList.map(Protocol.Command_Timing_Marker.apply) ::: used_theory_timings.map(Protocol.Theory_Timing_Marker.apply) ::: session_timings.toList.map(Protocol.Session_Timing_Marker.apply) ::: runtime_statistics.toList.map(Protocol.ML_Statistics_Marker.apply) ::: task_statistics.toList.map(Protocol.Task_Statistics_Marker.apply) ::: document_output result0.output(more_output) .error(Library.trim_line(stderr.toString)) .errors_rc(export_errors ::: document_errors) } val result2 = build_errors match { case Exn.Res(build_errs) => val errs = build_errs ::: document_errors if (errs.nonEmpty) { result1.error_rc.output( errs.flatMap(s => split_lines(Output.error_message_text(s))) ::: errs.map(Protocol.Error_Message_Marker.apply)) } else if (progress.stopped && result1.ok) result1.copy(rc = Process_Result.RC.interrupt) else result1 case Exn.Exn(Exn.Interrupt()) => if (result1.ok) result1.copy(rc = Process_Result.RC.interrupt) else result1 case Exn.Exn(exn) => throw exn } val process_result = if (result2.ok) result2 else if (was_timeout) result2.error(Output.error_message_text("Timeout")).timeout_rc else if (result2.interrupted) result2.error(Output.error_message_text("Interrupt")) else result2 /* output heap */ val output_shasum = { val heap = store.output_heap(session_name) if (process_result.ok && store_heap && heap.is_file) { val slice = Space.MiB(options.real("build_database_slice")).bytes val digest = ML_Heap.store(database_server, session_name, heap, slice) SHA1.shasum(digest, session_name) } else SHA1.no_shasum } val log_lines = process_result.out_lines.filterNot(Protocol_Message.Marker.test) val build_log = Build_Log.Log_File(session_name, process_result.out_lines). parse_session_info( command_timings = true, theory_timings = true, ml_statistics = true, task_statistics = true) // write log file if (process_result.ok) { File.write_gzip(store.output_log_gz(session_name), terminate_lines(log_lines)) } else File.write(store.output_log(session_name), terminate_lines(log_lines)) // write database using(store.open_database(session_name, output = true))(db => store.write_session_info(db, session_name, session_sources, build_log = if (process_result.timeout) build_log.error("Timeout") else build_log, build = Store.Build_Info( - sources = build_context.sources_shasum(session_name), + sources = sources_shasum, input_heaps = input_shasum, output_heap = output_shasum, process_result.rc, build_context.build_uuid))) // messages process_result.err_lines.foreach(progress.echo(_)) if (process_result.ok) { val props = build_log.session_timing val threads = Markup.Session_Timing.Threads.unapply(props) getOrElse 1 val timing = Markup.Timing_Properties.get(props) progress.echo( "Timing " + session_name + " (" + threads + " threads, " + timing.message_factor + ")", verbose = true) progress.echo( "Finished " + session_name + " (" + process_result.timing.message_resources + ")") } else { progress.echo( session_name + " FAILED (see also \"isabelle build_log -H Error " + session_name + "\")") if (!process_result.interrupted) { val tail = info.options.int("process_output_tail") val suffix = if (tail == 0) log_lines else log_lines.drop(log_lines.length - tail max 0) val prefix = if (log_lines.length == suffix.length) Nil else List("...") progress.echo(Library.trim_line(cat_lines(prefix ::: suffix))) } } (process_result.copy(out_lines = log_lines), output_shasum) } override def cancel(): Unit = future_result.cancel() override def is_finished: Boolean = future_result.is_finished override def join: (Process_Result, SHA1.Shasum) = future_result.join } /* theory markup/messages from session database */ def read_theory( theory_context: Export.Theory_Context, unicode_symbols: Boolean = false ): Option[Document.Snapshot] = { def decode_bytes(bytes: Bytes): String = Symbol.output(unicode_symbols, UTF8.decode_permissive(bytes)) def read(name: String): Export.Entry = theory_context(name, permissive = true) def read_xml(name: String): XML.Body = YXML.parse_body(decode_bytes(read(name).bytes), cache = theory_context.cache) def read_source_file(name: String): Store.Source_File = theory_context.session_context.source_file(name) for { id <- theory_context.document_id() (thy_file, blobs_files) <- theory_context.files(permissive = true) } yield { val master_dir = Path.explode(Url.strip_base_name(thy_file).getOrElse( error("Cannot determine theory master directory: " + quote(thy_file)))) val blobs = blobs_files.map { name => val path = Path.explode(name) val src_path = File.relative_path(master_dir, path).getOrElse(path) val file = read_source_file(name) val bytes = file.bytes val text = decode_bytes(bytes) val chunk = Symbol.Text_Chunk(text) Command.Blob(Document.Node.Name(name), src_path, Some((file.digest, chunk))) -> Document.Blobs.Item(bytes, text, chunk, changed = false) } val thy_source = decode_bytes(read_source_file(thy_file).bytes) val thy_xml = read_xml(Export.MARKUP) val blobs_xml = for (i <- (1 to blobs.length).toList) yield read_xml(Export.MARKUP + i) val markups_index = Command.Markup_Index.make(blobs.map(_._1)) val markups = Command.Markups.make( for ((index, xml) <- markups_index.zip(thy_xml :: blobs_xml)) yield index -> Markup_Tree.from_XML(xml)) val results = Command.Results.make( for (elem @ XML.Elem(Markup(_, Markup.Serial(i)), _) <- read_xml(Export.MESSAGES)) yield i -> elem) val command = Command.unparsed(thy_source, theory = true, id = id, node_name = Document.Node.Name(thy_file, theory = theory_context.theory), blobs_info = Command.Blobs_Info.make(blobs), markups = markups, results = results) val doc_blobs = Document.Blobs.make(blobs) Document.State.init.snippet(command, doc_blobs) } } } diff --git a/src/Pure/Tools/build_process.scala b/src/Pure/Tools/build_process.scala --- a/src/Pure/Tools/build_process.scala +++ b/src/Pure/Tools/build_process.scala @@ -1,1113 +1,1138 @@ /* Title: Pure/Tools/build_process.scala Author: Makarius Build process for sessions, with build database, optional heap, and optional presentation. */ package isabelle import scala.collection.immutable.SortedMap import scala.math.Ordering import scala.annotation.tailrec object Build_Process { /** static context **/ - def init_context( + sealed case class Context( store: Store, - build_deps: Sessions.Deps, - progress: Progress = new Progress, + build_deps: isabelle.Sessions.Deps, ml_platform: String = Isabelle_System.getenv("ML_PLATFORM"), hostname: String = Isabelle_System.hostname(), numa_shuffling: Boolean = false, build_heap: Boolean = false, max_jobs: Int = 1, fresh_build: Boolean = false, no_build: Boolean = false, session_setup: (String, Session) => Unit = (_, _) => (), build_uuid: String = UUID.random().toString, - master: Boolean = false, - ): Context = { - val sessions_structure = build_deps.sessions_structure - val build_graph = sessions_structure.build_graph - - val sessions = - Map.from( - for ((name, (info, _)) <- build_graph.iterator) - yield { - val deps = info.parent.toList - val ancestors = sessions_structure.build_requirements(deps) - val sources_shasum = build_deps.sources_shasum(name) - val session_context = - Build_Job.Session_Context.load( - build_uuid, name, deps, ancestors, info.session_prefs, sources_shasum, - info.timeout, store, progress = progress) - name -> session_context - }) - - val sessions_time = { - val maximals = build_graph.maximals.toSet - def descendants_time(name: String): Double = { - if (maximals.contains(name)) sessions(name).old_time.seconds - else { - val descendants = build_graph.all_succs(List(name)).toSet - val g = build_graph.restrict(descendants) - (0.0 :: g.maximals.flatMap { desc => - val ps = g.all_preds(List(desc)) - if (ps.exists(p => !sessions.isDefinedAt(p))) None - else Some(ps.map(p => sessions(p).old_time.seconds).sum) - }).max - } - } - Map.from( - for (name <- sessions.keysIterator) - yield name -> descendants_time(name)).withDefaultValue(0.0) - } - - val ordering = - new Ordering[String] { - def compare(name1: String, name2: String): Int = - sessions_time(name2) compare sessions_time(name1) match { - case 0 => - sessions(name2).timeout compare sessions(name1).timeout match { - case 0 => name1 compare name2 - case ord => ord - } - case ord => ord - } - } - - val numa_nodes = Host.numa_nodes(enabled = numa_shuffling) - - new Context(store, build_deps, sessions, ordering, ml_platform, hostname, numa_nodes, - build_heap = build_heap, max_jobs = max_jobs, fresh_build = fresh_build, - no_build = no_build, session_setup, build_uuid = build_uuid, master = master) - } - - final class Context private[Build_Process]( - val store: Store, - val build_deps: Sessions.Deps, - val sessions: State.Sessions, - val ordering: Ordering[String], - val ml_platform: String, - val hostname: String, - val numa_nodes: List[Int], - val build_heap: Boolean, - val max_jobs: Int, - val fresh_build: Boolean, - val no_build: Boolean, - val session_setup: (String, Session) => Unit, - val build_uuid: String, - val master: Boolean + master: Boolean = false ) { override def toString: String = "Build_Process.Context(build_uuid = " + quote(build_uuid) + if_proper(master, ", master = true") + ")" def build_options: Options = store.options - def sessions_structure: Sessions.Structure = build_deps.sessions_structure - - def sources_shasum(name: String): SHA1.Shasum = sessions(name).sources_shasum - - def old_command_timings(name: String): List[Properties.T] = - sessions.get(name) match { - case Some(session_context) => - Properties.uncompress(session_context.old_command_timings_blob, cache = store.cache) - case None => Nil - } - - def store_heap(name: String): Boolean = - build_heap || Sessions.is_pure(name) || - sessions.valuesIterator.exists(_.ancestors.contains(name)) + def sessions_structure: isabelle.Sessions.Structure = build_deps.sessions_structure def worker_active: Boolean = max_jobs > 0 } /** dynamic state **/ case class Build( build_uuid: String, // Database_Progress.context_uuid ml_platform: String, options: String, start: Date, stop: Option[Date], sessions: List[String] ) { def active: Boolean = stop.isEmpty } case class Worker( worker_uuid: String, // Database_Progress.agent_uuid build_uuid: String, start: Date, stamp: Date, stop: Option[Date], serial: Long ) case class Task( name: String, deps: List[String], info: JSON.Object.T, build_uuid: String ) { def is_ready: Boolean = deps.isEmpty def resolve(dep: String): Task = if (deps.contains(dep)) copy(deps = deps.filterNot(_ == dep)) else this } case class Job( name: String, worker_uuid: String, build_uuid: String, node_info: Host.Node_Info, build: Option[Build_Job] ) extends Library.Named { def no_build: Job = copy(build = None) } case class Result( name: String, worker_uuid: String, build_uuid: String, node_info: Host.Node_Info, process_result: Process_Result, output_shasum: SHA1.Shasum, current: Boolean ) extends Library.Named { def ok: Boolean = process_result.ok } + object Sessions { + type Graph = isabelle.Graph[String, Build_Job.Session_Context] + val empty: Sessions = new Sessions(Graph.string) + } + + final class Sessions private(val graph: Sessions.Graph) { + override def toString: String = graph.toString + + def apply(name: String): Build_Job.Session_Context = graph.get_node(name) + + def iterator: Iterator[Build_Job.Session_Context] = + for (name <- graph.topological_order.iterator) yield apply(name) + + def make(new_graph: Sessions.Graph): Sessions = + if (graph == new_graph) this + else { + new Sessions( + new_graph.iterator.foldLeft(new_graph) { + case (g, (name, (session, _))) => g.add_deps_acyclic(name, session.deps) + }) + } + + def pull( + data_domain: Set[String], + data: Set[String] => List[Build_Job.Session_Context] + ): Sessions = { + val dom = data_domain -- iterator.map(_.name) + make(data(dom).foldLeft(graph.restrict(dom)) { case (g, e) => g.new_node(e.name, e) }) + } + + def init(build_context: Context, progress: Progress = new Progress): Sessions = { + val sessions_structure = build_context.sessions_structure + make( + sessions_structure.build_graph.iterator.foldLeft(graph) { + case (graph0, (name, (info, _))) => + val deps = info.parent.toList + val prefs = info.session_prefs + val ancestors = sessions_structure.build_requirements(deps) + val sources_shasum = build_context.build_deps.sources_shasum(name) + + if (graph0.defined(name)) { + val session0 = graph0.get_node(name) + val prefs0 = session0.session_prefs + val ancestors0 = session0.ancestors + val sources_shasum0 = session0.sources_shasum + + def err(msg: String, a: String, b: String): Nothing = + error("Conflicting dependencies for session " + quote(name) + ": " + + msg + "\n" + a + "\nvs.\n" + b) + + if (prefs0 != prefs) { + err("preferences disagree", + Symbol.cartouche_decoded(prefs0), Symbol.cartouche_decoded(prefs)) + } + if (ancestors0 != ancestors) { + err("ancestors disagree", commas_quote(ancestors0), commas_quote(ancestors)) + } + if (sources_shasum0 != sources_shasum) { + val a = sources_shasum0 - sources_shasum + val b = sources_shasum - sources_shasum0 + err("sources disagree", a.toString, b.toString) + } + + graph0 + } + else { + val session = + Build_Job.Session_Context.load( + build_context.build_uuid, name, deps, ancestors, prefs, sources_shasum, + info.timeout, build_context.store, progress = progress) + graph0.new_node(name, session) + } + } + ) + } + + lazy val max_time: Map[String, Double] = { + val maximals = graph.maximals.toSet + def descendants_time(name: String): Double = { + if (maximals.contains(name)) apply(name).old_time.seconds + else { + val descendants = graph.all_succs(List(name)).toSet + val g = graph.restrict(descendants) + (0.0 :: g.maximals.flatMap { desc => + val ps = g.all_preds(List(desc)) + if (ps.exists(p => !graph.defined(p))) None + else Some(ps.map(p => apply(p).old_time.seconds).sum) + }).max + } + } + Map.from( + for (name <- graph.keys_iterator) + yield name -> descendants_time(name)).withDefaultValue(0.0) + } + + lazy val ordering: Ordering[String] = + (a: String, b: String) => + max_time(b) compare max_time(a) match { + case 0 => + apply(b).timeout compare apply(a).timeout match { + case 0 => a compare b + case ord => ord + } + case ord => ord + } + } + sealed case class Snapshot( builds: List[Build], // available build configurations workers: List[Worker], // available worker processes - sessions: State.Sessions, // static build targets + sessions: Sessions, // static build targets pending: State.Pending, // dynamic build "queue" running: State.Running, // presently running jobs results: State.Results) // finished results object State { - type Sessions = Map[String, Build_Job.Session_Context] type Pending = List[Task] type Running = Map[String, Job] type Results = Map[String, Result] def inc_serial(serial: Long): Long = { require(serial < java.lang.Long.MAX_VALUE, "serial overflow") serial + 1 } } sealed case class State( serial: Long = 0, - sessions: State.Sessions = Map.empty, + numa_nodes: List[Int] = Nil, + sessions: Sessions = Sessions.empty, pending: State.Pending = Nil, running: State.Running = Map.empty, results: State.Results = Map.empty ) { require(serial >= 0, "serial underflow") def inc_serial: State = copy(serial = State.inc_serial(serial)) def set_serial(i: Long): State = { require(serial <= i, "non-monotonic change of serial") copy(serial = i) } def finished: Boolean = pending.isEmpty def remove_pending(name: String): State = copy(pending = pending.flatMap( entry => if (entry.name == name) None else Some(entry.resolve(name)))) def is_running(name: String): Boolean = running.isDefinedAt(name) def stop_running(): Unit = for (job <- running.valuesIterator; build <- job.build) build.cancel() def finished_running(): List[Job] = List.from( for (job <- running.valuesIterator; build <- job.build if build.is_finished) yield job) def add_running(job: Job): State = copy(running = running + (job.name -> job)) def remove_running(name: String): State = copy(running = running - name) def make_result( result_name: (String, String, String), process_result: Process_Result, output_shasum: SHA1.Shasum, node_info: Host.Node_Info = Host.Node_Info.none, current: Boolean = false ): State = { val (name, worker_uuid, build_uuid) = result_name val result = Result(name, worker_uuid, build_uuid, node_info, process_result, output_shasum, current) copy(results = results + (name -> result)) } } /** SQL data model **/ object Data extends SQL.Data("isabelle_build") { val database: Path = Path.explode("$ISABELLE_HOME_USER/build.db") def pull_data[A <: Library.Named]( data_domain: Set[String], data_iterator: Set[String] => Iterator[A], old_data: Map[String, A] ): Map[String, A] = { val dom = data_domain -- old_data.keysIterator val data = old_data -- old_data.keysIterator.filterNot(dom) if (dom.isEmpty) data else data_iterator(dom).foldLeft(data) { case (map, a) => map + (a.name -> a) } } def pull0[A <: Library.Named]( new_data: Map[String, A], old_data: Map[String, A] ): Map[String, A] = { pull_data(new_data.keySet, dom => new_data.valuesIterator.filter(a => dom(a.name)), old_data) } def pull1[A <: Library.Named]( data_domain: Set[String], data_base: Set[String] => Map[String, A], old_data: Map[String, A] ): Map[String, A] = { pull_data(data_domain, dom => data_base(dom).valuesIterator, old_data) } object Generic { val build_uuid = SQL.Column.string("build_uuid") val worker_uuid = SQL.Column.string("worker_uuid") val name = SQL.Column.string("name") def sql( build_uuid: String = "", worker_uuid: String = "", names: Iterable[String] = Nil ): SQL.Source = SQL.and( if_proper(build_uuid, Generic.build_uuid.equal(build_uuid)), if_proper(worker_uuid, Generic.worker_uuid.equal(worker_uuid)), if_proper(names, Generic.name.member(names))) def sql_where( build_uuid: String = "", worker_uuid: String = "", names: Iterable[String] = Nil ): SQL.Source = { SQL.where(sql(build_uuid = build_uuid, worker_uuid = worker_uuid, names = names)) } } /* base table */ object Base { val build_uuid = Generic.build_uuid.make_primary_key val ml_platform = SQL.Column.string("ml_platform") val options = SQL.Column.string("options") val start = SQL.Column.date("start") val stop = SQL.Column.date("stop") val table = make_table("", List(build_uuid, ml_platform, options, start, stop)) } def read_builds(db: SQL.Database, build_uuid: String = ""): List[Build] = { val builds = db.execute_query_statement( Base.table.select(sql = Generic.sql_where(build_uuid = build_uuid)), List.from[Build], { res => val build_uuid = res.string(Base.build_uuid) val ml_platform = res.string(Base.ml_platform) val options = res.string(Base.options) val start = res.date(Base.start) val stop = res.get_date(Base.stop) Build(build_uuid, ml_platform, options, start, stop, Nil) }) for (build <- builds.sortBy(_.start)(Date.Ordering)) yield { val sessions = Data.read_sessions_domain(db, build_uuid = build.build_uuid) build.copy(sessions = sessions.toList.sorted) } } def start_build( db: SQL.Database, build_uuid: String, ml_platform: String, options: String ): Unit = { db.execute_statement(Base.table.insert(), body = { stmt => stmt.string(1) = build_uuid stmt.string(2) = ml_platform stmt.string(3) = options stmt.date(4) = db.now() stmt.date(5) = None }) } def stop_build(db: SQL.Database, build_uuid: String): Unit = db.execute_statement( Base.table.update(List(Base.stop), sql = Base.build_uuid.where_equal(build_uuid)), body = { stmt => stmt.date(1) = db.now() }) def clean_build(db: SQL.Database): Unit = { val old = db.execute_query_statement( Base.table.select(List(Base.build_uuid), sql = SQL.where(Base.stop.defined)), List.from[String], res => res.string(Base.build_uuid)) if (old.nonEmpty) { for (table <- build_uuid_tables) { db.execute_statement(table.delete(sql = Generic.build_uuid.where_member(old))) } } } /* sessions */ object Sessions { val name = Generic.name.make_primary_key val deps = SQL.Column.string("deps") val ancestors = SQL.Column.string("ancestors") val options = SQL.Column.string("options") val sources = SQL.Column.string("sources") val timeout = SQL.Column.long("timeout") val old_time = SQL.Column.long("old_time") val old_command_timings = SQL.Column.bytes("old_command_timings") val build_uuid = Generic.build_uuid val table = make_table("sessions", List(name, deps, ancestors, options, sources, timeout, old_time, old_command_timings, build_uuid)) } def read_sessions_domain(db: SQL.Database, build_uuid: String = ""): Set[String] = db.execute_query_statement( Sessions.table.select(List(Sessions.name), sql = if_proper(build_uuid, Sessions.name.where_equal(build_uuid))), Set.from[String], res => res.string(Sessions.name)) def read_sessions(db: SQL.Database, names: Iterable[String] = Nil, build_uuid: String = "" - ): State.Sessions = + ): List[Build_Job.Session_Context] = { db.execute_query_statement( Sessions.table.select( sql = SQL.where_and( if_proper(names, Sessions.name.member(names)), if_proper(build_uuid, Sessions.build_uuid.equal(build_uuid))) ), - Map.from[String, Build_Job.Session_Context], + List.from[Build_Job.Session_Context], { res => val name = res.string(Sessions.name) val deps = split_lines(res.string(Sessions.deps)) val ancestors = split_lines(res.string(Sessions.ancestors)) val options = res.string(Sessions.options) val sources_shasum = SHA1.fake_shasum(res.string(Sessions.sources)) val timeout = Time.ms(res.long(Sessions.timeout)) val old_time = Time.ms(res.long(Sessions.old_time)) val old_command_timings_blob = res.bytes(Sessions.old_command_timings) val build_uuid = res.string(Sessions.build_uuid) - name -> Build_Job.Session_Context(name, deps, ancestors, options, sources_shasum, + Build_Job.Session_Context(name, deps, ancestors, options, sources_shasum, timeout, old_time, old_command_timings_blob, build_uuid) } ) + } - def update_sessions(db: SQL.Database, sessions: State.Sessions): Boolean = { + def update_sessions(db: SQL.Database, sessions: Build_Process.Sessions): Boolean = { val old_sessions = read_sessions_domain(db) - val insert = sessions.iterator.filterNot(p => old_sessions.contains(p._1)).toList + val insert = sessions.iterator.filterNot(s => old_sessions.contains(s.name)).toList - for ((name, session) <- insert) { + for (session <- insert) { db.execute_statement(Sessions.table.insert(), body = { stmt => - stmt.string(1) = name + stmt.string(1) = session.name stmt.string(2) = cat_lines(session.deps) stmt.string(3) = cat_lines(session.ancestors) stmt.string(4) = session.session_prefs stmt.string(5) = session.sources_shasum.toString stmt.long(6) = session.timeout.ms stmt.long(7) = session.old_time.ms stmt.bytes(8) = session.old_command_timings_blob stmt.string(9) = session.build_uuid }) } insert.nonEmpty } /* workers */ object Workers { val worker_uuid = Generic.worker_uuid.make_primary_key val build_uuid = Generic.build_uuid val start = SQL.Column.date("start") val stamp = SQL.Column.date("stamp") val stop = SQL.Column.date("stop") val serial = SQL.Column.long("serial") val table = make_table("workers", List(worker_uuid, build_uuid, start, stamp, stop, serial)) } def read_serial(db: SQL.Database): Long = db.execute_query_statementO[Long]( Workers.table.select(List(Workers.serial.max)), _.long(Workers.serial)).getOrElse(0L) def read_workers( db: SQL.Database, build_uuid: String = "", worker_uuid: String = "" ): List[Worker] = { db.execute_query_statement( Workers.table.select( sql = Generic.sql_where(build_uuid = build_uuid, worker_uuid = worker_uuid)), List.from[Worker], { res => Worker( worker_uuid = res.string(Workers.worker_uuid), build_uuid = res.string(Workers.build_uuid), start = res.date(Workers.start), stamp = res.date(Workers.stamp), stop = res.get_date(Workers.stop), serial = res.long(Workers.serial)) }) } def start_worker( db: SQL.Database, worker_uuid: String, build_uuid: String, serial: Long ): Unit = { def err(msg: String): Nothing = error("Cannot start worker " + worker_uuid + if_proper(msg, "\n" + msg)) val build_stop = db.execute_query_statementO( Base.table.select(List(Base.stop), sql = Base.build_uuid.where_equal(build_uuid)), res => res.get_date(Base.stop)) build_stop match { case Some(None) => case Some(Some(_)) => err("for already stopped build process " + build_uuid) case None => err("for unknown build process " + build_uuid) } db.execute_statement(Workers.table.insert(), body = { stmt => val now = db.now() stmt.string(1) = worker_uuid stmt.string(2) = build_uuid stmt.date(3) = now stmt.date(4) = now stmt.date(5) = None stmt.long(6) = serial }) } def stamp_worker( db: SQL.Database, worker_uuid: String, serial: Long, stop: Boolean = false ): Unit = { val sql = Workers.table.update(List(Workers.stamp, Workers.stop, Workers.serial), sql = Workers.worker_uuid.where_equal(worker_uuid)) db.execute_statement(sql, body = { stmt => val now = db.now() stmt.date(1) = now stmt.date(2) = if (stop) Some(now) else None stmt.long(3) = serial }) } /* pending jobs */ object Pending { val name = Generic.name.make_primary_key val deps = SQL.Column.string("deps") val info = SQL.Column.string("info") val build_uuid = Generic.build_uuid val table = make_table("pending", List(name, deps, info, build_uuid)) } def read_pending(db: SQL.Database): List[Task] = db.execute_query_statement( Pending.table.select(sql = SQL.order_by(List(Pending.name))), List.from[Task], { res => val name = res.string(Pending.name) val deps = res.string(Pending.deps) val info = res.string(Pending.info) val build_uuid = res.string(Pending.build_uuid) Task(name, split_lines(deps), JSON.Object.parse(info), build_uuid) }) def update_pending(db: SQL.Database, pending: State.Pending): Boolean = { val old_pending = read_pending(db) val (delete, insert) = Library.symmetric_difference(old_pending, pending) if (delete.nonEmpty) { db.execute_statement( Pending.table.delete(sql = Generic.sql_where(names = delete.map(_.name)))) } for (task <- insert) { db.execute_statement(Pending.table.insert(), body = { stmt => stmt.string(1) = task.name stmt.string(2) = cat_lines(task.deps) stmt.string(3) = JSON.Format(task.info) stmt.string(4) = task.build_uuid }) } delete.nonEmpty || insert.nonEmpty } /* running jobs */ object Running { val name = Generic.name.make_primary_key val worker_uuid = Generic.worker_uuid val build_uuid = Generic.build_uuid val hostname = SQL.Column.string("hostname") val numa_node = SQL.Column.int("numa_node") val table = make_table("running", List(name, worker_uuid, build_uuid, hostname, numa_node)) } def read_running(db: SQL.Database): State.Running = db.execute_query_statement( Running.table.select(sql = SQL.order_by(List(Running.name))), Map.from[String, Job], { res => val name = res.string(Running.name) val worker_uuid = res.string(Running.worker_uuid) val build_uuid = res.string(Running.build_uuid) val hostname = res.string(Running.hostname) val numa_node = res.get_int(Running.numa_node) name -> Job(name, worker_uuid, build_uuid, Host.Node_Info(hostname, numa_node), None) } ) def update_running(db: SQL.Database, running: State.Running): Boolean = { val running0 = read_running(db).valuesIterator.toList val running1 = running.valuesIterator.map(_.no_build).toList val (delete, insert) = Library.symmetric_difference(running0, running1) if (delete.nonEmpty) { db.execute_statement( Running.table.delete(sql = Generic.sql_where(names = delete.map(_.name)))) } for (job <- insert) { db.execute_statement(Running.table.insert(), body = { stmt => stmt.string(1) = job.name stmt.string(2) = job.worker_uuid stmt.string(3) = job.build_uuid stmt.string(4) = job.node_info.hostname stmt.int(5) = job.node_info.numa_node }) } delete.nonEmpty || insert.nonEmpty } /* job results */ object Results { val name = Generic.name.make_primary_key val worker_uuid = Generic.worker_uuid val build_uuid = Generic.build_uuid val hostname = SQL.Column.string("hostname") val numa_node = SQL.Column.string("numa_node") val rc = SQL.Column.int("rc") val out = SQL.Column.string("out") val err = SQL.Column.string("err") val timing_elapsed = SQL.Column.long("timing_elapsed") val timing_cpu = SQL.Column.long("timing_cpu") val timing_gc = SQL.Column.long("timing_gc") val output_shasum = SQL.Column.string("output_shasum") val current = SQL.Column.bool("current") val table = make_table("results", List(name, worker_uuid, build_uuid, hostname, numa_node, rc, out, err, timing_elapsed, timing_cpu, timing_gc, output_shasum, current)) } def read_results_domain(db: SQL.Database): Set[String] = db.execute_query_statement( Results.table.select(List(Results.name)), Set.from[String], res => res.string(Results.name)) def read_results(db: SQL.Database, names: Iterable[String] = Nil): State.Results = db.execute_query_statement( Results.table.select(sql = if_proper(names, Results.name.where_member(names))), Map.from[String, Result], { res => val name = res.string(Results.name) val worker_uuid = res.string(Results.worker_uuid) val build_uuid = res.string(Results.build_uuid) val hostname = res.string(Results.hostname) val numa_node = res.get_int(Results.numa_node) val node_info = Host.Node_Info(hostname, numa_node) val rc = res.int(Results.rc) val out = res.string(Results.out) val err = res.string(Results.err) val timing = res.timing( Results.timing_elapsed, Results.timing_cpu, Results.timing_gc) val process_result = Process_Result(rc, out_lines = split_lines(out), err_lines = split_lines(err), timing = timing) val output_shasum = SHA1.fake_shasum(res.string(Results.output_shasum)) val current = res.bool(Results.current) name -> Result(name, worker_uuid, build_uuid, node_info, process_result, output_shasum, current) } ) def update_results(db: SQL.Database, results: State.Results): Boolean = { val old_results = read_results_domain(db) val insert = results.valuesIterator.filterNot(res => old_results.contains(res.name)).toList for (result <- insert) { val process_result = result.process_result db.execute_statement(Results.table.insert(), body = { stmt => stmt.string(1) = result.name stmt.string(2) = result.worker_uuid stmt.string(3) = result.build_uuid stmt.string(4) = result.node_info.hostname stmt.int(5) = result.node_info.numa_node stmt.int(6) = process_result.rc stmt.string(7) = cat_lines(process_result.out_lines) stmt.string(8) = cat_lines(process_result.err_lines) stmt.long(9) = process_result.timing.elapsed.ms stmt.long(10) = process_result.timing.cpu.ms stmt.long(11) = process_result.timing.gc.ms stmt.string(12) = result.output_shasum.toString stmt.bool(13) = result.current }) } insert.nonEmpty } /* collective operations */ override val tables = SQL.Tables( Base.table, Workers.table, Sessions.table, Pending.table, Running.table, Results.table) val build_uuid_tables = tables.filter(table => table.columns.exists(column => column.name == Generic.build_uuid.name)) def pull_database( db: SQL.Database, worker_uuid: String, hostname: String, state: State ): State = { val serial_db = read_serial(db) if (serial_db == state.serial) state else { val serial = serial_db max state.serial stamp_worker(db, worker_uuid, serial) - val sessions = pull1(read_sessions_domain(db), read_sessions(db, _), state.sessions) + val sessions = state.sessions.pull(read_sessions_domain(db), read_sessions(db, _)) val pending = read_pending(db) val running = pull0(read_running(db), state.running) val results = pull1(read_results_domain(db), read_results(db, _), state.results) state.copy(serial = serial, sessions = sessions, pending = pending, running = running, results = results) } } def update_database( db: SQL.Database, worker_uuid: String, build_uuid: String, hostname: String, state: State ): State = { val changed = List( update_sessions(db, state.sessions), update_pending(db, state.pending), update_running(db, state.running), update_results(db, state.results)) val serial0 = state.serial val serial = if (changed.exists(identity)) State.inc_serial(serial0) else serial0 stamp_worker(db, worker_uuid, serial) state.set_serial(serial) } } def read_builds(db: SQL.Database): List[Build] = Data.transaction_lock(db, create = true) { Data.read_builds(db) } } /** main process **/ class Build_Process( protected final val build_context: Build_Process.Context, protected final val build_progress: Progress ) extends AutoCloseable { /* context */ protected final val store: Store = build_context.store protected final val build_options: Options = store.options - protected final val build_deps: Sessions.Deps = build_context.build_deps + protected final val build_deps: isabelle.Sessions.Deps = build_context.build_deps protected final val hostname: String = build_context.hostname protected final val build_uuid: String = build_context.build_uuid /* progress backed by database */ private val _database_server: Option[SQL.Database] = try { store.maybe_open_database_server() } catch { case exn: Throwable => close(); throw exn } private val _build_database: Option[SQL.Database] = try { for (db <- store.maybe_open_build_database()) yield { val shared_db = db.is_postgresql Build_Process.Data.transaction_lock(db, create = true) { Build_Process.Data.clean_build(db) if (shared_db) Store.Data.tables.lock(db, create = true) } Build_Process.Data.vacuum(db, more_tables = if (shared_db) Store.Data.tables else SQL.Tables.empty) db } } catch { case exn: Throwable => close(); throw exn } private val _host_database: Option[SQL.Database] = try { store.maybe_open_build_database(path = Host.Data.database) } catch { case exn: Throwable => close(); throw exn } protected val (progress, worker_uuid) = synchronized { _build_database match { case None => (build_progress, UUID.random().toString) case Some(db) => try { val progress_db = store.open_build_database(Progress.Data.database) val progress = new Database_Progress(progress_db, build_progress, hostname = hostname, context_uuid = build_uuid) (progress, progress.agent_uuid) } catch { case exn: Throwable => close(); throw exn } } } protected val log: Logger = Logger.make_system_log(progress, build_options) def close(): Unit = synchronized { Option(_database_server).flatten.foreach(_.close()) Option(_build_database).flatten.foreach(_.close()) Option(_host_database).flatten.foreach(_.close()) progress match { case db_progress: Database_Progress => db_progress.exit() db_progress.db.close() case _ => } } /* global state: internal var vs. external database */ private var _state: Build_Process.State = Build_Process.State() protected def synchronized_database[A](body: => A): A = synchronized { _build_database match { case None => body case Some(db) => Build_Process.Data.transaction_lock(db) { progress.asInstanceOf[Database_Progress].sync() _state = Build_Process.Data.pull_database(db, worker_uuid, hostname, _state) val res = body _state = Build_Process.Data.update_database(db, worker_uuid, build_uuid, hostname, _state) res } } } /* policy operations */ protected def init_state(state: Build_Process.State): Build_Process.State = { - val sessions1 = - build_context.sessions.foldLeft(state.sessions) { case (map, (name, session)) => - if (state.sessions.isDefinedAt(name)) map - else map + (name -> session) - } + val sessions1 = state.sessions.init(build_context, progress = build_progress) val old_pending = state.pending.iterator.map(_.name).toSet val new_pending = List.from( - for { - (name, session_context) <- build_context.sessions.iterator - if !old_pending(name) - } yield Build_Process.Task(name, session_context.deps, JSON.Object.empty, build_uuid)) + for (session <- sessions1.iterator if !old_pending(session.name)) + yield Build_Process.Task(session.name, session.deps, JSON.Object.empty, build_uuid)) val pending1 = new_pending ::: state.pending - state.copy(sessions = sessions1, pending = pending1) + state.copy( + numa_nodes = Host.numa_nodes(enabled = build_context.numa_shuffling), + sessions = sessions1, + pending = pending1) } protected def next_job(state: Build_Process.State): Option[String] = if (progress.stopped || state.running.size < build_context.max_jobs) { state.pending.filter(entry => entry.is_ready && !state.is_running(entry.name)) - .sortBy(_.name)(build_context.ordering) + .sortBy(_.name)(state.sessions.ordering) .headOption.map(_.name) } else None protected def start_session(state: Build_Process.State, session_name: String): Build_Process.State = { val ancestor_results = - for (a <- build_context.sessions(session_name).ancestors) yield state.results(a) + for (a <- state.sessions(session_name).ancestors) yield state.results(a) + + val sources_shasum = state.sessions(session_name).sources_shasum val input_shasum = if (ancestor_results.isEmpty) ML_Process.bootstrap_shasum() else SHA1.flat_shasum(ancestor_results.map(_.output_shasum)) - val store_heap = build_context.store_heap(session_name) + val store_heap = + build_context.build_heap || Sessions.is_pure(session_name) || + state.sessions.iterator.exists(_.ancestors.contains(session_name)) val (current, output_shasum) = store.check_output(session_name, session_options = build_context.sessions_structure(session_name).options, - sources_shasum = build_context.sources_shasum(session_name), + sources_shasum = sources_shasum, input_shasum = input_shasum, fresh_build = build_context.fresh_build, store_heap = store_heap) val finished = current && ancestor_results.forall(_.current) val skipped = build_context.no_build val cancelled = progress.stopped || !ancestor_results.forall(_.ok) if (!skipped && !cancelled) { ML_Heap.restore(_database_server, session_name, store.output_heap(session_name), cache = store.cache.compress) } val result_name = (session_name, worker_uuid, build_uuid) if (finished) { state .remove_pending(session_name) .make_result(result_name, Process_Result.ok, output_shasum, current = true) } else if (skipped) { progress.echo("Skipping " + session_name + " ...", verbose = true) state. remove_pending(session_name). make_result(result_name, Process_Result.error, output_shasum) } else if (cancelled) { progress.echo(session_name + " CANCELLED") state .remove_pending(session_name) .make_result(result_name, Process_Result.undefined, output_shasum) } else { def used_nodes: Set[Int] = Set.from(for (job <- state.running.valuesIterator; i <- job.node_info.numa_node) yield i) val numa_node = for { db <- _host_database - n <- Host.next_numa_node(db, hostname, build_context.numa_nodes, used_nodes) + n <- Host.next_numa_node(db, hostname, state.numa_nodes, used_nodes) } yield n val node_info = Host.Node_Info(hostname, numa_node) progress.echo( (if (store_heap) "Building " else "Running ") + session_name + if_proper(node_info.numa_node, " on " + node_info) + " ...") store.clean_output(_database_server, session_name, session_init = true) + val session = state.sessions(session_name) + val build = - Build_Job.start_session(build_context, progress, log, _database_server, - build_deps.background(session_name), input_shasum, node_info) + Build_Job.start_session(build_context, session, progress, log, _database_server, + build_deps.background(session_name), sources_shasum, input_shasum, node_info, store_heap) val job = Build_Process.Job(session_name, worker_uuid, build_uuid, node_info, Some(build)) state.add_running(job) } } /* build process roles */ final def is_session_name(job_name: String): Boolean = !Long_Name.is_qualified(job_name) protected final def start_build(): Unit = synchronized_database { for (db <- _build_database) { Build_Process.Data.start_build(db, build_uuid, build_context.ml_platform, build_context.sessions_structure.session_prefs) } } protected final def stop_build(): Unit = synchronized_database { for (db <- _build_database) { Build_Process.Data.stop_build(db, build_uuid) } } protected final def start_worker(): Unit = synchronized_database { for (db <- _build_database) { _state = _state.inc_serial Build_Process.Data.start_worker(db, worker_uuid, build_uuid, _state.serial) } } protected final def stop_worker(): Unit = synchronized_database { for (db <- _build_database) { Build_Process.Data.stamp_worker(db, worker_uuid, _state.serial, stop = true) } } /* run */ def run(): Map[String, Process_Result] = { if (build_context.master) synchronized_database { _state = init_state(_state) } def finished(): Boolean = synchronized_database { _state.finished } def sleep(): Unit = Isabelle_Thread.interrupt_handler(_ => progress.stop()) { build_options.seconds("editor_input_delay").sleep() } def start_job(): Boolean = synchronized_database { next_job(_state) match { case Some(name) => if (is_session_name(name)) { _state = start_session(_state, name) true } else error("Unsupported build job name " + quote(name)) case None => false } } if (finished()) { progress.echo_warning("Nothing to build") Map.empty[String, Process_Result] } else { if (build_context.master) start_build() start_worker() if (build_context.master && !build_context.worker_active) { progress.echo("Waiting for external workers ...") } try { while (!finished()) { synchronized_database { if (progress.stopped) _state.stop_running() for (job <- _state.finished_running()) { val result_name = (job.name, worker_uuid, build_uuid) val (process_result, output_shasum) = job.build.get.join _state = _state. remove_pending(job.name). remove_running(job.name). make_result(result_name, process_result, output_shasum, node_info = job.node_info) } } if (!start_job()) sleep() } } finally { stop_worker() if (build_context.master) stop_build() } synchronized_database { for ((name, result) <- _state.results) yield name -> result.process_result } } } /* snapshot */ def snapshot(): Build_Process.Snapshot = synchronized_database { val (builds, workers) = _build_database match { case None => (Nil, Nil) case Some(db) => (Build_Process.Data.read_builds(db), Build_Process.Data.read_workers(db)) } Build_Process.Snapshot( builds = builds, workers = workers, sessions = _state.sessions, pending = _state.pending, running = _state.running, results = _state.results) } /* toString */ override def toString: String = "Build_Process(worker_uuid = " + quote(worker_uuid) + ", build_uuid = " + quote(build_uuid) + if_proper(build_context.master, ", master = true") + ")" }