diff --git a/src/Pure/PIDE/session.ML b/src/Pure/PIDE/session.ML --- a/src/Pure/PIDE/session.ML +++ b/src/Pure/PIDE/session.ML @@ -1,97 +1,97 @@ (* Title: Pure/PIDE/session.ML Author: Makarius Prover session: persistent state of logic image. *) signature SESSION = sig val get_name: unit -> string val welcome: unit -> string val get_keywords: unit -> Keyword.keywords - val init: HTML.symbols -> bool -> bool -> Path.T -> string -> string -> (string * string) list -> + val init: HTML.symbols -> bool -> Path.T -> string -> string -> (string * string) list -> (Path.T * Path.T) list -> Path.T -> string -> string * string -> bool -> unit val shutdown: unit -> unit val finish: unit -> unit val protocol_handler: string -> unit val init_protocol_handlers: unit -> unit end; structure Session: SESSION = struct (** persistent session information **) val session = Synchronized.var "Session.session" ({chapter = "", name = ""}, true); fun get_name () = #name (#1 (Synchronized.value session)); fun description () = "Isabelle/" ^ get_name (); fun welcome () = if Distribution.is_identified then "Welcome to " ^ description () ^ " (" ^ Distribution.version ^ ")" else "Unofficial version of " ^ description () ^ " (" ^ Distribution.version ^ ")"; (* base syntax *) val keywords = Synchronized.var "Session.keywords" Keyword.empty_keywords; fun get_keywords () = Synchronized.value keywords; fun update_keywords () = Synchronized.change keywords (K (fold (curry Keyword.merge_keywords o Thy_Header.get_keywords o Thy_Info.get_theory) (Thy_Info.get_names ()) Keyword.empty_keywords)); (* init *) -fun init symbols build info info_path doc doc_output doc_variants doc_files graph_file +fun init symbols info info_path doc doc_output doc_variants doc_files graph_file parent (chapter, name) verbose = (Synchronized.change session (fn ({name = parent_name, ...}, parent_finished) => if parent_name <> parent orelse not parent_finished then error ("Unfinished parent session " ^ quote parent ^ " for " ^ quote name) else ({chapter = chapter, name = name}, false)); - Present.init symbols build info info_path (if doc = "false" then "" else doc) + Present.init symbols info info_path (if doc = "false" then "" else doc) doc_output doc_variants doc_files graph_file (chapter, name) verbose); (* finish *) fun shutdown () = (Execution.shutdown (); Event_Timer.shutdown (); Future.shutdown ()); fun finish () = (shutdown (); Thy_Info.finish (); Present.finish (); shutdown (); update_keywords (); Synchronized.change session (apsnd (K true))); (** protocol handlers **) val protocol_handlers = Synchronized.var "protocol_handlers" ([]: string list); fun protocol_handler name = if Thread_Data.is_virtual then () else Synchronized.change protocol_handlers (fn handlers => (Output.try_protocol_message (Markup.protocol_handler name) []; if not (member (op =) handlers name) then () else warning ("Redefining protocol handler: " ^ quote name); update (op =) name handlers)); fun init_protocol_handlers () = if Thread_Data.is_virtual then () else Synchronized.value protocol_handlers |> List.app (fn name => Output.try_protocol_message (Markup.protocol_handler name) []); end; diff --git a/src/Pure/Thy/present.ML b/src/Pure/Thy/present.ML --- a/src/Pure/Thy/present.ML +++ b/src/Pure/Thy/present.ML @@ -1,325 +1,322 @@ (* Title: Pure/Thy/present.ML Author: Markus Wenzel and Stefan Berghofer, TU Muenchen Theory presentation: HTML and PDF-LaTeX documents. *) signature PRESENT = sig val get_bibtex_entries: theory -> string list val theory_qualifier: theory -> string val document_option: Options.T -> {enabled: bool, disabled: bool} val document_variants: Options.T -> (string * string) list - val init: HTML.symbols -> bool -> bool -> Path.T -> string -> string -> (string * string) list -> + val init: HTML.symbols -> bool -> Path.T -> string -> string -> (string * string) list -> (Path.T * Path.T) list -> Path.T -> string * string -> bool -> unit val finish: unit -> unit val theory_output: theory -> string list -> unit val begin_theory: string list -> int -> (unit -> HTML.text) -> theory -> theory end; structure Present: PRESENT = struct (** paths **) val tex_ext = Path.ext "tex"; val tex_path = tex_ext o Path.basic; val html_ext = Path.ext "html"; val html_path = html_ext o Path.basic; val index_path = Path.basic "index.html"; val readme_html_path = Path.basic "README.html"; val doc_indexN = "session"; val session_graph_path = Path.basic "session_graph.pdf"; fun show_path path = Path.implode (Path.expand (File.full_path Path.current path)); (** additional theory data **) structure Browser_Info = Theory_Data ( type T = {chapter: string, name: string, bibtex_entries: string list}; val empty = {chapter = "Unsorted", name = "Unknown", bibtex_entries = []}: T; fun extend _ = empty; fun merge _ = empty; ); val _ = Theory.setup (Browser_Info.put {chapter = Context.PureN, name = Context.PureN, bibtex_entries = []}); val get_bibtex_entries = #bibtex_entries o Browser_Info.get; (** global browser info state **) (* type theory_info *) type theory_info = {tex_source: string list, html_source: string}; fun make_theory_info (tex_source, html_source) = {tex_source = tex_source, html_source = html_source}: theory_info; fun map_theory_info f {tex_source, html_source} = make_theory_info (f (tex_source, html_source)); (* type browser_info *) type browser_info = {theories: theory_info Symtab.table, tex_index: (int * string) list, html_index: (int * string) list}; fun make_browser_info (theories, tex_index, html_index) : browser_info = {theories = theories, tex_index = tex_index, html_index = html_index}; val empty_browser_info = make_browser_info (Symtab.empty, [], []); fun map_browser_info f {theories, tex_index, html_index} = make_browser_info (f (theories, tex_index, html_index)); (* state *) val browser_info = Synchronized.var "browser_info" empty_browser_info; fun change_browser_info f = Synchronized.change browser_info (map_browser_info f); fun init_theory_info name info = change_browser_info (fn (theories, tex_index, html_index) => (Symtab.update (name, info) theories, tex_index, html_index)); fun change_theory_info name f = change_browser_info (fn (theories, tex_index, html_index) => (case Symtab.lookup theories name of NONE => error ("Browser info: cannot access theory document " ^ quote name) | SOME info => (Symtab.update (name, map_theory_info f info) theories, tex_index, html_index))); fun add_tex_index txt = change_browser_info (fn (theories, tex_index, html_index) => (theories, txt :: tex_index, html_index)); fun add_html_index txt = change_browser_info (fn (theories, tex_index, html_index) => (theories, tex_index, txt :: html_index)); (** global session state **) (* session_info *) type session_info = {symbols: HTML.symbols, name: string, chapter: string, info_path: Path.T, info: bool, doc_format: string, doc_output: Path.T option, doc_files: (Path.T * Path.T) list, graph_file: Path.T, documents: (string * string) list, verbose: bool, readme: Path.T option}; fun make_session_info (symbols, name, chapter, info_path, info, doc_format, doc_output, doc_files, graph_file, documents, verbose, readme) = {symbols = symbols, name = name, chapter = chapter, info_path = info_path, info = info, doc_format = doc_format, doc_output = doc_output, doc_files = doc_files, graph_file = graph_file, documents = documents, verbose = verbose, readme = readme}: session_info; (* state *) val session_info = Unsynchronized.ref (NONE: session_info option); fun with_session_info x f = (case ! session_info of NONE => x | SOME info => f info); val theory_qualifier = Resources.theory_qualifier o Context.theory_long_name; fun is_session_theory thy = (case ! session_info of NONE => false | SOME {name, ...} => name = theory_qualifier thy); (** document preparation **) (* options *) fun document_option options = (case Options.string options "document" of "" => {enabled = false, disabled = false} | "false" => {enabled = false, disabled = true} | _ => {enabled = true, disabled = false}); fun document_variants options = let val variants = space_explode ":" (Options.string options "document_variants") |> map (fn s => (case space_explode "=" s of [name] => (name, "") | [name, tags] => (name, tags) | _ => error ("Malformed document variant specification: " ^ quote s))); val _ = (case duplicates (op =) (map #1 variants) of [] => () | dups => error ("Duplicate document variants: " ^ commas_quote dups)); in variants end; (* init session *) -fun init symbols build info info_path doc document_output doc_variants doc_files graph_file +fun init symbols info info_path doc document_output doc_variants doc_files graph_file (chapter, name) verbose = - if not build andalso not info andalso doc = "" then - (Synchronized.change browser_info (K empty_browser_info); session_info := NONE) - else - let - val doc_output = - if document_output = "" then NONE else SOME (Path.explode document_output); + let + val doc_output = + if document_output = "" then NONE else SOME (Path.explode document_output); - val documents = if doc = "" orelse null doc_files then [] else doc_variants; - val readme = if File.exists readme_html_path then SOME readme_html_path else NONE; + val documents = if doc = "" orelse null doc_files then [] else doc_variants; + val readme = if File.exists readme_html_path then SOME readme_html_path else NONE; - val docs = - (case readme of NONE => [] | SOME p => [(Url.File p, "README")]) @ - map (fn (name, _) => (Url.File (Path.ext doc (Path.basic name)), name)) documents; - in - session_info := - SOME (make_session_info (symbols, name, chapter, info_path, info, doc, - doc_output, doc_files, graph_file, documents, verbose, readme)); - Synchronized.change browser_info (K empty_browser_info); - add_html_index (0, HTML.begin_session_index symbols name (Url.File session_graph_path) docs) - end; + val docs = + (case readme of NONE => [] | SOME p => [(Url.File p, "README")]) @ + map (fn (name, _) => (Url.File (Path.ext doc (Path.basic name)), name)) documents; + in + session_info := + SOME (make_session_info (symbols, name, chapter, info_path, info, doc, + doc_output, doc_files, graph_file, documents, verbose, readme)); + Synchronized.change browser_info (K empty_browser_info); + add_html_index (0, HTML.begin_session_index symbols name (Url.File session_graph_path) docs) + end; (* isabelle tool wrappers *) fun isabelle_document {verbose} format name tags dir = let val script = "isabelle document -d " ^ File.bash_path dir ^ " -o " ^ Bash.string format ^ " -n " ^ Bash.string name ^ (if tags = "" then "" else " -t " ^ Bash.string tags); val doc_path = Path.appends [dir, Path.parent, Path.basic name |> Path.ext format]; val _ = if verbose then writeln script else (); val {out, err, rc, ...} = Bash.process script; val _ = if verbose then writeln (trim_line (normalize_lines out)) else (); val _ = if not (File.exists doc_path) orelse rc <> 0 then error (trim_line err) else (); in doc_path end; (* finish session -- output all generated text *) fun sorted_index index = map snd (sort (int_ord o apply2 fst) (rev index)); fun index_buffer index = Buffer.add (implode (sorted_index index)) Buffer.empty; fun write_tex src name path = File.write_buffer (Path.append path (tex_path name)) src; fun write_tex_index tex_index path = write_tex (index_buffer tex_index) doc_indexN path; fun finish () = with_session_info () (fn {name, chapter, info, info_path, doc_format, doc_output, doc_files, graph_file, documents, verbose, readme, ...} => let val {theories, tex_index, html_index} = Synchronized.value browser_info; val thys = Symtab.dest theories; val session_prefix = Path.append (Path.append info_path (Path.basic chapter)) (Path.basic name); fun finish_html (a, {html_source, ...}: theory_info) = File.write (Path.append session_prefix (html_path a)) html_source; val _ = if info then (Isabelle_System.mkdirs session_prefix; File.write_buffer (Path.append session_prefix index_path) (index_buffer html_index |> Buffer.add HTML.end_document); (case readme of NONE => () | SOME path => Isabelle_System.copy_file path session_prefix); List.app finish_html thys; if verbose then Output.physical_stderr ("Browser info at " ^ show_path session_prefix ^ "\n") else ()) else (); fun document_job doc_prefix backdrop (doc_name, tags) = let val doc_dir = Path.append doc_prefix (Path.basic doc_name); fun purge () = if backdrop then Isabelle_System.rm_tree doc_dir else (); val _ = purge (); val _ = Isabelle_System.mkdirs doc_dir; val _ = Isabelle_System.bash ("isabelle latex -o sty " ^ File.bash_path (Path.append doc_dir (Path.basic "root.tex"))); val _ = List.app (fn file => Isabelle_System.copy_file_base file doc_dir) doc_files; val _ = Isabelle_System.copy_file graph_file (Path.append doc_dir session_graph_path); val _ = write_tex_index tex_index doc_dir; val _ = List.app (fn (a, {tex_source, ...}) => write_tex (fold Buffer.add tex_source Buffer.empty) a doc_dir) thys; in fn () => (isabelle_document {verbose = true} doc_format doc_name tags doc_dir before purge (), fn doc => if verbose orelse not backdrop then Output.physical_stderr ("Document at " ^ show_path doc ^ "\n") else ()) end; val jobs = (if info orelse is_none doc_output then map (document_job session_prefix true) documents else []) @ (case doc_output of NONE => [] | SOME path => map (document_job path false) documents); val _ = jobs |> Par_List.map (fn job => job ()) |> List.app (op |>); in Synchronized.change browser_info (K empty_browser_info); session_info := NONE end); (* theory elements *) fun theory_output thy output = with_session_info () (fn _ => if is_session_theory thy then (change_theory_info (Context.theory_name thy) o apfst) (K output) else ()); fun theory_link (curr_chapter, curr_session) thy = let val {chapter, name = session, ...} = Browser_Info.get thy; val link = html_path (Context.theory_name thy); in if curr_session = session then SOME link else if curr_chapter = chapter then SOME (Path.appends [Path.parent, Path.basic session, link]) else if chapter = Context.PureN then NONE else SOME (Path.appends [Path.parent, Path.parent, Path.basic chapter, Path.basic session, link]) end; fun begin_theory bibtex_entries update_time mk_text thy = with_session_info thy (fn {symbols, name = session_name, chapter, ...} => let val name = Context.theory_name thy; val parent_specs = Theory.parents_of thy |> map (fn parent => (Option.map Url.File (theory_link (chapter, session_name) parent), (Context.theory_name parent))); val html_source = HTML.theory symbols name parent_specs (mk_text ()); val _ = init_theory_info name (make_theory_info ([], html_source)); val bibtex_entries' = if is_session_theory thy then (add_html_index (update_time, HTML.theory_entry symbols (Url.File (html_path name), name)); add_tex_index (update_time, Latex.theory_entry name); bibtex_entries) else []; in thy |> Browser_Info.put {chapter = chapter, name = session_name, bibtex_entries = bibtex_entries'} end); end; diff --git a/src/Pure/Tools/build.ML b/src/Pure/Tools/build.ML --- a/src/Pure/Tools/build.ML +++ b/src/Pure/Tools/build.ML @@ -1,250 +1,248 @@ (* Title: Pure/Tools/build.ML Author: Makarius Build Isabelle sessions. *) signature BUILD = sig val build: string -> unit end; structure Build: BUILD = struct (* command timings *) type timings = ((string * Time.time) Inttab.table) Symtab.table; (*file -> offset -> name, time*) val empty_timings: timings = Symtab.empty; fun update_timings props = (case Markup.parse_command_timing_properties props of SOME ({file, offset, name}, time) => Symtab.map_default (file, Inttab.empty) (Inttab.map_default (offset, (name, time)) (fn (_, t) => (name, t + time))) | NONE => I); fun approximative_id name pos = (case (Position.file_of pos, Position.offset_of pos) of (SOME file, SOME offset) => if name = "" then NONE else SOME {file = file, offset = offset, name = name} | _ => NONE); fun get_timings timings tr = (case approximative_id (Toplevel.name_of tr) (Toplevel.pos_of tr) of SOME {file, offset, name} => (case Symtab.lookup timings file of SOME offsets => (case Inttab.lookup offsets offset of SOME (name', time) => if name = name' then SOME time else NONE | NONE => NONE) | NONE => NONE) | NONE => NONE) |> the_default Time.zeroTime; (* session timing *) fun session_timing name verbose f x = let val start = Timing.start (); val y = f x; val timing = Timing.result start; val threads = string_of_int (Multithreading.max_threads ()); val factor = Time.toReal (#cpu timing) / Time.toReal (#elapsed timing) |> Real.fmt (StringCvt.FIX (SOME 2)); val timing_props = [("threads", threads)] @ Markup.timing_properties timing @ [("factor", factor)]; val _ = writeln ("\fTiming = " ^ YXML.string_of_body (XML.Encode.properties timing_props)); val _ = if verbose then Output.physical_stderr ("Timing " ^ name ^ " (" ^ threads ^ " threads, " ^ Timing.message timing ^ ", factor " ^ factor ^ ")\n") else (); in y end; (* protocol messages *) fun protocol_message props output = (case props of function :: args => if function = Markup.ML_statistics orelse function = Markup.task_statistics then Protocol_Message.inline (#2 function) args else if function = Markup.command_timing then let val name = the_default "" (Properties.get args Markup.nameN); val pos = Position.of_properties args; val {elapsed, ...} = Markup.parse_timing_properties args; val is_significant = Timing.is_relevant_time elapsed andalso elapsed >= Options.default_seconds "command_timing_threshold"; in if is_significant then (case approximative_id name pos of SOME id => Protocol_Message.inline (#2 function) (Markup.command_timing_properties id elapsed) | NONE => ()) else () end else if function = Markup.theory_timing then Protocol_Message.inline (#2 function) args else (case Markup.dest_loading_theory props of SOME name => writeln ("\floading_theory = " ^ name) | NONE => Export.protocol_message props output) | [] => raise Output.Protocol_Message props); (* build theories *) fun build_theories symbols bibtex_entries last_timing qualifier master_dir (options, thys) = let val context = {options = options, symbols = symbols, bibtex_entries = bibtex_entries, last_timing = last_timing}; val condition = space_explode "," (Options.string options "condition"); val conds = filter_out (can getenv_strict) condition; in if null conds then (Options.set_default options; Isabelle_Process.init_options (); Future.fork I; (Thy_Info.use_theories context qualifier master_dir |> (case Options.string options "profiling" of "" => I | "time" => profile_time | "allocations" => profile_allocations | bad => error ("Bad profiling option: " ^ quote bad)) |> Unsynchronized.setmp print_mode (space_explode "," (Options.string options "print_mode") @ print_mode_value ())) thys) else Output.physical_stderr ("Skipping theories " ^ commas_quote (map #1 thys) ^ " (undefined " ^ commas conds ^ ")\n") end; (* build session *) datatype args = Args of {symbol_codes: (string * int) list, command_timings: Properties.T list, - do_output: bool, verbose: bool, browser_info: Path.T, document_files: (Path.T * Path.T) list, graph_file: Path.T, parent_name: string, chapter: string, name: string, master_dir: Path.T, theories: (Options.T * (string * Position.T) list) list, session_positions: (string * Properties.T) list, session_directories: (string * string) list, doc_names: string list, global_theories: (string * string) list, loaded_theories: string list, bibtex_entries: string list}; fun decode_args yxml = let open XML.Decode; val position = Position.of_properties o properties; - val (symbol_codes, (command_timings, (do_output, (verbose, (browser_info, + val (symbol_codes, (command_timings, (verbose, (browser_info, (document_files, (graph_file, (parent_name, (chapter, (name, (master_dir, (theories, (session_positions, (session_directories, (doc_names, (global_theories, - (loaded_theories, bibtex_entries))))))))))))))))) = - pair (list (pair string int)) (pair (list properties) (pair bool (pair bool (pair string + (loaded_theories, bibtex_entries)))))))))))))))) = + pair (list (pair string int)) (pair (list properties) (pair bool (pair string (pair (list (pair string string)) (pair string (pair string (pair string (pair string (pair string (pair (((list (pair Options.decode (list (pair string position)))))) (pair (list (pair string properties)) (pair (list (pair string string)) (pair (list string) - (pair (list (pair string string)) (pair (list string) (list string))))))))))))))))) + (pair (list (pair string string)) (pair (list string) (list string)))))))))))))))) (YXML.parse_body yxml); in - Args {symbol_codes = symbol_codes, command_timings = command_timings, do_output = do_output, + Args {symbol_codes = symbol_codes, command_timings = command_timings, verbose = verbose, browser_info = Path.explode browser_info, document_files = map (apply2 Path.explode) document_files, graph_file = Path.explode graph_file, parent_name = parent_name, chapter = chapter, name = name, master_dir = Path.explode master_dir, theories = theories, session_positions = session_positions, session_directories = session_directories, doc_names = doc_names, global_theories = global_theories, loaded_theories = loaded_theories, bibtex_entries = bibtex_entries} end; -fun build_session (Args {symbol_codes, command_timings, do_output, verbose, browser_info, +fun build_session (Args {symbol_codes, command_timings, verbose, browser_info, document_files, graph_file, parent_name, chapter, name, master_dir, theories, session_positions, session_directories, doc_names, global_theories, loaded_theories, bibtex_entries}) = let val symbols = HTML.make_symbols symbol_codes; val _ = Resources.init_session_base {session_positions = session_positions, session_directories = session_directories, docs = doc_names, global_theories = global_theories, loaded_theories = loaded_theories}; val _ = Session.init symbols - do_output (Options.default_bool "browser_info") browser_info (Options.default_string "document") (Options.default_string "document_output") (Present.document_variants (Options.default ())) document_files graph_file parent_name (chapter, name) verbose; val last_timing = get_timings (fold update_timings command_timings empty_timings); val res1 = theories |> (List.app (build_theories symbols bibtex_entries last_timing name master_dir) |> session_timing name verbose |> Exn.capture); val res2 = Exn.capture Session.finish (); val _ = Resources.finish_session_base (); val _ = Par_Exn.release_all [res1, res2]; in () end; (*command-line tool*) fun build args_file = let val _ = SHA1.test_samples (); val _ = Options.load_default (); val _ = Isabelle_Process.init_options (); val args = decode_args (File.read (Path.explode args_file)); fun error_message msg = writeln ("\ferror_message = " ^ encode_lines (YXML.content_of msg)); val _ = Unsynchronized.setmp Private_Output.protocol_message_fn protocol_message build_session args handle exn => (List.app error_message (Runtime.exn_message_list exn); Exn.reraise exn); val _ = Private_Output.protocol_message_fn := Output.protocol_message_undefined; val _ = Options.reset_default (); in () end; (*PIDE version*) val _ = Isabelle_Process.protocol_command "build_session" (fn [args_yxml] => let val args = decode_args args_yxml; val result = (build_session args; "") handle exn => (Runtime.exn_message exn handle _ (*sic!*) => "Exception raised, but failed to print details!"); in Output.protocol_message Markup.build_session_finished [XML.Text result] end | _ => raise Match); end; diff --git a/src/Pure/Tools/build.scala b/src/Pure/Tools/build.scala --- a/src/Pure/Tools/build.scala +++ b/src/Pure/Tools/build.scala @@ -1,875 +1,875 @@ /* Title: Pure/Tools/build.scala Author: Makarius Options: :folding=explicit: Build and manage Isabelle sessions. */ package isabelle import scala.collection.SortedSet import scala.annotation.tailrec object Build { /** auxiliary **/ /* persistent build info */ sealed case class Session_Info( sources: String, input_heaps: List[String], output_heap: Option[String], return_code: Int) { def ok: Boolean = return_code == 0 } /* queue with scheduling information */ private object Queue { type Timings = (List[Properties.T], Double) def load_timings(progress: Progress, store: Sessions.Store, name: String): Timings = { val no_timings: Timings = (Nil, 0.0) store.access_database(name) match { case None => no_timings case Some(db) => def ignore_error(msg: String) = { progress.echo_warning("Ignoring bad database " + db + (if (msg == "") "" else "\n" + msg)) no_timings } try { val command_timings = store.read_command_timings(db, name) val session_timing = store.read_session_timing(db, name) match { case Markup.Elapsed(t) => t case _ => 0.0 } (command_timings, session_timing) } catch { case ERROR(msg) => ignore_error(msg) case exn: java.lang.Error => ignore_error(Exn.message(exn)) case _: XML.Error => ignore_error("") } finally { db.close } } } def make_session_timing(sessions_structure: Sessions.Structure, timing: Map[String, Double]) : Map[String, Double] = { val maximals = sessions_structure.build_graph.maximals.toSet def desc_timing(name: String): Double = { if (maximals.contains(name)) timing(name) else { val descendants = sessions_structure.build_descendants(List(name)).toSet val g = sessions_structure.build_graph.restrict(descendants) (0.0 :: g.maximals.flatMap(desc => { val ps = g.all_preds(List(desc)) if (ps.exists(p => timing.get(p).isEmpty)) None else Some(ps.map(timing(_)).sum) })).max } } timing.keySet.iterator.map(name => (name -> desc_timing(name))).toMap.withDefaultValue(0.0) } def apply(progress: Progress, sessions_structure: Sessions.Structure, store: Sessions.Store) : Queue = { val graph = sessions_structure.build_graph val names = graph.keys val timings = names.map(name => (name, load_timings(progress, store, name))) val command_timings = timings.map({ case (name, (ts, _)) => (name, ts) }).toMap.withDefaultValue(Nil) val session_timing = make_session_timing(sessions_structure, timings.map({ case (name, (_, t)) => (name, t) }).toMap) object Ordering extends scala.math.Ordering[String] { def compare_timing(name1: String, name2: String): Int = { val t1 = session_timing(name1) val t2 = session_timing(name2) if (t1 == 0.0 || t2 == 0.0) 0 else t1 compare t2 } def compare(name1: String, name2: String): Int = compare_timing(name2, name1) match { case 0 => sessions_structure(name2).timeout compare sessions_structure(name1).timeout match { case 0 => name1 compare name2 case ord => ord } case ord => ord } } new Queue(graph, SortedSet(names: _*)(Ordering), command_timings) } } private class Queue( graph: Graph[String, Sessions.Info], order: SortedSet[String], val command_timings: String => List[Properties.T]) { def is_inner(name: String): Boolean = !graph.is_maximal(name) def is_empty: Boolean = graph.is_empty def - (name: String): Queue = new Queue(graph.del_node(name), order - name, // FIXME scala-2.10.0 .. 2.12.4 TreeSet problem!? command_timings) def dequeue(skip: String => Boolean): Option[(String, Sessions.Info)] = { val it = order.iterator.dropWhile(name => skip(name) || !graph.defined(name) // FIXME scala-2.10.0 .. 2.12.4 TreeSet problem!? || !graph.is_minimal(name)) if (it.hasNext) { val name = it.next; Some((name, graph.get_node(name))) } else None } } /* PIDE protocol handler */ class Handler(progress: Progress, session: Session, session_name: String) extends Session.Protocol_Handler { val result_error: Promise[String] = Future.promise override def exit() { result_error.cancel } private def build_session_finished(msg: Prover.Protocol_Output): Boolean = { val error_message = try { Pretty.string_of(Symbol.decode_yxml(msg.text)) } catch { case ERROR(msg) => msg } result_error.fulfill(error_message) session.send_stop() true } private def loading_theory(msg: Prover.Protocol_Output): Boolean = msg.properties match { case Markup.Loading_Theory(name) => progress.theory(Progress.Theory(name, session = session_name)) true case _ => false } val functions = List( Markup.BUILD_SESSION_FINISHED -> build_session_finished, Markup.LOADING_THEORY -> loading_theory) } /* job: running prover process */ private class Job(progress: Progress, name: String, val info: Sessions.Info, deps: Sessions.Deps, store: Sessions.Store, do_output: Boolean, verbose: Boolean, pide: Boolean, val numa_node: Option[Int], command_timings: List[Properties.T]) { private val options = NUMA.policy_options(info.options, numa_node) private val sessions_structure = deps.sessions_structure private val graph_file = Isabelle_System.tmp_file("session_graph", "pdf") isabelle.graphview.Graph_File.write(options, graph_file, deps(name).session_graph_display) private val export_tmp_dir = Isabelle_System.tmp_dir("export") private val export_consumer = Export.consumer(store.open_database(name, output = true), cache = store.xz_cache) private val future_result: Future[Process_Result] = Future.thread("build") { val parent = info.parent.getOrElse("") val base = deps(name) val args_yxml = YXML.string_of_body( { import XML.Encode._ - pair(list(pair(string, int)), pair(list(properties), pair(bool, pair(bool, + pair(list(pair(string, int)), pair(list(properties), pair(bool, pair(Path.encode, pair(list(pair(Path.encode, Path.encode)), pair(string, pair(string, pair(string, pair(string, pair(Path.encode, pair(list(pair(Options.encode, list(pair(string, properties)))), pair(list(pair(string, properties)), pair(list(pair(string, string)), pair(list(string), pair(list(pair(string, string)), - pair(list(string), list(string))))))))))))))))))( - (Symbol.codes, (command_timings, (do_output, (verbose, + pair(list(string), list(string)))))))))))))))))( + (Symbol.codes, (command_timings, (verbose, (store.browser_info, (info.document_files, (File.standard_path(graph_file), (parent, (info.chapter, (name, (Path.current, (info.theories, (sessions_structure.session_positions, (sessions_structure.dest_session_directories, (base.doc_names, (base.global_theories.toList, - (base.loaded_theories.keys, info.bibtex_entries.map(_.info))))))))))))))))))) + (base.loaded_theories.keys, info.bibtex_entries.map(_.info)))))))))))))))))) }) val env = Isabelle_System.settings() + ("ISABELLE_EXPORT_TMP" -> File.standard_path(export_tmp_dir)) + ("ISABELLE_ML_DEBUGGER" -> options.bool("ML_debugger").toString) val is_pure = Sessions.is_pure(name) def save_heap: String = (if (info.theories.isEmpty) "" else "ML_Heap.share_common_data (); ") + "ML_Heap.save_child " + ML_Syntax.print_string_bytes(File.platform_path(store.output_heap(name))) if (pide && !is_pure) { val resources = new Resources(sessions_structure, deps(parent)) val session = new Session(options, resources) val handler = new Handler(progress, session, name) session.init_protocol_handler(handler) val process = Isabelle_Process(session, options, sessions_structure, store, logic = parent, cwd = info.dir.file, env = env).await_startup session.protocol_command("build_session", args_yxml) val result = process.join handler.result_error.join match { case "" => result case msg => result.copy( rc = result.rc max 1, out_lines = result.out_lines ::: split_lines(Output.error_message_text(msg))) } } else { val args_file = Isabelle_System.tmp_file("build") File.write(args_file, args_yxml) val eval = "Command_Line.tool0 (fn () => (" + "Build.build " + ML_Syntax.print_string_bytes(File.standard_path(args_file)) + (if (is_pure) "; Theory.install_pure (Thy_Info.get_theory Context.PureN)" else "") + (if (do_output) "; " + save_heap else "") + "));" val process = if (is_pure) { ML_Process(options, deps.sessions_structure, store, raw_ml_system = true, args = (for ((root, _) <- Thy_Header.ml_roots) yield List("--use", root)).flatten ::: List("--eval", eval), cwd = info.dir.file, env = env, cleanup = () => args_file.delete) } else { ML_Process(options, deps.sessions_structure, store, logic = parent, args = List("--eval", eval), cwd = info.dir.file, env = env, cleanup = () => args_file.delete) } process.result( progress_stdout = (line: String) => Library.try_unprefix("\floading_theory = ", line) match { case Some(theory) => progress.theory(Progress.Theory(theory, session = name)) case None => for { text <- Library.try_unprefix("\fexport = ", line) (args, path) <- Markup.Export.dest_inline(XML.Decode.properties(YXML.parse_body(text))) } { val body = try { Bytes.read(path) } catch { case ERROR(msg) => error("Failed to read export " + quote(args.compound_name) + "\n" + msg) } path.file.delete export_consumer(name, args, body) } }, progress_limit = options.int("process_output_limit") match { case 0 => None case m => Some(m * 1000000L) }, strict = false) } } def terminate: Unit = future_result.cancel def is_finished: Boolean = future_result.is_finished private val timeout_request: Option[Event_Timer.Request] = { if (info.timeout > Time.zero) Some(Event_Timer.request(Time.now() + info.timeout) { terminate }) else None } def join: (Process_Result, Option[String]) = { val result0 = future_result.join val result1 = export_consumer.shutdown(close = true).map(Output.error_message_text) match { case Nil => result0 case errs => result0.errors(errs).error_rc } Isabelle_System.rm_tree(export_tmp_dir) if (result1.ok) Present.finish(progress, store.browser_info, graph_file, info, name) graph_file.delete val was_timeout = timeout_request match { case None => false case Some(request) => !request.cancel } val result2 = if (result1.interrupted) { if (was_timeout) result1.error(Output.error_message_text("Timeout")).was_timeout else result1.error(Output.error_message_text("Interrupt")) } else result1 val heap_digest = if (result2.ok && do_output && store.output_heap(name).is_file) Some(Sessions.write_heap_digest(store.output_heap(name))) else None (result2, heap_digest) } } /** build with results **/ class Results private[Build](results: Map[String, (Option[Process_Result], Sessions.Info)]) { def sessions: Set[String] = results.keySet def cancelled(name: String): Boolean = results(name)._1.isEmpty def apply(name: String): Process_Result = results(name)._1.getOrElse(Process_Result(1)) def info(name: String): Sessions.Info = results(name)._2 val rc: Int = (0 /: results.iterator.map( { case (_, (Some(r), _)) => r.rc case (_, (None, _)) => 1 }))(_ max _) def ok: Boolean = rc == 0 override def toString: String = rc.toString } def build( options: Options, progress: Progress = No_Progress, check_unknown_files: Boolean = false, build_heap: Boolean = false, clean_build: Boolean = false, dirs: List[Path] = Nil, select_dirs: List[Path] = Nil, infos: List[Sessions.Info] = Nil, numa_shuffling: Boolean = false, max_jobs: Int = 1, list_files: Boolean = false, check_keywords: Set[String] = Set.empty, fresh_build: Boolean = false, no_build: Boolean = false, soft_build: Boolean = false, verbose: Boolean = false, export_files: Boolean = false, pide: Boolean = false, requirements: Boolean = false, all_sessions: Boolean = false, base_sessions: List[String] = Nil, exclude_session_groups: List[String] = Nil, exclude_sessions: List[String] = Nil, session_groups: List[String] = Nil, sessions: List[String] = Nil, selection: Sessions.Selection = Sessions.Selection.empty): Results = { val build_options = options.int.update("completion_limit", 0).bool.update("ML_statistics", true) val store = Sessions.store(build_options) Isabelle_Fonts.init() /* session selection and dependencies */ val full_sessions = Sessions.load_structure(build_options, dirs = dirs, select_dirs = select_dirs, infos = infos) def sources_stamp(deps: Sessions.Deps, name: String): String = { val digests = full_sessions(name).meta_digest :: deps.sources(name) ::: deps.imported_sources(name) SHA1.digest(cat_lines(digests.map(_.toString).sorted)).toString } val selection1 = Sessions.Selection(requirements, all_sessions, base_sessions, exclude_session_groups, exclude_sessions, session_groups, sessions) ++ selection val deps = { val deps0 = Sessions.deps(full_sessions.selection(selection1), progress = progress, inlined_files = true, verbose = verbose, list_files = list_files, check_keywords = check_keywords).check_errors if (soft_build && !fresh_build) { val outdated = deps0.sessions_structure.build_topological_order.flatMap(name => store.access_database(name) match { case Some(db) => using(db)(store.read_build(_, name)) match { case Some(build) if build.ok && build.sources == sources_stamp(deps0, name) => None case _ => Some(name) } case None => Some(name) }) Sessions.deps(full_sessions.selection(Sessions.Selection(sessions = outdated)), progress = progress, inlined_files = true).check_errors } else deps0 } /* check unknown files */ if (check_unknown_files) { val source_files = (for { (_, base) <- deps.session_bases.iterator (path, _) <- base.sources.iterator } yield path).toList val exclude_files = List(Path.explode("$POLYML_EXE")).map(_.canonical_file) val unknown_files = Mercurial.check_files(source_files)._2. filterNot(path => exclude_files.contains(path.canonical_file)) if (unknown_files.nonEmpty) { progress.echo_warning("Unknown files (not part of the underlying Mercurial repository):" + unknown_files.map(path => path.expand.implode).sorted.mkString("\n ", "\n ", "")) } } /* main build process */ val queue = Queue(progress, deps.sessions_structure, store) store.prepare_output_dir() if (clean_build) { for (name <- full_sessions.imports_descendants(full_sessions.imports_selection(selection1))) { val (relevant, ok) = store.clean_output(name) if (relevant) { if (ok) progress.echo("Cleaned " + name) else progress.echo(name + " FAILED to clean") } } } // scheduler loop case class Result( current: Boolean, heap_digest: Option[String], process: Option[Process_Result], info: Sessions.Info) { def ok: Boolean = process match { case None => false case Some(res) => res.rc == 0 } } def sleep() { try { Thread.sleep(500) } catch { case Exn.Interrupt() => Exn.Interrupt.impose() } } val numa_nodes = new NUMA.Nodes(numa_shuffling) @tailrec def loop( pending: Queue, running: Map[String, (List[String], Job)], results: Map[String, Result]): Map[String, Result] = { def used_node(i: Int): Boolean = running.iterator.exists( { case (_, (_, job)) => job.numa_node.isDefined && job.numa_node.get == i }) if (pending.is_empty) results else { if (progress.stopped) for ((_, (_, job)) <- running) job.terminate running.find({ case (_, (_, job)) => job.is_finished }) match { case Some((name, (input_heaps, job))) => //{{{ finish job val (process_result, heap_digest) = job.join val log_lines = process_result.out_lines.filterNot(_.startsWith("\f")) val process_result_tail = { val tail = job.info.options.int("process_output_tail") process_result.copy( out_lines = "(see also " + store.output_log(name).file.toString + ")" :: (if (tail == 0) log_lines else log_lines.drop(log_lines.length - tail max 0))) } // write log file if (process_result.ok) { File.write_gzip(store.output_log_gz(name), terminate_lines(log_lines)) } else File.write(store.output_log(name), terminate_lines(log_lines)) // write database { val build_log = Build_Log.Log_File(name, process_result.out_lines). parse_session_info( command_timings = true, theory_timings = true, ml_statistics = true, task_statistics = true) using(store.open_database(name, output = true))(db => store.write_session_info(db, name, build_log = if (process_result.timeout) build_log.error("Timeout") else build_log, build = Session_Info(sources_stamp(deps, name), input_heaps, heap_digest, process_result.rc))) } // messages process_result.err_lines.foreach(progress.echo) if (process_result.ok) progress.echo("Finished " + name + " (" + process_result.timing.message_resources + ")") else { progress.echo(name + " FAILED") if (!process_result.interrupted) progress.echo(process_result_tail.out) } loop(pending - name, running - name, results + (name -> Result(false, heap_digest, Some(process_result_tail), job.info))) //}}} case None if running.size < (max_jobs max 1) => //{{{ check/start next job pending.dequeue(running.isDefinedAt) match { case Some((name, info)) => val ancestor_results = deps.sessions_structure.build_requirements(List(name)).filterNot(_ == name). map(results(_)) val ancestor_heaps = ancestor_results.flatMap(_.heap_digest) val do_output = build_heap || Sessions.is_pure(name) || queue.is_inner(name) val (current, heap_digest) = { store.access_database(name) match { case Some(db) => using(db)(store.read_build(_, name)) match { case Some(build) => val heap_digest = store.find_heap_digest(name) val current = !fresh_build && build.ok && build.sources == sources_stamp(deps, name) && build.input_heaps == ancestor_heaps && build.output_heap == heap_digest && !(do_output && heap_digest.isEmpty) (current, heap_digest) case None => (false, None) } case None => (false, None) } } val all_current = current && ancestor_results.forall(_.current) if (all_current) loop(pending - name, running, results + (name -> Result(true, heap_digest, Some(Process_Result(0)), info))) else if (no_build) { if (verbose) progress.echo("Skipping " + name + " ...") loop(pending - name, running, results + (name -> Result(false, heap_digest, Some(Process_Result(1)), info))) } else if (ancestor_results.forall(_.ok) && !progress.stopped) { progress.echo((if (do_output) "Building " else "Running ") + name + " ...") store.clean_output(name) using(store.open_database(name, output = true))(store.init_session_info(_, name)) val numa_node = numa_nodes.next(used_node) val job = new Job(progress, name, info, deps, store, do_output, verbose, pide = pide, numa_node, queue.command_timings(name)) loop(pending, running + (name -> (ancestor_heaps, job)), results) } else { progress.echo(name + " CANCELLED") loop(pending - name, running, results + (name -> Result(false, heap_digest, None, info))) } case None => sleep(); loop(pending, running, results) } ///}}} case None => sleep(); loop(pending, running, results) } } } /* build results */ val results0 = if (deps.is_empty) { progress.echo_warning("Nothing to build") Map.empty[String, Result] } else loop(queue, Map.empty, Map.empty) val results = new Results( (for ((name, result) <- results0.iterator) yield (name, (result.process, result.info))).toMap) if (export_files) { for (name <- full_sessions.imports_selection(selection1).iterator if results(name).ok) { val info = results.info(name) if (info.export_files.nonEmpty) { progress.echo("Exporting " + info.name + " ...") for ((dir, prune, pats) <- info.export_files) { Export.export_files(store, name, info.dir + dir, progress = if (verbose) progress else No_Progress, export_prune = prune, export_patterns = pats) } } } } if (results.rc != 0 && (verbose || !no_build)) { val unfinished = (for { name <- results.sessions.iterator if !results(name).ok } yield name).toList.sorted progress.echo("Unfinished session(s): " + commas(unfinished)) } /* global browser info */ if (!no_build) { val browser_chapters = (for { (name, result) <- results0.iterator if result.ok info = full_sessions(name) if info.options.bool("browser_info") } yield (info.chapter, (name, info.description))).toList.groupBy(_._1). map({ case (chapter, es) => (chapter, es.map(_._2)) }).filterNot(_._2.isEmpty) for ((chapter, entries) <- browser_chapters) Present.update_chapter_index(store.browser_info, chapter, entries) if (browser_chapters.nonEmpty) Present.make_global_index(store.browser_info) } results } /* Isabelle tool wrapper */ val isabelle_tool = Isabelle_Tool("build", "build and manage Isabelle sessions", args => { val build_options = Word.explode(Isabelle_System.getenv("ISABELLE_BUILD_OPTIONS")) var base_sessions: List[String] = Nil var select_dirs: List[Path] = Nil var numa_shuffling = false var pide = false var requirements = false var soft_build = false var exclude_session_groups: List[String] = Nil var all_sessions = false var build_heap = false var clean_build = false var dirs: List[Path] = Nil var export_files = false var fresh_build = false var session_groups: List[String] = Nil var max_jobs = 1 var check_keywords: Set[String] = Set.empty var list_files = false var no_build = false var options = Options.init(opts = build_options) var verbose = false var exclude_sessions: List[String] = Nil val getopts = Getopts(""" Usage: isabelle build [OPTIONS] [SESSIONS ...] Options are: -B NAME include session NAME and all descendants -D DIR include session directory and select its sessions -N cyclic shuffling of NUMA CPU nodes (performance tuning) -P build via PIDE protocol -R operate on requirements of selected sessions -S soft build: only observe changes of sources, not heap images -X NAME exclude sessions from group NAME and all descendants -a select all sessions -b build heap images -c clean build -d DIR include session directory -e export files from session specification into file-system -f fresh build -g NAME select session group NAME -j INT maximum number of parallel jobs (default 1) -k KEYWORD check theory sources for conflicts with proposed keywords -l list session source files -n no build -- test dependencies only -o OPTION override Isabelle system OPTION (via NAME=VAL or NAME) -v verbose -x NAME exclude session NAME and all descendants Build and manage Isabelle sessions, depending on implicit settings: """ + Library.prefix_lines(" ", Build_Log.Settings.show()) + "\n", "B:" -> (arg => base_sessions = base_sessions ::: List(arg)), "D:" -> (arg => select_dirs = select_dirs ::: List(Path.explode(arg))), "N" -> (_ => numa_shuffling = true), "P" -> (_ => pide = true), "R" -> (_ => requirements = true), "S" -> (_ => soft_build = true), "X:" -> (arg => exclude_session_groups = exclude_session_groups ::: List(arg)), "a" -> (_ => all_sessions = true), "b" -> (_ => build_heap = true), "c" -> (_ => clean_build = true), "d:" -> (arg => dirs = dirs ::: List(Path.explode(arg))), "e" -> (_ => export_files = true), "f" -> (_ => fresh_build = true), "g:" -> (arg => session_groups = session_groups ::: List(arg)), "j:" -> (arg => max_jobs = Value.Int.parse(arg)), "k:" -> (arg => check_keywords = check_keywords + arg), "l" -> (_ => list_files = true), "n" -> (_ => no_build = true), "o:" -> (arg => options = options + arg), "v" -> (_ => verbose = true), "x:" -> (arg => exclude_sessions = exclude_sessions ::: List(arg))) val sessions = getopts(args) val progress = new Console_Progress(verbose = verbose) val start_date = Date.now() if (verbose) { progress.echo( "Started at " + Build_Log.print_date(start_date) + " (" + Isabelle_System.getenv("ML_IDENTIFIER") + " on " + Isabelle_System.hostname() +")") progress.echo(Build_Log.Settings.show() + "\n") } val results = progress.interrupt_handler { build(options, progress = progress, check_unknown_files = Mercurial.is_repository(Path.explode("~~")), build_heap = build_heap, clean_build = clean_build, dirs = dirs, select_dirs = select_dirs, numa_shuffling = NUMA.enabled_warning(progress, numa_shuffling), max_jobs = max_jobs, list_files = list_files, check_keywords = check_keywords, fresh_build = fresh_build, no_build = no_build, soft_build = soft_build, verbose = verbose, export_files = export_files, pide = pide, requirements = requirements, all_sessions = all_sessions, base_sessions = base_sessions, exclude_session_groups = exclude_session_groups, exclude_sessions = exclude_sessions, session_groups = session_groups, sessions = sessions) } val end_date = Date.now() val elapsed_time = end_date.time - start_date.time if (verbose) { progress.echo("\nFinished at " + Build_Log.print_date(end_date)) } val total_timing = (Timing.zero /: results.sessions.iterator.map(a => results(a).timing))(_ + _). copy(elapsed = elapsed_time) progress.echo(total_timing.message_resources) sys.exit(results.rc) }) /* build logic image */ def build_logic(options: Options, logic: String, progress: Progress = No_Progress, build_heap: Boolean = false, dirs: List[Path] = Nil, fresh: Boolean = false, strict: Boolean = false): Int = { val rc = if (!fresh && build(options, build_heap = build_heap, no_build = true, dirs = dirs, sessions = List(logic)).ok) 0 else { progress.echo("Build started for Isabelle/" + logic + " ...") Build.build(options, progress = progress, build_heap = build_heap, fresh_build = fresh, dirs = dirs, sessions = List(logic)).rc } if (strict && rc != 0) error("Failed to build Isabelle/" + logic) else rc } }