diff --git a/src/Pure/PIDE/session.ML b/src/Pure/PIDE/session.ML --- a/src/Pure/PIDE/session.ML +++ b/src/Pure/PIDE/session.ML @@ -1,72 +1,70 @@ (* Title: Pure/PIDE/session.ML Author: Makarius Prover session: persistent state of logic image. *) signature SESSION = sig val get_name: unit -> string val welcome: unit -> string val get_keywords: unit -> Keyword.keywords - val init: bool -> Path.T -> string list -> string -> string * string -> bool -> unit + val init: string -> string * string -> unit val shutdown: unit -> unit val finish: unit -> unit end; structure Session: SESSION = struct (** persistent session information **) val session = Synchronized.var "Session.session" ({chapter = "", name = ""}, true); fun get_name () = #name (#1 (Synchronized.value session)); fun description () = "Isabelle/" ^ get_name (); fun welcome () = if Distribution.is_identified then "Welcome to " ^ description () ^ " (" ^ Distribution.version ^ ")" else "Unofficial version of " ^ description () ^ " (" ^ Distribution.version ^ ")"; (* base syntax *) val keywords = Synchronized.var "Session.keywords" Keyword.empty_keywords; fun get_keywords () = Synchronized.value keywords; fun update_keywords () = Synchronized.change keywords (K (fold (curry Keyword.merge_keywords o Thy_Header.get_keywords o Thy_Info.get_theory) (Thy_Info.get_names ()) Keyword.empty_keywords)); (* init *) -fun init info info_path documents parent (chapter, name) verbose = +fun init parent (chapter, name) = (Synchronized.change session (fn ({name = parent_name, ...}, parent_finished) => if parent_name <> parent orelse not parent_finished then error ("Unfinished parent session " ^ quote parent ^ " for " ^ quote name) - else ({chapter = chapter, name = name}, false)); - Present.init info info_path documents (chapter, name) verbose); + else ({chapter = chapter, name = name}, false))); (* finish *) fun shutdown () = (Execution.shutdown (); Event_Timer.shutdown (); Future.shutdown ()); fun finish () = (shutdown (); Par_List.map (Global_Theory.get_thm_names o Thy_Info.get_theory) (Thy_Info.get_names ()); Thy_Info.finish (); - Present.finish (); shutdown (); update_keywords (); Synchronized.change session (apsnd (K true))); end; diff --git a/src/Pure/Thy/html.ML b/src/Pure/Thy/html.ML --- a/src/Pure/Thy/html.ML +++ b/src/Pure/Thy/html.ML @@ -1,168 +1,146 @@ (* Title: Pure/Thy/html.ML - Author: Markus Wenzel and Stefan Berghofer, TU Muenchen + Author: Makarius HTML presentation elements. *) signature HTML = sig type symbols val make_symbols: (string * int) list -> symbols val no_symbols: symbols val present_span: symbols -> Keyword.keywords -> Command_Span.span -> Output.output type text = string + val name: symbols -> string -> text + val keyword: symbols -> string -> text + val command: symbols -> string -> text + val href_name: string -> string -> string + val href_path: Url.T -> string -> string + val href_opt_path: Url.T option -> string -> string val begin_document: symbols -> string -> text val end_document: text - val begin_session_index: symbols -> string -> Url.T -> (Url.T * string) list -> text - val theory_entry: symbols -> Url.T * string -> text - val theory: symbols -> string -> (Url.T option * string) list -> text -> text end; structure HTML: HTML = struct (* common markup *) fun span "" = ("", "") | span class = ("", ""); val enclose_span = uncurry enclose o span; val hidden = enclose_span Markup.hiddenN; (* symbol output *) abstype symbols = Symbols of string Symtab.table with fun make_symbols codes = let val mapping = map (apsnd (fn c => "&#" ^ Value.print_int c ^ ";")) codes @ [("'", "'"), ("\<^bsub>", hidden "⇘" ^ ""), ("\<^esub>", hidden "⇙" ^ ""), ("\<^bsup>", hidden "⇗" ^ ""), ("\<^esup>", hidden "⇖" ^ "")]; in Symbols (fold Symtab.update mapping Symtab.empty) end; val no_symbols = make_symbols []; fun output_sym (Symbols tab) s = (case Symtab.lookup tab s of SOME x => x | NONE => XML.text s); end; local fun output_markup (bg, en) symbols s1 s2 = hidden s1 ^ enclose bg en (output_sym symbols s2); val output_sub = output_markup ("", ""); val output_sup = output_markup ("", ""); val output_bold = output_markup (span "bold"); fun output_syms _ [] result = implode (rev result) | output_syms symbols (s1 :: rest) result = let val (s2, ss) = (case rest of [] => ("", []) | s2 :: ss => (s2, ss)); val (s, r) = if s1 = "\<^sub>" then (output_sub symbols "⇩" s2, ss) else if s1 = "\<^sup>" then (output_sup symbols "⇧" s2, ss) else if s1 = "\<^bold>" then (output_bold symbols "❙" s2, ss) else (output_sym symbols s1, rest); in output_syms symbols r (s :: result) end; in fun output symbols str = output_syms symbols (Symbol.explode str) []; end; (* presentation *) fun present_span symbols keywords = let fun present_markup (name, props) = (case Properties.get props Markup.kindN of SOME kind => if kind = Markup.commandN orelse kind = Markup.keywordN then enclose_span kind else I | NONE => I) #> enclose_span name; fun present_token tok = fold_rev present_markup (Token.markups keywords tok) (output symbols (Token.unparse tok)); in implode o map present_token o Command_Span.content end; (** HTML markup **) type text = string; (* atoms *) val name = enclose "" "" oo output; val keyword = enclose "" "" oo output; val command = enclose "" "" oo output; (* misc *) fun href_name s txt = "" ^ txt ^ ""; fun href_path path txt = href_name (Url.implode path) txt; fun href_opt_path NONE txt = txt | href_opt_path (SOME p) txt = href_path p txt; -fun para txt = "\n

" ^ txt ^ "

\n"; - (* document *) fun begin_document symbols title = XML.header ^ "\n" ^ "\n" ^ "\n" ^ "\n" ^ "" ^ output symbols (title ^ " (" ^ Distribution.version ^ ")") ^ "\n" ^ "\n" ^ "\n" ^ "\n" ^ "\n" ^ "
" ^ "

" ^ output symbols title ^ "

\n"; val end_document = "\n
\n\n\n"; - -(* session index *) - -fun begin_session_index symbols session graph docs = - begin_document symbols ("Session " ^ output symbols session) ^ - para ("View " ^ href_path graph "theory dependencies" ^ - implode (map (fn (p, name) => "
\nView " ^ href_path p name) docs)) ^ - "\n\n
\n

Theories

\n
\n
\n
" "
\n" txt ^ - end_document; - end; diff --git a/src/Pure/Thy/html.scala b/src/Pure/Thy/html.scala --- a/src/Pure/Thy/html.scala +++ b/src/Pure/Thy/html.scala @@ -1,398 +1,401 @@ /* Title: Pure/Thy/html.scala Author: Makarius HTML presentation elements. */ package isabelle object HTML { /* output text with control symbols */ private val control = Map( Symbol.sub -> "sub", Symbol.sub_decoded -> "sub", Symbol.sup -> "sup", Symbol.sup_decoded -> "sup", Symbol.bold -> "b", Symbol.bold_decoded -> "b") private val control_block = Map( Symbol.bsub -> "", Symbol.bsub_decoded -> "", Symbol.esub -> "", Symbol.esub_decoded -> "", Symbol.bsup -> "", Symbol.bsup_decoded -> "", Symbol.esup -> "", Symbol.esup_decoded -> "") def is_control(sym: Symbol.Symbol): Boolean = control.isDefinedAt(sym) def output_char_permissive(c: Char, s: StringBuilder) { c match { case '<' => s ++= "<" case '>' => s ++= ">" case '&' => s ++= "&" case _ => s += c } } def output(text: String, s: StringBuilder, hidden: Boolean, permissive: Boolean) { def output_char(c: Char): Unit = if (permissive) output_char_permissive(c, s) else XML.output_char(c, s) def output_string(str: String): Unit = str.iterator.foreach(output_char) def output_hidden(body: => Unit): Unit = if (hidden) { s ++= ""; body; s ++= "" } def output_symbol(sym: Symbol.Symbol): Unit = if (sym != "") { control_block.get(sym) match { case Some(html) if html.startsWith(" s ++= html; output_hidden(output_string(sym)) case Some(html) => output_hidden(output_string(sym)); s ++= html case None => if (hidden && Symbol.is_control_encoded(sym)) { output_hidden(output_string(Symbol.control_prefix)) s ++= "" output_string(Symbol.control_name(sym).get) s ++= "" output_hidden(output_string(Symbol.control_suffix)) } else output_string(sym) } } var ctrl = "" for (sym <- Symbol.iterator(text)) { if (is_control(sym)) { output_symbol(ctrl); ctrl = sym } else { control.get(ctrl) match { case Some(elem) if Symbol.is_controllable(sym) => output_hidden(output_symbol(ctrl)) s += '<'; s ++= elem; s += '>' output_symbol(sym) s ++= "' case _ => output_symbol(ctrl) output_symbol(sym) } ctrl = "" } } output_symbol(ctrl) } def output(text: String): String = Library.make_string(output(text, _, hidden = false, permissive = true)) /* output XML as HTML */ private val structural_elements = Set("head", "body", "meta", "div", "pre", "p", "title", "h1", "h2", "h3", "h4", "h5", "h6", "ul", "ol", "dl", "li", "dt", "dd") def output(body: XML.Body, s: StringBuilder, hidden: Boolean, structural: Boolean) { def elem(markup: Markup) { s ++= markup.name for ((a, b) <- markup.properties) { s += ' ' s ++= a s += '=' s += '"' output(b, s, hidden = hidden, permissive = false) s += '"' } } def tree(t: XML.Tree): Unit = t match { case XML.Elem(markup, Nil) => s += '<'; elem(markup); s ++= "/>" case XML.Elem(markup, ts) => if (structural && structural_elements(markup.name)) s += '\n' s += '<'; elem(markup); s += '>' ts.foreach(tree) s ++= "' if (structural && structural_elements(markup.name)) s += '\n' case XML.Text(txt) => output(txt, s, hidden = hidden, permissive = true) } body.foreach(tree) } def output(body: XML.Body, hidden: Boolean, structural: Boolean): String = Library.make_string(output(body, _, hidden, structural)) def output(tree: XML.Tree, hidden: Boolean, structural: Boolean): String = output(List(tree), hidden, structural) /* attributes */ class Attribute(val name: String, value: String) { def xml: XML.Attribute = name -> value def apply(elem: XML.Elem): XML.Elem = elem + xml } def id(s: String): Attribute = new Attribute("id", s) def class_(name: String): Attribute = new Attribute("class", name) def width(w: Int): Attribute = new Attribute("width", w.toString) def height(h: Int): Attribute = new Attribute("height", h.toString) def size(w: Int, h: Int)(elem: XML.Elem): XML.Elem = width(w)(height(h)(elem)) /* structured markup operators */ def text(txt: String): XML.Body = if (txt.isEmpty) Nil else List(XML.Text(txt)) val no_text: XML.Tree = XML.Text("") val break: XML.Body = List(XML.elem("br")) + val nl: XML.Body = List(XML.Text("\n")) class Operator(val name: String) { def apply(body: XML.Body): XML.Elem = XML.elem(name, body) def apply(att: Attribute, body: XML.Body): XML.Elem = att(apply(body)) def apply(c: String, body: XML.Body): XML.Elem = apply(class_(c), body) } class Heading(name: String) extends Operator(name) { def apply(txt: String): XML.Elem = super.apply(text(txt)) def apply(att: Attribute, txt: String): XML.Elem = super.apply(att, text(txt)) def apply(c: String, txt: String): XML.Elem = super.apply(c, text(txt)) } val div = new Operator("div") val span = new Operator("span") val pre = new Operator("pre") val par = new Operator("p") val emph = new Operator("em") val bold = new Operator("b") val code = new Operator("code") val item = new Operator("li") val list = new Operator("ul") val enum = new Operator("ol") val descr = new Operator("dl") val dt = new Operator("dt") val dd = new Operator("dd") val title = new Heading("title") val chapter = new Heading("h1") val section = new Heading("h2") val subsection = new Heading("h3") val subsubsection = new Heading("h4") val paragraph = new Heading("h5") val subparagraph = new Heading("h6") def itemize(items: List[XML.Body]): XML.Elem = list(items.map(item(_))) def enumerate(items: List[XML.Body]): XML.Elem = enum(items.map(item(_))) def description(items: List[(XML.Body, XML.Body)]): XML.Elem = descr(items.flatMap({ case (x, y) => List(dt(x), dd(y)) })) - def link(href: String, body: XML.Body = Nil): XML.Elem = + def link(href: String, body: XML.Body): XML.Elem = XML.Elem(Markup("a", List("href" -> href)), if (body.isEmpty) text(href) else body) + def link(path: Path, body: XML.Body): XML.Elem = link(path.implode, body) + def image(src: String, alt: String = ""): XML.Elem = XML.Elem(Markup("img", List("src" -> src) ::: proper_string(alt).map("alt" -> _).toList), Nil) def source(body: XML.Body): XML.Elem = pre("source", body) def source(src: String): XML.Elem = source(text(src)) def style(s: String): XML.Elem = XML.elem("style", text(s)) def style_file(href: String): XML.Elem = XML.Elem(Markup("link", List("rel" -> "stylesheet", "type" -> "text/css", "href" -> href)), Nil) def style_file(path: Path): XML.Elem = style_file(Url.print_file(path.file)) def script(s: String): XML.Elem = XML.elem("script", text(s)) def script_file(href: String): XML.Elem = XML.Elem(Markup("script", List("src" -> href)), Nil) def script_file(path: Path): XML.Elem = script_file(Url.print_file(path.file)) /* messages */ // background val writeln_message: Attribute = class_("writeln_message") val warning_message: Attribute = class_("warning_message") val error_message: Attribute = class_("error_message") // underline val writeln: Attribute = class_("writeln") val warning: Attribute = class_("warning") val error: Attribute = class_("error") /* tooltips */ def tooltip(item: XML.Body, tip: XML.Body): XML.Elem = span(item ::: List(div("tooltip", tip))) def tooltip_errors(item: XML.Body, msgs: List[XML.Body]): XML.Elem = HTML.error(tooltip(item, msgs.map(msg => error_message(pre(msg))))) /* GUI elements */ object GUI { private def optional_value(text: String): XML.Attributes = proper_string(text).map(a => "value" -> a).toList private def optional_name(name: String): XML.Attributes = proper_string(name).map(a => "name" -> a).toList private def optional_title(tooltip: String): XML.Attributes = proper_string(tooltip).map(a => "title" -> a).toList private def optional_submit(submit: Boolean): XML.Attributes = if (submit) List("onChange" -> "this.form.submit()") else Nil private def optional_checked(selected: Boolean): XML.Attributes = if (selected) List("checked" -> "") else Nil private def optional_action(action: String): XML.Attributes = proper_string(action).map(a => "action" -> a).toList private def optional_onclick(script: String): XML.Attributes = proper_string(script).map(a => "onclick" -> a).toList private def optional_onchange(script: String): XML.Attributes = proper_string(script).map(a => "onchange" -> a).toList def button(body: XML.Body, name: String = "", tooltip: String = "", submit: Boolean = false, script: String = ""): XML.Elem = XML.Elem( Markup("button", List("type" -> (if (submit) "submit" else "button"), "value" -> "true") ::: optional_name(name) ::: optional_title(tooltip) ::: optional_onclick(script)), body) def checkbox(body: XML.Body, name: String = "", tooltip: String = "", submit: Boolean = false, selected: Boolean = false, script: String = ""): XML.Elem = XML.elem("label", XML.elem( Markup("input", List("type" -> "checkbox", "value" -> "true") ::: optional_name(name) ::: optional_title(tooltip) ::: optional_submit(submit) ::: optional_checked(selected) ::: optional_onchange(script))) :: body) def text_field(columns: Int = 0, text: String = "", name: String = "", tooltip: String = "", submit: Boolean = false, script: String = ""): XML.Elem = XML.elem(Markup("input", List("type" -> "text") ::: (if (columns > 0) List("size" -> columns.toString) else Nil) ::: optional_value(text) ::: optional_name(name) ::: optional_title(tooltip) ::: optional_submit(submit) ::: optional_onchange(script))) def parameter(text: String = "", name: String = ""): XML.Elem = XML.elem( Markup("input", List("type" -> "hidden") ::: optional_value(text) ::: optional_name(name))) def form(body: XML.Body, name: String = "", action: String = "", http_post: Boolean = false) : XML.Elem = XML.Elem( Markup("form", optional_name(name) ::: optional_action(action) ::: (if (http_post) List("method" -> "post") else Nil)), body) } /* GUI layout */ object Wrap_Panel { object Alignment extends Enumeration { val left, right, center = Value } def apply(contents: List[XML.Elem], name: String = "", action: String = "", alignment: Alignment.Value = Alignment.right): XML.Elem = { val body = Library.separate(XML.Text(" "), contents) GUI.form(List(div(body) + ("style" -> ("text-align: " + alignment))), name = name, action = action) } } /* document */ val header: String = XML.header + """ """ val head_meta: XML.Elem = XML.Elem(Markup("meta", List("http-equiv" -> "Content-Type", "content" -> "text/html; charset=utf-8")), Nil) def output_document(head: XML.Body, body: XML.Body, css: String = "isabelle.css", hidden: Boolean = true, structural: Boolean = true): String = { cat_lines( List(header, output( XML.elem("head", head_meta :: (if (css == "") Nil else List(style_file(css))) ::: head), hidden = hidden, structural = structural), output(XML.elem("body", body), hidden = hidden, structural = structural))) } /* fonts */ def fonts_url(): String => String = (for (entry <- Isabelle_Fonts.fonts(hidden = true)) yield (entry.path.file_name -> Url.print_file(entry.path.file))).toMap def fonts_dir(prefix: String)(ttf_name: String): String = prefix + "/" + ttf_name def fonts_css(make_url: String => String = fonts_url()): String = { def font_face(entry: Isabelle_Fonts.Entry): String = cat_lines( List( "@font-face {", " font-family: '" + entry.family + "';", " src: url('" + make_url(entry.path.file_name) + "') format('truetype');") ::: (if (entry.is_bold) List(" font-weight: bold;") else Nil) ::: (if (entry.is_italic) List(" font-style: italic;") else Nil) ::: List("}")) ("/* Isabelle fonts */" :: Isabelle_Fonts.fonts(hidden = true).map(font_face)) .mkString("", "\n\n", "\n") } /* document directory */ def isabelle_css: Path = Path.explode("~~/etc/isabelle.css") def write_isabelle_css(dir: Path, make_url: String => String = fonts_dir("fonts")) { File.write(dir + isabelle_css.base, fonts_css(make_url) + "\n\n" + File.read(isabelle_css)) } def init_dir(dir: Path): Unit = write_isabelle_css(Isabelle_System.make_directory(dir)) def write_document(dir: Path, name: String, head: XML.Body, body: XML.Body, css: String = isabelle_css.file_name, hidden: Boolean = true, structural: Boolean = true) { init_dir(dir) File.write(dir + Path.basic(name), output_document(head, body, css = css, hidden = hidden, structural = structural)) } } diff --git a/src/Pure/Thy/present.ML b/src/Pure/Thy/present.ML --- a/src/Pure/Thy/present.ML +++ b/src/Pure/Thy/present.ML @@ -1,168 +1,75 @@ (* Title: Pure/Thy/present.ML - Author: Markus Wenzel and Stefan Berghofer, TU Muenchen + Author: Makarius -Theory presentation: HTML and PDF-LaTeX documents. +Theory presentation: HTML and LaTeX documents. *) signature PRESENT = sig - val init: bool -> Path.T -> string list -> string * string -> bool -> unit - val finish: unit -> unit - val begin_theory: int -> HTML.text -> theory -> theory + val document_html_name: theory -> Path.binding + val document_tex_name: theory -> Path.binding + val html_name: theory -> Path.T + val export_html: theory -> Command_Span.span list -> unit end; structure Present: PRESENT = struct - -(** paths **) - -val html_ext = Path.ext "html"; -val html_path = html_ext o Path.basic; -val index_path = Path.basic "index.html"; -val readme_html_path = Path.basic "README.html"; -val session_graph_path = Path.basic "session_graph.pdf"; -val document_path = Path.ext "pdf" o Path.basic; - -fun show_path path = Path.implode (Path.expand (File.full_path Path.current path)); - - - -(** global browser info state **) - -(* type browser_info *) - -type browser_info = - {html_theories: string Symtab.table, - html_index: (int * string) list}; - -fun make_browser_info (html_theories, html_index) : browser_info = - {html_theories = html_theories, html_index = html_index}; +(** artefact names **) -val empty_browser_info = make_browser_info (Symtab.empty, []); - -fun map_browser_info f {html_theories, html_index} = - make_browser_info (f (html_theories, html_index)); - - -(* state *) - -val browser_info = Synchronized.var "browser_info" empty_browser_info; -fun change_browser_info f = Synchronized.change browser_info (map_browser_info f); - -fun update_html name html = change_browser_info (apfst (Symtab.update (name, html))); +fun document_name ext thy = + Path.explode_binding0 ("document/" ^ Context.theory_name thy ^ "." ^ ext); -fun add_html_index txt = change_browser_info (apsnd (cons txt)); - - - -(** global session state **) +val document_html_name = document_name "html"; +val document_tex_name = document_name "tex"; -(* session_info *) - -type session_info = - {name: string, chapter: string, info_path: Path.T, info: bool, - verbose: bool, readme: Path.T option}; - -fun make_session_info (name, chapter, info_path, info, verbose, readme) = - {name = name, chapter = chapter, info_path = info_path, info = info, - verbose = verbose, readme = readme}: session_info; +fun html_name thy = Path.basic (Context.theory_name thy ^ ".html"); -(* state *) - -val session_info = Unsynchronized.ref (NONE: session_info option); - -fun with_session_info x f = (case ! session_info of NONE => x | SOME info => f info); - -val theory_qualifier = Resources.theory_qualifier o Context.theory_long_name; - -fun is_session_theory thy = - (case ! session_info of - NONE => false - | SOME {name, ...} => name = theory_qualifier thy); - - -(** document preparation **) - -(* init session *) - -fun init info info_path documents (chapter, name) verbose = - let - val readme = if File.exists readme_html_path then SOME readme_html_path else NONE; - val symbols = Resources.html_symbols (); - val docs = - (case readme of NONE => [] | SOME p => [(Url.File p, "README")]) @ - map (fn name => (Url.File (document_path name), name)) documents; - in - session_info := SOME (make_session_info (name, chapter, info_path, info, verbose, readme)); - Synchronized.change browser_info (K empty_browser_info); - add_html_index (0, HTML.begin_session_index symbols name (Url.File session_graph_path) docs) - end; - +(* theory as HTML *) -(* finish session -- output all generated text *) - -fun sorted_index index = map snd (sort (int_ord o apply2 fst) (rev index)); -fun index_buffer index = Buffer.add (implode (sorted_index index)) Buffer.empty; - -fun finish () = with_session_info () (fn {name, chapter, info, info_path, verbose, readme, ...} => - let - val {html_theories, html_index} = Synchronized.value browser_info; - - val session_prefix = info_path + Path.basic chapter + Path.basic name; - - fun finish_html (a, html) = File.write (session_prefix + html_path a) html; +local - val _ = - if info then - (Isabelle_System.make_directory session_prefix; - File.write_buffer (session_prefix + index_path) - (index_buffer html_index |> Buffer.add HTML.end_document); - (case readme of NONE => () | SOME path => Isabelle_System.copy_file path session_prefix); - Symtab.fold (K o finish_html) html_theories (); - if verbose - then Output.physical_stderr ("Browser info at " ^ show_path session_prefix ^ "\n") - else ()) - else (); - in - Synchronized.change browser_info (K empty_browser_info); - session_info := NONE - end); - - -(* theory elements *) - -fun theory_link (curr_chapter, curr_session) thy = +fun get_session_chapter thy = let val session = Resources.theory_qualifier (Context.theory_long_name thy); val chapter = Resources.session_chapter session; - val link = html_path (Context.theory_name thy); + in (session, chapter) end; + +fun theory_link thy thy' = + let + val ((session, chapter), (session', chapter')) = apply2 get_session_chapter (thy, thy'); + val link = html_name thy'; in - if curr_session = session then SOME link - else if curr_chapter = chapter then - SOME (Path.parent + Path.basic session + link) + if session = session' then SOME link + else if chapter = chapter' then SOME (Path.parent + Path.basic session + link) else if chapter = Context.PureN then NONE else SOME (Path.parent + Path.parent + Path.basic chapter + Path.basic session + link) end; -fun begin_theory update_time html thy = - with_session_info thy (fn {name = session_name, chapter, ...} => - let - val name = Context.theory_name thy; +fun theory_document symbols A Bs body = + HTML.begin_document symbols ("Theory " ^ A) ^ "\n" ^ + HTML.command symbols "theory" ^ " " ^ HTML.name symbols A ^ "
\n" ^ + HTML.keyword symbols "imports" ^ " " ^ + space_implode " " (map (uncurry HTML.href_opt_path o apsnd (HTML.name symbols)) Bs) ^ + "
\n\n
\n
\n
" ::
+  body @ ["
\n", HTML.end_document]; - val parent_specs = - Theory.parents_of thy |> map (fn parent => - (Option.map Url.File (theory_link (chapter, session_name) parent), - (Context.theory_name parent))); +in - val symbols = Resources.html_symbols (); - val _ = update_html name (HTML.theory symbols name parent_specs html); +fun export_html thy spans = + let + val name = Context.theory_name thy; + val parents = + Theory.parents_of thy |> map (fn thy' => + (Option.map Url.File (theory_link thy thy'), Context.theory_name thy')); - val _ = - if is_session_theory thy then - add_html_index (update_time, HTML.theory_entry symbols (Url.File (html_path name), name)) - else (); - in thy end); + val symbols = Resources.html_symbols (); + val keywords = Thy_Header.get_keywords thy; + val body = map (HTML.present_span symbols keywords) spans; + val html = XML.blob (theory_document symbols name parents body); + in Export.export thy (document_html_name thy) html end end; + +end; diff --git a/src/Pure/Thy/present.scala b/src/Pure/Thy/present.scala --- a/src/Pure/Thy/present.scala +++ b/src/Pure/Thy/present.scala @@ -1,401 +1,445 @@ /* Title: Pure/Thy/present.scala Author: Makarius -Theory presentation: HTML. +Theory presentation: HTML and LaTeX documents. */ package isabelle import java.io.{File => JFile} import scala.collection.immutable.SortedMap object Present { /* maintain chapter index -- NOT thread-safe */ private val sessions_path = Path.basic(".sessions") private def read_sessions(dir: Path): List[(String, String)] = { val path = dir + sessions_path if (path.is_file) { import XML.Decode._ list(pair(string, string))(Symbol.decode_yxml(File.read(path))) } else Nil } private def write_sessions(dir: Path, sessions: List[(String, String)]) { import XML.Encode._ File.write(dir + sessions_path, YXML.string_of_body(list(pair(string, string))(sessions))) } def update_chapter_index(browser_info: Path, chapter: String, new_sessions: List[(String, String)]) { val dir = Isabelle_System.make_directory(browser_info + Path.basic(chapter)) val sessions0 = try { read_sessions(dir) } catch { case _: XML.Error => Nil } val sessions = (SortedMap.empty[String, String] ++ sessions0 ++ new_sessions).toList write_sessions(dir, sessions) val title = "Isabelle/" + chapter + " sessions" HTML.write_document(dir, "index.html", List(HTML.title(title + " (" + Distribution.version + ")")), HTML.chapter(title) :: (if (sessions.isEmpty) Nil else List(HTML.div("sessions", List(HTML.description( sessions.map({ case (name, description) => val descr = Symbol.trim_blank_lines(description) (List(HTML.link(name + "/index.html", HTML.text(name))), if (descr == "") Nil else HTML.break ::: List(HTML.pre(HTML.text(descr)))) }))))))) } def make_global_index(browser_info: Path) { if (!(browser_info + Path.explode("index.html")).is_file) { Isabelle_System.make_directory(browser_info) File.copy(Path.explode("~~/lib/logo/isabelle.gif"), browser_info + Path.explode("isabelle.gif")) File.write(browser_info + Path.explode("index.html"), File.read(Path.explode("~~/lib/html/library_index_header.template")) + File.read(Path.explode("~~/lib/html/library_index_content.template")) + File.read(Path.explode("~~/lib/html/library_index_footer.template"))) } } - /* finish session */ + /* present session */ - def finish( - browser_info: Path, - graph_pdf: Bytes, - info: Sessions.Info, - name: String) + val session_graph_path = Path.explode("session_graph.pdf") + val readme_path = Path.basic("README.html") + + def html_name(name: Document.Node.Name): String = name.theory_base_name + ".html" + def document_html_name(name: Document.Node.Name): String = "document/" + html_name(name) + + def theory_link(name: Document.Node.Name): XML.Tree = + HTML.link(html_name(name), HTML.text(name.theory_base_name)) + + def session_html(session: String, deps: Sessions.Deps, store: Sessions.Store): Path = { - val session_prefix = browser_info + Path.basic(info.chapter) + Path.basic(name) - val session_fonts = session_prefix + Path.explode("fonts") - - if (info.options.bool("browser_info")) { - Isabelle_System.make_directory(session_fonts) + val info = deps.sessions_structure(session) + val options = info.options + val base = deps(session) - Bytes.write(session_prefix + session_graph_path, graph_pdf) + val session_dir = store.browser_info + info.chapter_session + val session_fonts = Isabelle_System.make_directory(session_dir + Path.explode("fonts")) + for (entry <- Isabelle_Fonts.fonts(hidden = true)) + File.copy(entry.path, session_fonts) - HTML.write_isabelle_css(session_prefix) + Bytes.write(session_dir + session_graph_path, + graphview.Graph_File.make_pdf(options, base.session_graph_display)) - for (entry <- Isabelle_Fonts.fonts(hidden = true)) - File.copy(entry.path, session_fonts) + val links = + { + val deps_link = + HTML.link(session_graph_path, HTML.text("theory dependencies")) + + val readme_links = + if ((info.dir + readme_path).is_file) { + File.copy(info.dir + readme_path, session_dir + readme_path) + List(HTML.link(readme_path, HTML.text("README"))) + } + else Nil + + val document_links = + for ((name, _) <- info.documents) + yield HTML.link(Path.basic(name).pdf, HTML.text(name)) + + Library.separate(HTML.break ::: HTML.nl, + (deps_link :: readme_links ::: document_links). + map(link => HTML.text("View ") ::: List(link))).flatten } + + val theories = + using(store.open_database(session))(db => + for { + name <- base.session_theories + entry <- Export.read_entry(db, session, name.theory, document_html_name(name)) + } yield name -> entry.uncompressed(cache = store.xz_cache)) + + val theories_body = + HTML.itemize(for ((name, _) <- theories) yield List(theory_link(name))) + + val title = "Session " + session + HTML.write_document(session_dir, "index.html", + List(HTML.title(title + " (" + Distribution.version + ")")), + HTML.div("head", List(HTML.chapter(title), HTML.par(links))) :: + (if (theories.isEmpty) Nil + else List(HTML.div("theories", List(HTML.section("Theories"), theories_body))))) + + for ((name, html) <- theories) Bytes.write(session_dir + Path.basic(html_name(name)), html) + + session_dir } /** preview **/ sealed case class Preview(title: String, content: String) def preview( resources: Resources, snapshot: Document.Snapshot, plain_text: Boolean = false, fonts_url: String => String = HTML.fonts_url()): Preview = { require(!snapshot.is_outdated) def output_document(title: String, body: XML.Body): String = HTML.output_document( List( HTML.style(HTML.fonts_css(fonts_url) + "\n\n" + File.read(HTML.isabelle_css)), HTML.title(title)), List(HTML.source(body)), css = "", structural = false) val name = snapshot.node_name if (plain_text) { val title = "File " + quote(name.path.file_name) val content = output_document(title, HTML.text(snapshot.node.source)) Preview(title, content) } else { resources.make_preview(snapshot) match { case Some(preview) => preview case None => val title = if (name.is_theory) "Theory " + quote(name.theory_base_name) else "File " + quote(name.path.file_name) val content = output_document(title, pide_document(snapshot)) Preview(title, content) } } } /* PIDE document */ private val document_elements = Rendering.foreground_elements ++ Rendering.text_color_elements ++ Rendering.markdown_elements + Markup.NUMERAL + Markup.COMMENT + Markup.LANGUAGE private val div_elements = Set(HTML.div.name, HTML.pre.name, HTML.par.name, HTML.list.name, HTML.enum.name, HTML.descr.name) private def html_div(html: XML.Body): Boolean = html exists { case XML.Elem(markup, body) => div_elements.contains(markup.name) || html_div(body) case XML.Text(_) => false } private def html_class(c: String, html: XML.Body): XML.Tree = if (html.forall(_ == HTML.no_text)) HTML.no_text else if (html_div(html)) HTML.div(c, html) else HTML.span(c, html) private def make_html(xml: XML.Body): XML.Body = xml map { case XML.Elem(Markup(Markup.LANGUAGE, Markup.Name(Markup.Language.DOCUMENT)), body) => html_class(Markup.Language.DOCUMENT, make_html(body)) case XML.Elem(Markup(Markup.MARKDOWN_PARAGRAPH, _), body) => HTML.par(make_html(body)) case XML.Elem(Markup(Markup.MARKDOWN_ITEM, _), body) => HTML.item(make_html(body)) case XML.Elem(Markup(Markup.Markdown_Bullet.name, _), _) => HTML.no_text case XML.Elem(Markup.Markdown_List(kind), body) => if (kind == Markup.ENUMERATE) HTML.enum(make_html(body)) else HTML.list(make_html(body)) case XML.Elem(markup, body) => val name = markup.name val html = markup.properties match { case Markup.Kind(kind) if kind == Markup.COMMAND || kind == Markup.KEYWORD => List(html_class(kind, make_html(body))) case _ => make_html(body) } Rendering.foreground.get(name) orElse Rendering.text_color.get(name) match { case Some(c) => html_class(c.toString, html) case None => html_class(name, html) } case XML.Text(text) => XML.Text(Symbol.decode(text)) } def pide_document(snapshot: Document.Snapshot): XML.Body = make_html(snapshot.markup_to_XML(Text.Range.full, document_elements)) /** build documents **/ val session_tex_path = Path.explode("session.tex") - val session_graph_path = Path.explode("session_graph.pdf") def tex_name(name: Document.Node.Name): String = name.theory_base_name + ".tex" def document_tex_name(name: Document.Node.Name): String = "document/" + tex_name(name) def isabelletags(tags: List[String]): String = Library.terminate_lines( tags.map(tag => tag.toList match { case '/' :: cs => "\\isafoldtag{" + cs.mkString + "}" case '-' :: cs => "\\isadroptag{" + cs.mkString + "}" case '+' :: cs => "\\isakeeptag{" + cs.mkString + "}" case cs => "\\isakeeptag{" + cs.mkString + "}" })) def build_documents( session: String, deps: Sessions.Deps, store: Sessions.Store, progress: Progress = new Progress, verbose: Boolean = false, verbose_latex: Boolean = false): List[(String, Bytes)] = { /* session info */ val info = deps.sessions_structure(session) val options = info.options val base = deps(session) val graph_pdf = graphview.Graph_File.make_pdf(options, base.session_graph_display) def find_tex(name: Document.Node.Name): Option[Bytes] = deps.sessions_structure.build_requirements(List(session)).reverse.view .map(session_name => using(store.open_database(session_name))(db => Export.read_entry(db, session_name, name.theory, document_tex_name(name)). map(_.uncompressed(cache = store.xz_cache)))) .collectFirst({ case Some(x) => x }) /* prepare document directory */ lazy val tex_files = for (name <- base.session_theories ::: base.document_theories) yield Path.basic(tex_name(name)) -> find_tex(name).getOrElse(Bytes.empty) def prepare_dir(dir: Path, doc_name: String, doc_tags: List[String]): (Path, String) = { val doc_dir = dir + Path.basic(doc_name) Isabelle_System.make_directory(doc_dir) Isabelle_System.bash("isabelle latex -o sty", cwd = doc_dir.file).check File.write(doc_dir + Path.explode("isabelletags.sty"), isabelletags(doc_tags)) for ((base_dir, src) <- info.document_files) File.copy_base(info.dir + base_dir, src, doc_dir) Bytes.write(doc_dir + session_graph_path, graph_pdf) File.write(doc_dir + session_tex_path, Library.terminate_lines( base.session_theories.map(name => "\\input{" + tex_name(name) + "}"))) for ((path, tex) <- tex_files) Bytes.write(doc_dir + path, tex) val root1 = "root_" + doc_name val root = if ((doc_dir + Path.explode(root1).tex).is_file) root1 else "root" (doc_dir, root) } /* produce documents */ val doc_output = info.document_output val docs = for ((doc_name, doc_tags) <- info.documents) yield { Isabelle_System.with_tmp_dir("document")(tmp_dir => { val (doc_dir, root) = prepare_dir(tmp_dir, doc_name, doc_tags) doc_output.foreach(prepare_dir(_, doc_name, doc_tags)) // bash scripts def root_bash(ext: String): String = Bash.string(root + "." + ext) def latex_bash(fmt: String = "pdf", ext: String = "tex"): String = "isabelle latex -o " + Bash.string(fmt) + " " + Bash.string(root + "." + ext) def bash(items: String*): Process_Result = progress.bash(items.mkString(" && "), cwd = doc_dir.file, echo = verbose_latex, watchdog = Time.seconds(0.5)) // prepare document val result = if ((doc_dir + Path.explode("build")).is_file) { bash("./build pdf " + Bash.string(doc_name)) } else { bash( latex_bash(), "{ [ ! -f " + root_bash("bib") + " ] || " + latex_bash("bbl") + "; }", "{ [ ! -f " + root_bash("idx") + " ] || " + latex_bash("idx") + "; }", latex_bash(), latex_bash()) } // result val root_pdf = Path.basic(root).pdf val result_path = doc_dir + root_pdf if (!result.ok) { cat_error( Library.trim_line(result.err), cat_lines(Latex.latex_errors(doc_dir, root) ::: Bibtex.bibtex_errors(doc_dir, root)), "Failed to build document " + quote(doc_name)) } else if (!result_path.is_file) { error("Bad document result: expected to find " + root_pdf) } else doc_name -> Bytes.read(result_path) }) } def output(dir: Path) { Isabelle_System.make_directory(dir) for ((name, pdf) <- docs) { val path = dir + Path.basic(name).pdf Bytes.write(path, pdf) progress.echo_document(path) } } if (info.options.bool("browser_info") || doc_output.isEmpty) { - output(store.browser_info + Path.basic(info.chapter) + Path.basic(session)) + output(store.browser_info + info.chapter_session) } doc_output.foreach(output) docs } /* Isabelle tool wrapper */ val isabelle_tool = Isabelle_Tool("document", "prepare session theory document", args => { var verbose_latex = false var dirs: List[Path] = Nil var no_build = false var options = Options.init() var verbose_build = false val getopts = Getopts( """ Usage: isabelle document [OPTIONS] SESSION Options are: -O set option "document_output", relative to current directory -V verbose latex -d DIR include session directory -n no build of session -o OPTION override Isabelle system OPTION (via NAME=VAL or NAME) -v verbose build Prepare the theory document of a session. """, "O:" -> (arg => options += ("document_output=" + Path.explode(arg).absolute.implode)), "V" -> (_ => verbose_latex = true), "d:" -> (arg => dirs = dirs ::: List(Path.explode(arg))), "n" -> (_ => no_build = true), "o:" -> (arg => options = options + arg), "v" -> (_ => verbose_build = true)) val more_args = getopts(args) val session = more_args match { case List(a) => a case _ => getopts.usage() } val progress = new Console_Progress(verbose = verbose_build) val store = Sessions.store(options) progress.interrupt_handler { if (!no_build) { val res = Build.build(options, selection = Sessions.Selection.session(session), dirs = dirs, progress = progress, verbose = verbose_build) if (!res.ok) error("Failed to build session " + quote(session)) } val deps = Sessions.load_structure(options + "document=pdf", dirs = dirs). selection_deps(Sessions.Selection.session(session)) build_documents(session, deps, store, progress = progress, verbose = true, verbose_latex = verbose_latex) } }) } diff --git a/src/Pure/Thy/sessions.scala b/src/Pure/Thy/sessions.scala --- a/src/Pure/Thy/sessions.scala +++ b/src/Pure/Thy/sessions.scala @@ -1,1395 +1,1397 @@ /* Title: Pure/Thy/sessions.scala Author: Makarius Cumulative session information. */ package isabelle import java.io.{File => JFile} import java.nio.ByteBuffer import java.nio.channels.FileChannel import java.nio.file.StandardOpenOption import scala.collection.{SortedSet, SortedMap} import scala.collection.mutable object Sessions { /* session and theory names */ val root_name: String = "ROOT" val theory_name: String = "Pure.Sessions" val UNSORTED = "Unsorted" val DRAFT = "Draft" def is_pure(name: String): Boolean = name == Thy_Header.PURE def exclude_session(name: String): Boolean = name == "" || name == DRAFT def exclude_theory(name: String): Boolean = name == root_name || name == "README" || name == "index" || name == "bib" /* base info and source dependencies */ object Base { def bootstrap( session_directories: Map[JFile, String], global_theories: Map[String, String]): Base = Base( session_directories = session_directories, global_theories = global_theories, overall_syntax = Thy_Header.bootstrap_syntax) } sealed case class Base( pos: Position.T = Position.none, doc_names: List[String] = Nil, session_directories: Map[JFile, String] = Map.empty, global_theories: Map[String, String] = Map.empty, session_theories: List[Document.Node.Name] = Nil, document_theories: List[Document.Node.Name] = Nil, loaded_theories: Graph[String, Outer_Syntax] = Graph.string, used_theories: List[(Document.Node.Name, Options)] = Nil, known_theories: Map[String, Document.Node.Entry] = Map.empty, known_loaded_files: Map[String, List[Path]] = Map.empty, overall_syntax: Outer_Syntax = Outer_Syntax.empty, imported_sources: List[(Path, SHA1.Digest)] = Nil, sources: List[(Path, SHA1.Digest)] = Nil, session_graph_display: Graph_Display.Graph = Graph_Display.empty_graph, errors: List[String] = Nil) { override def toString: String = "Sessions.Base(loaded_theories = " + loaded_theories.size + ", used_theories = " + used_theories.length + ")" def theory_qualifier(name: String): String = global_theories.getOrElse(name, Long_Name.qualifier(name)) def theory_qualifier(name: Document.Node.Name): String = theory_qualifier(name.theory) def loaded_theory(name: String): Boolean = loaded_theories.defined(name) def loaded_theory(name: Document.Node.Name): Boolean = loaded_theory(name.theory) def loaded_theory_syntax(name: String): Option[Outer_Syntax] = if (loaded_theory(name)) Some(loaded_theories.get_node(name)) else None def loaded_theory_syntax(name: Document.Node.Name): Option[Outer_Syntax] = loaded_theory_syntax(name.theory) def node_syntax(nodes: Document.Nodes, name: Document.Node.Name): Outer_Syntax = nodes(name).syntax orElse loaded_theory_syntax(name) getOrElse overall_syntax } sealed case class Deps(sessions_structure: Structure, session_bases: Map[String, Base]) { override def toString: String = "Sessions.Deps(" + sessions_structure + ")" def is_empty: Boolean = session_bases.isEmpty def apply(name: String): Base = session_bases(name) def get(name: String): Option[Base] = session_bases.get(name) def imported_sources(name: String): List[SHA1.Digest] = session_bases(name).imported_sources.map(_._2) def sources(name: String): List[SHA1.Digest] = session_bases(name).sources.map(_._2) def errors: List[String] = (for { (name, base) <- session_bases.iterator if base.errors.nonEmpty } yield cat_lines(base.errors) + "\nThe error(s) above occurred in session " + quote(name) + Position.here(base.pos) ).toList def check_errors: Deps = errors match { case Nil => this case errs => error(cat_lines(errs)) } } def deps(sessions_structure: Structure, progress: Progress = new Progress, inlined_files: Boolean = false, verbose: Boolean = false, list_files: Boolean = false, check_keywords: Set[String] = Set.empty): Deps = { var cache_sources = Map.empty[JFile, SHA1.Digest] def check_sources(paths: List[Path]): List[(Path, SHA1.Digest)] = { for { path <- paths file = path.file if cache_sources.isDefinedAt(file) || file.isFile } yield { cache_sources.get(file) match { case Some(digest) => (path, digest) case None => val digest = SHA1.digest(file) cache_sources = cache_sources + (file -> digest) (path, digest) } } } val doc_names = Doc.doc_names() val bootstrap = Sessions.Base.bootstrap( sessions_structure.session_directories, sessions_structure.global_theories) val session_bases = (Map("" -> bootstrap) /: sessions_structure.imports_topological_order)({ case (session_bases, session_name) => progress.expose_interrupt() val info = sessions_structure(session_name) try { val deps_base = info.deps_base(session_bases) val resources = new Resources(sessions_structure, deps_base) if (verbose || list_files) { val groups = if (info.groups.isEmpty) "" else info.groups.mkString(" (", " ", ")") progress.echo("Session " + info.chapter + "/" + info.name + groups) } val dependencies = resources.session_dependencies(info) val overall_syntax = dependencies.overall_syntax val session_theories = dependencies.theories.filter(name => deps_base.theory_qualifier(name) == session_name) val theory_files = dependencies.theories.map(_.path) val (loaded_files, loaded_files_errors) = try { if (inlined_files) (dependencies.loaded_files(Sessions.is_pure(info.name)), Nil) else (Nil, Nil) } catch { case ERROR(msg) => (Nil, List(msg)) } val session_files = (theory_files ::: loaded_files.flatMap(_._2) ::: info.document_files.map(file => info.dir + file._1 + file._2)).map(_.expand) val imported_files = if (inlined_files) dependencies.imported_files else Nil if (list_files) progress.echo(cat_lines(session_files.map(_.implode).sorted.map(" " + _))) if (check_keywords.nonEmpty) { Check_Keywords.check_keywords( progress, overall_syntax.keywords, check_keywords, theory_files) } val session_graph_display: Graph_Display.Graph = { def session_node(name: String): Graph_Display.Node = Graph_Display.Node("[" + name + "]", "session." + name) def node(name: Document.Node.Name): Graph_Display.Node = { val qualifier = deps_base.theory_qualifier(name) if (qualifier == info.name) Graph_Display.Node(name.theory_base_name, "theory." + name.theory) else session_node(qualifier) } val required_sessions = dependencies.loaded_theories.all_preds(dependencies.theories.map(_.theory)) .map(theory => deps_base.theory_qualifier(theory)) .filter(name => name != info.name && sessions_structure.defined(name)) val required_subgraph = sessions_structure.imports_graph .restrict(sessions_structure.imports_graph.all_preds(required_sessions).toSet) .transitive_closure .restrict(required_sessions.toSet) .transitive_reduction_acyclic val graph0 = (Graph_Display.empty_graph /: required_subgraph.topological_order)( { case (g, session) => val a = session_node(session) val bs = required_subgraph.imm_preds(session).toList.map(session_node) ((g /: (a :: bs))(_.default_node(_, Nil)) /: bs)(_.add_edge(_, a)) }) (graph0 /: dependencies.entries)( { case (g, entry) => val a = node(entry.name) val bs = entry.header.imports.map(node).filterNot(_ == a) ((g /: (a :: bs))(_.default_node(_, Nil)) /: bs)(_.add_edge(_, a)) }) } val known_theories = (deps_base.known_theories /: dependencies.entries.iterator.map(entry => entry.name.theory -> entry))(_ + _) val known_loaded_files = deps_base.known_loaded_files ++ loaded_files val import_errors = { val known_sessions = sessions_structure.imports_requirements(List(session_name)).toSet for { name <- dependencies.theories qualifier = deps_base.theory_qualifier(name) if !known_sessions(qualifier) } yield "Bad import of theory " + quote(name.toString) + ": need to include sessions " + quote(qualifier) + " in ROOT" } val document_errors = info.document_theories.flatMap( { case (thy, pos) => val parent_sessions = sessions_structure.build_requirements(List(session_name)) def err(msg: String): Option[String] = Some(msg + " " + quote(thy) + Position.here(pos)) known_theories.get(thy).map(_.name) match { case None => err("Unknown document theory") case Some(name) => val qualifier = deps_base.theory_qualifier(name) if (session_theories.contains(name)) { err("Redundant document theory from this session:") } else if (parent_sessions.contains(qualifier)) None else if (dependencies.theories.contains(name)) None else err("Document theory from other session not imported properly:") } }) val document_theories = info.document_theories.map({ case (thy, _) => known_theories(thy).name }) val dir_errors = { val ok = info.dirs.map(_.canonical_file).toSet val bad = (for { name <- session_theories.iterator path = name.master_dir_path if !ok(path.canonical_file) path1 = File.relative_path(info.dir.canonical, path).getOrElse(path) } yield (path1, name)).toList val bad_dirs = (for { (path1, _) <- bad } yield path1.toString).distinct.sorted val errs1 = for { (path1, name) <- bad } yield "Implicit use of directory " + path1 + " for theory " + quote(name.toString) val errs2 = if (bad_dirs.isEmpty) Nil else List("Implicit use of session directories: " + commas(bad_dirs)) val errs3 = for (p <- info.dirs if !p.is_dir) yield "No such directory: " + p val errs4 = (for { name <- session_theories.iterator name1 <- resources.find_theory_node(name.theory) if name.node != name1.node } yield "Incoherent theory file import:\n " + name.path + " vs. \n " + name1.path) .toList errs1 ::: errs2 ::: errs3 ::: errs4 } val sources_errors = for (p <- session_files if !p.is_file) yield "No such file: " + p val path_errors = try { Path.check_case_insensitive(session_files ::: imported_files); Nil } catch { case ERROR(msg) => List(msg) } val bibtex_errors = try { info.bibtex_entries; Nil } catch { case ERROR(msg) => List(msg) } val base = Base( pos = info.pos, doc_names = doc_names, session_directories = sessions_structure.session_directories, global_theories = sessions_structure.global_theories, session_theories = session_theories, document_theories = document_theories, loaded_theories = dependencies.loaded_theories, used_theories = dependencies.theories_adjunct, known_theories = known_theories, known_loaded_files = known_loaded_files, overall_syntax = overall_syntax, imported_sources = check_sources(imported_files), sources = check_sources(session_files), session_graph_display = session_graph_display, errors = dependencies.errors ::: loaded_files_errors ::: import_errors ::: document_errors ::: dir_errors ::: sources_errors ::: path_errors ::: bibtex_errors) session_bases + (info.name -> base) } catch { case ERROR(msg) => cat_error(msg, "The error(s) above occurred in session " + quote(info.name) + Position.here(info.pos)) } }) Deps(sessions_structure, session_bases) } /* base info */ sealed case class Base_Info( session: String, sessions_structure: Structure, errors: List[String], base: Base, infos: List[Info]) { def check_base: Base = if (errors.isEmpty) base else error(cat_lines(errors)) } def base_info(options: Options, session: String, progress: Progress = new Progress, dirs: List[Path] = Nil, include_sessions: List[String] = Nil, session_ancestor: Option[String] = None, session_requirements: Boolean = false): Base_Info = { val full_sessions = load_structure(options, dirs = dirs) val selected_sessions = full_sessions.selection(Selection(sessions = session :: session_ancestor.toList)) val info = selected_sessions(session) val ancestor = session_ancestor orElse info.parent val (session1, infos1) = if (session_requirements && ancestor.isDefined) { val deps = Sessions.deps(selected_sessions, progress = progress) val base = deps(session) val ancestor_loaded = deps.get(ancestor.get) match { case Some(ancestor_base) if !selected_sessions.imports_requirements(List(ancestor.get)).contains(session) => ancestor_base.loaded_theories.defined _ case _ => error("Bad ancestor " + quote(ancestor.get) + " for session " + quote(session)) } val required_theories = for { thy <- base.loaded_theories.keys if !ancestor_loaded(thy) && base.theory_qualifier(thy) != session } yield thy if (required_theories.isEmpty) (ancestor.get, Nil) else { val other_name = info.name + "_requirements(" + ancestor.get + ")" Isabelle_System.isabelle_tmp_prefix() (other_name, List( make_info(info.options, dir_selected = false, dir = Path.explode("$ISABELLE_TMP_PREFIX"), chapter = info.chapter, Session_Entry( pos = info.pos, name = other_name, groups = info.groups, path = ".", parent = ancestor, description = "Required theory imports from other sessions", options = Nil, imports = info.deps, directories = Nil, theories = List((Nil, required_theories.map(thy => ((thy, Position.none), false)))), document_theories = Nil, document_files = Nil, export_files = Nil)))) } } else (session, Nil) val full_sessions1 = if (infos1.isEmpty) full_sessions else load_structure(options, dirs = dirs, infos = infos1) val selected_sessions1 = full_sessions1.selection(Selection(sessions = session1 :: session :: include_sessions)) val deps1 = Sessions.deps(selected_sessions1, progress = progress) Base_Info(session1, full_sessions1, deps1.errors, deps1(session1), infos1) } /* cumulative session info */ sealed case class Info( name: String, chapter: String, dir_selected: Boolean, pos: Position.T, groups: List[String], dir: Path, parent: Option[String], description: String, directories: List[Path], options: Options, imports: List[String], theories: List[(Options, List[(String, Position.T)])], global_theories: List[String], document_theories: List[(String, Position.T)], document_files: List[(Path, Path)], export_files: List[(Path, Int, List[String])], meta_digest: SHA1.Digest) { + def chapter_session: Path = Path.basic(chapter) + Path.basic(name) + def deps: List[String] = parent.toList ::: imports def deps_base(session_bases: String => Base): Base = { val parent_base = session_bases(parent.getOrElse("")) val imports_bases = imports.map(session_bases) parent_base.copy( known_theories = (parent_base.known_theories /: (for { base <- imports_bases.iterator (_, entry) <- base.known_theories.iterator } yield (entry.name.theory -> entry)))(_ + _), known_loaded_files = (parent_base.known_loaded_files /: imports_bases.iterator.map(_.known_loaded_files))(_ ++ _)) } def dirs: List[Path] = dir :: directories def timeout: Time = Time.seconds(options.real("timeout") * options.real("timeout_scale")) def document_enabled: Boolean = options.string("document") match { case "" | "false" => false case "pdf" => true case doc => error("Bad document specification " + quote(doc)) } def documents: List[(String, List[String])] = { val variants = for (s <- Library.space_explode(':', options.string("document_variants"))) yield { Library.space_explode('=', s) match { case List(name) => (name, Nil) case List(name, tags) => (name, Library.space_explode(',', tags)) case _ => error("Malformed document_variants: " + quote(s)) } } val dups = Library.duplicates(variants.map(_._1)) if (dups.nonEmpty) error("Duplicate document variants: " + commas_quote(dups)) if (!document_enabled || document_files.isEmpty) Nil else variants } def document_output: Option[Path] = options.string("document_output") match { case "" => None case s => Some(dir + Path.explode(s)) } lazy val bibtex_entries: List[Text.Info[String]] = (for { (document_dir, file) <- document_files.iterator if Bibtex.is_bibtex(file.file_name) info <- Bibtex.entries(File.read(dir + document_dir + file)).iterator } yield info).toList def record_proofs: Boolean = options.int("record_proofs") >= 2 def is_afp: Boolean = chapter == AFP.chapter def is_afp_bulky: Boolean = is_afp && groups.exists(AFP.groups_bulky.contains) } def make_info(options: Options, dir_selected: Boolean, dir: Path, chapter: String, entry: Session_Entry): Info = { try { val name = entry.name if (exclude_session(name)) error("Bad session name") if (is_pure(name) && entry.parent.isDefined) error("Illegal parent session") if (!is_pure(name) && !entry.parent.isDefined) error("Missing parent session") val session_path = dir + Path.explode(entry.path) val directories = entry.directories.map(dir => session_path + Path.explode(dir)) val session_options = options ++ entry.options val theories = entry.theories.map({ case (opts, thys) => (session_options ++ opts, thys.map({ case ((thy, pos), _) => if (exclude_theory(thy)) error("Bad theory name " + quote(thy) + Position.here(pos)) else (thy, pos) })) }) val global_theories = for { (_, thys) <- entry.theories; ((thy, pos), global) <- thys if global } yield { val thy_name = Path.explode(thy).file_name if (Long_Name.is_qualified(thy_name)) error("Bad qualified name for global theory " + quote(thy_name) + Position.here(pos)) else thy_name } val conditions = theories.flatMap(thys => space_explode(',', thys._1.string("condition"))).distinct.sorted. map(x => (x, Isabelle_System.getenv(x) != "")) val document_files = entry.document_files.map({ case (s1, s2) => (Path.explode(s1), Path.explode(s2)) }) val export_files = entry.export_files.map({ case (dir, prune, pats) => (Path.explode(dir), prune, pats) }) val meta_digest = SHA1.digest( (name, chapter, entry.parent, entry.directories, entry.options, entry.imports, entry.theories_no_position, conditions, entry.document_theories, entry.document_files) .toString) Info(name, chapter, dir_selected, entry.pos, entry.groups, session_path, entry.parent, entry.description, directories, session_options, entry.imports, theories, global_theories, entry.document_theories, document_files, export_files, meta_digest) } catch { case ERROR(msg) => error(msg + "\nThe error(s) above occurred in session entry " + quote(entry.name) + Position.here(entry.pos)) } } object Selection { val empty: Selection = Selection() val all: Selection = Selection(all_sessions = true) def session(session: String): Selection = Selection(sessions = List(session)) } sealed case class Selection( requirements: Boolean = false, all_sessions: Boolean = false, base_sessions: List[String] = Nil, exclude_session_groups: List[String] = Nil, exclude_sessions: List[String] = Nil, session_groups: List[String] = Nil, sessions: List[String] = Nil) { def ++ (other: Selection): Selection = Selection( requirements = requirements || other.requirements, all_sessions = all_sessions || other.all_sessions, base_sessions = Library.merge(base_sessions, other.base_sessions), exclude_session_groups = Library.merge(exclude_session_groups, other.exclude_session_groups), exclude_sessions = Library.merge(exclude_sessions, other.exclude_sessions), session_groups = Library.merge(session_groups, other.session_groups), sessions = Library.merge(sessions, other.sessions)) } def make(infos: List[Info]): Structure = { def add_edges(graph: Graph[String, Info], kind: String, edges: Info => Traversable[String]) : Graph[String, Info] = { def add_edge(pos: Position.T, name: String, g: Graph[String, Info], parent: String) = { if (!g.defined(parent)) error("Bad " + kind + " session " + quote(parent) + " for " + quote(name) + Position.here(pos)) try { g.add_edge_acyclic(parent, name) } catch { case exn: Graph.Cycles[_] => error(cat_lines(exn.cycles.map(cycle => "Cyclic session dependency of " + cycle.map(c => quote(c.toString)).mkString(" via "))) + Position.here(pos)) } } (graph /: graph.iterator) { case (g, (name, (info, _))) => (g /: edges(info))(add_edge(info.pos, name, _, _)) } } val info_graph = (Graph.string[Info] /: infos) { case (graph, info) => if (graph.defined(info.name)) error("Duplicate session " + quote(info.name) + Position.here(info.pos) + Position.here(graph.get_node(info.name).pos)) else graph.new_node(info.name, info) } val build_graph = add_edges(info_graph, "parent", _.parent) val imports_graph = add_edges(build_graph, "imports", _.imports) val session_positions: List[(String, Position.T)] = (for ((name, (info, _)) <- info_graph.iterator) yield (name, info.pos)).toList val session_directories: Map[JFile, String] = (Map.empty[JFile, String] /: (for { session <- imports_graph.topological_order.iterator info = info_graph.get_node(session) dir <- info.dirs.iterator } yield (info, dir)))({ case (dirs, (info, dir)) => val session = info.name val canonical_dir = dir.canonical_file dirs.get(canonical_dir) match { case Some(session1) => val info1 = info_graph.get_node(session1) error("Duplicate use of directory " + dir + "\n for session " + quote(session1) + Position.here(info1.pos) + "\n vs. session " + quote(session) + Position.here(info.pos)) case None => dirs + (canonical_dir -> session) } }) val global_theories: Map[String, String] = (Thy_Header.bootstrap_global_theories.toMap /: (for { session <- imports_graph.topological_order.iterator info = info_graph.get_node(session) thy <- info.global_theories.iterator } yield (info, thy)))({ case (global, (info, thy)) => val qualifier = info.name global.get(thy) match { case Some(qualifier1) if qualifier != qualifier1 => error("Duplicate global theory " + quote(thy) + Position.here(info.pos)) case _ => global + (thy -> qualifier) } }) new Structure( session_positions, session_directories, global_theories, build_graph, imports_graph) } final class Structure private[Sessions]( val session_positions: List[(String, Position.T)], val session_directories: Map[JFile, String], val global_theories: Map[String, String], val build_graph: Graph[String, Info], val imports_graph: Graph[String, Info]) { sessions_structure => def dest_session_directories: List[(String, String)] = for ((file, session) <- session_directories.toList) yield (File.standard_path(file), session) lazy val chapters: SortedMap[String, List[Info]] = (SortedMap.empty[String, List[Info]] /: build_graph.iterator)( { case (chs, (_, (info, _))) => chs + (info.chapter -> (info :: chs.getOrElse(info.chapter, Nil))) }) def build_graph_display: Graph_Display.Graph = Graph_Display.make_graph(build_graph) def imports_graph_display: Graph_Display.Graph = Graph_Display.make_graph(imports_graph) def defined(name: String): Boolean = imports_graph.defined(name) def apply(name: String): Info = imports_graph.get_node(name) def get(name: String): Option[Info] = if (defined(name)) Some(apply(name)) else None def theory_qualifier(name: String): String = global_theories.getOrElse(name, Long_Name.qualifier(name)) def check_sessions(names: List[String]) { val bad_sessions = SortedSet(names.filterNot(defined): _*).toList if (bad_sessions.nonEmpty) error("Undefined session(s): " + commas_quote(bad_sessions)) } def check_sessions(sel: Selection): Unit = check_sessions(sel.base_sessions ::: sel.exclude_sessions ::: sel.sessions) private def selected(graph: Graph[String, Info], sel: Selection): List[String] = { check_sessions(sel) val select_group = sel.session_groups.toSet val select_session = sel.sessions.toSet ++ imports_graph.all_succs(sel.base_sessions) val selected0 = if (sel.all_sessions) graph.keys else { (for { (name, (info, _)) <- graph.iterator if info.dir_selected || select_session(name) || graph.get_node(name).groups.exists(select_group) } yield name).toList } if (sel.requirements) (graph.all_preds(selected0).toSet -- selected0).toList else selected0 } def selection(sel: Selection): Structure = { check_sessions(sel) val excluded = { val exclude_group = sel.exclude_session_groups.toSet val exclude_group_sessions = (for { (name, (info, _)) <- imports_graph.iterator if imports_graph.get_node(name).groups.exists(exclude_group) } yield name).toList imports_graph.all_succs(exclude_group_sessions ::: sel.exclude_sessions).toSet } def restrict(graph: Graph[String, Info]): Graph[String, Info] = { val sessions = graph.all_preds(selected(graph, sel)).filterNot(excluded) graph.restrict(graph.all_preds(sessions).toSet) } new Structure( session_positions, session_directories, global_theories, restrict(build_graph), restrict(imports_graph)) } def selection(session: String): Structure = selection(Selection.session(session)) def selection_deps( selection: Selection, progress: Progress = new Progress, loading_sessions: Boolean = false, inlined_files: Boolean = false, verbose: Boolean = false): Deps = { val deps = Sessions.deps(sessions_structure.selection(selection), progress = progress, inlined_files = inlined_files, verbose = verbose) if (loading_sessions) { val selection_size = deps.sessions_structure.build_graph.size if (selection_size > 1) progress.echo("Loading " + selection_size + " sessions ...") } deps } def build_selection(sel: Selection): List[String] = selected(build_graph, sel) def build_descendants(ss: List[String]): List[String] = build_graph.all_succs(ss) def build_requirements(ss: List[String]): List[String] = build_graph.all_preds_rev(ss) def build_topological_order: List[String] = build_graph.topological_order def imports_selection(sel: Selection): List[String] = selected(imports_graph, sel) def imports_descendants(ss: List[String]): List[String] = imports_graph.all_succs(ss) def imports_requirements(ss: List[String]): List[String] = imports_graph.all_preds_rev(ss) def imports_topological_order: List[String] = imports_graph.topological_order def bibtex_entries: List[(String, List[String])] = build_topological_order.flatMap(name => apply(name).bibtex_entries match { case Nil => None case entries => Some(name -> entries.map(_.info)) }) def session_chapters: List[(String, String)] = build_topological_order.map(name => name -> apply(name).chapter) override def toString: String = imports_graph.keys_iterator.mkString("Sessions.Structure(", ", ", ")") } /* parser */ val ROOT: Path = Path.explode("ROOT") val ROOTS: Path = Path.explode("ROOTS") private val CHAPTER = "chapter" private val SESSION = "session" private val IN = "in" private val DESCRIPTION = "description" private val DIRECTORIES = "directories" private val OPTIONS = "options" private val SESSIONS = "sessions" private val THEORIES = "theories" private val GLOBAL = "global" private val DOCUMENT_THEORIES = "document_theories" private val DOCUMENT_FILES = "document_files" private val EXPORT_FILES = "export_files" val root_syntax: Outer_Syntax = Outer_Syntax.empty + "(" + ")" + "+" + "," + "=" + "[" + "]" + GLOBAL + IN + (CHAPTER, Keyword.THY_DECL) + (SESSION, Keyword.THY_DECL) + (DESCRIPTION, Keyword.QUASI_COMMAND) + (DIRECTORIES, Keyword.QUASI_COMMAND) + (OPTIONS, Keyword.QUASI_COMMAND) + (SESSIONS, Keyword.QUASI_COMMAND) + (THEORIES, Keyword.QUASI_COMMAND) + (DOCUMENT_THEORIES, Keyword.QUASI_COMMAND) + (DOCUMENT_FILES, Keyword.QUASI_COMMAND) + (EXPORT_FILES, Keyword.QUASI_COMMAND) abstract class Entry sealed case class Chapter(name: String) extends Entry sealed case class Session_Entry( pos: Position.T, name: String, groups: List[String], path: String, parent: Option[String], description: String, options: List[Options.Spec], imports: List[String], directories: List[String], theories: List[(List[Options.Spec], List[((String, Position.T), Boolean)])], document_theories: List[(String, Position.T)], document_files: List[(String, String)], export_files: List[(String, Int, List[String])]) extends Entry { def theories_no_position: List[(List[Options.Spec], List[(String, Boolean)])] = theories.map({ case (a, b) => (a, b.map({ case ((c, _), d) => (c, d) })) }) } private object Parser extends Options.Parser { private val chapter: Parser[Chapter] = { val chapter_name = atom("chapter name", _.is_name) command(CHAPTER) ~! chapter_name ^^ { case _ ~ a => Chapter(a) } } private val session_entry: Parser[Session_Entry] = { val option = option_name ~ opt($$$("=") ~! option_value ^^ { case _ ~ x => x }) ^^ { case x ~ y => (x, y) } val options = $$$("[") ~> rep1sep(option, $$$(",")) <~ $$$("]") val theory_entry = position(theory_name) ~ opt_keyword(GLOBAL) ^^ { case x ~ y => (x, y) } val theories = $$$(THEORIES) ~! ((options | success(Nil)) ~ rep1(theory_entry)) ^^ { case _ ~ (x ~ y) => (x, y) } val in_path = $$$("(") ~! ($$$(IN) ~ path ~ $$$(")")) ^^ { case _ ~ (_ ~ x ~ _) => x } val document_theories = $$$(DOCUMENT_THEORIES) ~! rep1(position(name)) ^^ { case _ ~ x => x } val document_files = $$$(DOCUMENT_FILES) ~! ((in_path | success("document")) ~ rep1(path)) ^^ { case _ ~ (x ~ y) => y.map((x, _)) } val prune = $$$("[") ~! (nat ~ $$$("]")) ^^ { case _ ~ (x ~ _) => x } | success(0) val export_files = $$$(EXPORT_FILES) ~! ((in_path | success("export")) ~ prune ~ rep1(embedded)) ^^ { case _ ~ (x ~ y ~ z) => (x, y, z) } command(SESSION) ~! (position(session_name) ~ (($$$("(") ~! (rep1(name) <~ $$$(")")) ^^ { case _ ~ x => x }) | success(Nil)) ~ (($$$(IN) ~! path ^^ { case _ ~ x => x }) | success(".")) ~ ($$$("=") ~! (opt(session_name ~! $$$("+") ^^ { case x ~ _ => x }) ~ (($$$(DESCRIPTION) ~! text ^^ { case _ ~ x => x }) | success("")) ~ (($$$(OPTIONS) ~! options ^^ { case _ ~ x => x }) | success(Nil)) ~ (($$$(SESSIONS) ~! rep1(session_name) ^^ { case _ ~ x => x }) | success(Nil)) ~ (($$$(DIRECTORIES) ~! rep1(path) ^^ { case _ ~ x => x }) | success(Nil)) ~ rep(theories) ~ (opt(document_theories) ^^ (x => x.getOrElse(Nil))) ~ (rep(document_files) ^^ (x => x.flatten)) ~ rep(export_files)))) ^^ { case _ ~ ((a, pos) ~ b ~ c ~ (_ ~ (d ~ e ~ f ~ g ~ h ~ i ~ j ~ k ~ l))) => Session_Entry(pos, a, b, c, d, e, f, g, h, i, j, k, l) } } def parse_root(path: Path): List[Entry] = { val toks = Token.explode(root_syntax.keywords, File.read(path)) val start = Token.Pos.file(path.implode) parse_all(rep(chapter | session_entry), Token.reader(toks, start)) match { case Success(result, _) => result case bad => error(bad.toString) } } } def parse_root(path: Path): List[Entry] = Parser.parse_root(path) def parse_root_entries(path: Path): List[Session_Entry] = for (entry <- Parser.parse_root(path) if entry.isInstanceOf[Session_Entry]) yield entry.asInstanceOf[Session_Entry] def read_root(options: Options, select: Boolean, path: Path): List[Info] = { var entry_chapter = UNSORTED val infos = new mutable.ListBuffer[Info] parse_root(path).foreach { case Chapter(name) => entry_chapter = name case entry: Session_Entry => infos += make_info(options, select, path.dir, entry_chapter, entry) } infos.toList } def parse_roots(roots: Path): List[String] = { for { line <- split_lines(File.read(roots)) if !(line == "" || line.startsWith("#")) } yield line } /* load sessions from certain directories */ private def is_session_dir(dir: Path): Boolean = (dir + ROOT).is_file || (dir + ROOTS).is_file private def check_session_dir(dir: Path): Path = if (is_session_dir(dir)) File.pwd() + dir.expand else error("Bad session root directory: " + dir.toString) def directories(dirs: List[Path], select_dirs: List[Path]): List[(Boolean, Path)] = { val default_dirs = Isabelle_System.components().filter(is_session_dir) for { (select, dir) <- (default_dirs ::: dirs).map((false, _)) ::: select_dirs.map((true, _)) } yield (select, dir.canonical) } def load_structure(options: Options, dirs: List[Path] = Nil, select_dirs: List[Path] = Nil, infos: List[Info] = Nil): Structure = { def load_dir(select: Boolean, dir: Path): List[(Boolean, Path)] = load_root(select, dir) ::: load_roots(select, dir) def load_root(select: Boolean, dir: Path): List[(Boolean, Path)] = { val root = dir + ROOT if (root.is_file) List((select, root)) else Nil } def load_roots(select: Boolean, dir: Path): List[(Boolean, Path)] = { val roots = dir + ROOTS if (roots.is_file) { for { entry <- parse_roots(roots) dir1 = try { check_session_dir(dir + Path.explode(entry)) } catch { case ERROR(msg) => error(msg + "\nThe error(s) above occurred in session catalog " + roots.toString) } res <- load_dir(select, dir1) } yield res } else Nil } val roots = for { (select, dir) <- directories(dirs, select_dirs) res <- load_dir(select, check_session_dir(dir)) } yield res val unique_roots = ((Map.empty[JFile, (Boolean, Path)] /: roots) { case (m, (select, path)) => val file = path.canonical_file m.get(file) match { case None => m + (file -> (select, path)) case Some((select1, path1)) => m + (file -> (select1 || select, path1)) } }).toList.map(_._2) make(unique_roots.flatMap(p => read_root(options, p._1, p._2)) ::: infos) } /* Isabelle tool wrapper */ val isabelle_tool = Isabelle_Tool("sessions", "explore structure of Isabelle sessions", args => { var base_sessions: List[String] = Nil var select_dirs: List[Path] = Nil var requirements = false var exclude_session_groups: List[String] = Nil var all_sessions = false var dirs: List[Path] = Nil var session_groups: List[String] = Nil var exclude_sessions: List[String] = Nil val getopts = Getopts(""" Usage: isabelle sessions [OPTIONS] [SESSIONS ...] Options are: -B NAME include session NAME and all descendants -D DIR include session directory and select its sessions -R refer to requirements of selected sessions -X NAME exclude sessions from group NAME and all descendants -a select all sessions -d DIR include session directory -g NAME select session group NAME -x NAME exclude session NAME and all descendants Explore the structure of Isabelle sessions and print result names in topological order (on stdout). """, "B:" -> (arg => base_sessions = base_sessions ::: List(arg)), "D:" -> (arg => select_dirs = select_dirs ::: List(Path.explode(arg))), "R" -> (_ => requirements = true), "X:" -> (arg => exclude_session_groups = exclude_session_groups ::: List(arg)), "a" -> (_ => all_sessions = true), "d:" -> (arg => dirs = dirs ::: List(Path.explode(arg))), "g:" -> (arg => session_groups = session_groups ::: List(arg)), "x:" -> (arg => exclude_sessions = exclude_sessions ::: List(arg))) val sessions = getopts(args) val options = Options.init() val selection = Selection(requirements = requirements, all_sessions = all_sessions, base_sessions = base_sessions, exclude_session_groups = exclude_session_groups, exclude_sessions = exclude_sessions, session_groups = session_groups, sessions = sessions) val sessions_structure = load_structure(options, dirs = dirs, select_dirs = select_dirs).selection(selection) for (name <- sessions_structure.imports_topological_order) { Output.writeln(name, stdout = true) } }) /** heap file with SHA1 digest **/ private val sha1_prefix = "SHA1:" def read_heap_digest(heap: Path): Option[String] = { if (heap.is_file) { using(FileChannel.open(heap.file.toPath, StandardOpenOption.READ))(file => { val len = file.size val n = sha1_prefix.length + SHA1.digest_length if (len >= n) { file.position(len - n) val buf = ByteBuffer.allocate(n) var i = 0 var m = 0 do { m = file.read(buf) if (m != -1) i += m } while (m != -1 && n > i) if (i == n) { val prefix = new String(buf.array(), 0, sha1_prefix.length, UTF8.charset) val s = new String(buf.array(), sha1_prefix.length, SHA1.digest_length, UTF8.charset) if (prefix == sha1_prefix) Some(s) else None } else None } else None }) } else None } def write_heap_digest(heap: Path): String = read_heap_digest(heap) match { case None => val s = SHA1.digest(heap).rep File.append(heap, sha1_prefix + s) s case Some(s) => s } /** persistent store **/ object Session_Info { val session_name = SQL.Column.string("session_name").make_primary_key // Build_Log.Session_Info val session_timing = SQL.Column.bytes("session_timing") val command_timings = SQL.Column.bytes("command_timings") val theory_timings = SQL.Column.bytes("theory_timings") val ml_statistics = SQL.Column.bytes("ml_statistics") val task_statistics = SQL.Column.bytes("task_statistics") val errors = SQL.Column.bytes("errors") val build_log_columns = List(session_name, session_timing, command_timings, theory_timings, ml_statistics, task_statistics, errors) // Build.Session_Info val sources = SQL.Column.string("sources") val input_heaps = SQL.Column.string("input_heaps") val output_heap = SQL.Column.string("output_heap") val return_code = SQL.Column.int("return_code") val build_columns = List(sources, input_heaps, output_heap, return_code) val table = SQL.Table("isabelle_session_info", build_log_columns ::: build_columns) } def store(options: Options): Store = new Store(options) class Store private[Sessions](val options: Options) { override def toString: String = "Store(output_dir = " + output_dir.expand + ")" val xml_cache: XML.Cache = XML.make_cache() val xz_cache: XZ.Cache = XZ.make_cache() /* directories */ val system_output_dir: Path = Path.explode("$ISABELLE_HEAPS_SYSTEM/$ML_IDENTIFIER") val user_output_dir: Path = Path.explode("$ISABELLE_HEAPS/$ML_IDENTIFIER") def system_heaps: Boolean = options.bool("system_heaps") val output_dir: Path = if (system_heaps) system_output_dir else user_output_dir val input_dirs: List[Path] = if (system_heaps) List(system_output_dir) else List(user_output_dir, system_output_dir) val browser_info: Path = if (system_heaps) Path.explode("$ISABELLE_BROWSER_INFO_SYSTEM") else Path.explode("$ISABELLE_BROWSER_INFO") /* file names */ def heap(name: String): Path = Path.basic(name) def database(name: String): Path = Path.basic("log") + Path.basic(name).ext("db") def log(name: String): Path = Path.basic("log") + Path.basic(name) def log_gz(name: String): Path = log(name).ext("gz") def output_heap(name: String): Path = output_dir + heap(name) def output_database(name: String): Path = output_dir + database(name) def output_log(name: String): Path = output_dir + log(name) def output_log_gz(name: String): Path = output_dir + log_gz(name) def prepare_output_dir() { Isabelle_System.make_directory(output_dir + Path.basic("log")) } /* heap */ def find_heap(name: String): Option[Path] = input_dirs.map(_ + heap(name)).find(_.is_file) def find_heap_digest(name: String): Option[String] = find_heap(name).flatMap(read_heap_digest) def the_heap(name: String): Path = find_heap(name) getOrElse error("Missing heap image for session " + quote(name) + " -- expected in:\n" + cat_lines(input_dirs.map(dir => " " + dir.expand.implode))) /* database */ private def database_server: Boolean = options.bool("build_database_server") def access_database(name: String, output: Boolean = false): Option[SQL.Database] = { if (database_server) { val db = PostgreSQL.open_database( user = options.string("build_database_user"), password = options.string("build_database_password"), database = options.string("build_database_name"), host = options.string("build_database_host"), port = options.int("build_database_port"), ssh = options.proper_string("build_database_ssh_host").map(ssh_host => SSH.open_session(options, host = ssh_host, user = options.string("build_database_ssh_user"), port = options.int("build_database_ssh_port"))), ssh_close = true) if (output || has_session_info(db, name)) Some(db) else { db.close; None } } else if (output) Some(SQLite.open_database(output_database(name))) else input_dirs.map(_ + database(name)).find(_.is_file).map(SQLite.open_database) } def open_database(name: String, output: Boolean = false): SQL.Database = access_database(name, output = output) getOrElse error("Missing build database for session " + quote(name)) def clean_output(name: String): (Boolean, Boolean) = { val relevant_db = database_server && { access_database(name) match { case Some(db) => try { db.transaction { val relevant_db = has_session_info(db, name) init_session_info(db, name) relevant_db } } finally { db.close } case None => false } } val del = for { dir <- (if (system_heaps) List(user_output_dir, system_output_dir) else List(user_output_dir)) file <- List(heap(name), database(name), log(name), log_gz(name)) path = dir + file if path.is_file } yield path.file.delete val relevant = relevant_db || del.nonEmpty val ok = del.forall(b => b) (relevant, ok) } /* SQL database content */ def read_bytes(db: SQL.Database, name: String, column: SQL.Column): Bytes = db.using_statement(Session_Info.table.select(List(column), Session_Info.session_name.where_equal(name)))(stmt => { val res = stmt.execute_query() if (!res.next()) Bytes.empty else res.bytes(column) }) def read_properties(db: SQL.Database, name: String, column: SQL.Column): List[Properties.T] = Properties.uncompress( read_bytes(db, name, column), cache = xz_cache, xml_cache = Some(xml_cache)) /* session info */ def init_session_info(db: SQL.Database, name: String) { db.transaction { db.create_table(Session_Info.table) db.using_statement( Session_Info.table.delete(Session_Info.session_name.where_equal(name)))(_.execute) db.create_table(Export.Data.table) db.using_statement( Export.Data.table.delete(Export.Data.session_name.where_equal(name)))(_.execute) } } def has_session_info(db: SQL.Database, name: String): Boolean = { db.transaction { db.tables.contains(Session_Info.table.name) && { db.using_statement( Session_Info.table.select(List(Session_Info.session_name), Session_Info.session_name.where_equal(name)))(stmt => stmt.execute_query().next()) } } } def write_session_info( db: SQL.Database, name: String, build_log: Build_Log.Session_Info, build: Build.Session_Info) { db.transaction { db.using_statement(Session_Info.table.insert())(stmt => { stmt.string(1) = name stmt.bytes(2) = Properties.encode(build_log.session_timing) stmt.bytes(3) = Properties.compress(build_log.command_timings, cache = xz_cache) stmt.bytes(4) = Properties.compress(build_log.theory_timings, cache = xz_cache) stmt.bytes(5) = Properties.compress(build_log.ml_statistics, cache = xz_cache) stmt.bytes(6) = Properties.compress(build_log.task_statistics, cache = xz_cache) stmt.bytes(7) = Build_Log.compress_errors(build_log.errors, cache = xz_cache) stmt.string(8) = build.sources stmt.string(9) = cat_lines(build.input_heaps) stmt.string(10) = build.output_heap getOrElse "" stmt.int(11) = build.return_code stmt.execute() }) } } def read_session_timing(db: SQL.Database, name: String): Properties.T = Properties.decode(read_bytes(db, name, Session_Info.session_timing), Some(xml_cache)) def read_command_timings(db: SQL.Database, name: String): List[Properties.T] = read_properties(db, name, Session_Info.command_timings) def read_theory_timings(db: SQL.Database, name: String): List[Properties.T] = read_properties(db, name, Session_Info.theory_timings) def read_ml_statistics(db: SQL.Database, name: String): List[Properties.T] = read_properties(db, name, Session_Info.ml_statistics) def read_task_statistics(db: SQL.Database, name: String): List[Properties.T] = read_properties(db, name, Session_Info.task_statistics) def read_errors(db: SQL.Database, name: String): List[String] = Build_Log.uncompress_errors(read_bytes(db, name, Session_Info.errors), cache = xz_cache) def read_build(db: SQL.Database, name: String): Option[Build.Session_Info] = { if (db.tables.contains(Session_Info.table.name)) { db.using_statement(Session_Info.table.select(Session_Info.build_columns, Session_Info.session_name.where_equal(name)))(stmt => { val res = stmt.execute_query() if (!res.next()) None else { Some( Build.Session_Info( res.string(Session_Info.sources), split_lines(res.string(Session_Info.input_heaps)), res.string(Session_Info.output_heap) match { case "" => None case s => Some(s) }, res.int(Session_Info.return_code))) } }) } else None } } } diff --git a/src/Pure/Thy/thy_info.ML b/src/Pure/Thy/thy_info.ML --- a/src/Pure/Thy/thy_info.ML +++ b/src/Pure/Thy/thy_info.ML @@ -1,477 +1,472 @@ (* Title: Pure/Thy/thy_info.ML Author: Markus Wenzel, TU Muenchen Global theory info database, with auto-loading according to theory and file dependencies. *) signature THY_INFO = sig type presentation_context = {options: Options.T, file_pos: Position.T, adjust_pos: Position.T -> Position.T, segments: Thy_Output.segment list} val adjust_pos_properties: presentation_context -> Position.T -> Properties.T val apply_presentation: presentation_context -> theory -> unit val add_presentation: (presentation_context -> theory -> unit) -> theory -> theory val get_names: unit -> string list val lookup_theory: string -> theory option val get_theory: string -> theory val master_directory: string -> Path.T val remove_thy: string -> unit type context = {options: Options.T, last_timing: Toplevel.transition -> Time.time} val use_theories: context -> string -> Path.T -> (string * Position.T) list -> unit val use_thy: string -> unit val script_thy: Position.T -> string -> theory -> theory val register_thy: theory -> unit val finish: unit -> unit end; structure Thy_Info: THY_INFO = struct (** presentation of consolidated theory **) type presentation_context = {options: Options.T, file_pos: Position.T, adjust_pos: Position.T -> Position.T, segments: Thy_Output.segment list}; fun adjust_pos_properties (context: presentation_context) pos = Position.offset_properties_of (#adjust_pos context pos) @ Position.id_properties_of pos; structure Presentation = Theory_Data ( type T = ((presentation_context -> theory -> unit) * stamp) list; val empty = []; val extend = I; fun merge data : T = Library.merge (eq_snd op =) data; ); fun apply_presentation (context: presentation_context) thy = ignore (Presentation.get thy |> Par_List.map (fn (f, _) => f context thy)); fun add_presentation f = Presentation.map (cons (f, stamp ())); val _ = Theory.setup (add_presentation (fn {options, file_pos, segments, ...} => fn thy => + (Present.export_html thy (map #span segments); if exists (Toplevel.is_skipped_proof o #state) segments then () else let val body = Thy_Output.present_thy options thy segments; in if Options.string options "document" = "false" then () else let val thy_name = Context.theory_name thy; - val document_tex_name = Path.explode_binding0 ("document/" ^ thy_name ^ ".tex"); + val document_tex_name = Present.document_tex_name thy; val latex = Latex.isabelle_body thy_name body; val output = [Latex.output_text latex, Latex.output_positions file_pos latex]; in Export.export thy document_tex_name (XML.blob output) end - end)); + end))); (** thy database **) (* messages *) val show_path = space_implode " via " o map quote; fun cycle_msg names = "Cyclic dependency of " ^ show_path names; (* derived graph operations *) fun add_deps name parents G = String_Graph.add_deps_acyclic (name, parents) G handle String_Graph.CYCLES namess => error (cat_lines (map cycle_msg namess)); fun new_entry name parents entry = String_Graph.new_node (name, entry) #> add_deps name parents; (* global thys *) type deps = {master: (Path.T * SHA1.digest), (*master dependencies for thy file*) imports: (string * Position.T) list}; (*source specification of imports (partially qualified)*) fun make_deps master imports : deps = {master = master, imports = imports}; fun master_dir_deps (d: deps option) = the_default Path.current (Option.map (Path.dir o #1 o #master) d); local val global_thys = Synchronized.var "Thy_Info.thys" (String_Graph.empty: (deps option * theory option) String_Graph.T); in fun get_thys () = Synchronized.value global_thys; fun change_thys f = Synchronized.change global_thys f; end; fun get_names () = String_Graph.topological_order (get_thys ()); (* access thy *) fun lookup thys name = try (String_Graph.get_node thys) name; fun lookup_thy name = lookup (get_thys ()) name; fun get thys name = (case lookup thys name of SOME thy => thy | NONE => error ("Theory loader: nothing known about theory " ^ quote name)); fun get_thy name = get (get_thys ()) name; (* access deps *) val lookup_deps = Option.map #1 o lookup_thy; val master_directory = master_dir_deps o #1 o get_thy; (* access theory *) fun lookup_theory name = (case lookup_thy name of SOME (_, SOME theory) => SOME theory | _ => NONE); fun get_theory name = (case lookup_theory name of SOME theory => theory | _ => error ("Theory loader: undefined entry for theory " ^ quote name)); val get_imports = Resources.imports_of o get_theory; (** thy operations **) (* remove *) fun remove name thys = (case lookup thys name of NONE => thys | SOME (NONE, _) => error ("Cannot update finished theory " ^ quote name) | SOME _ => let val succs = String_Graph.all_succs thys [name]; val _ = writeln ("Theory loader: removing " ^ commas_quote succs); in fold String_Graph.del_node succs thys end); val remove_thy = change_thys o remove; (* update *) fun update deps theory thys = let val name = Context.theory_long_name theory; val parents = map Context.theory_long_name (Theory.parents_of theory); val thys' = remove name thys; val _ = map (get thys') parents; in new_entry name parents (SOME deps, SOME theory) thys' end; fun update_thy deps theory = change_thys (update deps theory); (* context *) type context = {options: Options.T, last_timing: Toplevel.transition -> Time.time}; fun default_context (): context = {options = Options.default (), last_timing = K Time.zeroTime}; (* scheduling loader tasks *) datatype result = Result of {theory: theory, exec_id: Document_ID.exec, present: unit -> unit, commit: unit -> unit, weight: int}; fun theory_result theory = Result {theory = theory, exec_id = Document_ID.none, present = I, commit = I, weight = 0}; fun result_theory (Result {theory, ...}) = theory; fun result_present (Result {present, ...}) = present; fun result_commit (Result {commit, ...}) = commit; fun result_ord (Result {weight = i, ...}, Result {weight = j, ...}) = int_ord (j, i); fun join_theory (Result {theory, exec_id, ...}) = let val _ = Execution.join [exec_id]; val res = Exn.capture Thm.consolidate_theory theory; val exns = maps Task_Queue.group_status (Execution.peek exec_id); in res :: map Exn.Exn exns end; datatype task = Task of string list * (theory list -> result) | Finished of theory; fun task_finished (Task _) = false | task_finished (Finished _) = true; fun task_parents deps (parents: string list) = map (the o AList.lookup (op =) deps) parents; val schedule_seq = String_Graph.schedule (fn deps => fn (_, task) => (case task of Task (parents, body) => let val result = body (task_parents deps parents); val _ = Par_Exn.release_all (join_theory result); val _ = result_present result (); val _ = result_commit result (); in result_theory result end | Finished thy => thy)) #> ignore; val schedule_futures = Thread_Attributes.uninterruptible (fn _ => fn tasks => let val futures = tasks |> String_Graph.schedule (fn deps => fn (name, task) => (case task of Task (parents, body) => (singleton o Future.forks) {name = "theory:" ^ name, group = NONE, deps = map (Future.task_of o #2) deps, pri = 0, interrupts = true} (fn () => (case filter (not o can Future.join o #2) deps of [] => body (map (result_theory o Future.join) (task_parents deps parents)) | bad => error ("Failed to load theory " ^ quote name ^ " (unresolved " ^ commas_quote (map #1 bad) ^ ")"))) | Finished theory => Future.value (theory_result theory))); val results1 = futures |> maps (fn future => (case Future.join_result future of Exn.Res result => join_theory result | Exn.Exn exn => [Exn.Exn exn])); val results2 = futures |> map_filter (Exn.get_res o Future.join_result) |> sort result_ord |> Par_List.map (fn result => Exn.capture (result_present result) ()); (* FIXME more precise commit order (!?) *) val results3 = futures |> map (fn future => Exn.capture (fn () => result_commit (Future.join future) ()) ()); (* FIXME avoid global Execution.reset (!??) *) val results4 = map Exn.Exn (maps Task_Queue.group_status (Execution.reset ())); val _ = Par_Exn.release_all (results1 @ results2 @ results3 @ results4); in () end); (* eval theory *) fun excursion keywords master_dir last_timing init elements = let fun prepare_span st span = Command_Span.content span |> Command.read keywords (Command.read_thy st) master_dir init ([], ~1) |> (fn tr => Toplevel.timing (last_timing tr) tr); fun element_result span_elem (st, _) = let val elem = Thy_Element.map_element (prepare_span st) span_elem; val (results, st') = Toplevel.element_result keywords elem st; val pos' = Toplevel.pos_of (Thy_Element.last_element elem); in (results, (st', pos')) end; val (results, (end_state, end_pos)) = fold_map element_result elements (Toplevel.init_toplevel (), Position.none); val thy = Toplevel.end_theory end_pos end_state; in (results, thy) end; fun eval_thy (context: context) update_time master_dir header text_pos text parents = let val {options, last_timing} = context; val (name, _) = #name header; val keywords = fold (curry Keyword.merge_keywords o Thy_Header.get_keywords) parents (Keyword.add_keywords (#keywords header) Keyword.empty_keywords); val spans = Outer_Syntax.parse_spans (Token.explode keywords text_pos text); val elements = Thy_Element.parse_elements keywords spans; - val symbols = Resources.html_symbols (); - val html = implode (map (HTML.present_span symbols keywords) spans); - - fun init () = - Resources.begin_theory master_dir header parents - |> Present.begin_theory update_time html; - + fun init () = Resources.begin_theory master_dir header parents; val (results, thy) = cond_timeit true ("theory " ^ quote name) (fn () => excursion keywords master_dir last_timing init elements); fun present () = let val segments = (spans ~~ maps Toplevel.join_results results) |> map (fn (span, (tr, st')) => {span = span, command = tr, state = st'}); val context: presentation_context = {options = options, file_pos = text_pos, adjust_pos = I, segments = segments}; in apply_presentation context thy end; in (thy, present, size text) end; (* require_thy -- checking database entries wrt. the file-system *) local fun required_by _ [] = "" | required_by s initiators = s ^ "(required by " ^ show_path (rev initiators) ^ ")"; fun load_thy context initiators update_time deps text (name, pos) keywords parents = let val _ = remove_thy name; val _ = writeln ("Loading theory " ^ quote name ^ required_by " " initiators); val _ = Output.try_protocol_message (Markup.loading_theory name) []; val {master = (thy_path, _), imports} = deps; val dir = Path.dir thy_path; val header = Thy_Header.make (name, pos) imports keywords; val _ = (imports ~~ parents) |> List.app (fn ((_, pos), thy) => Context_Position.reports_global thy [(pos, Theory.get_markup thy)]); val exec_id = Document_ID.make (); val _ = Execution.running Document_ID.none exec_id [] orelse raise Fail ("Failed to register execution: " ^ Document_ID.print exec_id); val timing_start = Timing.start (); val text_pos = Position.put_id (Document_ID.print exec_id) (Path.position thy_path); val (theory, present, weight) = eval_thy context update_time dir header text_pos text (if name = Context.PureN then [Context.the_global_context ()] else parents); val timing_result = Timing.result timing_start; val timing_props = [Markup.theory_timing, (Markup.nameN, name)]; val _ = Output.try_protocol_message (timing_props @ Markup.timing_properties timing_result) [] fun commit () = update_thy deps theory; in Result {theory = theory, exec_id = exec_id, present = present, commit = commit, weight = weight} end; fun check_thy_deps dir name = (case lookup_deps name of SOME NONE => (true, NONE, Position.none, get_imports name, []) | NONE => let val {master, text, theory_pos, imports, keywords} = Resources.check_thy dir name in (false, SOME (make_deps master imports, text), theory_pos, imports, keywords) end | SOME (SOME {master, ...}) => let val {master = master', text = text', theory_pos = theory_pos', imports = imports', keywords = keywords'} = Resources.check_thy dir name; val deps' = SOME (make_deps master' imports', text'); val current = #2 master = #2 master' andalso (case lookup_theory name of NONE => false | SOME theory => Resources.loaded_files_current theory); in (current, deps', theory_pos', imports', keywords') end); in fun require_thys context initiators qualifier dir strs tasks = fold_map (require_thy context initiators qualifier dir) strs tasks |>> forall I and require_thy context initiators qualifier dir (s, require_pos) tasks = let val {master_dir, theory_name, ...} = Resources.import_name qualifier dir s; in (case try (String_Graph.get_node tasks) theory_name of SOME task => (task_finished task, tasks) | NONE => let val _ = member (op =) initiators theory_name andalso error (cycle_msg initiators); val (current, deps, theory_pos, imports, keywords) = check_thy_deps master_dir theory_name handle ERROR msg => cat_error msg ("The error(s) above occurred for theory " ^ quote theory_name ^ Position.here require_pos ^ required_by "\n" initiators); val qualifier' = Resources.theory_qualifier theory_name; val dir' = dir + master_dir_deps (Option.map #1 deps); val parents = map (#theory_name o Resources.import_name qualifier' dir' o #1) imports; val (parents_current, tasks') = require_thys context (theory_name :: initiators) qualifier' dir' imports tasks; val all_current = current andalso parents_current; val task = if all_current then Finished (get_theory theory_name) else (case deps of NONE => raise Fail "Malformed deps" | SOME (dep, text) => let val update_time = serial (); val load = load_thy context initiators update_time dep text (theory_name, theory_pos) keywords; in Task (parents, load) end); val tasks'' = new_entry theory_name parents task tasks'; in (all_current, tasks'') end) end; end; (* use theories *) fun use_theories context qualifier master_dir imports = let val (_, tasks) = require_thys context [] qualifier master_dir imports String_Graph.empty in if Multithreading.max_threads () > 1 then schedule_futures tasks else schedule_seq tasks end; fun use_thy name = use_theories (default_context ()) Resources.default_qualifier Path.current [(name, Position.none)]; (* toplevel scripting -- without maintaining database *) fun script_thy pos txt thy = let val trs = Outer_Syntax.parse_text thy (K thy) pos txt; val end_pos = if null trs then pos else Toplevel.pos_of (List.last trs); val end_state = fold (Toplevel.command_exception true) trs (Toplevel.init_toplevel ()); in Toplevel.end_theory end_pos end_state end; (* register theory *) fun register_thy theory = let val name = Context.theory_long_name theory; val {master, ...} = Resources.check_thy (Resources.master_directory theory) name; val imports = Resources.imports_of theory; in change_thys (fn thys => let val thys' = remove name thys; val _ = writeln ("Registering theory " ^ quote name); in update (make_deps master imports) theory thys' end) end; (* finish all theories *) fun finish () = change_thys (String_Graph.map (fn _ => fn (_, entry) => (NONE, entry))); end; fun use_thy name = Runtime.toplevel_program (fn () => Thy_Info.use_thy name); diff --git a/src/Pure/Tools/build.ML b/src/Pure/Tools/build.ML --- a/src/Pure/Tools/build.ML +++ b/src/Pure/Tools/build.ML @@ -1,164 +1,155 @@ (* Title: Pure/Tools/build.ML Author: Makarius Build Isabelle sessions. *) structure Build: sig end = struct (* command timings *) type timings = ((string * Time.time) Inttab.table) Symtab.table; (*file -> offset -> name, time*) val empty_timings: timings = Symtab.empty; fun update_timings props = (case Markup.parse_command_timing_properties props of SOME ({file, offset, name}, time) => Symtab.map_default (file, Inttab.empty) (Inttab.map_default (offset, (name, time)) (fn (_, t) => (name, t + time))) | NONE => I); fun approximative_id name pos = (case (Position.file_of pos, Position.offset_of pos) of (SOME file, SOME offset) => if name = "" then NONE else SOME {file = file, offset = offset, name = name} | _ => NONE); fun get_timings timings tr = (case approximative_id (Toplevel.name_of tr) (Toplevel.pos_of tr) of SOME {file, offset, name} => (case Symtab.lookup timings file of SOME offsets => (case Inttab.lookup offsets offset of SOME (name', time) => if name = name' then SOME time else NONE | NONE => NONE) | NONE => NONE) | NONE => NONE) |> the_default Time.zeroTime; (* session timing *) fun session_timing f x = let val start = Timing.start (); val y = f x; val timing = Timing.result start; val threads = string_of_int (Multithreading.max_threads ()); val props = [("threads", threads)] @ Markup.timing_properties timing; val _ = Output.protocol_message (Markup.session_timing :: props) []; in y end; (* build theories *) fun build_theories last_timing qualifier master_dir (options, thys) = let val context = {options = options, last_timing = last_timing}; val condition = space_explode "," (Options.string options "condition"); val conds = filter_out (can getenv_strict) condition; in if null conds then (Options.set_default options; Isabelle_Process.init_options (); Future.fork I; (Thy_Info.use_theories context qualifier master_dir |> (case Options.string options "profiling" of "" => I | "time" => profile_time | "allocations" => profile_allocations | bad => error ("Bad profiling option: " ^ quote bad)) |> Unsynchronized.setmp print_mode (space_explode "," (Options.string options "print_mode") @ print_mode_value ())) thys) else Output.physical_stderr ("Skipping theories " ^ commas_quote (map #1 thys) ^ " (undefined " ^ commas conds ^ ")\n") end; (* build session *) val _ = Isabelle_Process.protocol_command "build_session" (fn [args_yxml] => let - val (html_symbols, (command_timings, (verbose, (browser_info, - (documents, (parent_name, (chapter, (session_name, (master_dir, - (theories, (session_positions, (session_directories, (session_chapters, - (doc_names, (global_theories, (loaded_theories, bibtex_entries)))))))))))))))) = + val (html_symbols, (command_timings, (parent_name, (chapter, + (session_name, (master_dir, (theories, (session_positions, (session_directories, + (session_chapters, (doc_names, + (global_theories, (loaded_theories, bibtex_entries))))))))))))) = YXML.parse_body args_yxml |> let open XML.Decode; val position = Position.of_properties o properties; val path = Path.explode o string; in - pair (list (pair string int)) (pair (list properties) (pair bool (pair path - (pair (list string) (pair string (pair string (pair string - (pair path - (pair (((list (pair Options.decode (list (pair string position)))))) - (pair (list (pair string properties)) - (pair (list (pair string string)) - (pair (list (pair string string)) (pair (list string) - (pair (list (pair string string)) (pair (list string) - (list (pair string (list string)))))))))))))))))) + pair (list (pair string int)) (pair (list properties) (pair string (pair string + (pair string (pair path + (pair (((list (pair Options.decode (list (pair string position)))))) + (pair (list (pair string properties)) (pair (list (pair string string)) + (pair (list (pair string string)) (pair (list string) + (pair (list (pair string string)) (pair (list string) + (list (pair string (list string))))))))))))))) end; fun build () = let val _ = Resources.init_session {html_symbols = html_symbols, session_positions = session_positions, session_directories = session_directories, session_chapters = session_chapters, bibtex_entries = bibtex_entries, docs = doc_names, global_theories = global_theories, loaded_theories = loaded_theories}; - val _ = - Session.init - (Options.default_bool "browser_info") - browser_info - documents - parent_name - (chapter, session_name) - verbose; + val _ = Session.init parent_name (chapter, session_name); val last_timing = get_timings (fold update_timings command_timings empty_timings); val res1 = theories |> (List.app (build_theories last_timing session_name master_dir) |> session_timing |> Exn.capture); val res2 = Exn.capture Session.finish (); val _ = Resources.finish_session_base (); val _ = Par_Exn.release_all [res1, res2]; val _ = if session_name = Context.PureN then Theory.install_pure (Thy_Info.get_theory Context.PureN) else (); in () end; fun exec e = if can Theory.get_pure () then Isabelle_Thread.fork {name = "build_session", stack_limit = Isabelle_Thread.stack_limit (), interrupts = false} e |> ignore else e (); in exec (fn () => (Future.interruptible_task (fn () => (build (); (0, []))) () handle exn => ((1, Runtime.exn_message_list exn) handle _ (*sic!*) => (2, ["CRASHED"]))) |> let open XML.Encode in pair int (list string) end |> Output.protocol_message Markup.build_session_finished) end | _ => raise Match); end; diff --git a/src/Pure/Tools/build.scala b/src/Pure/Tools/build.scala --- a/src/Pure/Tools/build.scala +++ b/src/Pure/Tools/build.scala @@ -1,975 +1,976 @@ /* Title: Pure/Tools/build.scala Author: Makarius Options: :folding=explicit: Build and manage Isabelle sessions. */ package isabelle import scala.collection.{SortedSet, mutable} import scala.annotation.tailrec object Build { /** auxiliary **/ /* persistent build info */ sealed case class Session_Info( sources: String, input_heaps: List[String], output_heap: Option[String], return_code: Int) { def ok: Boolean = return_code == 0 } /* queue with scheduling information */ private object Queue { type Timings = (List[Properties.T], Double) def load_timings(progress: Progress, store: Sessions.Store, session_name: String): Timings = { val no_timings: Timings = (Nil, 0.0) store.access_database(session_name) match { case None => no_timings case Some(db) => def ignore_error(msg: String) = { progress.echo_warning("Ignoring bad database " + db + (if (msg == "") "" else "\n" + msg)) no_timings } try { val command_timings = store.read_command_timings(db, session_name) val session_timing = store.read_session_timing(db, session_name) match { case Markup.Elapsed(t) => t case _ => 0.0 } (command_timings, session_timing) } catch { case ERROR(msg) => ignore_error(msg) case exn: java.lang.Error => ignore_error(Exn.message(exn)) case _: XML.Error => ignore_error("") } finally { db.close } } } def make_session_timing(sessions_structure: Sessions.Structure, timing: Map[String, Double]) : Map[String, Double] = { val maximals = sessions_structure.build_graph.maximals.toSet def desc_timing(session_name: String): Double = { if (maximals.contains(session_name)) timing(session_name) else { val descendants = sessions_structure.build_descendants(List(session_name)).toSet val g = sessions_structure.build_graph.restrict(descendants) (0.0 :: g.maximals.flatMap(desc => { val ps = g.all_preds(List(desc)) if (ps.exists(p => !timing.isDefinedAt(p))) None else Some(ps.map(timing(_)).sum) })).max } } timing.keySet.iterator.map(name => (name -> desc_timing(name))).toMap.withDefaultValue(0.0) } def apply(progress: Progress, sessions_structure: Sessions.Structure, store: Sessions.Store) : Queue = { val graph = sessions_structure.build_graph val names = graph.keys val timings = names.map(name => (name, load_timings(progress, store, name))) val command_timings = timings.map({ case (name, (ts, _)) => (name, ts) }).toMap.withDefaultValue(Nil) val session_timing = make_session_timing(sessions_structure, timings.map({ case (name, (_, t)) => (name, t) }).toMap) object Ordering extends scala.math.Ordering[String] { def compare_timing(name1: String, name2: String): Int = { val t1 = session_timing(name1) val t2 = session_timing(name2) if (t1 == 0.0 || t2 == 0.0) 0 else t1 compare t2 } def compare(name1: String, name2: String): Int = compare_timing(name2, name1) match { case 0 => sessions_structure(name2).timeout compare sessions_structure(name1).timeout match { case 0 => name1 compare name2 case ord => ord } case ord => ord } } new Queue(graph, SortedSet(names: _*)(Ordering), command_timings) } } private class Queue( graph: Graph[String, Sessions.Info], order: SortedSet[String], val command_timings: String => List[Properties.T]) { def is_inner(name: String): Boolean = !graph.is_maximal(name) def is_empty: Boolean = graph.is_empty def - (name: String): Queue = new Queue(graph.del_node(name), order - name, // FIXME scala-2.10.0 .. 2.12.4 TreeSet problem!? command_timings) def dequeue(skip: String => Boolean): Option[(String, Sessions.Info)] = { val it = order.iterator.dropWhile(name => skip(name) || !graph.defined(name) // FIXME scala-2.10.0 .. 2.12.4 TreeSet problem!? || !graph.is_minimal(name)) if (it.hasNext) { val name = it.next; Some((name, graph.get_node(name))) } else None } } /* PIDE protocol handler */ /* job: running prover process */ private class Job(progress: Progress, session_name: String, val info: Sessions.Info, deps: Sessions.Deps, store: Sessions.Store, do_store: Boolean, verbose: Boolean, val numa_node: Option[Int], command_timings0: List[Properties.T]) { val options: Options = NUMA.policy_options(info.options, numa_node) private val sessions_structure = deps.sessions_structure - private val documents = info.documents.map(_._1) private val future_result: Future[Process_Result] = Future.thread("build", uninterruptible = true) { val parent = info.parent.getOrElse("") val base = deps(parent) val args_yxml = YXML.string_of_body( { import XML.Encode._ - pair(list(pair(string, int)), pair(list(properties), pair(bool, pair(Path.encode, - pair(list(string), pair(string, pair(string, pair(string, pair(Path.encode, + pair(list(pair(string, int)), pair(list(properties), pair(string, pair(string, + pair(string, pair(Path.encode, pair(list(pair(Options.encode, list(pair(string, properties)))), pair(list(pair(string, properties)), pair(list(pair(string, string)), pair(list(pair(string, string)), pair(list(string), pair(list(pair(string, string)), - pair(list(string), list(pair(string, list(string)))))))))))))))))))( - (Symbol.codes, (command_timings0, (verbose, (store.browser_info, - (documents, (parent, (info.chapter, (session_name, (Path.current, - (info.theories, + pair(list(string), list(pair(string, list(string))))))))))))))))( + (Symbol.codes, (command_timings0, (parent, (info.chapter, + (session_name, (Path.current, (info.theories, (sessions_structure.session_positions, (sessions_structure.dest_session_directories, (sessions_structure.session_chapters, (base.doc_names, (base.global_theories.toList, - (base.loaded_theories.keys, sessions_structure.bibtex_entries))))))))))))))))) + (base.loaded_theories.keys, sessions_structure.bibtex_entries)))))))))))))) }) val env = Isabelle_System.settings() + ("ISABELLE_ML_DEBUGGER" -> options.bool("ML_debugger").toString) val is_pure = Sessions.is_pure(session_name) val use_prelude = if (is_pure) Thy_Header.ml_roots.map(_._1) else Nil val eval_store = if (do_store) { (if (info.theories.nonEmpty) List("ML_Heap.share_common_data ()") else Nil) ::: List("ML_Heap.save_child " + ML_Syntax.print_string_bytes(File.platform_path(store.output_heap(session_name)))) } else Nil val resources = new Resources(sessions_structure, deps(parent)) val session = new Session(options, resources) { override val xml_cache: XML.Cache = store.xml_cache override val xz_cache: XZ.Cache = store.xz_cache } object Build_Session_Errors { private val promise: Promise[List[String]] = Future.promise def result: Exn.Result[List[String]] = promise.join_result def cancel: Unit = promise.cancel def apply(errs: List[String]) { try { promise.fulfill(errs) } catch { case _: IllegalStateException => } } } val export_consumer = Export.consumer(store.open_database(session_name, output = true), cache = store.xz_cache) val stdout = new StringBuilder(1000) val stderr = new StringBuilder(1000) val messages = new mutable.ListBuffer[XML.Elem] val command_timings = new mutable.ListBuffer[Properties.T] val theory_timings = new mutable.ListBuffer[Properties.T] val session_timings = new mutable.ListBuffer[Properties.T] val runtime_statistics = new mutable.ListBuffer[Properties.T] val task_statistics = new mutable.ListBuffer[Properties.T] val document_output = new mutable.ListBuffer[String] def fun( name: String, acc: mutable.ListBuffer[Properties.T], unapply: Properties.T => Option[Properties.T]): (String, Session.Protocol_Function) = { name -> ((msg: Prover.Protocol_Output) => unapply(msg.properties) match { case Some(props) => acc += props; true case _ => false }) } session.init_protocol_handler(new Session.Protocol_Handler { override def exit() { Build_Session_Errors.cancel } private def build_session_finished(msg: Prover.Protocol_Output): Boolean = { val (rc, errors) = try { val (rc, errs) = { import XML.Decode._ pair(int, list(x => x))(Symbol.decode_yxml(msg.text)) } val errors = for (err <- errs) yield { val prt = Protocol_Message.expose_no_reports(err) Pretty.string_of(prt, metric = Symbol.Metric) } (rc, errors) } catch { case ERROR(err) => (2, List(err)) } session.protocol_command("Prover.stop", rc.toString) Build_Session_Errors(errors) true } private def loading_theory(msg: Prover.Protocol_Output): Boolean = msg.properties match { case Markup.Loading_Theory(name) => progress.theory(Progress.Theory(name, session = session_name)) true case _ => false } private def export(msg: Prover.Protocol_Output): Boolean = msg.properties match { case Protocol.Export(args) => export_consumer(session_name, args, msg.bytes) true case _ => false } private def command_timing(props: Properties.T): Option[Properties.T] = for { props1 <- Markup.Command_Timing.unapply(props) elapsed <- Markup.Elapsed.unapply(props1) elapsed_time = Time.seconds(elapsed) if elapsed_time.is_relevant && elapsed_time >= options.seconds("command_timing_threshold") } yield props1.filter(p => Markup.command_timing_properties(p._1)) override val functions = List( Markup.Build_Session_Finished.name -> build_session_finished, Markup.Loading_Theory.name -> loading_theory, Markup.EXPORT -> export, fun(Markup.Command_Timing.name, command_timings, command_timing), fun(Markup.Theory_Timing.name, theory_timings, Markup.Theory_Timing.unapply), fun(Markup.Session_Timing.name, session_timings, Markup.Session_Timing.unapply), fun(Markup.Task_Statistics.name, task_statistics, Markup.Task_Statistics.unapply)) }) session.runtime_statistics += Session.Consumer("ML_statistics") { case Session.Runtime_Statistics(props) => runtime_statistics += props } session.all_messages += Session.Consumer[Any]("build_session_output") { case msg: Prover.Output => val message = msg.message if (msg.is_stdout) { stdout ++= Symbol.encode(XML.content(message)) } else if (msg.is_stderr) { stderr ++= Symbol.encode(XML.content(message)) } else if (Protocol.is_exported(message)) { messages += message } else if (msg.is_exit) { val err = "Prover terminated" + (msg.properties match { case Markup.Process_Result(result) => ": " + result.print_rc case _ => "" }) Build_Session_Errors(List(err)) } case _ => } val eval_main = Command_Line.ML_tool("Isabelle_Process.init_build ()" :: eval_store) val process = Isabelle_Process(session, options, sessions_structure, store, logic = parent, raw_ml_system = is_pure, use_prelude = use_prelude, eval_main = eval_main, cwd = info.dir.file, env = env) val build_errors = Isabelle_Thread.interrupt_handler(_ => process.terminate) { Exn.capture { process.await_startup } match { case Exn.Res(_) => session.protocol_command("build_session", args_yxml) Build_Session_Errors.result case Exn.Exn(exn) => Exn.Res(List(Exn.message(exn))) } } val process_result = Isabelle_Thread.interrupt_handler(_ => process.terminate) { process.await_shutdown } val export_errors = export_consumer.shutdown(close = true).map(Output.error_message_text) val document_errors = try { - if (build_errors.isInstanceOf[Exn.Res[_]] && process_result.ok && documents.nonEmpty) { - val document_progress = - new Progress { - override def echo(msg: String): Unit = - document_output.synchronized { document_output += msg } - override def echo_document(path: Path): Unit = - progress.echo_document(path) - } - Present.build_documents(session_name, deps, store, verbose = verbose, - verbose_latex = true, progress = document_progress) + if (build_errors.isInstanceOf[Exn.Res[_]] && process_result.ok) { + if (info.documents.nonEmpty) { + val document_progress = + new Progress { + override def echo(msg: String): Unit = + document_output.synchronized { document_output += msg } + override def echo_document(path: Path): Unit = + progress.echo_document(path) + } + Present.build_documents(session_name, deps, store, verbose = verbose, + verbose_latex = true, progress = document_progress) + } + if (info.options.bool("browser_info")) { + val dir = Present.session_html(session_name, deps, store) + if (verbose) progress.echo("Browser info at " + dir.absolute) + } } - val graph_pdf = - graphview.Graph_File.make_pdf(options, deps(session_name).session_graph_display) - Present.finish(store.browser_info, graph_pdf, info, session_name) Nil } catch { case Exn.Interrupt.ERROR(msg) => List(msg) } val result = { val more_output = Library.trim_line(stdout.toString) :: messages.toList.map(message => Symbol.encode(Protocol.message_text(List(message), metric = Symbol.Metric))) ::: command_timings.toList.map(Protocol.Command_Timing_Marker.apply) ::: theory_timings.toList.map(Protocol.Theory_Timing_Marker.apply) ::: session_timings.toList.map(Protocol.Session_Timing_Marker.apply) ::: runtime_statistics.toList.map(Protocol.ML_Statistics_Marker.apply) ::: task_statistics.toList.map(Protocol.Task_Statistics_Marker.apply) ::: document_output.toList val more_errors = Library.trim_line(stderr.toString) :: export_errors ::: document_errors process_result.output(more_output).errors(more_errors) } build_errors match { case Exn.Res(build_errs) => val errs = build_errs ::: document_errors if (errs.isEmpty) result else { result.error_rc.output( errs.flatMap(s => split_lines(Output.error_message_text(s))) ::: errs.map(Protocol.Error_Message_Marker.apply)) } case Exn.Exn(Exn.Interrupt()) => if (result.ok) result.copy(rc = Exn.Interrupt.return_code) else result case Exn.Exn(exn) => throw exn } } def terminate: Unit = future_result.cancel def is_finished: Boolean = future_result.is_finished private val timeout_request: Option[Event_Timer.Request] = { if (info.timeout > Time.zero) Some(Event_Timer.request(Time.now() + info.timeout) { terminate }) else None } def join: (Process_Result, Option[String]) = { val result1 = future_result.join val was_timeout = timeout_request match { case None => false case Some(request) => !request.cancel } val result2 = if (result1.interrupted) { if (was_timeout) result1.error(Output.error_message_text("Timeout")).was_timeout else result1.error(Output.error_message_text("Interrupt")) } else result1 val heap_digest = if (result2.ok && do_store && store.output_heap(session_name).is_file) Some(Sessions.write_heap_digest(store.output_heap(session_name))) else None (result2, heap_digest) } } /** build with results **/ class Results private[Build](results: Map[String, (Option[Process_Result], Sessions.Info)]) { def sessions: Set[String] = results.keySet def cancelled(name: String): Boolean = results(name)._1.isEmpty def apply(name: String): Process_Result = results(name)._1.getOrElse(Process_Result(1)) def info(name: String): Sessions.Info = results(name)._2 val rc: Int = (0 /: results.iterator.map( { case (_, (Some(r), _)) => r.rc case (_, (None, _)) => 1 }))(_ max _) def ok: Boolean = rc == 0 override def toString: String = rc.toString } def session_finished(session_name: String, process_result: Process_Result): String = "Finished " + session_name + " (" + process_result.timing.message_resources + ")" def session_timing(session_name: String, build_log: Build_Log.Session_Info): String = { val props = build_log.session_timing val threads = Markup.Session_Timing.Threads.unapply(props) getOrElse 1 val timing = Markup.Timing_Properties.parse(props) "Timing " + session_name + " (" + threads + " threads, " + timing.message_factor + ")" } def build( options: Options, selection: Sessions.Selection = Sessions.Selection.empty, progress: Progress = new Progress, check_unknown_files: Boolean = false, build_heap: Boolean = false, clean_build: Boolean = false, dirs: List[Path] = Nil, select_dirs: List[Path] = Nil, infos: List[Sessions.Info] = Nil, numa_shuffling: Boolean = false, max_jobs: Int = 1, list_files: Boolean = false, check_keywords: Set[String] = Set.empty, fresh_build: Boolean = false, no_build: Boolean = false, soft_build: Boolean = false, verbose: Boolean = false, export_files: Boolean = false): Results = { val build_options = options + "completion_limit=0" + "editor_tracing_messages=0" + "pide_reports=false" val store = Sessions.store(build_options) Isabelle_Fonts.init() /* session selection and dependencies */ val full_sessions = Sessions.load_structure(build_options, dirs = dirs, select_dirs = select_dirs, infos = infos) def sources_stamp(deps: Sessions.Deps, session_name: String): String = { val digests = full_sessions(session_name).meta_digest :: deps.sources(session_name) ::: deps.imported_sources(session_name) SHA1.digest(cat_lines(digests.map(_.toString).sorted)).toString } val deps = { val deps0 = Sessions.deps(full_sessions.selection(selection), progress = progress, inlined_files = true, verbose = verbose, list_files = list_files, check_keywords = check_keywords).check_errors if (soft_build && !fresh_build) { val outdated = deps0.sessions_structure.build_topological_order.flatMap(name => store.access_database(name) match { case Some(db) => using(db)(store.read_build(_, name)) match { case Some(build) if build.ok && build.sources == sources_stamp(deps0, name) => None case _ => Some(name) } case None => Some(name) }) Sessions.deps(full_sessions.selection(Sessions.Selection(sessions = outdated)), progress = progress, inlined_files = true).check_errors } else deps0 } /* check unknown files */ if (check_unknown_files) { val source_files = (for { (_, base) <- deps.session_bases.iterator (path, _) <- base.sources.iterator } yield path).toList val exclude_files = List(Path.explode("$POLYML_EXE")).map(_.canonical_file) val unknown_files = Mercurial.check_files(source_files)._2. filterNot(path => exclude_files.contains(path.canonical_file)) if (unknown_files.nonEmpty) { progress.echo_warning("Unknown files (not part of the underlying Mercurial repository):" + unknown_files.map(path => path.expand.implode).sorted.mkString("\n ", "\n ", "")) } } /* main build process */ val queue = Queue(progress, deps.sessions_structure, store) store.prepare_output_dir() if (clean_build) { for (name <- full_sessions.imports_descendants(full_sessions.imports_selection(selection))) { val (relevant, ok) = store.clean_output(name) if (relevant) { if (ok) progress.echo("Cleaned " + name) else progress.echo(name + " FAILED to clean") } } } // scheduler loop case class Result( current: Boolean, heap_digest: Option[String], process: Option[Process_Result], info: Sessions.Info) { def ok: Boolean = process match { case None => false case Some(res) => res.rc == 0 } } def sleep() { Isabelle_Thread.interrupt_handler(_ => progress.stop) { Time.seconds(0.5).sleep } } val numa_nodes = new NUMA.Nodes(numa_shuffling) @tailrec def loop( pending: Queue, running: Map[String, (List[String], Job)], results: Map[String, Result]): Map[String, Result] = { def used_node(i: Int): Boolean = running.iterator.exists( { case (_, (_, job)) => job.numa_node.isDefined && job.numa_node.get == i }) if (pending.is_empty) results else { if (progress.stopped) { for ((_, (_, job)) <- running) job.terminate } running.find({ case (_, (_, job)) => job.is_finished }) match { case Some((session_name, (input_heaps, job))) => //{{{ finish job val (process_result, heap_digest) = job.join val log_lines = process_result.out_lines.filterNot(Protocol_Message.Marker.test) val process_result_tail = { val tail = job.info.options.int("process_output_tail") process_result.copy( out_lines = "(see also " + store.output_log(session_name).file.toString + ")" :: (if (tail == 0) log_lines else log_lines.drop(log_lines.length - tail max 0))) } val build_log = Build_Log.Log_File(session_name, process_result.out_lines). parse_session_info( command_timings = true, theory_timings = true, ml_statistics = true, task_statistics = true) // write log file if (process_result.ok) { File.write_gzip(store.output_log_gz(session_name), terminate_lines(log_lines)) } else File.write(store.output_log(session_name), terminate_lines(log_lines)) // write database using(store.open_database(session_name, output = true))(db => store.write_session_info(db, session_name, build_log = if (process_result.timeout) build_log.error("Timeout") else build_log, build = Session_Info(sources_stamp(deps, session_name), input_heaps, heap_digest, process_result.rc))) // messages process_result.err_lines.foreach(progress.echo) if (process_result.ok) { if (verbose) progress.echo(session_timing(session_name, build_log)) progress.echo(session_finished(session_name, process_result)) } else { progress.echo(session_name + " FAILED") if (!process_result.interrupted) progress.echo(process_result_tail.out) } loop(pending - session_name, running - session_name, results + (session_name -> Result(false, heap_digest, Some(process_result_tail), job.info))) //}}} case None if running.size < (max_jobs max 1) => //{{{ check/start next job pending.dequeue(running.isDefinedAt) match { case Some((session_name, info)) => val ancestor_results = deps.sessions_structure.build_requirements(List(session_name)). filterNot(_ == session_name).map(results(_)) val ancestor_heaps = ancestor_results.flatMap(_.heap_digest) val do_store = build_heap || Sessions.is_pure(session_name) || queue.is_inner(session_name) val (current, heap_digest) = { store.access_database(session_name) match { case Some(db) => using(db)(store.read_build(_, session_name)) match { case Some(build) => val heap_digest = store.find_heap_digest(session_name) val current = !fresh_build && build.ok && build.sources == sources_stamp(deps, session_name) && build.input_heaps == ancestor_heaps && build.output_heap == heap_digest && !(do_store && heap_digest.isEmpty) (current, heap_digest) case None => (false, None) } case None => (false, None) } } val all_current = current && ancestor_results.forall(_.current) if (all_current) loop(pending - session_name, running, results + (session_name -> Result(true, heap_digest, Some(Process_Result(0)), info))) else if (no_build) { progress.echo_if(verbose, "Skipping " + session_name + " ...") loop(pending - session_name, running, results + (session_name -> Result(false, heap_digest, Some(Process_Result(1)), info))) } else if (ancestor_results.forall(_.ok) && !progress.stopped) { progress.echo((if (do_store) "Building " else "Running ") + session_name + " ...") store.clean_output(session_name) using(store.open_database(session_name, output = true))( store.init_session_info(_, session_name)) val numa_node = numa_nodes.next(used_node) val job = new Job(progress, session_name, info, deps, store, do_store, verbose, numa_node, queue.command_timings(session_name)) loop(pending, running + (session_name -> (ancestor_heaps, job)), results) } else { progress.echo(session_name + " CANCELLED") loop(pending - session_name, running, results + (session_name -> Result(false, heap_digest, None, info))) } case None => sleep(); loop(pending, running, results) } ///}}} case None => sleep(); loop(pending, running, results) } } } /* build results */ val results0 = if (deps.is_empty) { progress.echo_warning("Nothing to build") Map.empty[String, Result] } else Isabelle_Thread.uninterruptible { loop(queue, Map.empty, Map.empty) } val results = new Results( (for ((name, result) <- results0.iterator) yield (name, (result.process, result.info))).toMap) if (export_files) { for (name <- full_sessions.imports_selection(selection).iterator if results(name).ok) { val info = results.info(name) if (info.export_files.nonEmpty) { progress.echo("Exporting " + info.name + " ...") for ((dir, prune, pats) <- info.export_files) { Export.export_files(store, name, info.dir + dir, progress = if (verbose) progress else new Progress, export_prune = prune, export_patterns = pats) } } } } if (results.rc != 0 && (verbose || !no_build)) { val unfinished = (for { name <- results.sessions.iterator if !results(name).ok } yield name).toList.sorted progress.echo("Unfinished session(s): " + commas(unfinished)) } /* global browser info */ if (!no_build) { val browser_chapters = (for { (name, result) <- results0.iterator if result.ok info = full_sessions(name) if info.options.bool("browser_info") } yield (info.chapter, (name, info.description))).toList.groupBy(_._1). map({ case (chapter, es) => (chapter, es.map(_._2)) }).filterNot(_._2.isEmpty) for ((chapter, entries) <- browser_chapters) Present.update_chapter_index(store.browser_info, chapter, entries) if (browser_chapters.nonEmpty) Present.make_global_index(store.browser_info) } results } /* Isabelle tool wrapper */ val isabelle_tool = Isabelle_Tool("build", "build and manage Isabelle sessions", args => { val build_options = Word.explode(Isabelle_System.getenv("ISABELLE_BUILD_OPTIONS")) var base_sessions: List[String] = Nil var select_dirs: List[Path] = Nil var numa_shuffling = false var requirements = false var soft_build = false var exclude_session_groups: List[String] = Nil var all_sessions = false var build_heap = false var clean_build = false var dirs: List[Path] = Nil var export_files = false var fresh_build = false var session_groups: List[String] = Nil var max_jobs = 1 var check_keywords: Set[String] = Set.empty var list_files = false var no_build = false var options = Options.init(opts = build_options) var verbose = false var exclude_sessions: List[String] = Nil val getopts = Getopts(""" Usage: isabelle build [OPTIONS] [SESSIONS ...] Options are: -B NAME include session NAME and all descendants -D DIR include session directory and select its sessions -N cyclic shuffling of NUMA CPU nodes (performance tuning) -R refer to requirements of selected sessions -S soft build: only observe changes of sources, not heap images -X NAME exclude sessions from group NAME and all descendants -a select all sessions -b build heap images -c clean build -d DIR include session directory -e export files from session specification into file-system -f fresh build -g NAME select session group NAME -j INT maximum number of parallel jobs (default 1) -k KEYWORD check theory sources for conflicts with proposed keywords -l list session source files -n no build -- test dependencies only -o OPTION override Isabelle system OPTION (via NAME=VAL or NAME) -v verbose -x NAME exclude session NAME and all descendants Build and manage Isabelle sessions, depending on implicit settings: """ + Library.prefix_lines(" ", Build_Log.Settings.show()) + "\n", "B:" -> (arg => base_sessions = base_sessions ::: List(arg)), "D:" -> (arg => select_dirs = select_dirs ::: List(Path.explode(arg))), "N" -> (_ => numa_shuffling = true), "R" -> (_ => requirements = true), "S" -> (_ => soft_build = true), "X:" -> (arg => exclude_session_groups = exclude_session_groups ::: List(arg)), "a" -> (_ => all_sessions = true), "b" -> (_ => build_heap = true), "c" -> (_ => clean_build = true), "d:" -> (arg => dirs = dirs ::: List(Path.explode(arg))), "e" -> (_ => export_files = true), "f" -> (_ => fresh_build = true), "g:" -> (arg => session_groups = session_groups ::: List(arg)), "j:" -> (arg => max_jobs = Value.Int.parse(arg)), "k:" -> (arg => check_keywords = check_keywords + arg), "l" -> (_ => list_files = true), "n" -> (_ => no_build = true), "o:" -> (arg => options = options + arg), "v" -> (_ => verbose = true), "x:" -> (arg => exclude_sessions = exclude_sessions ::: List(arg))) val sessions = getopts(args) val progress = new Console_Progress(verbose = verbose) val start_date = Date.now() if (verbose) { progress.echo( "Started at " + Build_Log.print_date(start_date) + " (" + Isabelle_System.getenv("ML_IDENTIFIER") + " on " + Isabelle_System.hostname() +")") progress.echo(Build_Log.Settings.show() + "\n") } val results = progress.interrupt_handler { build(options, selection = Sessions.Selection( requirements = requirements, all_sessions = all_sessions, base_sessions = base_sessions, exclude_session_groups = exclude_session_groups, exclude_sessions = exclude_sessions, session_groups = session_groups, sessions = sessions), progress = progress, check_unknown_files = Mercurial.is_repository(Path.explode("~~")), build_heap = build_heap, clean_build = clean_build, dirs = dirs, select_dirs = select_dirs, numa_shuffling = NUMA.enabled_warning(progress, numa_shuffling), max_jobs = max_jobs, list_files = list_files, check_keywords = check_keywords, fresh_build = fresh_build, no_build = no_build, soft_build = soft_build, verbose = verbose, export_files = export_files) } val end_date = Date.now() val elapsed_time = end_date.time - start_date.time if (verbose) { progress.echo("\nFinished at " + Build_Log.print_date(end_date)) } val total_timing = (Timing.zero /: results.sessions.iterator.map(a => results(a).timing))(_ + _). copy(elapsed = elapsed_time) progress.echo(total_timing.message_resources) sys.exit(results.rc) }) /* build logic image */ def build_logic(options: Options, logic: String, progress: Progress = new Progress, build_heap: Boolean = false, dirs: List[Path] = Nil, fresh: Boolean = false, strict: Boolean = false): Int = { val selection = Sessions.Selection.session(logic) val rc = if (!fresh && build(options, selection = selection, build_heap = build_heap, no_build = true, dirs = dirs).ok) 0 else { progress.echo("Build started for Isabelle/" + logic + " ...") Build.build(options, selection = selection, progress = progress, build_heap = build_heap, fresh_build = fresh, dirs = dirs).rc } if (strict && rc != 0) error("Failed to build Isabelle/" + logic) else rc } }