diff --git a/src/Pure/Thy/presentation.scala b/src/Pure/Thy/presentation.scala --- a/src/Pure/Thy/presentation.scala +++ b/src/Pure/Thy/presentation.scala @@ -1,779 +1,793 @@ /* Title: Pure/Thy/present.scala Author: Makarius HTML/PDF presentation of theory documents. */ package isabelle import scala.collection.immutable.SortedMap object Presentation { /** HTML documents **/ val fonts_path = Path.explode("fonts") sealed case class HTML_Document(title: String, content: String) def html_context(fonts_url: String => String = HTML.fonts_url()): HTML_Context = new HTML_Context(fonts_url) final class HTML_Context private[Presentation](fonts_url: String => String) { def init_fonts(dir: Path) { val fonts_dir = Isabelle_System.make_directory(dir + fonts_path) for (entry <- Isabelle_Fonts.fonts(hidden = true)) File.copy(entry.path, fonts_dir) } def head(title: String, rest: XML.Body = Nil): XML.Tree = HTML.div("head", HTML.chapter(title) :: rest) def source(body: XML.Body): XML.Tree = HTML.pre("source", body) def contents(heading: String, items: List[XML.Body], css_class: String = "contents") : List[XML.Elem] = { if (items.isEmpty) Nil else List(HTML.div(css_class, List(HTML.section(heading), HTML.itemize(items)))) } def output_document(title: String, body: XML.Body): String = HTML.output_document( List( HTML.style(HTML.fonts_css(fonts_url) + "\n\n" + File.read(HTML.isabelle_css)), HTML.title(title)), List(HTML.source(body)), css = "", structural = false) def html_document(title: String, body: XML.Body): HTML_Document = HTML_Document(title, output_document(title, body)) } - /* HTML body */ + /* presentation elements */ - val html_elements1: Markup.Elements = - Rendering.foreground_elements ++ Rendering.text_color_elements + - Markup.NUMERAL + Markup.COMMENT + sealed case class Elements( + html: Markup.Elements = Markup.Elements.empty, + language: Markup.Elements = Markup.Elements.empty) - val html_elements2: Markup.Elements = - html_elements1 ++ Rendering.markdown_elements + Markup.LANGUAGE + val elements1: Elements = + Elements( + html = + Rendering.foreground_elements ++ Rendering.text_color_elements + + Markup.NUMERAL + Markup.COMMENT + Markup.LANGUAGE) + + val elements2: Elements = + Elements( + html = elements1.html ++ Rendering.markdown_elements, + language = elements1.language + Markup.Language.DOCUMENT) + + + /* HTML */ private val div_elements = Set(HTML.div.name, HTML.pre.name, HTML.par.name, HTML.list.name, HTML.enum.name, HTML.descr.name) - def make_html_body(xml: XML.Body): XML.Body = + def make_html(elements: Elements, xml: XML.Body): XML.Body = { def html_div(html: XML.Body): Boolean = html exists { case XML.Elem(markup, body) => div_elements.contains(markup.name) || html_div(body) case XML.Text(_) => false } - def html_class(c: String, html: XML.Body): XML.Tree = - if (html.forall(_ == XML.no_text)) XML.no_text - else if (html_div(html)) HTML.div(c, html) - else HTML.span(c, html) + def html_class(c: String, html: XML.Body): XML.Body = + if (c == "") html + else if (html_div(html)) List(HTML.div(c, html)) + else List(HTML.span(c, html)) def html_body(xml_body: XML.Body): XML.Body = - xml_body map { - case XML.Elem(Markup(Markup.LANGUAGE, Markup.Name(Markup.Language.DOCUMENT)), body) => - html_class(Markup.Language.DOCUMENT, html_body(body)) - case XML.Elem(Markup(Markup.MARKDOWN_PARAGRAPH, _), body) => HTML.par(html_body(body)) - case XML.Elem(Markup(Markup.MARKDOWN_ITEM, _), body) => HTML.item(html_body(body)) - case XML.Elem(Markup(Markup.Markdown_Bullet.name, _), _) => XML.no_text + xml_body flatMap { + case XML.Elem(Markup(Markup.LANGUAGE, Markup.Name(name)), body) => + html_class(if (elements.language(name)) name else "", html_body(body)) + case XML.Elem(Markup(Markup.MARKDOWN_PARAGRAPH, _), body) => + List(HTML.par(html_body(body))) + case XML.Elem(Markup(Markup.MARKDOWN_ITEM, _), body) => + List(HTML.item(html_body(body))) + case XML.Elem(Markup(Markup.Markdown_Bullet.name, _), _) => Nil case XML.Elem(Markup.Markdown_List(kind), body) => - if (kind == Markup.ENUMERATE) HTML.enum(html_body(body)) else HTML.list(html_body(body)) + if (kind == Markup.ENUMERATE) List(HTML.enum(html_body(body))) + else List(HTML.list(html_body(body))) case XML.Elem(markup, body) => val name = markup.name val html = markup.properties match { case Markup.Kind(kind) if kind == Markup.COMMAND || kind == Markup.KEYWORD => - List(html_class(kind, html_body(body))) + html_class(kind, html_body(body)) case _ => html_body(body) } Rendering.foreground.get(name) orElse Rendering.text_color.get(name) match { case Some(c) => html_class(c.toString, html) case None => html_class(name, html) } case XML.Text(text) => - XML.Text(Symbol.decode(text)) + HTML.text(Symbol.decode(text)) } html_body(xml) } /* PIDE HTML document */ def html_document( resources: Resources, snapshot: Document.Snapshot, html_context: HTML_Context, - html_elements: Markup.Elements, + elements: Elements, plain_text: Boolean = false): HTML_Document = { require(!snapshot.is_outdated) val name = snapshot.node_name if (plain_text) { val title = "File " + Symbol.cartouche_decoded(name.path.file_name) val body = HTML.text(snapshot.node.source) html_context.html_document(title, body) } else { resources.html_document(snapshot) getOrElse { val title = if (name.is_theory) "Theory " + quote(name.theory_base_name) else "File " + Symbol.cartouche_decoded(name.path.file_name) - val body = make_html_body(snapshot.xml_markup(elements = html_elements)) + val body = make_html(elements, snapshot.xml_markup(elements = elements.html)) html_context.html_document(title, body) } } } /** PDF LaTeX documents **/ /* document info */ abstract class Document_Name { def name: String def path: Path = Path.basic(name) override def toString: String = name } object Document_Variant { def parse(name: String, tags: String): Document_Variant = Document_Variant(name, Library.space_explode(',', tags)) def parse(opt: String): Document_Variant = Library.space_explode('=', opt) match { case List(name) => Document_Variant(name, Nil) case List(name, tags) => parse(name, tags) case _ => error("Malformed document variant: " + quote(opt)) } } sealed case class Document_Variant(name: String, tags: List[String]) extends Document_Name { def print_tags: String = tags.mkString(",") def print: String = if (tags.isEmpty) name else name + "=" + print_tags def latex_sty: String = Library.terminate_lines( tags.map(tag => tag.toList match { case '/' :: cs => "\\isafoldtag{" + cs.mkString + "}" case '-' :: cs => "\\isadroptag{" + cs.mkString + "}" case '+' :: cs => "\\isakeeptag{" + cs.mkString + "}" case cs => "\\isakeeptag{" + cs.mkString + "}" })) } sealed case class Document_Input(name: String, sources: SHA1.Digest) extends Document_Name sealed case class Document_Output(name: String, sources: SHA1.Digest, log_xz: Bytes, pdf: Bytes) extends Document_Name { def log: String = log_xz.uncompress().text def log_lines: List[String] = split_lines(log) def write(db: SQL.Database, session_name: String): Unit = write_document(db, session_name, this) } /* SQL data model */ object Data { val session_name = SQL.Column.string("session_name").make_primary_key val name = SQL.Column.string("name").make_primary_key val sources = SQL.Column.string("sources") val log_xz = SQL.Column.bytes("log_xz") val pdf = SQL.Column.bytes("pdf") val table = SQL.Table("isabelle_documents", List(session_name, name, sources, log_xz, pdf)) def where_equal(session_name: String, name: String = ""): SQL.Source = "WHERE " + Data.session_name.equal(session_name) + (if (name == "") "" else " AND " + Data.name.equal(name)) } def read_documents(db: SQL.Database, session_name: String): List[Document_Input] = { val select = Data.table.select(List(Data.name, Data.sources), Data.where_equal(session_name)) db.using_statement(select)(stmt => stmt.execute_query().iterator(res => { val name = res.string(Data.name) val sources = res.string(Data.sources) Document_Input(name, SHA1.fake(sources)) }).toList) } def read_document(db: SQL.Database, session_name: String, name: String): Option[Document_Output] = { val select = Data.table.select(sql = Data.where_equal(session_name, name)) db.using_statement(select)(stmt => { val res = stmt.execute_query() if (res.next()) { val name = res.string(Data.name) val sources = res.string(Data.sources) val log_xz = res.bytes(Data.log_xz) val pdf = res.bytes(Data.pdf) Some(Document_Output(name, SHA1.fake(sources), log_xz, pdf)) } else None }) } def write_document(db: SQL.Database, session_name: String, doc: Document_Output) { db.using_statement(Data.table.insert())(stmt => { stmt.string(1) = session_name stmt.string(2) = doc.name stmt.string(3) = doc.sources.toString stmt.bytes(4) = doc.log_xz stmt.bytes(5) = doc.pdf stmt.execute() }) } /* presentation context */ object Context { val none: Context = new Context { def enabled: Boolean = false } val standard: Context = new Context { def enabled: Boolean = true } def dir(path: Path): Context = new Context { def enabled: Boolean = true override def dir(store: Sessions.Store): Path = path } def make(s: String): Context = if (s == ":") standard else dir(Path.explode(s)) } abstract class Context private { def enabled: Boolean def enabled(info: Sessions.Info): Boolean = enabled || info.browser_info def dir(store: Sessions.Store): Path = store.presentation_dir def dir(store: Sessions.Store, info: Sessions.Info): Path = dir(store) + Path.explode(info.chapter_session) } /** HTML presentation **/ /* maintain chapter index */ private val sessions_path = Path.basic(".sessions") private def read_sessions(dir: Path): List[(String, String)] = { val path = dir + sessions_path if (path.is_file) { import XML.Decode._ list(pair(string, string))(Symbol.decode_yxml(File.read(path))) } else Nil } private def write_sessions(dir: Path, sessions: List[(String, String)]) { import XML.Encode._ File.write(dir + sessions_path, YXML.string_of_body(list(pair(string, string))(sessions))) } def update_chapter_index(browser_info: Path, chapter: String, new_sessions: List[(String, String)]) { val dir = Isabelle_System.make_directory(browser_info + Path.basic(chapter)) val sessions0 = try { read_sessions(dir) } catch { case _: XML.Error => Nil } val sessions = (SortedMap.empty[String, String] ++ sessions0 ++ new_sessions).toList write_sessions(dir, sessions) val title = "Isabelle/" + chapter + " sessions" HTML.write_document(dir, "index.html", List(HTML.title(title + " (" + Distribution.version + ")")), HTML.chapter(title) :: (if (sessions.isEmpty) Nil else List(HTML.div("sessions", List(HTML.description( sessions.map({ case (name, description) => val descr = Symbol.trim_blank_lines(description) (List(HTML.link(name + "/index.html", HTML.text(name))), if (descr == "") Nil else HTML.break ::: List(HTML.pre(HTML.text(descr)))) }))))))) } def make_global_index(browser_info: Path) { if (!(browser_info + Path.explode("index.html")).is_file) { Isabelle_System.make_directory(browser_info) File.copy(Path.explode("~~/lib/logo/isabelle.gif"), browser_info + Path.explode("isabelle.gif")) File.write(browser_info + Path.explode("index.html"), File.read(Path.explode("~~/lib/html/library_index_header.template")) + File.read(Path.explode("~~/lib/html/library_index_content.template")) + File.read(Path.explode("~~/lib/html/library_index_footer.template"))) } } /* present session */ val session_graph_path = Path.explode("session_graph.pdf") val readme_path = Path.explode("README.html") val files_path = Path.explode("files") def html_name(name: Document.Node.Name): String = name.theory_base_name + ".html" def theory_link(deps: Sessions.Deps, session0: String, name: Document.Node.Name, body: XML.Body): XML.Tree = { val session1 = deps(session0).theory_qualifier(name) val info0 = deps.sessions_structure(session0) val info1 = deps.sessions_structure(session1) HTML.link(info0.relative_path(info1) + html_name(name), body) } def token_markups(keywords: Keyword.Keywords, tok: Token): List[String] = { if (keywords.is_command(tok, Keyword.theory_end)) List(Markup.KEYWORD2, Markup.KEYWORD) else if (keywords.is_command(tok, Keyword.proof_asm)) List(Markup.KEYWORD3, Markup.COMMAND) else if (keywords.is_command(tok, Keyword.improper)) List(Markup.KEYWORD1, Markup.IMPROPER, Markup.COMMAND) else if (tok.is_command) List(Markup.KEYWORD1, Markup.COMMAND) else if (tok.is_delimiter) List(Markup.DELIMITER, Markup.KEYWORD) else if (tok.is_keyword) List(Markup.KEYWORD2, Markup.KEYWORD) else if (tok.is_comment) List(Markup.COMMENT) else { tok.kind match { case Token.Kind.VAR => List(Markup.VAR) case Token.Kind.TYPE_IDENT => List(Markup.TFREE) case Token.Kind.TYPE_VAR => List(Markup.TVAR) case Token.Kind.STRING => List(Markup.STRING) case Token.Kind.ALT_STRING => List(Markup.ALT_STRING) case Token.Kind.VERBATIM => List(Markup.VERBATIM) case Token.Kind.CARTOUCHE => List(Markup.CARTOUCHE) case _ => Nil } } } def session_html( resources: Resources, session: String, deps: Sessions.Deps, db_context: Sessions.Database_Context, progress: Progress = new Progress, verbose: Boolean = false, html_context: HTML_Context, - html_elements: Markup.Elements, + elements: Elements, presentation: Context) { val info = deps.sessions_structure(session) val options = info.options val base = deps(session) val session_dir = presentation.dir(db_context.store, info) html_context.init_fonts(session_dir) Bytes.write(session_dir + session_graph_path, graphview.Graph_File.make_pdf(options, base.session_graph_display)) val documents = for { doc <- info.document_variants document <- db_context.input_database(session)(read_document(_, _, doc.name)) } yield { if (verbose) progress.echo("Presenting document " + session + "/" + doc.name) Bytes.write(session_dir + doc.path.pdf, document.pdf) doc } val view_links = { val deps_link = HTML.link(session_graph_path, HTML.text("theory dependencies")) val readme_links = if ((info.dir + readme_path).is_file) { File.copy(info.dir + readme_path, session_dir + readme_path) List(HTML.link(readme_path, HTML.text("README"))) } else Nil val document_links = documents.map(doc => HTML.link(doc.path.pdf, HTML.text(doc.name))) Library.separate(HTML.break ::: HTML.nl, (deps_link :: readme_links ::: document_links). map(link => HTML.text("View ") ::: List(link))).flatten } val theories: List[XML.Body] = { var seen_files = List.empty[(Path, String, Document.Node.Name)] for { thy_name <- base.session_theories thy_command <- Build_Job.read_theory(db_context, resources, session, thy_name.theory) } yield { progress.expose_interrupt() if (verbose) progress.echo("Presenting theory " + thy_name) val snapshot = Document.State.init.snippet(thy_command) val thy_body = { val syntax = base.theory_syntax(thy_name) val keywords = syntax.keywords val spans = syntax.parse_spans(Symbol.decode(File.read(thy_name.path))) val imports_offset = base.known_theories(thy_name.theory).header.imports_offset var token_offset = 1 spans.flatMap(span => { val is_init = span.is_kind(keywords, Keyword.theory_begin, false) span.content.flatMap(tok => { val text = HTML.text(tok.source) val item = if (is_init && imports_offset.isDefinedAt(token_offset)) { List(theory_link(deps, session, imports_offset(token_offset), text)) } else text token_offset += tok.symbol_length (token_markups(keywords, tok) :\ item)( { case (c, body) => List(HTML.span(c, body)) }) }) }) } val files = for { - (src_path, xml) <- snapshot.xml_markup_blobs(elements = html_elements) + (src_path, xml) <- snapshot.xml_markup_blobs(elements = elements.html) if xml.nonEmpty } yield { progress.expose_interrupt() if (verbose) progress.echo("Presenting file " + src_path) val file_name = (files_path + src_path.squash.html).implode seen_files.find(p => p._1 == src_path || p._2 == file_name) match { case None => seen_files ::= (src_path, file_name, thy_name) case Some((_, _, thy_name1)) => error("Incoherent use of file name " + src_path + " as " + quote(file_name) + " in theory " + thy_name1 + " vs. " + thy_name) } val file_path = session_dir + Path.explode(file_name) html_context.init_fonts(file_path.dir) val file_title = "File " + Symbol.cartouche_decoded(src_path.implode_short) HTML.write_document(file_path.dir, file_path.file_name, List(HTML.title(file_title)), - List(html_context.head(file_title), html_context.source(make_html_body(xml)))) + List(html_context.head(file_title), html_context.source(make_html(elements, xml)))) List(HTML.link(file_name, HTML.text(file_title))) } val thy_title = "Theory " + thy_name.theory_base_name HTML.write_document(session_dir, html_name(thy_name), List(HTML.title(thy_title)), List(html_context.head(thy_title), html_context.source(thy_body))) List(HTML.link(html_name(thy_name), HTML.text(thy_name.theory_base_name) ::: (if (files.isEmpty) Nil else List(HTML.itemize(files))))) } } val title = "Session " + session HTML.write_document(session_dir, "index.html", List(HTML.title(title + " (" + Distribution.version + ")")), html_context.head(title, List(HTML.par(view_links))) :: html_context.contents("Theories", theories)) } /** build documents **/ val session_tex_path = Path.explode("session.tex") def tex_name(name: Document.Node.Name): String = name.theory_base_name + ".tex" def document_tex_name(name: Document.Node.Name): String = "document/" + tex_name(name) class Build_Error(val log_lines: List[String], val message: String) extends Exn.User_Error(message) def build_documents( session: String, deps: Sessions.Deps, db_context: Sessions.Database_Context, progress: Progress = new Progress, output_sources: Option[Path] = None, output_pdf: Option[Path] = None, verbose: Boolean = false, verbose_latex: Boolean = false): List[Document_Output] = { /* session info */ val info = deps.sessions_structure(session) val hierarchy = deps.sessions_structure.hierarchy(session) val options = info.options val base = deps(session) val graph_pdf = graphview.Graph_File.make_pdf(options, base.session_graph_display) /* prepare document directory */ lazy val tex_files = for (name <- base.session_theories ::: base.document_theories) yield { val entry = db_context.get_export(hierarchy, name.theory, document_tex_name(name)) Path.basic(tex_name(name)) -> entry.uncompressed } def prepare_dir1(dir: Path, doc: Document_Variant): (Path, String) = { val doc_dir = dir + Path.basic(doc.name) Isabelle_System.make_directory(doc_dir) Isabelle_System.bash("isabelle latex -o sty", cwd = doc_dir.file).check File.write(doc_dir + Path.explode("isabelletags.sty"), doc.latex_sty) for ((base_dir, src) <- info.document_files) File.copy_base(info.dir + base_dir, src, doc_dir) File.write(doc_dir + session_tex_path, Library.terminate_lines( base.session_theories.map(name => "\\input{" + tex_name(name) + "}"))) for ((path, tex) <- tex_files) Bytes.write(doc_dir + path, tex) val root1 = "root_" + doc.name val root = if ((doc_dir + Path.explode(root1).tex).is_file) root1 else "root" (doc_dir, root) } def prepare_dir2(dir: Path, doc: Document_Variant): Unit = { val doc_dir = dir + Path.basic(doc.name) // non-deterministic, but irrelevant Bytes.write(doc_dir + session_graph_path, graph_pdf) } /* produce documents */ val documents = for (doc <- info.documents) yield { Isabelle_System.with_tmp_dir("document")(tmp_dir => { progress.echo("Preparing " + session + "/" + doc.name + " ...") val start = Time.now() // prepare sources val (doc_dir, root) = prepare_dir1(tmp_dir, doc) val digests = File.find_files(doc_dir.file, follow_links = true).map(SHA1.digest) val sources = SHA1.digest_set(digests) prepare_dir2(tmp_dir, doc) for (dir <- output_sources) { prepare_dir1(dir, doc) prepare_dir2(dir, doc) } // old document from database val old_document = for { old_doc <- db_context.input_database(session)(read_document(_, _, doc.name)) if old_doc.sources == sources } yield { Bytes.write(doc_dir + doc.path.pdf, old_doc.pdf) old_doc } old_document getOrElse { // bash scripts def root_bash(ext: String): String = Bash.string(root + "." + ext) def latex_bash(fmt: String = "pdf", ext: String = "tex"): String = "isabelle latex -o " + Bash.string(fmt) + " " + Bash.string(root + "." + ext) def bash(items: String*): Process_Result = progress.bash(items.mkString(" && "), cwd = doc_dir.file, echo = verbose_latex, watchdog = Time.seconds(0.5)) // prepare document val result = if ((doc_dir + Path.explode("build")).is_file) { bash("./build pdf " + Bash.string(doc.name)) } else { bash( latex_bash(), "{ [ ! -f " + root_bash("bib") + " ] || " + latex_bash("bbl") + "; }", "{ [ ! -f " + root_bash("idx") + " ] || " + latex_bash("idx") + "; }", latex_bash(), latex_bash()) } // result val root_pdf = Path.basic(root).pdf val result_path = doc_dir + root_pdf val log_lines = result.out_lines ::: result.err_lines if (!result.ok) { val message = Exn.cat_message( Library.trim_line(result.err), cat_lines(Latex.latex_errors(doc_dir, root) ::: Bibtex.bibtex_errors(doc_dir, root)), "Failed to build document " + quote(doc.name)) throw new Build_Error(log_lines, message) } else if (!result_path.is_file) { val message = "Bad document result: expected to find " + root_pdf throw new Build_Error(log_lines, message) } else { val stop = Time.now() val timing = stop - start progress.echo("Finished " + session + "/" + doc.name + " (" + timing.message_hms + " elapsed time)") val log_xz = Bytes(cat_lines(log_lines)).compress() val pdf = Bytes.read(result_path) Document_Output(doc.name, sources, log_xz, pdf) } } }) } for (dir <- output_pdf; doc <- documents) { Isabelle_System.make_directory(dir) val path = dir + doc.path.pdf Bytes.write(path, doc.pdf) progress.echo("Document at " + path.absolute) } documents } /* Isabelle tool wrapper */ val isabelle_tool = Isabelle_Tool("document", "prepare session theory document", Scala_Project.here, args => { var output_sources: Option[Path] = None var output_pdf: Option[Path] = None var verbose_latex = false var dirs: List[Path] = Nil var options = Options.init() var verbose_build = false val getopts = Getopts( """ Usage: isabelle document [OPTIONS] SESSION Options are: -O DIR output directory for LaTeX sources and resulting PDF -P DIR output directory for resulting PDF -S DIR output directory for LaTeX sources -V verbose latex -d DIR include session directory -o OPTION override Isabelle system OPTION (via NAME=VAL or NAME) -v verbose build Prepare the theory document of a session. """, "O:" -> (arg => { val dir = Path.explode(arg) output_sources = Some(dir) output_pdf = Some(dir) }), "P:" -> (arg => { output_pdf = Some(Path.explode(arg)) }), "S:" -> (arg => { output_sources = Some(Path.explode(arg)) }), "V" -> (_ => verbose_latex = true), "d:" -> (arg => dirs = dirs ::: List(Path.explode(arg))), "o:" -> (arg => options = options + arg), "v" -> (_ => verbose_build = true)) val more_args = getopts(args) val session = more_args match { case List(a) => a case _ => getopts.usage() } val progress = new Console_Progress(verbose = verbose_build) val store = Sessions.store(options) progress.interrupt_handler { val res = Build.build(options, selection = Sessions.Selection.session(session), dirs = dirs, progress = progress, verbose = verbose_build) if (!res.ok) error("Failed to build session " + quote(session)) val deps = Sessions.load_structure(options + "document=pdf", dirs = dirs). selection_deps(Sessions.Selection.session(session)) if (output_sources.isEmpty && output_pdf.isEmpty) { progress.echo_warning("No output directory") } using(store.open_database_context())(db_context => build_documents(session, deps, db_context, progress = progress, output_sources = output_sources, output_pdf = output_pdf, verbose = true, verbose_latex = verbose_latex)) } }) } diff --git a/src/Pure/Tools/build.scala b/src/Pure/Tools/build.scala --- a/src/Pure/Tools/build.scala +++ b/src/Pure/Tools/build.scala @@ -1,694 +1,694 @@ /* Title: Pure/Tools/build.scala Author: Makarius Options: :folding=explicit: Build and manage Isabelle sessions. */ package isabelle import scala.collection.SortedSet import scala.annotation.tailrec object Build { /** auxiliary **/ /* persistent build info */ sealed case class Session_Info( sources: String, input_heaps: List[String], output_heap: Option[String], return_code: Int) { def ok: Boolean = return_code == 0 } /* queue with scheduling information */ private object Queue { type Timings = (List[Properties.T], Double) def load_timings(progress: Progress, store: Sessions.Store, session_name: String): Timings = { val no_timings: Timings = (Nil, 0.0) store.try_open_database(session_name) match { case None => no_timings case Some(db) => def ignore_error(msg: String) = { progress.echo_warning("Ignoring bad database " + db + (if (msg == "") "" else "\n" + msg)) no_timings } try { val command_timings = store.read_command_timings(db, session_name) val session_timing = store.read_session_timing(db, session_name) match { case Markup.Elapsed(t) => t case _ => 0.0 } (command_timings, session_timing) } catch { case ERROR(msg) => ignore_error(msg) case exn: java.lang.Error => ignore_error(Exn.message(exn)) case _: XML.Error => ignore_error("") } finally { db.close } } } def make_session_timing(sessions_structure: Sessions.Structure, timing: Map[String, Double]) : Map[String, Double] = { val maximals = sessions_structure.build_graph.maximals.toSet def desc_timing(session_name: String): Double = { if (maximals.contains(session_name)) timing(session_name) else { val descendants = sessions_structure.build_descendants(List(session_name)).toSet val g = sessions_structure.build_graph.restrict(descendants) (0.0 :: g.maximals.flatMap(desc => { val ps = g.all_preds(List(desc)) if (ps.exists(p => !timing.isDefinedAt(p))) None else Some(ps.map(timing(_)).sum) })).max } } timing.keySet.iterator.map(name => (name -> desc_timing(name))).toMap.withDefaultValue(0.0) } def apply(progress: Progress, sessions_structure: Sessions.Structure, store: Sessions.Store) : Queue = { val graph = sessions_structure.build_graph val names = graph.keys val timings = names.map(name => (name, load_timings(progress, store, name))) val command_timings = timings.map({ case (name, (ts, _)) => (name, ts) }).toMap.withDefaultValue(Nil) val session_timing = make_session_timing(sessions_structure, timings.map({ case (name, (_, t)) => (name, t) }).toMap) object Ordering extends scala.math.Ordering[String] { def compare_timing(name1: String, name2: String): Int = { val t1 = session_timing(name1) val t2 = session_timing(name2) if (t1 == 0.0 || t2 == 0.0) 0 else t1 compare t2 } def compare(name1: String, name2: String): Int = compare_timing(name2, name1) match { case 0 => sessions_structure(name2).timeout compare sessions_structure(name1).timeout match { case 0 => name1 compare name2 case ord => ord } case ord => ord } } new Queue(graph, SortedSet(names: _*)(Ordering), command_timings) } } private class Queue( graph: Graph[String, Sessions.Info], order: SortedSet[String], val command_timings: String => List[Properties.T]) { def is_inner(name: String): Boolean = !graph.is_maximal(name) def is_empty: Boolean = graph.is_empty def - (name: String): Queue = new Queue(graph.del_node(name), order - name, // FIXME scala-2.10.0 .. 2.12.4 TreeSet problem!? command_timings) def dequeue(skip: String => Boolean): Option[(String, Sessions.Info)] = { val it = order.iterator.dropWhile(name => skip(name) || !graph.defined(name) // FIXME scala-2.10.0 .. 2.12.4 TreeSet problem!? || !graph.is_minimal(name)) if (it.hasNext) { val name = it.next; Some((name, graph.get_node(name))) } else None } } /** build with results **/ class Results private[Build](results: Map[String, (Option[Process_Result], Sessions.Info)]) { def sessions: Set[String] = results.keySet def infos: List[Sessions.Info] = results.values.map(_._2).toList def cancelled(name: String): Boolean = results(name)._1.isEmpty def apply(name: String): Process_Result = results(name)._1.getOrElse(Process_Result(1)) def info(name: String): Sessions.Info = results(name)._2 val rc: Int = (0 /: results.iterator.map( { case (_, (Some(r), _)) => r.rc case (_, (None, _)) => 1 }))(_ max _) def ok: Boolean = rc == 0 override def toString: String = rc.toString } def session_finished(session_name: String, process_result: Process_Result): String = "Finished " + session_name + " (" + process_result.timing.message_resources + ")" def session_timing(session_name: String, build_log: Build_Log.Session_Info): String = { val props = build_log.session_timing val threads = Markup.Session_Timing.Threads.unapply(props) getOrElse 1 val timing = Markup.Timing_Properties.parse(props) "Timing " + session_name + " (" + threads + " threads, " + timing.message_factor + ")" } def build( options: Options, selection: Sessions.Selection = Sessions.Selection.empty, presentation: Presentation.Context = Presentation.Context.none, progress: Progress = new Progress, check_unknown_files: Boolean = false, build_heap: Boolean = false, clean_build: Boolean = false, dirs: List[Path] = Nil, select_dirs: List[Path] = Nil, infos: List[Sessions.Info] = Nil, numa_shuffling: Boolean = false, max_jobs: Int = 1, list_files: Boolean = false, check_keywords: Set[String] = Set.empty, fresh_build: Boolean = false, no_build: Boolean = false, soft_build: Boolean = false, verbose: Boolean = false, export_files: Boolean = false): Results = { val build_options = options + "completion_limit=0" + "editor_tracing_messages=0" + "kodkod_scala=false" + ("pide_reports=" + options.bool("build_pide_reports")) val store = Sessions.store(build_options) Isabelle_Fonts.init() /* session selection and dependencies */ val full_sessions = Sessions.load_structure(build_options, dirs = dirs, select_dirs = select_dirs, infos = infos) val full_sessions_selection = full_sessions.imports_selection(selection) def sources_stamp(deps: Sessions.Deps, session_name: String): String = { val digests = full_sessions(session_name).meta_digest :: deps.sources(session_name) ::: deps.imported_sources(session_name) SHA1.digest_set(digests).toString } val deps = { val deps0 = Sessions.deps(full_sessions.selection(selection), progress = progress, inlined_files = true, verbose = verbose, list_files = list_files, check_keywords = check_keywords).check_errors if (soft_build && !fresh_build) { val outdated = deps0.sessions_structure.build_topological_order.flatMap(name => store.try_open_database(name) match { case Some(db) => using(db)(store.read_build(_, name)) match { case Some(build) if build.ok && build.sources == sources_stamp(deps0, name) => None case _ => Some(name) } case None => Some(name) }) Sessions.deps(full_sessions.selection(Sessions.Selection(sessions = outdated)), progress = progress, inlined_files = true).check_errors } else deps0 } /* check unknown files */ if (check_unknown_files) { val source_files = (for { (_, base) <- deps.session_bases.iterator (path, _) <- base.sources.iterator } yield path).toList val exclude_files = List(Path.explode("$POLYML_EXE")).map(_.canonical_file) val unknown_files = Mercurial.check_files(source_files)._2. filterNot(path => exclude_files.contains(path.canonical_file)) if (unknown_files.nonEmpty) { progress.echo_warning("Unknown files (not part of the underlying Mercurial repository):" + unknown_files.map(path => path.expand.implode).sorted.mkString("\n ", "\n ", "")) } } /* main build process */ val queue = Queue(progress, deps.sessions_structure, store) store.prepare_output_dir() if (clean_build) { for (name <- full_sessions.imports_descendants(full_sessions_selection)) { val (relevant, ok) = store.clean_output(name) if (relevant) { if (ok) progress.echo("Cleaned " + name) else progress.echo(name + " FAILED to clean") } } } // scheduler loop case class Result( current: Boolean, heap_digest: Option[String], process: Option[Process_Result], info: Sessions.Info) { def ok: Boolean = process match { case None => false case Some(res) => res.rc == 0 } } def sleep() { Isabelle_Thread.interrupt_handler(_ => progress.stop) { Time.seconds(0.5).sleep } } val numa_nodes = new NUMA.Nodes(numa_shuffling) @tailrec def loop( pending: Queue, running: Map[String, (List[String], Build_Job)], results: Map[String, Result]): Map[String, Result] = { def used_node(i: Int): Boolean = running.iterator.exists( { case (_, (_, job)) => job.numa_node.isDefined && job.numa_node.get == i }) if (pending.is_empty) results else { if (progress.stopped) { for ((_, (_, job)) <- running) job.terminate } running.find({ case (_, (_, job)) => job.is_finished }) match { case Some((session_name, (input_heaps, job))) => //{{{ finish job val (process_result, heap_digest) = job.join val log_lines = process_result.out_lines.filterNot(Protocol_Message.Marker.test) val process_result_tail = { val tail = job.info.options.int("process_output_tail") process_result.copy( out_lines = "(see also " + store.output_log(session_name).file.toString + ")" :: (if (tail == 0) log_lines else log_lines.drop(log_lines.length - tail max 0))) } val build_log = Build_Log.Log_File(session_name, process_result.out_lines). parse_session_info( command_timings = true, theory_timings = true, ml_statistics = true, task_statistics = true) // write log file if (process_result.ok) { File.write_gzip(store.output_log_gz(session_name), terminate_lines(log_lines)) } else File.write(store.output_log(session_name), terminate_lines(log_lines)) // write database using(store.open_database(session_name, output = true))(db => store.write_session_info(db, session_name, build_log = if (process_result.timeout) build_log.error("Timeout") else build_log, build = Session_Info(sources_stamp(deps, session_name), input_heaps, heap_digest, process_result.rc))) // messages process_result.err_lines.foreach(progress.echo) if (process_result.ok) { if (verbose) progress.echo(session_timing(session_name, build_log)) progress.echo(session_finished(session_name, process_result)) } else { progress.echo(session_name + " FAILED") if (!process_result.interrupted) progress.echo(process_result_tail.out) } loop(pending - session_name, running - session_name, results + (session_name -> Result(false, heap_digest, Some(process_result_tail), job.info))) //}}} case None if running.size < (max_jobs max 1) => //{{{ check/start next job pending.dequeue(running.isDefinedAt) match { case Some((session_name, info)) => val ancestor_results = deps.sessions_structure.build_requirements(List(session_name)). filterNot(_ == session_name).map(results(_)) val ancestor_heaps = ancestor_results.flatMap(_.heap_digest) val do_store = build_heap || Sessions.is_pure(session_name) || queue.is_inner(session_name) val (current, heap_digest) = { store.try_open_database(session_name) match { case Some(db) => using(db)(store.read_build(_, session_name)) match { case Some(build) => val heap_digest = store.find_heap_digest(session_name) val current = !fresh_build && build.ok && build.sources == sources_stamp(deps, session_name) && build.input_heaps == ancestor_heaps && build.output_heap == heap_digest && !(do_store && heap_digest.isEmpty) (current, heap_digest) case None => (false, None) } case None => (false, None) } } val all_current = current && ancestor_results.forall(_.current) if (all_current) loop(pending - session_name, running, results + (session_name -> Result(true, heap_digest, Some(Process_Result(0)), info))) else if (no_build) { progress.echo_if(verbose, "Skipping " + session_name + " ...") loop(pending - session_name, running, results + (session_name -> Result(false, heap_digest, Some(Process_Result(1)), info))) } else if (ancestor_results.forall(_.ok) && !progress.stopped) { progress.echo((if (do_store) "Building " else "Running ") + session_name + " ...") store.clean_output(session_name) using(store.open_database(session_name, output = true))( store.init_session_info(_, session_name)) val numa_node = numa_nodes.next(used_node) val job = new Build_Job(progress, session_name, info, deps, store, do_store, verbose, numa_node, queue.command_timings(session_name)) loop(pending, running + (session_name -> (ancestor_heaps, job)), results) } else { progress.echo(session_name + " CANCELLED") loop(pending - session_name, running, results + (session_name -> Result(false, heap_digest, None, info))) } case None => sleep(); loop(pending, running, results) } ///}}} case None => sleep(); loop(pending, running, results) } } } /* build results */ val results = { val results0 = if (deps.is_empty) { progress.echo_warning("Nothing to build") Map.empty[String, Result] } else Isabelle_Thread.uninterruptible { loop(queue, Map.empty, Map.empty) } new Results( (for ((name, result) <- results0.iterator) yield (name, (result.process, result.info))).toMap) } if (export_files) { for (name <- full_sessions.imports_selection(selection).iterator if results(name).ok) { val info = results.info(name) if (info.export_files.nonEmpty) { progress.echo("Exporting " + info.name + " ...") for ((dir, prune, pats) <- info.export_files) { Export.export_files(store, name, info.dir + dir, progress = if (verbose) progress else new Progress, export_prune = prune, export_patterns = pats) } } } } if (results.rc != 0 && (verbose || !no_build)) { val unfinished = (for { name <- results.sessions.iterator if !results(name).ok } yield name).toList.sorted progress.echo("Unfinished session(s): " + commas(unfinished)) } /* PDF/HTML presentation */ if (!no_build && !progress.stopped && results.ok) { val selected = full_sessions_selection.toSet val presentation_chapters = (for { session_name <- deps.sessions_structure.build_topological_order.iterator info = results.info(session_name) if selected(session_name) && presentation.enabled(info) && results(session_name).ok } yield (info.chapter, (session_name, info.description))).toList if (presentation_chapters.nonEmpty) { val presentation_dir = presentation.dir(store) progress.echo("Presentation in " + presentation_dir.absolute) val resources = Resources.empty val html_context = Presentation.html_context() using(store.open_database_context())(db_context => for ((_, (session_name, _)) <- presentation_chapters) { progress.expose_interrupt() progress.echo("Presenting " + session_name + " ...") Presentation.session_html( resources, session_name, deps, db_context, progress = progress, verbose = verbose, html_context = html_context, - html_elements = Presentation.html_elements1, presentation = presentation) + elements = Presentation.elements1, presentation = presentation) }) val browser_chapters = presentation_chapters.groupBy(_._1). map({ case (chapter, es) => (chapter, es.map(_._2)) }).filterNot(_._2.isEmpty) for ((chapter, entries) <- browser_chapters) Presentation.update_chapter_index(presentation_dir, chapter, entries) if (browser_chapters.nonEmpty) Presentation.make_global_index(presentation_dir) } } results } /* Isabelle tool wrapper */ val isabelle_tool = Isabelle_Tool("build", "build and manage Isabelle sessions", Scala_Project.here, args => { val build_options = Word.explode(Isabelle_System.getenv("ISABELLE_BUILD_OPTIONS")) var base_sessions: List[String] = Nil var select_dirs: List[Path] = Nil var numa_shuffling = false var presentation = Presentation.Context.none var requirements = false var soft_build = false var exclude_session_groups: List[String] = Nil var all_sessions = false var build_heap = false var clean_build = false var dirs: List[Path] = Nil var export_files = false var fresh_build = false var session_groups: List[String] = Nil var max_jobs = 1 var check_keywords: Set[String] = Set.empty var list_files = false var no_build = false var options = Options.init(opts = build_options) var verbose = false var exclude_sessions: List[String] = Nil val getopts = Getopts(""" Usage: isabelle build [OPTIONS] [SESSIONS ...] Options are: -B NAME include session NAME and all descendants -D DIR include session directory and select its sessions -N cyclic shuffling of NUMA CPU nodes (performance tuning) -P DIR enable HTML/PDF presentation in directory (":" for default) -R refer to requirements of selected sessions -S soft build: only observe changes of sources, not heap images -X NAME exclude sessions from group NAME and all descendants -a select all sessions -b build heap images -c clean build -d DIR include session directory -e export files from session specification into file-system -f fresh build -g NAME select session group NAME -j INT maximum number of parallel jobs (default 1) -k KEYWORD check theory sources for conflicts with proposed keywords -l list session source files -n no build -- test dependencies only -o OPTION override Isabelle system OPTION (via NAME=VAL or NAME) -v verbose -x NAME exclude session NAME and all descendants Build and manage Isabelle sessions, depending on implicit settings: """ + Library.prefix_lines(" ", Build_Log.Settings.show()) + "\n", "B:" -> (arg => base_sessions = base_sessions ::: List(arg)), "D:" -> (arg => select_dirs = select_dirs ::: List(Path.explode(arg))), "N" -> (_ => numa_shuffling = true), "P:" -> (arg => presentation = Presentation.Context.make(arg)), "R" -> (_ => requirements = true), "S" -> (_ => soft_build = true), "X:" -> (arg => exclude_session_groups = exclude_session_groups ::: List(arg)), "a" -> (_ => all_sessions = true), "b" -> (_ => build_heap = true), "c" -> (_ => clean_build = true), "d:" -> (arg => dirs = dirs ::: List(Path.explode(arg))), "e" -> (_ => export_files = true), "f" -> (_ => fresh_build = true), "g:" -> (arg => session_groups = session_groups ::: List(arg)), "j:" -> (arg => max_jobs = Value.Int.parse(arg)), "k:" -> (arg => check_keywords = check_keywords + arg), "l" -> (_ => list_files = true), "n" -> (_ => no_build = true), "o:" -> (arg => options = options + arg), "v" -> (_ => verbose = true), "x:" -> (arg => exclude_sessions = exclude_sessions ::: List(arg))) val sessions = getopts(args) val progress = new Console_Progress(verbose = verbose) val start_date = Date.now() if (verbose) { progress.echo( "Started at " + Build_Log.print_date(start_date) + " (" + Isabelle_System.getenv("ML_IDENTIFIER") + " on " + Isabelle_System.hostname() +")") progress.echo(Build_Log.Settings.show() + "\n") } val results = progress.interrupt_handler { build(options, selection = Sessions.Selection( requirements = requirements, all_sessions = all_sessions, base_sessions = base_sessions, exclude_session_groups = exclude_session_groups, exclude_sessions = exclude_sessions, session_groups = session_groups, sessions = sessions), presentation = presentation, progress = progress, check_unknown_files = Mercurial.is_repository(Path.explode("~~")), build_heap = build_heap, clean_build = clean_build, dirs = dirs, select_dirs = select_dirs, numa_shuffling = NUMA.enabled_warning(progress, numa_shuffling), max_jobs = max_jobs, list_files = list_files, check_keywords = check_keywords, fresh_build = fresh_build, no_build = no_build, soft_build = soft_build, verbose = verbose, export_files = export_files) } val end_date = Date.now() val elapsed_time = end_date.time - start_date.time if (verbose) { progress.echo("\nFinished at " + Build_Log.print_date(end_date)) } val total_timing = (Timing.zero /: results.sessions.iterator.map(a => results(a).timing))(_ + _). copy(elapsed = elapsed_time) progress.echo(total_timing.message_resources) sys.exit(results.rc) }) /* build logic image */ def build_logic(options: Options, logic: String, progress: Progress = new Progress, build_heap: Boolean = false, dirs: List[Path] = Nil, fresh: Boolean = false, strict: Boolean = false): Int = { val selection = Sessions.Selection.session(logic) val rc = if (!fresh && build(options, selection = selection, build_heap = build_heap, no_build = true, dirs = dirs).ok) 0 else { progress.echo("Build started for Isabelle/" + logic + " ...") Build.build(options, selection = selection, progress = progress, build_heap = build_heap, fresh_build = fresh, dirs = dirs).rc } if (strict && rc != 0) error("Failed to build Isabelle/" + logic) else rc } } diff --git a/src/Tools/VSCode/src/preview_panel.scala b/src/Tools/VSCode/src/preview_panel.scala --- a/src/Tools/VSCode/src/preview_panel.scala +++ b/src/Tools/VSCode/src/preview_panel.scala @@ -1,46 +1,46 @@ /* Title: Tools/VSCode/src/preview_panel.scala Author: Makarius HTML document preview. */ package isabelle.vscode import isabelle._ import java.io.{File => JFile} class Preview_Panel(resources: VSCode_Resources) { private val pending = Synchronized(Map.empty[JFile, Int]) def request(file: JFile, column: Int): Unit = pending.change(map => map + (file -> column)) def flush(channel: Channel): Boolean = { pending.change_result(map => { val map1 = (map /: map.iterator)({ case (m, (file, column)) => resources.get_model(file) match { case Some(model) => val snapshot = model.snapshot() if (snapshot.is_outdated) m else { val html_context = Presentation.html_context() val document = Presentation.html_document( - resources, snapshot, html_context, Presentation.html_elements2) + resources, snapshot, html_context, Presentation.elements2) channel.write(LSP.Preview_Response(file, column, document.title, document.content)) m - file } case None => m - file } }) (map1.nonEmpty, map1) }) } } diff --git a/src/Tools/jEdit/src/document_model.scala b/src/Tools/jEdit/src/document_model.scala --- a/src/Tools/jEdit/src/document_model.scala +++ b/src/Tools/jEdit/src/document_model.scala @@ -1,697 +1,697 @@ /* Title: Tools/jEdit/src/document_model.scala Author: Fabian Immler, TU Munich Author: Makarius Document model connected to jEdit buffer or external file: content of theory node or auxiliary file (blob). */ package isabelle.jedit import isabelle._ import java.io.{File => JFile} import scala.collection.mutable import scala.annotation.tailrec import org.gjt.sp.jedit.View import org.gjt.sp.jedit.Buffer import org.gjt.sp.jedit.buffer.{BufferAdapter, BufferListener, JEditBuffer} object Document_Model { /* document models */ sealed case class State( models: Map[Document.Node.Name, Document_Model] = Map.empty, buffer_models: Map[JEditBuffer, Buffer_Model] = Map.empty, overlays: Document.Overlays = Document.Overlays.empty) { def file_models_iterator: Iterator[(Document.Node.Name, File_Model)] = for { (node_name, model) <- models.iterator if model.isInstanceOf[File_Model] } yield (node_name, model.asInstanceOf[File_Model]) def document_blobs: Document.Blobs = Document.Blobs( (for { (node_name, model) <- models.iterator blob <- model.get_blob } yield (node_name -> blob)).toMap) def open_buffer(session: Session, node_name: Document.Node.Name, buffer: Buffer) : (Buffer_Model, State) = { val old_model = models.get(node_name) match { case Some(file_model: File_Model) => Some(file_model) case Some(buffer_model: Buffer_Model) => Some(buffer_model.exit()) case _ => None } val buffer_model = Buffer_Model(session, node_name, buffer).init(old_model) (buffer_model, copy(models = models + (node_name -> buffer_model), buffer_models = buffer_models + (buffer -> buffer_model))) } def close_buffer(buffer: JEditBuffer): State = { buffer_models.get(buffer) match { case None => this case Some(buffer_model) => val file_model = buffer_model.exit() copy(models = models + (file_model.node_name -> file_model), buffer_models = buffer_models - buffer) } } def provide_file(session: Session, node_name: Document.Node.Name, text: String): State = if (models.isDefinedAt(node_name)) this else { val edit = Text.Edit.insert(0, text) val model = File_Model.init(session, node_name, text, pending_edits = List(edit)) copy(models = models + (node_name -> model)) } } private val state = Synchronized(State()) // owned by GUI thread def get_models(): Map[Document.Node.Name, Document_Model] = state.value.models def get(name: Document.Node.Name): Option[Document_Model] = get_models().get(name) def get(buffer: JEditBuffer): Option[Buffer_Model] = state.value.buffer_models.get(buffer) def document_blobs(): Document.Blobs = state.value.document_blobs /* bibtex */ def bibtex_entries_iterator(): Iterator[Text.Info[(String, Document_Model)]] = Bibtex.entries_iterator(state.value.models) def bibtex_completion(history: Completion.History, rendering: Rendering, caret: Text.Offset) : Option[Completion.Result] = Bibtex.completion(history, rendering, caret, state.value.models) /* overlays */ def node_overlays(name: Document.Node.Name): Document.Node.Overlays = state.value.overlays(name) def insert_overlay(command: Command, fn: String, args: List[String]): Unit = state.change(st => st.copy(overlays = st.overlays.insert(command, fn, args))) def remove_overlay(command: Command, fn: String, args: List[String]): Unit = state.change(st => st.copy(overlays = st.overlays.remove(command, fn, args))) /* sync external files */ def sync_files(changed_files: Set[JFile]): Boolean = { state.change_result(st => { val changed_models = (for { (node_name, model) <- st.file_models_iterator file <- model.file if changed_files(file) text <- PIDE.resources.read_file_content(node_name) if model.content.text != text } yield { val content = Document_Model.File_Content(text) val edits = Text.Edit.replace(0, model.content.text, text) (node_name, model.copy(content = content, pending_edits = model.pending_edits ::: edits)) }).toList if (changed_models.isEmpty) (false, st) else (true, st.copy(models = (st.models /: changed_models)(_ + _))) }) } /* syntax */ def syntax_changed(names: List[Document.Node.Name]) { GUI_Thread.require {} val models = state.value.models for (name <- names.iterator; model <- models.get(name)) { model match { case buffer_model: Buffer_Model => buffer_model.syntax_changed() case _ => } } } /* init and exit */ def init(session: Session, node_name: Document.Node.Name, buffer: Buffer): Buffer_Model = { GUI_Thread.require {} state.change_result(st => st.buffer_models.get(buffer) match { case Some(buffer_model) if buffer_model.node_name == node_name => buffer_model.init_token_marker() (buffer_model, st) case _ => val res = st.close_buffer(buffer).open_buffer(session, node_name, buffer) buffer.propertiesChanged() res }) } def exit(buffer: Buffer) { GUI_Thread.require {} state.change(st => if (st.buffer_models.isDefinedAt(buffer)) { val res = st.close_buffer(buffer) buffer.propertiesChanged() res } else st) } def provide_files(session: Session, files: List[(Document.Node.Name, String)]) { GUI_Thread.require {} state.change(st => (st /: files) { case (st1, (node_name, text)) => st1.provide_file(session, node_name, text) }) } /* required nodes */ def required_nodes(): Set[Document.Node.Name] = (for { (node_name, model) <- state.value.models.iterator if model.node_required } yield node_name).toSet def node_required(name: Document.Node.Name, toggle: Boolean = false, set: Boolean = false) { GUI_Thread.require {} val changed = state.change_result(st => st.models.get(name) match { case None => (false, st) case Some(model) => val required = if (toggle) !model.node_required else set model match { case model1: File_Model if required != model1.node_required => (true, st.copy(models = st.models + (name -> model1.copy(node_required = required)))) case model1: Buffer_Model if required != model1.node_required => model1.set_node_required(required); (true, st) case _ => (false, st) } }) if (changed) { PIDE.plugin.options_changed() PIDE.editor.flush() } } def view_node_required(view: View, toggle: Boolean = false, set: Boolean = false): Unit = Document_Model.get(view.getBuffer).foreach(model => node_required(model.node_name, toggle = toggle, set = set)) /* flushed edits */ def flush_edits(hidden: Boolean, purge: Boolean): (Document.Blobs, List[Document.Edit_Text]) = { GUI_Thread.require {} state.change_result(st => { val doc_blobs = st.document_blobs val buffer_edits = (for { (_, model) <- st.buffer_models.iterator edit <- model.flush_edits(doc_blobs, hidden).iterator } yield edit).toList val file_edits = (for { (node_name, model) <- st.file_models_iterator (edits, model1) <- model.flush_edits(doc_blobs, hidden) } yield (edits, node_name -> model1)).toList val model_edits = buffer_edits ::: file_edits.flatMap(_._1) val purge_edits = if (purge) { val purged = (for ((node_name, model) <- st.file_models_iterator) yield (node_name -> model.purge_edits(doc_blobs))).toList val imports = { val open_nodes = (for ((_, model) <- st.buffer_models.iterator) yield model.node_name).toList val touched_nodes = model_edits.map(_._1) val pending_nodes = for ((node_name, None) <- purged) yield node_name (open_nodes ::: touched_nodes ::: pending_nodes).map((_, Position.none)) } val retain = PIDE.resources.dependencies(imports).theories.toSet for ((node_name, Some(edits)) <- purged if !retain(node_name); edit <- edits) yield edit } else Nil val st1 = st.copy(models = st.models ++ file_edits.map(_._2) -- purge_edits.map(_._1)) PIDE.plugin.file_watcher.purge( (for { (_, model) <- st1.file_models_iterator file <- model.file } yield file.getParentFile).toSet) ((doc_blobs, model_edits ::: purge_edits), st1) }) } /* file content */ sealed case class File_Content(text: String) { lazy val bytes: Bytes = Bytes(Symbol.encode(text)) lazy val chunk: Symbol.Text_Chunk = Symbol.Text_Chunk(text) lazy val bibtex_entries: List[Text.Info[String]] = try { Bibtex.entries(text) } catch { case ERROR(_) => Nil } } /* HTTP preview */ private val plain_text_prefix = "plain_text=" def open_preview(view: View, plain_text: Boolean) { Document_Model.get(view.getBuffer) match { case Some(model) => val name = model.node_name val url = PIDE.plugin.http_server.url + PIDE.plugin.http_root + "/preview?" + (if (plain_text) plain_text_prefix else "") + Url.encode(name.node) PIDE.editor.hyperlink_url(url).follow(view) case _ => } } def http_handlers(http_root: String): List[HTTP.Handler] = { val fonts_root = http_root + "/fonts" val preview_root = http_root + "/preview" val html = HTTP.get(preview_root, arg => for { query <- Library.try_unprefix(preview_root + "?", arg.uri.toString).map(Url.decode) name = Library.perhaps_unprefix(plain_text_prefix, query) model <- get(PIDE.resources.node_name(name)) } yield { val snapshot = model.await_stable_snapshot() val html_context = Presentation.html_context(fonts_url = HTML.fonts_dir(fonts_root)) val document = Presentation.html_document( - PIDE.resources, snapshot, html_context, Presentation.html_elements2, + PIDE.resources, snapshot, html_context, Presentation.elements2, plain_text = query.startsWith(plain_text_prefix)) HTTP.Response.html(document.content) }) List(HTTP.fonts(fonts_root), html) } } sealed abstract class Document_Model extends Document.Model { /* perspective */ def document_view_ranges(snapshot: Document.Snapshot): List[Text.Range] = Nil def node_perspective( doc_blobs: Document.Blobs, hidden: Boolean): (Boolean, Document.Node.Perspective_Text) = { GUI_Thread.require {} if (Isabelle.continuous_checking && is_theory) { val snapshot = this.snapshot() val reparse = snapshot.node.load_commands_changed(doc_blobs) val perspective = if (hidden) Text.Perspective.empty else { val view_ranges = document_view_ranges(snapshot) val load_ranges = snapshot.commands_loading_ranges(PIDE.editor.visible_node) Text.Perspective(view_ranges ::: load_ranges) } val overlays = PIDE.editor.node_overlays(node_name) (reparse, Document.Node.Perspective(node_required, perspective, overlays)) } else (false, Document.Node.no_perspective_text) } /* snapshot */ @tailrec final def await_stable_snapshot(): Document.Snapshot = { val snapshot = this.snapshot() if (snapshot.is_outdated) { PIDE.options.seconds("editor_output_delay").sleep await_stable_snapshot() } else snapshot } } object File_Model { def empty(session: Session): File_Model = File_Model(session, Document.Node.Name.empty, None, Document_Model.File_Content(""), false, Document.Node.no_perspective_text, Nil) def init(session: Session, node_name: Document.Node.Name, text: String, node_required: Boolean = false, last_perspective: Document.Node.Perspective_Text = Document.Node.no_perspective_text, pending_edits: List[Text.Edit] = Nil): File_Model = { val file = JEdit_Lib.check_file(node_name.node) file.foreach(PIDE.plugin.file_watcher.register_parent(_)) val content = Document_Model.File_Content(text) val node_required1 = node_required || File_Format.registry.is_theory(node_name) File_Model(session, node_name, file, content, node_required1, last_perspective, pending_edits) } } case class File_Model( session: Session, node_name: Document.Node.Name, file: Option[JFile], content: Document_Model.File_Content, node_required: Boolean, last_perspective: Document.Node.Perspective_Text, pending_edits: List[Text.Edit]) extends Document_Model { /* text */ def get_text(range: Text.Range): Option[String] = range.try_substring(content.text) /* header */ def node_header: Document.Node.Header = PIDE.resources.special_header(node_name) getOrElse PIDE.resources.check_thy(node_name, Scan.char_reader(content.text), strict = false) /* content */ def node_position(offset: Text.Offset): Line.Node_Position = Line.Node_Position(node_name.node, Line.Position.zero.advance(content.text.substring(0, offset))) def get_blob: Option[Document.Blob] = if (is_theory) None else Some(Document.Blob(content.bytes, content.text, content.chunk, pending_edits.nonEmpty)) def bibtex_entries: List[Text.Info[String]] = if (Bibtex.is_bibtex(node_name.node)) content.bibtex_entries else Nil /* edits */ def update_text(text: String): Option[File_Model] = Text.Edit.replace(0, content.text, text) match { case Nil => None case edits => val content1 = Document_Model.File_Content(text) val pending_edits1 = pending_edits ::: edits Some(copy(content = content1, pending_edits = pending_edits1)) } def flush_edits(doc_blobs: Document.Blobs, hidden: Boolean) : Option[(List[Document.Edit_Text], File_Model)] = { val (reparse, perspective) = node_perspective(doc_blobs, hidden) if (reparse || pending_edits.nonEmpty || last_perspective != perspective) { val edits = node_edits(node_header, pending_edits, perspective) Some((edits, copy(last_perspective = perspective, pending_edits = Nil))) } else None } def purge_edits(doc_blobs: Document.Blobs): Option[List[Document.Edit_Text]] = if (pending_edits.nonEmpty || !File_Format.registry.is_theory(node_name) && (node_required || !Document.Node.is_no_perspective_text(last_perspective))) None else { val text_edits = List(Text.Edit.remove(0, content.text)) Some(node_edits(Document.Node.no_header, text_edits, Document.Node.no_perspective_text)) } /* snapshot */ def is_stable: Boolean = pending_edits.isEmpty def snapshot(): Document.Snapshot = session.snapshot(node_name, pending_edits) } case class Buffer_Model(session: Session, node_name: Document.Node.Name, buffer: Buffer) extends Document_Model { /* text */ def get_text(range: Text.Range): Option[String] = JEdit_Lib.get_text(buffer, range) /* header */ def node_header(): Document.Node.Header = { GUI_Thread.require {} PIDE.resources.special_header(node_name) getOrElse JEdit_Lib.buffer_lock(buffer) { PIDE.resources.check_thy(node_name, JEdit_Lib.buffer_reader(buffer), strict = false) } } /* perspective */ // owned by GUI thread private var _node_required = false def node_required: Boolean = _node_required def set_node_required(b: Boolean) { GUI_Thread.require { _node_required = b } } def document_view_iterator: Iterator[Document_View] = for { text_area <- JEdit_Lib.jedit_text_areas(buffer) doc_view <- Document_View.get(text_area) } yield doc_view override def document_view_ranges(snapshot: Document.Snapshot): List[Text.Range] = { GUI_Thread.require {} (for { doc_view <- document_view_iterator range <- doc_view.perspective(snapshot).ranges.iterator } yield range).toList } /* blob */ // owned by GUI thread private var _blob: Option[(Bytes, String, Symbol.Text_Chunk)] = None private def reset_blob(): Unit = GUI_Thread.require { _blob = None } def get_blob: Option[Document.Blob] = GUI_Thread.require { if (is_theory) None else { val (bytes, text, chunk) = _blob match { case Some(x) => x case None => val bytes = PIDE.resources.make_file_content(buffer) val text = buffer.getText(0, buffer.getLength) val chunk = Symbol.Text_Chunk(text) val x = (bytes, text, chunk) _blob = Some(x) x } val changed = pending_edits.nonEmpty Some(Document.Blob(bytes, text, chunk, changed)) } } /* bibtex entries */ // owned by GUI thread private var _bibtex_entries: Option[List[Text.Info[String]]] = None private def reset_bibtex_entries(): Unit = GUI_Thread.require { _bibtex_entries = None } def bibtex_entries: List[Text.Info[String]] = GUI_Thread.require { if (Bibtex.is_bibtex(node_name.node)) { _bibtex_entries match { case Some(entries) => entries case None => val text = JEdit_Lib.buffer_text(buffer) val entries = try { Bibtex.entries(text) } catch { case ERROR(msg) => Output.warning(msg); Nil } _bibtex_entries = Some(entries) entries } } else Nil } /* pending edits */ private object pending_edits { private val pending = new mutable.ListBuffer[Text.Edit] private var last_perspective = Document.Node.no_perspective_text def nonEmpty: Boolean = synchronized { pending.nonEmpty } def get_edits: List[Text.Edit] = synchronized { pending.toList } def get_last_perspective: Document.Node.Perspective_Text = synchronized { last_perspective } def set_last_perspective(perspective: Document.Node.Perspective_Text): Unit = synchronized { last_perspective = perspective } def flush_edits(doc_blobs: Document.Blobs, hidden: Boolean): List[Document.Edit_Text] = synchronized { GUI_Thread.require {} val edits = get_edits val (reparse, perspective) = node_perspective(doc_blobs, hidden) if (reparse || edits.nonEmpty || last_perspective != perspective) { pending.clear last_perspective = perspective node_edits(node_header(), edits, perspective) } else Nil } def edit(edits: List[Text.Edit]): Unit = synchronized { GUI_Thread.require {} reset_blob() reset_bibtex_entries() for (doc_view <- document_view_iterator) doc_view.rich_text_area.active_reset() pending ++= edits PIDE.editor.invoke() } } def is_stable: Boolean = !pending_edits.nonEmpty def snapshot(): Document.Snapshot = session.snapshot(node_name, pending_edits.get_edits) def flush_edits(doc_blobs: Document.Blobs, hidden: Boolean): List[Document.Edit_Text] = pending_edits.flush_edits(doc_blobs, hidden) /* buffer listener */ private val buffer_listener: BufferListener = new BufferAdapter { override def contentInserted(buffer: JEditBuffer, start_line: Int, offset: Int, num_lines: Int, length: Int) { pending_edits.edit(List(Text.Edit.insert(offset, buffer.getText(offset, length)))) } override def preContentRemoved(buffer: JEditBuffer, start_line: Int, offset: Int, num_lines: Int, removed_length: Int) { pending_edits.edit(List(Text.Edit.remove(offset, buffer.getText(offset, removed_length)))) } } /* syntax */ def syntax_changed() { JEdit_Lib.buffer_line_manager(buffer).setFirstInvalidLineContext(0) for (text_area <- JEdit_Lib.jedit_text_areas(buffer)) Untyped.method(Class.forName("org.gjt.sp.jedit.textarea.TextArea"), "foldStructureChanged"). invoke(text_area) buffer.invalidateCachedFoldLevels() } def init_token_marker() { Isabelle.buffer_token_marker(buffer) match { case Some(marker) if marker != buffer.getTokenMarker => buffer.setTokenMarker(marker) syntax_changed() case _ => } } /* init */ def init(old_model: Option[File_Model]): Buffer_Model = { GUI_Thread.require {} old_model match { case None => pending_edits.edit(List(Text.Edit.insert(0, JEdit_Lib.buffer_text(buffer)))) case Some(file_model) => set_node_required(file_model.node_required) pending_edits.set_last_perspective(file_model.last_perspective) pending_edits.edit( file_model.pending_edits ::: Text.Edit.replace(0, file_model.content.text, JEdit_Lib.buffer_text(buffer))) } buffer.addBufferListener(buffer_listener) init_token_marker() this } /* exit */ def exit(): File_Model = { GUI_Thread.require {} buffer.removeBufferListener(buffer_listener) init_token_marker() File_Model.init(session, node_name, JEdit_Lib.buffer_text(buffer), node_required, pending_edits.get_last_perspective, pending_edits.get_edits) } }