diff --git a/src/Pure/Thy/export.scala b/src/Pure/Thy/export.scala
--- a/src/Pure/Thy/export.scala
+++ b/src/Pure/Thy/export.scala
@@ -1,594 +1,521 @@
/* Title: Pure/Thy/export.scala
Author: Makarius
Manage theory exports: compressed blobs.
*/
package isabelle
import scala.annotation.tailrec
import scala.util.matching.Regex
object Export {
/* artefact names */
val DOCUMENT_ID = "PIDE/document_id"
val FILES = "PIDE/files"
val MARKUP = "PIDE/markup"
val MESSAGES = "PIDE/messages"
val DOCUMENT_PREFIX = "document/"
val DOCUMENT_LATEX = DOCUMENT_PREFIX + "latex"
val DOCUMENT_CITATIONS = DOCUMENT_PREFIX + "citations"
val THEORY_PREFIX: String = "theory/"
val PROOFS_PREFIX: String = "proofs/"
val THEORY_PARENTS: String = THEORY_PREFIX + "parents"
def explode_name(s: String): List[String] = space_explode('/', s)
def implode_name(elems: Iterable[String]): String = elems.mkString("/")
/* SQL data model */
object Data {
val session_name = SQL.Column.string("session_name").make_primary_key
val theory_name = SQL.Column.string("theory_name").make_primary_key
val name = SQL.Column.string("name").make_primary_key
val executable = SQL.Column.bool("executable")
val compressed = SQL.Column.bool("compressed")
val body = SQL.Column.bytes("body")
val table =
SQL.Table("isabelle_exports",
List(session_name, theory_name, name, executable, compressed, body))
def where_equal(session_name: String, theory_name: String = "", name: String = ""): SQL.Source =
"WHERE " + Data.session_name.equal(session_name) +
(if (theory_name == "") "" else " AND " + Data.theory_name.equal(theory_name)) +
(if (name == "") "" else " AND " + Data.name.equal(name))
}
def compound_name(a: String, b: String): String =
if (a.isEmpty) b else a + ":" + b
sealed case class Entry_Name(session: String = "", theory: String = "", name: String = "") {
val compound_name: String = Export.compound_name(theory, name)
def make_path(prune: Int = 0): Path = {
val elems = theory :: space_explode('/', name)
if (elems.length < prune + 1) {
error("Cannot prune path by " + prune + " element(s): " + Path.make(elems))
}
else Path.make(elems.drop(prune))
}
def readable(db: SQL.Database): Boolean = {
val select = Data.table.select(List(Data.name), Data.where_equal(session, theory, name))
db.using_statement(select)(stmt => stmt.execute_query().next())
}
def read(db: SQL.Database, cache: XML.Cache): Option[Entry] = {
val select =
Data.table.select(List(Data.executable, Data.compressed, Data.body),
Data.where_equal(session, theory, name))
db.using_statement(select) { stmt =>
val res = stmt.execute_query()
if (res.next()) {
val executable = res.bool(Data.executable)
val compressed = res.bool(Data.compressed)
val bytes = res.bytes(Data.body)
val body = Future.value(compressed, bytes)
Some(Entry(this, executable, body, cache))
}
else None
}
}
}
def read_theory_names(db: SQL.Database, session_name: String): List[String] = {
val select =
Data.table.select(List(Data.theory_name),
Data.where_equal(session_name, name = THEORY_PARENTS)) +
" ORDER BY " + Data.theory_name
db.using_statement(select)(stmt =>
stmt.execute_query().iterator(_.string(Data.theory_name)).toList)
}
def read_entry_names(db: SQL.Database, session_name: String): List[Entry_Name] = {
val select =
Data.table.select(List(Data.theory_name, Data.name), Data.where_equal(session_name)) +
" ORDER BY " + Data.theory_name + ", " + Data.name
db.using_statement(select)(stmt =>
stmt.execute_query().iterator(res =>
Entry_Name(session = session_name,
theory = res.string(Data.theory_name),
name = res.string(Data.name))).toList)
}
def message(msg: String, theory_name: String, name: String): String =
msg + " " + quote(name) + " for theory " + quote(theory_name)
def empty_entry(theory_name: String, name: String): Entry =
Entry(Entry_Name(theory = theory_name, name = name),
false, Future.value(false, Bytes.empty), XML.Cache.none)
sealed case class Entry(
entry_name: Entry_Name,
executable: Boolean,
body: Future[(Boolean, Bytes)],
cache: XML.Cache
) {
def session_name: String = entry_name.session
def theory_name: String = entry_name.theory
def name: String = entry_name.name
override def toString: String = name
def compound_name: String = entry_name.compound_name
def name_has_prefix(s: String): Boolean = name.startsWith(s)
val name_elems: List[String] = explode_name(name)
def name_extends(elems: List[String]): Boolean =
name_elems.startsWith(elems) && name_elems != elems
def text: String = uncompressed.text
def uncompressed: Bytes = {
val (compressed, bytes) = body.join
if (compressed) bytes.uncompress(cache = cache.xz) else bytes
}
def uncompressed_yxml: XML.Body =
YXML.parse_body(UTF8.decode_permissive(uncompressed), cache = cache)
def write(db: SQL.Database): Unit = {
val (compressed, bytes) = body.join
db.using_statement(Data.table.insert()) { stmt =>
stmt.string(1) = session_name
stmt.string(2) = theory_name
stmt.string(3) = name
stmt.bool(4) = executable
stmt.bool(5) = compressed
stmt.bytes(6) = bytes
stmt.execute()
}
}
}
def make_regex(pattern: String): Regex = {
@tailrec def make(result: List[String], depth: Int, chs: List[Char]): Regex =
chs match {
case '*' :: '*' :: rest => make("[^:]*" :: result, depth, rest)
case '*' :: rest => make("[^:/]*" :: result, depth, rest)
case '?' :: rest => make("[^:/]" :: result, depth, rest)
case '\\' :: c :: rest => make(("\\" + c) :: result, depth, rest)
case '{' :: rest => make("(" :: result, depth + 1, rest)
case ',' :: rest if depth > 0 => make("|" :: result, depth, rest)
case '}' :: rest if depth > 0 => make(")" :: result, depth - 1, rest)
case c :: rest if ".+()".contains(c) => make(("\\" + c) :: result, depth, rest)
case c :: rest => make(c.toString :: result, depth, rest)
case Nil => result.reverse.mkString.r
}
make(Nil, 0, pattern.toList)
}
def make_matcher(pats: List[String]): Entry_Name => Boolean = {
val regs = pats.map(make_regex)
(entry_name: Entry_Name) =>
regs.exists(_.pattern.matcher(entry_name.compound_name).matches)
}
def make_entry(
session_name: String,
args: Protocol.Export.Args,
bytes: Bytes,
cache: XML.Cache
): Entry = {
val body =
if (args.compress) Future.fork(bytes.maybe_compress(cache = cache.xz))
else Future.value((false, bytes))
val entry_name = Entry_Name(session = session_name, theory = args.theory_name, name = args.name)
Entry(entry_name, args.executable, body, cache)
}
/* specific entries */
def read_document_id(read: String => Entry): Option[Long] =
read(DOCUMENT_ID).text match {
case Value.Long(id) => Some(id)
case _ => None
}
def read_files(read: String => Entry): Option[(String, List[String])] =
split_lines(read(FILES).text) match {
case thy_file :: blobs_files => Some((thy_file, blobs_files))
case Nil => None
}
/* database consumer thread */
def consumer(db: SQL.Database, cache: XML.Cache, progress: Progress = new Progress): Consumer =
new Consumer(db, cache, progress)
class Consumer private[Export](db: SQL.Database, cache: XML.Cache, progress: Progress) {
private val errors = Synchronized[List[String]](Nil)
private val consumer =
Consumer_Thread.fork_bulk[(Entry, Boolean)](name = "export")(
bulk = { case (entry, _) => entry.body.is_finished },
consume =
{ (args: List[(Entry, Boolean)]) =>
val results =
db.transaction {
for ((entry, strict) <- args)
yield {
if (progress.stopped) {
entry.body.cancel()
Exn.Res(())
}
else if (entry.entry_name.readable(db)) {
if (strict) {
val msg = message("Duplicate export", entry.theory_name, entry.name)
errors.change(msg :: _)
}
Exn.Res(())
}
else Exn.capture { entry.write(db) }
}
}
(results, true)
})
def make_entry(session_name: String, args: Protocol.Export.Args, body: Bytes): Unit = {
if (!progress.stopped && !body.is_empty) {
consumer.send(Export.make_entry(session_name, args, body, cache) -> args.strict)
}
}
def shutdown(close: Boolean = false): List[String] = {
consumer.shutdown()
if (close) db.close()
errors.value.reverse ::: (if (progress.stopped) List("Export stopped") else Nil)
}
}
- /* abstract provider */
-
- object Provider {
- private def database_provider(
- db: SQL.Database,
- cache: XML.Cache,
- session: String,
- theory: String,
- _theory_names: Synchronized[Option[List[String]]]
- ): Provider = {
- new Provider {
- override def theory_names: List[String] =
- _theory_names.change_result { st =>
- val res = st.getOrElse(read_theory_names(db, session))
- (res, Some(res))
- }
-
- override def apply(export_name: String): Option[Entry] =
- if (theory.isEmpty) None
- else {
- Entry_Name(session = session, theory = theory, name = export_name)
- .read(db, cache)
- }
-
- override def focus(other_theory: String): Provider =
- if (other_theory == theory) this
- else database_provider(db, cache, session, theory, _theory_names)
-
- override def toString: String = db.toString
- }
- }
-
- def database(
- db: SQL.Database,
- cache: XML.Cache,
- session: String,
- theory: String = ""
- ): Provider = database_provider(db, cache, session, theory, Synchronized(None))
-
- def snapshot(
- resources: Resources,
- snapshot: Document.Snapshot
- ): Provider =
- new Provider {
- override def theory_names: List[String] =
- (for {
- (name, _) <- snapshot.version.nodes.iterator
- if name.is_theory && !resources.session_base.loaded_theory(name)
- } yield name.theory).toList
-
- override def apply(name: String): Option[Entry] =
- snapshot.all_exports.get(
- Entry_Name(session = Sessions.DRAFT, theory = snapshot.node_name.theory, name = name))
-
- override def focus(other_theory: String): Provider =
- if (other_theory == snapshot.node_name.theory) this
- else {
- val node_name =
- snapshot.version.nodes.theory_name(other_theory) getOrElse
- error("Bad theory " + quote(other_theory))
- Provider.snapshot(resources, snapshot.state.snapshot(node_name))
- }
-
- override def toString: String = snapshot.toString
- }
- }
-
- trait Provider {
- def theory_names: List[String]
-
- def apply(export_name: String): Option[Entry]
-
- def uncompressed_yxml(export_name: String): XML.Body =
- apply(export_name) match {
- case Some(entry) => entry.uncompressed_yxml
- case None => Nil
- }
-
- def focus(other_theory: String): Provider
- }
-
-
/* context for retrieval */
def context(db_context: Sessions.Database_Context): Context = new Context(db_context)
def open_context(store: Sessions.Store): Context =
new Context(store.open_database_context()) { override def close(): Unit = db_context.close() }
def open_session_context(
store: Sessions.Store,
session_base_info: Sessions.Base_Info,
document_snapshot: Option[Document.Snapshot] = None
): Session_Context = {
open_context(store).open_session(
session_base_info, document_snapshot = document_snapshot, close_context = true)
}
class Session_Database private[Export](val session: String, val db: SQL.Database) {
def close(): Unit = ()
lazy val theory_names: List[String] = read_theory_names(db, session)
lazy val entry_names: List[Entry_Name] = read_entry_names(db, session)
}
class Context private[Export](val db_context: Sessions.Database_Context) extends AutoCloseable {
context =>
override def toString: String = db_context.toString
def close(): Unit = ()
def open_session(
session_base_info: Sessions.Base_Info,
document_snapshot: Option[Document.Snapshot] = None,
close_context: Boolean = false
): Session_Context = {
val session_base = session_base_info.check.base
val session_hierarchy =
session_base_info.sessions_structure.build_hierarchy(session_base.session_name)
val session_databases =
db_context.database_server match {
case Some(db) => session_hierarchy.map(name => new Session_Database(name, db))
case None =>
val store = db_context.store
val attempts = session_hierarchy.map(name => name -> store.try_open_database(name))
attempts.collectFirst({ case (name, None) => name }) match {
case Some(bad) =>
for ((_, Some(db)) <- attempts) db.close()
store.bad_database(bad)
case None =>
for ((name, Some(db)) <- attempts) yield {
new Session_Database(name, db) { override def close(): Unit = this.db.close() }
}
}
}
new Session_Context(db_context.cache, session_base, session_databases, document_snapshot) {
override def close(): Unit = {
session_databases.foreach(_.close())
if (close_context) context.close()
}
}
}
}
class Session_Context private[Export](
val cache: Term.Cache,
session_base: Sessions.Base,
db_hierarchy: List[Session_Database],
document_snapshot: Option[Document.Snapshot]
) extends AutoCloseable {
session_context =>
def close(): Unit = ()
def session_name: String =
if (document_snapshot.isDefined) Sessions.DRAFT
else session_base.session_name
def session_stack: List[String] =
((if (document_snapshot.isDefined) List(session_name) else Nil) :::
db_hierarchy.map(_.session)).reverse
private def select[A](
session: String,
select1: Entry_Name => Option[A],
select2: Session_Database => List[A]
): List[A] = {
def sel(name: String): List[A] =
if (name == Sessions.DRAFT) {
(for {
snapshot <- document_snapshot.iterator
entry_name <- snapshot.all_exports.keysIterator
res <- select1(entry_name).iterator
} yield entry_name -> res).toList.sortBy(_._1.compound_name).map(_._2)
}
else { db_hierarchy.find(_.session == name).map(select2).getOrElse(Nil) }
if (session.nonEmpty) sel(session) else session_stack.flatMap(sel)
}
def entry_names(session: String = session_name): List[Entry_Name] =
select(session, Some(_), _.entry_names)
def theory_names(session: String = session_name): List[String] =
select(session, a => if(a.name == THEORY_PARENTS) Some(a.theory) else None, _.theory_names)
def get(theory: String, name: String): Option[Entry] =
{
def snapshot_entry =
for {
snapshot <- document_snapshot
entry_name = Entry_Name(session = Sessions.DRAFT, theory = theory, name = name)
entry <- snapshot.all_exports.get(entry_name)
} yield entry
def db_entry =
db_hierarchy.view.map(session_db =>
Export.Entry_Name(session = session_db.session, theory = theory, name = name)
.read(session_db.db, cache))
.collectFirst({ case Some(entry) => entry })
snapshot_entry orElse db_entry
}
def apply(theory: String, name: String, permissive: Boolean = false): Entry =
get(theory, name) match {
case None if permissive => empty_entry(theory, name)
case None => error("Missing export entry " + quote(compound_name(theory, name)))
case Some(entry) => entry
}
def theory(theory: String): Theory_Context =
new Theory_Context(session_context, theory)
override def toString: String =
"Export.Session_Context(" + commas_quote(session_stack) + ")"
}
- class Theory_Context private[Export](val session_context: Session_Context, theory: String) {
+ class Theory_Context private[Export](
+ val session_context: Session_Context,
+ val theory: String
+ ) {
def get(name: String): Option[Entry] = session_context.get(theory, name)
def apply(name: String, permissive: Boolean = false): Entry =
session_context.apply(theory, name, permissive = permissive)
+ def uncompressed_yxml(name: String): XML.Body =
+ get(name) match {
+ case Some(entry) => entry.uncompressed_yxml
+ case None => Nil
+ }
+
override def toString: String = "Export.Theory_Context(" + quote(theory) + ")"
}
/* export to file-system */
def export_files(
store: Sessions.Store,
session_name: String,
export_dir: Path,
progress: Progress = new Progress,
export_prune: Int = 0,
export_list: Boolean = false,
export_patterns: List[String] = Nil
): Unit = {
using(store.open_database(session_name)) { db =>
val entry_names = read_entry_names(db, session_name)
// list
if (export_list) {
for (entry_name <- entry_names) progress.echo(entry_name.compound_name)
}
// export
if (export_patterns.nonEmpty) {
val matcher = make_matcher(export_patterns)
for {
entry_name <- entry_names if matcher(entry_name)
entry <- entry_name.read(db, store.cache)
} {
val path = export_dir + entry_name.make_path(prune = export_prune)
progress.echo("export " + path + (if (entry.executable) " (executable)" else ""))
Isabelle_System.make_directory(path.dir)
val bytes = entry.uncompressed
if (!path.is_file || Bytes.read(path) != bytes) Bytes.write(path, bytes)
File.set_executable(path, entry.executable)
}
}
}
}
/* Isabelle tool wrapper */
val default_export_dir: Path = Path.explode("export")
val isabelle_tool =
Isabelle_Tool("export", "retrieve theory exports", Scala_Project.here,
{ args =>
/* arguments */
var export_dir = default_export_dir
var dirs: List[Path] = Nil
var export_list = false
var no_build = false
var options = Options.init()
var export_prune = 0
var export_patterns: List[String] = Nil
val getopts = Getopts("""
Usage: isabelle export [OPTIONS] SESSION
Options are:
-O DIR output directory for exported files (default: """ + default_export_dir + """)
-d DIR include session directory
-l list exports
-n no build of session
-o OPTION override Isabelle system OPTION (via NAME=VAL or NAME)
-p NUM prune path of exported files by NUM elements
-x PATTERN extract files matching pattern (e.g. "*:**" for all)
List or export theory exports for SESSION: named blobs produced by
isabelle build. Option -l or -x is required; option -x may be repeated.
The PATTERN language resembles glob patterns in the shell, with ? and *
(both excluding ":" and "/"), ** (excluding ":"), and [abc] or [^abc],
and variants {pattern1,pattern2,pattern3}.
""",
"O:" -> (arg => export_dir = Path.explode(arg)),
"d:" -> (arg => dirs = dirs ::: List(Path.explode(arg))),
"l" -> (_ => export_list = true),
"n" -> (_ => no_build = true),
"o:" -> (arg => options = options + arg),
"p:" -> (arg => export_prune = Value.Int.parse(arg)),
"x:" -> (arg => export_patterns ::= arg))
val more_args = getopts(args)
val session_name =
more_args match {
case List(session_name) if export_list || export_patterns.nonEmpty => session_name
case _ => getopts.usage()
}
val progress = new Console_Progress()
/* build */
if (!no_build) {
val rc =
progress.interrupt_handler {
Build.build_logic(options, session_name, progress = progress, dirs = dirs)
}
if (rc != Process_Result.RC.ok) sys.exit(rc)
}
/* export files */
val store = Sessions.store(options)
export_files(store, session_name, export_dir, progress = progress, export_prune = export_prune,
export_list = export_list, export_patterns = export_patterns)
})
}
diff --git a/src/Pure/Thy/export_theory.scala b/src/Pure/Thy/export_theory.scala
--- a/src/Pure/Thy/export_theory.scala
+++ b/src/Pure/Thy/export_theory.scala
@@ -1,776 +1,751 @@
/* Title: Pure/Thy/export_theory.scala
Author: Makarius
Export foundational theory content.
*/
package isabelle
import scala.collection.immutable.SortedMap
object Export_Theory {
/** session content **/
sealed case class Session(name: String, theory_graph: Graph[String, Option[Theory]]) {
override def toString: String = name
def theory(theory_name: String): Option[Theory] =
if (theory_graph.defined(theory_name)) theory_graph.get_node(theory_name)
else None
def theories: List[Theory] =
theory_graph.topological_order.flatMap(theory)
}
def read_session(
- store: Sessions.Store,
- sessions_structure: Sessions.Structure,
- session_name: String,
+ session_context: Export.Session_Context,
progress: Progress = new Progress,
- cache: Term.Cache = Term.Cache.make()): Session = {
+ cache: Term.Cache = Term.Cache.make()
+ ): Session = {
val thys =
- sessions_structure.build_requirements(List(session_name)).flatMap(session =>
- using(store.open_database(session)) { db =>
- val provider = Export.Provider.database(db, store.cache, session)
- for (theory <- provider.theory_names)
- yield {
- progress.echo("Reading theory " + theory)
- read_theory(provider, session, theory, cache = cache)
- }
- })
+ for (theory <- session_context.theory_names()) yield {
+ progress.echo("Reading theory " + theory)
+ read_theory(session_context.theory(theory), cache = cache)
+ }
val graph0 =
thys.foldLeft(Graph.string[Option[Theory]]) {
case (g, thy) => g.default_node(thy.name, Some(thy))
}
val graph1 =
thys.foldLeft(graph0) {
case (g0, thy) =>
thy.parents.foldLeft(g0) {
case (g1, parent) => g1.default_node(parent, None).add_edge_acyclic(parent, thy.name)
}
}
- Session(session_name, graph1)
+ Session(session_context.session_name, graph1)
}
/** theory content **/
sealed case class Theory(name: String, parents: List[String],
types: List[Entity[Type]],
consts: List[Entity[Const]],
axioms: List[Entity[Axiom]],
thms: List[Entity[Thm]],
classes: List[Entity[Class]],
locales: List[Entity[Locale]],
locale_dependencies: List[Entity[Locale_Dependency]],
classrel: List[Classrel],
arities: List[Arity],
constdefs: List[Constdef],
typedefs: List[Typedef],
datatypes: List[Datatype],
spec_rules: List[Spec_Rule],
others: Map[String, List[Entity[Other]]]
) {
override def toString: String = name
def entity_iterator: Iterator[Entity0] =
types.iterator.map(_.no_content) ++
consts.iterator.map(_.no_content) ++
axioms.iterator.map(_.no_content) ++
thms.iterator.map(_.no_content) ++
classes.iterator.map(_.no_content) ++
locales.iterator.map(_.no_content) ++
locale_dependencies.iterator.map(_.no_content) ++
(for { (_, xs) <- others; x <- xs.iterator } yield x.no_content)
def cache(cache: Term.Cache): Theory =
Theory(cache.string(name),
parents.map(cache.string),
types.map(_.cache(cache)),
consts.map(_.cache(cache)),
axioms.map(_.cache(cache)),
thms.map(_.cache(cache)),
classes.map(_.cache(cache)),
locales.map(_.cache(cache)),
locale_dependencies.map(_.cache(cache)),
classrel.map(_.cache(cache)),
arities.map(_.cache(cache)),
constdefs.map(_.cache(cache)),
typedefs.map(_.cache(cache)),
datatypes.map(_.cache(cache)),
spec_rules.map(_.cache(cache)),
(for ((k, xs) <- others.iterator) yield cache.string(k) -> xs.map(_.cache(cache))).toMap)
}
- def read_theory_parents(provider: Export.Provider, theory_name: String): Option[List[String]] =
- provider.focus(theory_name)(Export.THEORY_PARENTS)
+ def read_theory_parents(theory_context: Export.Theory_Context): Option[List[String]] =
+ theory_context.get(Export.THEORY_PARENTS)
.map(entry => Library.trim_split_lines(entry.uncompressed.text))
def no_theory: Theory =
Theory("", Nil, Nil, Nil, Nil, Nil, Nil, Nil, Nil, Nil, Nil, Nil, Nil, Nil, Nil, Map.empty)
def read_theory(
- provider: Export.Provider,
- session_name: String,
- theory_name: String,
+ theory_context: Export.Theory_Context,
permissive: Boolean = false,
cache: Term.Cache = Term.Cache.none
): Theory = {
- val theory_provider = provider.focus(theory_name)
- read_theory_parents(theory_provider, theory_name) match {
+ val session_name = theory_context.session_context.session_name
+ val theory_name = theory_context.theory
+ read_theory_parents(theory_context) match {
case None if permissive => no_theory
case None =>
error("Missing theory export in session " + quote(session_name) + ": " + quote(theory_name))
case Some(parents) =>
val theory =
Theory(theory_name, parents,
- read_types(theory_provider),
- read_consts(theory_provider),
- read_axioms(theory_provider),
- read_thms(theory_provider),
- read_classes(theory_provider),
- read_locales(theory_provider),
- read_locale_dependencies(theory_provider),
- read_classrel(theory_provider),
- read_arities(theory_provider),
- read_constdefs(theory_provider),
- read_typedefs(theory_provider),
- read_datatypes(theory_provider),
- read_spec_rules(theory_provider),
- read_others(theory_provider))
+ read_types(theory_context),
+ read_consts(theory_context),
+ read_axioms(theory_context),
+ read_thms(theory_context),
+ read_classes(theory_context),
+ read_locales(theory_context),
+ read_locale_dependencies(theory_context),
+ read_classrel(theory_context),
+ read_arities(theory_context),
+ read_constdefs(theory_context),
+ read_typedefs(theory_context),
+ read_datatypes(theory_context),
+ read_spec_rules(theory_context),
+ read_others(theory_context))
if (cache.no_cache) theory else theory.cache(cache)
}
}
- def read_pure[A](store: Sessions.Store, read: (Export.Provider, String, String) => A): A = {
- val session_name = Thy_Header.PURE
- val theory_name = Thy_Header.PURE
-
- using(store.open_database(session_name)) { db =>
- val provider = Export.Provider.database(db, store.cache, session_name)
- read(provider, session_name, theory_name)
- }
- }
-
- def read_pure_theory(store: Sessions.Store, cache: Term.Cache = Term.Cache.none): Theory =
- read_pure(store, read_theory(_, _, _, cache = cache))
-
- def read_pure_proof(
- store: Sessions.Store, id: Thm_Id, cache: Term.Cache = Term.Cache.none): Option[Proof] =
- read_pure(store, (provider, _, _) => read_proof(provider, id, cache = cache))
-
/* entities */
object Kind {
val TYPE = "type"
val CONST = "const"
val THM = "thm"
val PROOF = "proof"
val LOCALE_DEPENDENCY = "locale_dependency"
val DOCUMENT_HEADING = "document_heading"
val DOCUMENT_TEXT = "document_text"
val PROOF_TEXT = "proof_text"
}
def export_kind(kind: String): String =
if (kind == Markup.TYPE_NAME) Kind.TYPE
else if (kind == Markup.CONSTANT) Kind.CONST
else kind
def export_kind_name(kind: String, name: String): String =
name + "|" + export_kind(kind)
abstract class Content[T] {
def cache(cache: Term.Cache): T
}
sealed case class No_Content() extends Content[No_Content] {
def cache(cache: Term.Cache): No_Content = this
}
sealed case class Entity[A <: Content[A]](
kind: String,
name: String,
xname: String,
pos: Position.T,
id: Option[Long],
serial: Long,
content: Option[A]
) {
val kname: String = export_kind_name(kind, name)
val range: Symbol.Range = Position.Range.unapply(pos).getOrElse(Text.Range.offside)
def export_kind: String = Export_Theory.export_kind(kind)
override def toString: String = export_kind + " " + quote(name)
def the_content: A =
if (content.isDefined) content.get else error("No content for " + toString)
def no_content: Entity0 = copy(content = None)
def cache(cache: Term.Cache): Entity[A] =
Entity(
cache.string(kind),
cache.string(name),
cache.string(xname),
cache.position(pos),
id,
serial,
content.map(_.cache(cache)))
}
type Entity0 = Entity[No_Content]
def read_entities[A <: Content[A]](
- provider: Export.Provider,
+ theory_context: Export.Theory_Context,
export_name: String,
kind: String,
decode: XML.Decode.T[A]
): List[Entity[A]] = {
def decode_entity(tree: XML.Tree): Entity[A] = {
def err(): Nothing = throw new XML.XML_Body(List(tree))
tree match {
case XML.Elem(Markup(Markup.ENTITY, props), body) =>
val name = Markup.Name.unapply(props) getOrElse err()
val xname = Markup.XName.unapply(props) getOrElse err()
val pos = props.filter(p => Markup.position_property(p) && p._1 != Markup.ID)
val id = Position.Id.unapply(props)
val serial = Markup.Serial.unapply(props) getOrElse err()
val content = if (body.isEmpty) None else Some(decode(body))
Entity(kind, name, xname, pos, id, serial, content)
case _ => err()
}
}
- provider.uncompressed_yxml(export_name).map(decode_entity)
+ theory_context.uncompressed_yxml(export_name).map(decode_entity)
}
/* approximative syntax */
object Assoc extends Enumeration {
val NO_ASSOC, LEFT_ASSOC, RIGHT_ASSOC = Value
}
sealed abstract class Syntax
case object No_Syntax extends Syntax
case class Prefix(delim: String) extends Syntax
case class Infix(assoc: Assoc.Value, delim: String, pri: Int) extends Syntax
def decode_syntax: XML.Decode.T[Syntax] =
XML.Decode.variant(List(
{ case (Nil, Nil) => No_Syntax },
{ case (List(delim), Nil) => Prefix(delim) },
{ case (Nil, body) =>
import XML.Decode._
val (ass, delim, pri) = triple(int, string, int)(body)
Infix(Assoc(ass), delim, pri) }))
/* types */
sealed case class Type(syntax: Syntax, args: List[String], abbrev: Option[Term.Typ])
extends Content[Type] {
override def cache(cache: Term.Cache): Type =
Type(
syntax,
args.map(cache.string),
abbrev.map(cache.typ))
}
- def read_types(provider: Export.Provider): List[Entity[Type]] =
- read_entities(provider, Export.THEORY_PREFIX + "types", Markup.TYPE_NAME,
+ def read_types(theory_context: Export.Theory_Context): List[Entity[Type]] =
+ read_entities(theory_context, Export.THEORY_PREFIX + "types", Markup.TYPE_NAME,
{ body =>
import XML.Decode._
val (syntax, args, abbrev) =
triple(decode_syntax, list(string), option(Term_XML.Decode.typ))(body)
Type(syntax, args, abbrev)
})
/* consts */
sealed case class Const(
syntax: Syntax,
typargs: List[String],
typ: Term.Typ,
abbrev: Option[Term.Term],
propositional: Boolean
) extends Content[Const] {
override def cache(cache: Term.Cache): Const =
Const(
syntax,
typargs.map(cache.string),
cache.typ(typ),
abbrev.map(cache.term),
propositional)
}
- def read_consts(provider: Export.Provider): List[Entity[Const]] =
- read_entities(provider, Export.THEORY_PREFIX + "consts", Markup.CONSTANT,
+ def read_consts(theory_context: Export.Theory_Context): List[Entity[Const]] =
+ read_entities(theory_context, Export.THEORY_PREFIX + "consts", Markup.CONSTANT,
{ body =>
import XML.Decode._
val (syntax, (typargs, (typ, (abbrev, propositional)))) =
pair(decode_syntax,
pair(list(string),
pair(Term_XML.Decode.typ,
pair(option(Term_XML.Decode.term), bool))))(body)
Const(syntax, typargs, typ, abbrev, propositional)
})
/* axioms */
sealed case class Prop(
typargs: List[(String, Term.Sort)],
args: List[(String, Term.Typ)],
term: Term.Term
) extends Content[Prop] {
override def cache(cache: Term.Cache): Prop =
Prop(
typargs.map({ case (name, sort) => (cache.string(name), cache.sort(sort)) }),
args.map({ case (name, typ) => (cache.string(name), cache.typ(typ)) }),
cache.term(term))
}
def decode_prop(body: XML.Body): Prop = {
val (typargs, args, t) = {
import XML.Decode._
import Term_XML.Decode._
triple(list(pair(string, sort)), list(pair(string, typ)), term)(body)
}
Prop(typargs, args, t)
}
sealed case class Axiom(prop: Prop) extends Content[Axiom] {
override def cache(cache: Term.Cache): Axiom = Axiom(prop.cache(cache))
}
- def read_axioms(provider: Export.Provider): List[Entity[Axiom]] =
- read_entities(provider, Export.THEORY_PREFIX + "axioms", Markup.AXIOM,
+ def read_axioms(theory_context: Export.Theory_Context): List[Entity[Axiom]] =
+ read_entities(theory_context, Export.THEORY_PREFIX + "axioms", Markup.AXIOM,
body => Axiom(decode_prop(body)))
/* theorems */
- sealed case class Thm_Id(serial: Long, theory_name: String) {
- def pure: Boolean = theory_name == Thy_Header.PURE
- }
+ sealed case class Thm_Id(serial: Long, theory_name: String)
sealed case class Thm(
prop: Prop,
deps: List[String],
proof: Term.Proof)
extends Content[Thm] {
override def cache(cache: Term.Cache): Thm =
Thm(
prop.cache(cache),
deps.map(cache.string),
cache.proof(proof))
}
- def read_thms(provider: Export.Provider): List[Entity[Thm]] =
- read_entities(provider, Export.THEORY_PREFIX + "thms", Kind.THM,
+ def read_thms(theory_context: Export.Theory_Context): List[Entity[Thm]] =
+ read_entities(theory_context, Export.THEORY_PREFIX + "thms", Kind.THM,
{ body =>
import XML.Decode._
import Term_XML.Decode._
val (prop, deps, prf) = triple(decode_prop, list(string), proof)(body)
Thm(prop, deps, prf)
})
sealed case class Proof(
typargs: List[(String, Term.Sort)],
args: List[(String, Term.Typ)],
term: Term.Term,
proof: Term.Proof
) {
def prop: Prop = Prop(typargs, args, term)
def cache(cache: Term.Cache): Proof =
Proof(
typargs.map({ case (name, sort) => (cache.string(name), cache.sort(sort)) }),
args.map({ case (name, typ) => (cache.string(name), cache.typ(typ)) }),
cache.term(term),
cache.proof(proof))
}
def read_proof(
- provider: Export.Provider,
+ session_context: Export.Session_Context,
id: Thm_Id,
cache: Term.Cache = Term.Cache.none
): Option[Proof] = {
- for { entry <- provider.focus(id.theory_name)(Export.PROOFS_PREFIX + id.serial) }
+ val theory_context = session_context.theory(id.theory_name)
+ for { entry <- theory_context.get(Export.PROOFS_PREFIX + id.serial) }
yield {
val body = entry.uncompressed_yxml
val (typargs, (args, (prop_body, proof_body))) = {
import XML.Decode._
import Term_XML.Decode._
pair(list(pair(string, sort)), pair(list(pair(string, typ)), pair(x => x, x => x)))(body)
}
val env = args.toMap
val prop = Term_XML.Decode.term_env(env)(prop_body)
val proof = Term_XML.Decode.proof_env(env)(proof_body)
val result = Proof(typargs, args, prop, proof)
if (cache.no_cache) result else result.cache(cache)
}
}
def read_proof_boxes(
- store: Sessions.Store,
- provider: Export.Provider,
+ session_context: Export.Session_Context,
proof: Term.Proof,
suppress: Thm_Id => Boolean = _ => false,
cache: Term.Cache = Term.Cache.none
): List[(Thm_Id, Proof)] = {
var seen = Set.empty[Long]
var result = SortedMap.empty[Long, (Thm_Id, Proof)]
def boxes(context: Option[(Long, Term.Proof)], prf: Term.Proof): Unit = {
prf match {
case Term.Abst(_, _, p) => boxes(context, p)
case Term.AbsP(_, _, p) => boxes(context, p)
case Term.Appt(p, _) => boxes(context, p)
case Term.AppP(p, q) => boxes(context, p); boxes(context, q)
case thm: Term.PThm if !seen(thm.serial) =>
seen += thm.serial
val id = Thm_Id(thm.serial, thm.theory_name)
if (!suppress(id)) {
- val read =
- if (id.pure) Export_Theory.read_pure_proof(store, id, cache = cache)
- else Export_Theory.read_proof(provider, id, cache = cache)
- read match {
+ Export_Theory.read_proof(session_context, id, cache = cache) match {
case Some(p) =>
result += (thm.serial -> (id -> p))
boxes(Some((thm.serial, p.proof)), p.proof)
case None =>
error("Missing proof " + thm.serial + " (theory " + quote (thm.theory_name) + ")" +
(context match {
case None => ""
case Some((i, p)) => " in proof " + i + ":\n" + p
}))
}
}
case _ =>
}
}
boxes(None, proof)
result.iterator.map(_._2).toList
}
/* type classes */
sealed case class Class(params: List[(String, Term.Typ)], axioms: List[Prop])
extends Content[Class] {
override def cache(cache: Term.Cache): Class =
Class(
params.map({ case (name, typ) => (cache.string(name), cache.typ(typ)) }),
axioms.map(_.cache(cache)))
}
- def read_classes(provider: Export.Provider): List[Entity[Class]] =
- read_entities(provider, Export.THEORY_PREFIX + "classes", Markup.CLASS,
+ def read_classes(theory_context: Export.Theory_Context): List[Entity[Class]] =
+ read_entities(theory_context, Export.THEORY_PREFIX + "classes", Markup.CLASS,
{ body =>
import XML.Decode._
import Term_XML.Decode._
val (params, axioms) = pair(list(pair(string, typ)), list(decode_prop))(body)
Class(params, axioms)
})
/* locales */
sealed case class Locale(
typargs: List[(String, Term.Sort)],
args: List[((String, Term.Typ), Syntax)],
axioms: List[Prop]
) extends Content[Locale] {
override def cache(cache: Term.Cache): Locale =
Locale(
typargs.map({ case (name, sort) => (cache.string(name), cache.sort(sort)) }),
args.map({ case ((name, typ), syntax) => ((cache.string(name), cache.typ(typ)), syntax) }),
axioms.map(_.cache(cache)))
}
- def read_locales(provider: Export.Provider): List[Entity[Locale]] =
- read_entities(provider, Export.THEORY_PREFIX + "locales", Markup.LOCALE,
+ def read_locales(theory_context: Export.Theory_Context): List[Entity[Locale]] =
+ read_entities(theory_context, Export.THEORY_PREFIX + "locales", Markup.LOCALE,
{ body =>
import XML.Decode._
import Term_XML.Decode._
val (typargs, args, axioms) =
triple(list(pair(string, sort)), list(pair(pair(string, typ), decode_syntax)),
list(decode_prop))(body)
Locale(typargs, args, axioms)
})
/* locale dependencies */
sealed case class Locale_Dependency(
source: String,
target: String,
prefix: List[(String, Boolean)],
subst_types: List[((String, Term.Sort), Term.Typ)],
subst_terms: List[((String, Term.Typ), Term.Term)]
) extends Content[Locale_Dependency] {
override def cache(cache: Term.Cache): Locale_Dependency =
Locale_Dependency(
cache.string(source),
cache.string(target),
prefix.map({ case (name, mandatory) => (cache.string(name), mandatory) }),
subst_types.map({ case ((a, s), ty) => ((cache.string(a), cache.sort(s)), cache.typ(ty)) }),
subst_terms.map({ case ((x, ty), t) => ((cache.string(x), cache.typ(ty)), cache.term(t)) }))
def is_inclusion: Boolean =
subst_types.isEmpty && subst_terms.isEmpty
}
- def read_locale_dependencies(provider: Export.Provider): List[Entity[Locale_Dependency]] =
- read_entities(provider, Export.THEORY_PREFIX + "locale_dependencies", Kind.LOCALE_DEPENDENCY,
+ def read_locale_dependencies(
+ theory_context: Export.Theory_Context
+ ): List[Entity[Locale_Dependency]] = {
+ read_entities(theory_context, Export.THEORY_PREFIX + "locale_dependencies",
+ Kind.LOCALE_DEPENDENCY,
{ body =>
import XML.Decode._
import Term_XML.Decode._
val (source, (target, (prefix, (subst_types, subst_terms)))) =
pair(string, pair(string, pair(list(pair(string, bool)),
pair(list(pair(pair(string, sort), typ)), list(pair(pair(string, typ), term))))))(body)
Locale_Dependency(source, target, prefix, subst_types, subst_terms)
})
+ }
/* sort algebra */
sealed case class Classrel(class1: String, class2: String, prop: Prop) {
def cache(cache: Term.Cache): Classrel =
Classrel(cache.string(class1), cache.string(class2), prop.cache(cache))
}
- def read_classrel(provider: Export.Provider): List[Classrel] = {
- val body = provider.uncompressed_yxml(Export.THEORY_PREFIX + "classrel")
+ def read_classrel(theory_context: Export.Theory_Context): List[Classrel] = {
+ val body = theory_context.uncompressed_yxml(Export.THEORY_PREFIX + "classrel")
val classrel = {
import XML.Decode._
list(pair(decode_prop, pair(string, string)))(body)
}
for ((prop, (c1, c2)) <- classrel) yield Classrel(c1, c2, prop)
}
sealed case class Arity(
type_name: String,
domain: List[Term.Sort],
codomain: String,
prop: Prop
) {
def cache(cache: Term.Cache): Arity =
Arity(cache.string(type_name), domain.map(cache.sort), cache.string(codomain),
prop.cache(cache))
}
- def read_arities(provider: Export.Provider): List[Arity] = {
- val body = provider.uncompressed_yxml(Export.THEORY_PREFIX + "arities")
+ def read_arities(theory_context: Export.Theory_Context): List[Arity] = {
+ val body = theory_context.uncompressed_yxml(Export.THEORY_PREFIX + "arities")
val arities = {
import XML.Decode._
import Term_XML.Decode._
list(pair(decode_prop, triple(string, list(sort), string)))(body)
}
for ((prop, (a, b, c)) <- arities) yield Arity(a, b, c, prop)
}
/* Pure constdefs */
sealed case class Constdef(name: String, axiom_name: String) {
def cache(cache: Term.Cache): Constdef =
Constdef(cache.string(name), cache.string(axiom_name))
}
- def read_constdefs(provider: Export.Provider): List[Constdef] = {
- val body = provider.uncompressed_yxml(Export.THEORY_PREFIX + "constdefs")
+ def read_constdefs(theory_context: Export.Theory_Context): List[Constdef] = {
+ val body = theory_context.uncompressed_yxml(Export.THEORY_PREFIX + "constdefs")
val constdefs = {
import XML.Decode._
list(pair(string, string))(body)
}
for ((name, axiom_name) <- constdefs) yield Constdef(name, axiom_name)
}
/* HOL typedefs */
sealed case class Typedef(
name: String,
rep_type: Term.Typ,
abs_type: Term.Typ,
rep_name: String,
abs_name: String,
axiom_name: String
) {
def cache(cache: Term.Cache): Typedef =
Typedef(cache.string(name),
cache.typ(rep_type),
cache.typ(abs_type),
cache.string(rep_name),
cache.string(abs_name),
cache.string(axiom_name))
}
- def read_typedefs(provider: Export.Provider): List[Typedef] = {
- val body = provider.uncompressed_yxml(Export.THEORY_PREFIX + "typedefs")
+ def read_typedefs(theory_context: Export.Theory_Context): List[Typedef] = {
+ val body = theory_context.uncompressed_yxml(Export.THEORY_PREFIX + "typedefs")
val typedefs = {
import XML.Decode._
import Term_XML.Decode._
list(pair(string, pair(typ, pair(typ, pair(string, pair(string, string))))))(body)
}
for { (name, (rep_type, (abs_type, (rep_name, (abs_name, axiom_name))))) <- typedefs }
yield Typedef(name, rep_type, abs_type, rep_name, abs_name, axiom_name)
}
/* HOL datatypes */
sealed case class Datatype(
pos: Position.T,
name: String,
co: Boolean,
typargs: List[(String, Term.Sort)],
typ: Term.Typ,
constructors: List[(Term.Term, Term.Typ)]
) {
def id: Option[Long] = Position.Id.unapply(pos)
def cache(cache: Term.Cache): Datatype =
Datatype(
cache.position(pos),
cache.string(name),
co,
typargs.map({ case (name, sort) => (cache.string(name), cache.sort(sort)) }),
cache.typ(typ),
constructors.map({ case (term, typ) => (cache.term(term), cache.typ(typ)) }))
}
- def read_datatypes(provider: Export.Provider): List[Datatype] = {
- val body = provider.uncompressed_yxml(Export.THEORY_PREFIX + "datatypes")
+ def read_datatypes(theory_context: Export.Theory_Context): List[Datatype] = {
+ val body = theory_context.uncompressed_yxml(Export.THEORY_PREFIX + "datatypes")
val datatypes = {
import XML.Decode._
import Term_XML.Decode._
list(pair(properties, pair(string, pair(bool, pair(list(pair(string, sort)),
pair(typ, list(pair(term, typ))))))))(body)
}
for ((pos, (name, (co, (typargs, (typ, constructors))))) <- datatypes)
yield Datatype(pos, name, co, typargs, typ, constructors)
}
/* Pure spec rules */
sealed abstract class Recursion {
def cache(cache: Term.Cache): Recursion =
this match {
case Primrec(types) => Primrec(types.map(cache.string))
case Primcorec(types) => Primcorec(types.map(cache.string))
case _ => this
}
}
case class Primrec(types: List[String]) extends Recursion
case object Recdef extends Recursion
case class Primcorec(types: List[String]) extends Recursion
case object Corec extends Recursion
case object Unknown_Recursion extends Recursion
val decode_recursion: XML.Decode.T[Recursion] = {
import XML.Decode._
variant(List(
{ case (Nil, a) => Primrec(list(string)(a)) },
{ case (Nil, Nil) => Recdef },
{ case (Nil, a) => Primcorec(list(string)(a)) },
{ case (Nil, Nil) => Corec },
{ case (Nil, Nil) => Unknown_Recursion }))
}
sealed abstract class Rough_Classification {
def is_equational: Boolean = this.isInstanceOf[Equational]
def is_inductive: Boolean = this == Inductive
def is_co_inductive: Boolean = this == Co_Inductive
def is_relational: Boolean = is_inductive || is_co_inductive
def is_unknown: Boolean = this == Unknown
def cache(cache: Term.Cache): Rough_Classification =
this match {
case Equational(recursion) => Equational(recursion.cache(cache))
case _ => this
}
}
case class Equational(recursion: Recursion) extends Rough_Classification
case object Inductive extends Rough_Classification
case object Co_Inductive extends Rough_Classification
case object Unknown extends Rough_Classification
val decode_rough_classification: XML.Decode.T[Rough_Classification] = {
import XML.Decode._
variant(List(
{ case (Nil, a) => Equational(decode_recursion(a)) },
{ case (Nil, Nil) => Inductive },
{ case (Nil, Nil) => Co_Inductive },
{ case (Nil, Nil) => Unknown }))
}
sealed case class Spec_Rule(
pos: Position.T,
name: String,
rough_classification: Rough_Classification,
typargs: List[(String, Term.Sort)],
args: List[(String, Term.Typ)],
terms: List[(Term.Term, Term.Typ)],
rules: List[Term.Term]
) {
def id: Option[Long] = Position.Id.unapply(pos)
def cache(cache: Term.Cache): Spec_Rule =
Spec_Rule(
cache.position(pos),
cache.string(name),
rough_classification.cache(cache),
typargs.map({ case (name, sort) => (cache.string(name), cache.sort(sort)) }),
args.map({ case (name, typ) => (cache.string(name), cache.typ(typ)) }),
terms.map({ case (term, typ) => (cache.term(term), cache.typ(typ)) }),
rules.map(cache.term))
}
- def read_spec_rules(provider: Export.Provider): List[Spec_Rule] = {
- val body = provider.uncompressed_yxml(Export.THEORY_PREFIX + "spec_rules")
+ def read_spec_rules(theory_context: Export.Theory_Context): List[Spec_Rule] = {
+ val body = theory_context.uncompressed_yxml(Export.THEORY_PREFIX + "spec_rules")
val spec_rules = {
import XML.Decode._
import Term_XML.Decode._
list(
pair(properties, pair(string, pair(decode_rough_classification,
pair(list(pair(string, sort)), pair(list(pair(string, typ)),
pair(list(pair(term, typ)), list(term))))))))(body)
}
for ((pos, (name, (rough_classification, (typargs, (args, (terms, rules)))))) <- spec_rules)
yield Spec_Rule(pos, name, rough_classification, typargs, args, terms, rules)
}
/* other entities */
sealed case class Other() extends Content[Other] {
override def cache(cache: Term.Cache): Other = this
}
- def read_others(provider: Export.Provider): Map[String, List[Entity[Other]]] = {
+ def read_others(theory_context: Export.Theory_Context): Map[String, List[Entity[Other]]] = {
val kinds =
- provider(Export.THEORY_PREFIX + "other_kinds") match {
+ theory_context.get(Export.THEORY_PREFIX + "other_kinds") match {
case Some(entry) => split_lines(entry.uncompressed.text)
case None => Nil
}
val other = Other()
def read_other(kind: String): List[Entity[Other]] =
- read_entities(provider, Export.THEORY_PREFIX + "other/" + kind, kind, _ => other)
+ read_entities(theory_context, Export.THEORY_PREFIX + "other/" + kind, kind, _ => other)
kinds.map(kind => kind -> read_other(kind)).toMap
}
}
diff --git a/src/Pure/Thy/presentation.scala b/src/Pure/Thy/presentation.scala
--- a/src/Pure/Thy/presentation.scala
+++ b/src/Pure/Thy/presentation.scala
@@ -1,676 +1,670 @@
/* Title: Pure/Thy/presentation.scala
Author: Makarius
HTML presentation of PIDE document content.
*/
package isabelle
import scala.annotation.tailrec
import scala.collection.immutable.SortedMap
import scala.collection.mutable
object Presentation {
/** HTML documents **/
/* HTML context */
sealed case class HTML_Document(title: String, content: String)
abstract class HTML_Context {
/* directory structure and resources */
def nodes: Nodes
def root_dir: Path
def theory_session(name: Document.Node.Name): Sessions.Info
def session_dir(info: Sessions.Info): Path =
root_dir + Path.explode(info.chapter_session)
def theory_dir(name: Document.Node.Name): Path =
session_dir(theory_session(name))
def files_path(name: Document.Node.Name, path: Path): Path =
theory_dir(name) + Path.explode("files") + path.squash.html
/* HTML content */
def head(title: String, rest: XML.Body = Nil): XML.Tree =
HTML.div("head", HTML.chapter(title) :: rest)
def source(body: XML.Body): XML.Tree = HTML.pre("source", body)
def contents(
heading: String,
items: List[XML.Body],
css_class: String = "contents"
) : List[XML.Elem] = {
if (items.isEmpty) Nil
else List(HTML.div(css_class, List(HTML.section(heading), HTML.itemize(items))))
}
val isabelle_css: String = File.read(HTML.isabelle_css)
def html_document(title: String, body: XML.Body, fonts_css: String): HTML_Document = {
val content =
HTML.output_document(
List(
HTML.style(fonts_css + "\n\n" + isabelle_css),
HTML.title(title)),
List(HTML.source(body)), css = "", structural = false)
HTML_Document(title, content)
}
}
/* presentation elements */
sealed case class Elements(
html: Markup.Elements = Markup.Elements.empty,
entity: Markup.Elements = Markup.Elements.empty,
language: Markup.Elements = Markup.Elements.empty)
val elements1: Elements =
Elements(
html = Rendering.foreground_elements ++ Rendering.text_color_elements +
Markup.NUMERAL + Markup.COMMENT + Markup.ENTITY + Markup.LANGUAGE,
entity = Markup.Elements(Markup.THEORY, Markup.TYPE_NAME, Markup.CONSTANT, Markup.FACT,
Markup.CLASS, Markup.LOCALE, Markup.FREE))
val elements2: Elements =
Elements(
html = elements1.html ++ Rendering.markdown_elements,
language = Markup.Elements(Markup.Language.DOCUMENT))
/* per-session node info */
sealed case class File_Info(theory: String, is_theory: Boolean = false)
object Node_Info {
val empty: Node_Info = new Node_Info(Map.empty, Map.empty, Nil)
def make(theory: Export_Theory.Theory): Node_Info = {
val by_range = theory.entity_iterator.toList.groupBy(_.range)
val by_kind_name =
theory.entity_iterator.map(entity => ((entity.kind, entity.name), entity)).toMap
val others = theory.others.keySet.toList
new Node_Info(by_range, by_kind_name, others)
}
}
class Node_Info private(
by_range: Map[Symbol.Range, List[Export_Theory.Entity0]],
by_kind_name: Map[(String, String), Export_Theory.Entity0],
val others: List[String]) {
def for_range(range: Symbol.Range): List[Export_Theory.Entity0] =
by_range.getOrElse(range, Nil)
def get_kind_name(kind: String, name: String): Option[String] =
by_kind_name.get((kind, name)).map(_.kname)
}
object Nodes {
val empty: Nodes = new Nodes(Map.empty, Map.empty)
def read(
presentation_sessions: List[String],
deps: Sessions.Deps,
db_context: Sessions.Database_Context
): Nodes = {
+ val export_context = Export.context(db_context)
+
type Batch = (String, List[String])
val batches =
presentation_sessions.foldLeft((Set.empty[String], List.empty[Batch]))(
{ case ((seen, batches), session) =>
val thys = deps(session).loaded_theories.keys.filterNot(seen)
(seen ++ thys, (session, thys) :: batches)
})._2
+
val theory_node_info =
Par_List.map[Batch, List[(String, Node_Info)]](
{ case (session, thys) =>
- db_context.database(session) { db =>
- val provider = Export.Provider.database(db, db_context.cache, session)
- val result =
- for (thy_name <- thys) yield {
- val theory =
- if (thy_name == Thy_Header.PURE) Export_Theory.no_theory
- else {
- Export_Theory.read_theory(provider, session, thy_name,
- permissive = true, cache = db_context.cache)
- }
- thy_name -> Node_Info.make(theory)
- }
- Some(result)
- } getOrElse Nil
+ using(export_context.open_session(deps.base_info(session))) { session_context =>
+ for (thy_name <- thys) yield {
+ val theory_context = session_context.theory(thy_name)
+ val theory =
+ Export_Theory.read_theory(theory_context,
+ permissive = true, cache = db_context.cache)
+ thy_name -> Node_Info.make(theory)
+ }
+ }
}, batches).flatten.toMap
+
val files_info =
deps.sessions_structure.build_requirements(presentation_sessions).flatMap(session =>
- db_context.database(session) { db =>
- val res =
- Export.read_theory_names(db, session).flatMap { theory =>
- val files =
- Export.read_files(name =>
- Export.Entry_Name(session = session, theory = theory, name = name)
- .read(db, db_context.cache)
- .getOrElse(Export.empty_entry(theory, name)))
- files match {
- case None => Nil
- case Some((thy, other)) =>
- val thy_file_info = File_Info(theory, is_theory = true)
- (thy -> thy_file_info) :: other.map(_ -> File_Info(theory))
- }
+ using(export_context.open_session(deps.base_info(session))) { session_context =>
+ session_context.theory_names().flatMap { theory =>
+ val theory_context = session_context.theory(theory)
+ Export.read_files(theory_context(_, permissive = true)) match {
+ case None => Nil
+ case Some((thy, other)) =>
+ val thy_file_info = File_Info(theory, is_theory = true)
+ (thy -> thy_file_info) :: other.map(_ -> File_Info(theory))
}
- Some(res)
- }.getOrElse(Nil)).toMap
+ }
+ }).toMap
+
new Nodes(theory_node_info, files_info)
}
}
class Nodes private(
theory_node_info: Map[String, Node_Info],
val files_info: Map[String, File_Info]
) {
def apply(name: String): Node_Info = theory_node_info.getOrElse(name, Node_Info.empty)
def get(name: String): Option[Node_Info] = theory_node_info.get(name)
}
/* formal entities */
type Entity = Export_Theory.Entity[Export_Theory.No_Content]
object Entity_Context {
object Theory_Ref {
def unapply(props: Properties.T): Option[Document.Node.Name] =
(props, props, props) match {
case (Markup.Kind(Markup.THEORY), Markup.Name(theory), Position.Def_File(thy_file)) =>
Some(Resources.file_node(Path.explode(thy_file), theory = theory))
case _ => None
}
}
object Entity_Ref {
def unapply(props: Properties.T): Option[(Path, Option[String], String, String)] =
(props, props, props, props) match {
case (Markup.Entity.Ref.Prop(_), Position.Def_File(def_file),
Markup.Kind(kind), Markup.Name(name)) =>
val def_theory = Position.Def_Theory.unapply(props)
Some((Path.explode(def_file), def_theory, kind, name))
case _ => None
}
}
val empty: Entity_Context = new Entity_Context
def make(
session: String,
deps: Sessions.Deps,
node: Document.Node.Name,
html_context: HTML_Context): Entity_Context =
new Entity_Context {
private val seen_ranges: mutable.Set[Symbol.Range] = mutable.Set.empty
override def make_def(range: Symbol.Range, body: XML.Body): Option[XML.Elem] = {
body match {
case List(XML.Elem(Markup("span", List("id" -> _)), _)) => None
case _ =>
Some {
val entities = html_context.nodes(node.theory).for_range(range)
val body1 =
if (seen_ranges.contains(range)) {
HTML.entity_def(HTML.span(HTML.id(offset_id(range)), body))
}
else HTML.span(body)
entities.map(_.kname).foldLeft(body1) {
case (elem, id) => HTML.entity_def(HTML.span(HTML.id(id), List(elem)))
}
}
}
}
private def offset_id(range: Text.Range): String =
"offset_" + range.start + ".." + range.stop
private def physical_ref(thy_name: String, props: Properties.T): Option[String] = {
for {
range <- Position.Def_Range.unapply(props)
if thy_name == node.theory
} yield {
seen_ranges += range
offset_id(range)
}
}
private def logical_ref(thy_name: String, kind: String, name: String): Option[String] =
html_context.nodes.get(thy_name).flatMap(_.get_kind_name(kind, name))
override def make_ref(props: Properties.T, body: XML.Body): Option[XML.Elem] = {
props match {
case Theory_Ref(node_name) =>
node_relative(deps, session, node_name).map(html_dir =>
HTML.link(html_dir + html_name(node_name), body))
case Entity_Ref(file_path, def_theory, kind, name) if file_path.get_ext == "thy" =>
for {
thy_name <-
def_theory orElse (if (File.eq(node.path, file_path)) Some(node.theory) else None)
node_name = Resources.file_node(file_path, theory = thy_name)
html_dir <- node_relative(deps, session, node_name)
html_file = node_file(node_name)
html_ref <-
logical_ref(thy_name, kind, name) orElse physical_ref(thy_name, props)
} yield {
HTML.entity_ref(HTML.link(html_dir + html_file + "#" + html_ref, body))
}
case _ => None
}
}
}
}
class Entity_Context {
def make_def(range: Symbol.Range, body: XML.Body): Option[XML.Elem] = None
def make_ref(props: Properties.T, body: XML.Body): Option[XML.Elem] = None
}
/* HTML output */
private val div_elements =
Set(HTML.div.name, HTML.pre.name, HTML.par.name, HTML.list.name, HTML.`enum`.name,
HTML.descr.name)
def make_html(
entity_context: Entity_Context,
elements: Elements,
xml: XML.Body
): XML.Body = {
def html_div(html: XML.Body): Boolean =
html exists {
case XML.Elem(markup, body) => div_elements.contains(markup.name) || html_div(body)
case XML.Text(_) => false
}
def html_class(c: String, html: XML.Body): XML.Body =
if (c == "") html
else if (html_div(html)) List(HTML.div(c, html))
else List(HTML.span(c, html))
def html_body(xml_body: XML.Body, end_offset: Symbol.Offset): (XML.Body, Symbol.Offset) =
xml_body.foldRight((List.empty[XML.Tree], end_offset)) { case (tree, (res, end_offset1)) =>
val (res1, offset) = html_body_single(tree, end_offset1)
(res1 ++ res, offset)
}
@tailrec
def html_body_single(xml_tree: XML.Tree, end_offset: Symbol.Offset): (XML.Body, Symbol.Offset) =
xml_tree match {
case XML.Wrapped_Elem(markup, _, body) => html_body_single(XML.Elem(markup, body), end_offset)
case XML.Elem(Markup(Markup.ENTITY, props @ Markup.Kind(kind)), body) =>
val (body1, offset) = html_body(body, end_offset)
if (elements.entity(kind)) {
entity_context.make_ref(props, body1) match {
case Some(link) => (List(link), offset)
case None => (body1, offset)
}
}
else (body1, offset)
case XML.Elem(Markup(Markup.LANGUAGE, Markup.Name(name)), body) =>
val (body1, offset) = html_body(body, end_offset)
(html_class(if (elements.language(name)) name else "", body1), offset)
case XML.Elem(Markup(Markup.MARKDOWN_PARAGRAPH, _), body) =>
val (body1, offset) = html_body(body, end_offset)
(List(HTML.par(body1)), offset)
case XML.Elem(Markup(Markup.MARKDOWN_ITEM, _), body) =>
val (body1, offset) = html_body(body, end_offset)
(List(HTML.item(body1)), offset)
case XML.Elem(Markup(Markup.Markdown_Bullet.name, _), text) =>
(Nil, end_offset - XML.symbol_length(text))
case XML.Elem(Markup.Markdown_List(kind), body) =>
val (body1, offset) = html_body(body, end_offset)
if (kind == Markup.ENUMERATE) (List(HTML.`enum`(body1)), offset)
else (List(HTML.list(body1)), offset)
case XML.Elem(markup, body) =>
val name = markup.name
val (body1, offset) = html_body(body, end_offset)
val html =
markup.properties match {
case Markup.Kind(kind) if kind == Markup.COMMAND || kind == Markup.KEYWORD =>
html_class(kind, body1)
case _ =>
body1
}
Rendering.foreground.get(name) orElse Rendering.text_color.get(name) match {
case Some(c) => (html_class(c.toString, html), offset)
case None => (html_class(name, html), offset)
}
case XML.Text(text) =>
val offset = end_offset - Symbol.length(text)
val body = HTML.text(Symbol.decode(text))
entity_context.make_def(Text.Range(offset, end_offset), body) match {
case Some(body1) => (List(body1), offset)
case None => (body, offset)
}
}
html_body(xml, XML.symbol_length(xml) + 1)._1
}
/* PIDE HTML document */
def html_document(
snapshot: Document.Snapshot,
html_context: HTML_Context,
elements: Elements,
plain_text: Boolean = false,
fonts_css: String = HTML.fonts_css()
): HTML_Document = {
require(!snapshot.is_outdated, "document snapshot outdated")
val name = snapshot.node_name
if (plain_text) {
val title = "File " + Symbol.cartouche_decoded(name.path.file_name)
val body = HTML.text(snapshot.node.source)
html_context.html_document(title, body, fonts_css)
}
else {
Resources.html_document(snapshot) getOrElse {
val title =
if (name.is_theory) "Theory " + quote(name.theory_base_name)
else "File " + Symbol.cartouche_decoded(name.path.file_name)
val xml = snapshot.xml_markup(elements = elements.html)
val body = make_html(Entity_Context.empty, elements, xml)
html_context.html_document(title, body, fonts_css)
}
}
}
/** HTML presentation **/
/* presentation context */
object Context {
val none: Context = new Context { def enabled: Boolean = false }
val standard: Context = new Context { def enabled: Boolean = true }
def dir(path: Path): Context =
new Context {
def enabled: Boolean = true
override def dir(store: Sessions.Store): Path = path
}
def make(s: String): Context =
if (s == ":") standard else dir(Path.explode(s))
}
abstract class Context private {
def enabled: Boolean
def enabled(info: Sessions.Info): Boolean = enabled || info.browser_info
def dir(store: Sessions.Store): Path = store.presentation_dir
def dir(store: Sessions.Store, info: Sessions.Info): Path =
dir(store) + Path.explode(info.chapter_session)
}
/* maintain chapter index */
private val sessions_path = Path.basic(".sessions")
private def read_sessions(dir: Path): List[(String, String)] = {
val path = dir + sessions_path
if (path.is_file) {
import XML.Decode._
list(pair(string, string))(Symbol.decode_yxml(File.read(path)))
}
else Nil
}
def update_chapter(
presentation_dir: Path,
chapter: String,
new_sessions: List[(String, String)]
): Unit = {
val dir = Isabelle_System.make_directory(presentation_dir + Path.basic(chapter))
val sessions0 =
try { read_sessions(dir) }
catch { case _: XML.Error => Nil }
val sessions = (SortedMap.empty[String, String] ++ sessions0 ++ new_sessions).toList
File.write(dir + sessions_path,
{
import XML.Encode._
YXML.string_of_body(list(pair(string, string))(sessions))
})
val title = "Isabelle/" + chapter + " sessions"
HTML.write_document(dir, "index.html",
List(HTML.title(title + Isabelle_System.isabelle_heading())),
HTML.chapter(title) ::
(if (sessions.isEmpty) Nil
else
List(HTML.div("sessions",
List(HTML.description(
sessions.map({ case (name, description) =>
val descr = Symbol.trim_blank_lines(description)
(List(HTML.link(name + "/index.html", HTML.text(name))),
if (descr == "") Nil
else HTML.break ::: List(HTML.pre(HTML.text(descr)))) })))))),
base = Some(presentation_dir))
}
def update_root(presentation_dir: Path): Unit = {
Isabelle_System.make_directory(presentation_dir)
HTML.init_fonts(presentation_dir)
Isabelle_System.copy_file(Path.explode("~~/lib/logo/isabelle.gif"),
presentation_dir + Path.explode("isabelle.gif"))
val title = "The " + XML.text(Isabelle_System.isabelle_name()) + " Library"
File.write(presentation_dir + Path.explode("index.html"),
HTML.header +
"""
""" + HTML.head_meta + """
""" + title + """
""" + File.read(Path.explode("~~/lib/html/library_index_content.template")) +
"""
""" + HTML.footer)
}
/* present session */
val session_graph_path = Path.explode("session_graph.pdf")
val readme_path = Path.explode("README.html")
def html_name(name: Document.Node.Name): String = Path.explode(name.theory_base_name).html.implode
def files_path(src_path: Path): String = (Path.explode("files") + src_path.squash.html).implode
private def node_file(name: Document.Node.Name): String =
if (name.node.endsWith(".thy")) html_name(name) else files_path(name.path)
private def session_relative(
deps: Sessions.Deps,
session0: String,
session1: String
): Option[String] = {
for {
info0 <- deps.sessions_structure.get(session0)
info1 <- deps.sessions_structure.get(session1)
} yield info0.relative_path(info1)
}
def node_relative(
deps: Sessions.Deps,
session0: String,
node_name: Document.Node.Name
): Option[String] = {
val session1 = deps(session0).theory_qualifier(node_name)
session_relative(deps, session0, session1)
}
def session_html(
session: String,
deps: Sessions.Deps,
db_context: Sessions.Database_Context,
progress: Progress = new Progress,
verbose: Boolean = false,
html_context: HTML_Context,
session_elements: Elements
): Unit = {
val info = deps.sessions_structure(session)
val options = info.options
val base = deps(session)
val session_hierarchy = deps.sessions_structure.build_hierarchy(session)
val session_dir = Isabelle_System.make_directory(html_context.session_dir(info))
Bytes.write(session_dir + session_graph_path,
graphview.Graph_File.make_pdf(options, base.session_graph_display))
val documents =
for {
doc <- info.document_variants
document <- db_context.database(session)(db =>
Document_Build.read_document(db, session, doc.name))
} yield {
val doc_path = (session_dir + doc.path.pdf).expand
if (verbose) progress.echo("Presenting document " + session + "/" + doc.name)
if (options.bool("document_echo")) progress.echo("Document at " + doc_path)
Bytes.write(doc_path, document.pdf)
doc
}
val view_links = {
val deps_link =
HTML.link(session_graph_path, HTML.text("theory dependencies"))
val readme_links =
if ((info.dir + readme_path).is_file) {
Isabelle_System.copy_file(info.dir + readme_path, session_dir + readme_path)
List(HTML.link(readme_path, HTML.text("README")))
}
else Nil
val document_links =
documents.map(doc => HTML.link(doc.path.pdf, HTML.text(doc.name)))
Library.separate(HTML.break ::: HTML.nl,
(deps_link :: readme_links ::: document_links).
map(link => HTML.text("View ") ::: List(link))).flatten
}
def entity_context(name: Document.Node.Name): Entity_Context =
Entity_Context.make(session, deps, name, html_context)
sealed case class Seen_File(
src_path: Path,
thy_name: Document.Node.Name,
thy_session: String
) {
val files_path: Path = html_context.files_path(thy_name, src_path)
def check(src_path1: Path, thy_name1: Document.Node.Name, thy_session1: String): Boolean = {
val files_path1 = html_context.files_path(thy_name1, src_path1)
(src_path == src_path1 || files_path == files_path1) && thy_session == thy_session1
}
}
var seen_files = List.empty[Seen_File]
def present_theory(name: Document.Node.Name): Option[XML.Body] = {
progress.expose_interrupt()
Build_Job.read_theory(db_context, session_hierarchy, name.theory).flatMap { command =>
if (verbose) progress.echo("Presenting theory " + name)
val snapshot = Document.State.init.snippet(command)
val thy_elements =
session_elements.copy(entity =
html_context.nodes(name.theory).others.foldLeft(session_elements.entity)(_ + _))
val files_html =
for {
(src_path, xml) <- snapshot.xml_markup_blobs(elements = thy_elements.html)
if xml.nonEmpty
}
yield {
progress.expose_interrupt()
if (verbose) progress.echo("Presenting file " + src_path)
(src_path, html_context.source(
make_html(Entity_Context.empty, thy_elements, xml)))
}
val thy_html =
html_context.source(
make_html(entity_context(name), thy_elements,
snapshot.xml_markup(elements = thy_elements.html)))
val thy_session = html_context.theory_session(name).name
val thy_dir = Isabelle_System.make_directory(html_context.theory_dir(name))
val files =
for { (src_path, file_html) <- files_html }
yield {
seen_files.find(_.check(src_path, name, thy_session)) match {
case None => seen_files ::= Seen_File(src_path, name, thy_session)
case Some(seen_file) =>
error("Incoherent use of file name " + src_path + " as " + files_path(src_path) +
" in theory " + seen_file.thy_name + " vs. " + name)
}
val file_path = html_context.files_path(name, src_path)
val file_title = "File " + Symbol.cartouche_decoded(src_path.implode_short)
HTML.write_document(file_path.dir, file_path.file_name,
List(HTML.title(file_title)), List(html_context.head(file_title), file_html),
base = Some(html_context.root_dir))
List(HTML.link(files_path(src_path), HTML.text(file_title)))
}
val thy_title = "Theory " + name.theory_base_name
HTML.write_document(thy_dir, html_name(name),
List(HTML.title(thy_title)), List(html_context.head(thy_title), thy_html),
base = Some(html_context.root_dir))
if (thy_session == session) {
Some(
List(HTML.link(html_name(name),
HTML.text(name.theory_base_name) :::
(if (files.isEmpty) Nil else List(HTML.itemize(files))))))
}
else None
}
}
val theories = base.proper_session_theories.flatMap(present_theory)
val title = "Session " + session
HTML.write_document(session_dir, "index.html",
List(HTML.title(title + Isabelle_System.isabelle_heading())),
html_context.head(title, List(HTML.par(view_links))) ::
html_context.contents("Theories", theories),
base = Some(html_context.root_dir))
}
}
diff --git a/src/Pure/Thy/sessions.scala b/src/Pure/Thy/sessions.scala
--- a/src/Pure/Thy/sessions.scala
+++ b/src/Pure/Thy/sessions.scala
@@ -1,1519 +1,1522 @@
/* Title: Pure/Thy/sessions.scala
Author: Makarius
Cumulative session information.
*/
package isabelle
import java.io.{File => JFile}
import java.nio.ByteBuffer
import java.nio.channels.FileChannel
import java.nio.file.StandardOpenOption
import scala.collection.immutable.{SortedSet, SortedMap}
import scala.collection.mutable
object Sessions {
/* session and theory names */
val ROOTS: Path = Path.explode("ROOTS")
val ROOT: Path = Path.explode("ROOT")
val roots_name: String = "ROOTS"
val root_name: String = "ROOT"
val theory_import: String = "Pure.Sessions"
val UNSORTED = "Unsorted"
val DRAFT = "Draft"
def is_pure(name: String): Boolean = name == Thy_Header.PURE
def exclude_session(name: String): Boolean = name == "" || name == DRAFT
def exclude_theory(name: String): Boolean =
name == root_name || name == "README" || name == "index" || name == "bib"
/* ROOTS file format */
class File_Format extends isabelle.File_Format {
val format_name: String = roots_name
val file_ext = ""
override def detect(name: String): Boolean =
Thy_Header.split_file_name(name) match {
case Some((_, file_name)) => file_name == roots_name
case None => false
}
override def theory_suffix: String = "ROOTS_file"
override def theory_content(name: String): String =
"""theory "ROOTS" imports Pure begin ROOTS_file """ +
Outer_Syntax.quote_string(name) + """ end"""
}
/* base info and source dependencies */
sealed case class Base(
session_name: String = "",
session_pos: Position.T = Position.none,
session_directories: Map[JFile, String] = Map.empty,
global_theories: Map[String, String] = Map.empty,
proper_session_theories: List[Document.Node.Name] = Nil,
document_theories: List[Document.Node.Name] = Nil,
loaded_theories: Graph[String, Outer_Syntax] = Graph.string, // cumulative imports
used_theories: List[(Document.Node.Name, Options)] = Nil, // new imports
load_commands: Map[Document.Node.Name, List[Command_Span.Span]] = Map.empty,
known_theories: Map[String, Document.Node.Entry] = Map.empty,
known_loaded_files: Map[String, List[Path]] = Map.empty,
overall_syntax: Outer_Syntax = Outer_Syntax.empty,
imported_sources: List[(Path, SHA1.Digest)] = Nil,
session_sources: List[(Path, SHA1.Digest)] = Nil,
session_graph_display: Graph_Display.Graph = Graph_Display.empty_graph,
errors: List[String] = Nil
) {
override def toString: String =
"Sessions.Base(session_name = " + quote(session_name) +
", loaded_theories = " + loaded_theories.size +
", used_theories = " + used_theories.length + ")"
def theory_qualifier(name: String): String =
global_theories.getOrElse(name, Long_Name.qualifier(name))
def theory_qualifier(name: Document.Node.Name): String = theory_qualifier(name.theory)
def loaded_theory(name: String): Boolean = loaded_theories.defined(name)
def loaded_theory(name: Document.Node.Name): Boolean = loaded_theory(name.theory)
def loaded_theory_syntax(name: String): Option[Outer_Syntax] =
if (loaded_theory(name)) Some(loaded_theories.get_node(name)) else None
def loaded_theory_syntax(name: Document.Node.Name): Option[Outer_Syntax] =
loaded_theory_syntax(name.theory)
def theory_syntax(name: Document.Node.Name): Outer_Syntax =
loaded_theory_syntax(name) getOrElse overall_syntax
def node_syntax(nodes: Document.Nodes, name: Document.Node.Name): Outer_Syntax =
nodes(name).syntax orElse loaded_theory_syntax(name) getOrElse overall_syntax
}
sealed case class Deps(sessions_structure: Structure, session_bases: Map[String, Base]) {
override def toString: String = "Sessions.Deps(" + sessions_structure + ")"
def is_empty: Boolean = session_bases.isEmpty
def apply(name: String): Base = session_bases(name)
def get(name: String): Option[Base] = session_bases.get(name)
def imported_sources(name: String): List[SHA1.Digest] =
session_bases(name).imported_sources.map(_._2)
def session_sources(name: String): List[SHA1.Digest] =
session_bases(name).session_sources.map(_._2)
def errors: List[String] =
(for {
(name, base) <- session_bases.iterator
if base.errors.nonEmpty
} yield cat_lines(base.errors) +
"\nThe error(s) above occurred in session " + quote(name) + Position.here(base.session_pos)
).toList
def check_errors: Deps =
errors match {
case Nil => this
case errs => error(cat_lines(errs))
}
+
+ def base_info(session: String): Base_Info =
+ Base_Info(sessions_structure, errors, apply(session), Nil)
}
def deps(sessions_structure: Structure,
progress: Progress = new Progress,
inlined_files: Boolean = false,
verbose: Boolean = false,
list_files: Boolean = false,
check_keywords: Set[String] = Set.empty
): Deps = {
var cache_sources = Map.empty[JFile, SHA1.Digest]
def check_sources(paths: List[Path]): List[(Path, SHA1.Digest)] = {
for {
path <- paths
file = path.file
if cache_sources.isDefinedAt(file) || file.isFile
}
yield {
cache_sources.get(file) match {
case Some(digest) => (path, digest)
case None =>
val digest = SHA1.digest(file)
cache_sources = cache_sources + (file -> digest)
(path, digest)
}
}
}
val bootstrap_bases = {
val base = sessions_structure.bootstrap
Map(base.session_name -> base)
}
val session_bases =
sessions_structure.imports_topological_order.foldLeft(bootstrap_bases) {
case (session_bases, session_name) =>
progress.expose_interrupt()
val info = sessions_structure(session_name)
try {
val deps_base = info.deps_base(session_bases)
val resources = new Resources(sessions_structure, deps_base)
if (verbose || list_files) {
val groups =
if (info.groups.isEmpty) ""
else info.groups.mkString(" (", " ", ")")
progress.echo("Session " + info.chapter_session + groups)
}
val dependencies = resources.session_dependencies(info)
val overall_syntax = dependencies.overall_syntax
val proper_session_theories =
dependencies.theories.filter(name => deps_base.theory_qualifier(name) == session_name)
val theory_files = dependencies.theories.map(_.path)
val (load_commands, load_commands_errors) =
try { if (inlined_files) (dependencies.load_commands, Nil) else (Nil, Nil) }
catch { case ERROR(msg) => (Nil, List(msg)) }
val loaded_files =
load_commands.map({ case (name, spans) => dependencies.loaded_files(name, spans) })
val session_files =
(theory_files ::: loaded_files.flatMap(_._2) :::
info.document_files.map(file => info.dir + file._1 + file._2)).map(_.expand)
val imported_files = if (inlined_files) dependencies.imported_files else Nil
if (list_files)
progress.echo(cat_lines(session_files.map(_.implode).sorted.map(" " + _)))
if (check_keywords.nonEmpty) {
Check_Keywords.check_keywords(
progress, overall_syntax.keywords, check_keywords, theory_files)
}
val session_graph_display: Graph_Display.Graph = {
def session_node(name: String): Graph_Display.Node =
Graph_Display.Node("[" + name + "]", "session." + name)
def node(name: Document.Node.Name): Graph_Display.Node = {
val qualifier = deps_base.theory_qualifier(name)
if (qualifier == info.name)
Graph_Display.Node(name.theory_base_name, "theory." + name.theory)
else session_node(qualifier)
}
val required_sessions =
dependencies.loaded_theories.all_preds(dependencies.theories.map(_.theory))
.map(theory => deps_base.theory_qualifier(theory))
.filter(name => name != info.name && sessions_structure.defined(name))
val required_subgraph =
sessions_structure.imports_graph
.restrict(sessions_structure.imports_graph.all_preds(required_sessions).toSet)
.transitive_closure
.restrict(required_sessions.toSet)
.transitive_reduction_acyclic
val graph0 =
required_subgraph.topological_order.foldLeft(Graph_Display.empty_graph) {
case (g, session) =>
val a = session_node(session)
val bs = required_subgraph.imm_preds(session).toList.map(session_node)
bs.foldLeft((a :: bs).foldLeft(g)(_.default_node(_, Nil)))(_.add_edge(_, a))
}
dependencies.entries.foldLeft(graph0) {
case (g, entry) =>
val a = node(entry.name)
val bs = entry.header.imports.map(node).filterNot(_ == a)
bs.foldLeft((a :: bs).foldLeft(g)(_.default_node(_, Nil)))(_.add_edge(_, a))
}
}
val known_theories =
dependencies.entries.iterator.map(entry => entry.name.theory -> entry).
foldLeft(deps_base.known_theories)(_ + _)
val known_loaded_files = deps_base.known_loaded_files ++ loaded_files
val import_errors = {
val known_sessions =
sessions_structure.imports_requirements(List(session_name)).toSet
for {
name <- dependencies.theories
qualifier = deps_base.theory_qualifier(name)
if !known_sessions(qualifier)
} yield "Bad import of theory " + quote(name.toString) +
": need to include sessions " + quote(qualifier) + " in ROOT"
}
val document_errors =
info.document_theories.flatMap(
{
case (thy, pos) =>
val build_hierarchy =
if (sessions_structure.build_graph.defined(session_name)) {
sessions_structure.build_hierarchy(session_name)
}
else Nil
def err(msg: String): Option[String] =
Some(msg + " " + quote(thy) + Position.here(pos))
known_theories.get(thy).map(_.name) match {
case None => err("Unknown document theory")
case Some(name) =>
val qualifier = deps_base.theory_qualifier(name)
if (proper_session_theories.contains(name)) {
err("Redundant document theory from this session:")
}
else if (build_hierarchy.contains(qualifier)) None
else if (dependencies.theories.contains(name)) None
else err("Document theory from other session not imported properly:")
}
})
val document_theories =
info.document_theories.map({ case (thy, _) => known_theories(thy).name })
val dir_errors = {
val ok = info.dirs.map(_.canonical_file).toSet
val bad =
(for {
name <- proper_session_theories.iterator
path = name.master_dir_path
if !ok(path.canonical_file)
path1 = File.relative_path(info.dir.canonical, path).getOrElse(path)
} yield (path1, name)).toList
val bad_dirs = (for { (path1, _) <- bad } yield path1.toString).distinct.sorted
val errs1 =
for { (path1, name) <- bad }
yield "Implicit use of directory " + path1 + " for theory " + quote(name.toString)
val errs2 =
if (bad_dirs.isEmpty) Nil
else List("Implicit use of session directories: " + commas(bad_dirs))
val errs3 = for (p <- info.dirs if !p.is_dir) yield "No such directory: " + p
val errs4 =
(for {
name <- proper_session_theories.iterator
name1 <- resources.find_theory_node(name.theory)
if name.node != name1.node
} yield "Incoherent theory file import:\n " + name.path + " vs. \n " + name1.path)
.toList
errs1 ::: errs2 ::: errs3 ::: errs4
}
val sources_errors =
for (p <- session_files if !p.is_file) yield "No such file: " + p
val path_errors =
try { Path.check_case_insensitive(session_files ::: imported_files); Nil }
catch { case ERROR(msg) => List(msg) }
val bibtex_errors =
try { info.bibtex_entries; Nil }
catch { case ERROR(msg) => List(msg) }
val base =
Base(
session_name = info.name,
session_pos = info.pos,
session_directories = sessions_structure.session_directories,
global_theories = sessions_structure.global_theories,
proper_session_theories = proper_session_theories,
document_theories = document_theories,
loaded_theories = dependencies.loaded_theories,
used_theories = dependencies.theories_adjunct,
load_commands = load_commands.toMap,
known_theories = known_theories,
known_loaded_files = known_loaded_files,
overall_syntax = overall_syntax,
imported_sources = check_sources(imported_files),
session_sources = check_sources(session_files),
session_graph_display = session_graph_display,
errors = dependencies.errors ::: load_commands_errors ::: import_errors :::
document_errors ::: dir_errors ::: sources_errors ::: path_errors :::
bibtex_errors)
session_bases + (info.name -> base)
}
catch {
case ERROR(msg) =>
cat_error(msg, "The error(s) above occurred in session " +
quote(info.name) + Position.here(info.pos))
}
}
Deps(sessions_structure, session_bases)
}
/* base info */
sealed case class Base_Info(
sessions_structure: Structure,
errors: List[String],
base: Base,
infos: List[Info]
) {
def check: Base_Info = if (errors.isEmpty) this else error(cat_lines(errors))
def session: String = base.session_name
}
def base_info(options: Options,
session: String,
progress: Progress = new Progress,
dirs: List[Path] = Nil,
include_sessions: List[String] = Nil,
session_ancestor: Option[String] = None,
session_requirements: Boolean = false
): Base_Info = {
val full_sessions = load_structure(options, dirs = dirs)
val selected_sessions =
full_sessions.selection(Selection(sessions = session :: session_ancestor.toList))
val info = selected_sessions(session)
val ancestor = session_ancestor orElse info.parent
val (session1, infos1) =
if (session_requirements && ancestor.isDefined) {
val deps = Sessions.deps(selected_sessions, progress = progress)
val base = deps(session)
val ancestor_loaded =
deps.get(ancestor.get) match {
case Some(ancestor_base)
if !selected_sessions.imports_requirements(List(ancestor.get)).contains(session) =>
ancestor_base.loaded_theories.defined _
case _ =>
error("Bad ancestor " + quote(ancestor.get) + " for session " + quote(session))
}
val required_theories =
for {
thy <- base.loaded_theories.keys
if !ancestor_loaded(thy) && base.theory_qualifier(thy) != session
}
yield thy
if (required_theories.isEmpty) (ancestor.get, Nil)
else {
val other_name = info.name + "_requirements(" + ancestor.get + ")"
Isabelle_System.isabelle_tmp_prefix()
(other_name,
List(
make_info(info.options,
dir_selected = false,
dir = Path.explode("$ISABELLE_TMP_PREFIX"),
chapter = info.chapter,
Session_Entry(
pos = info.pos,
name = other_name,
groups = info.groups,
path = ".",
parent = ancestor,
description = "Required theory imports from other sessions",
options = Nil,
imports = info.deps,
directories = Nil,
theories = List((Nil, required_theories.map(thy => ((thy, Position.none), false)))),
document_theories = Nil,
document_files = Nil,
export_files = Nil,
export_classpath = Nil))))
}
}
else (session, Nil)
val full_sessions1 =
if (infos1.isEmpty) full_sessions
else load_structure(options, dirs = dirs, infos = infos1)
val selected_sessions1 =
full_sessions1.selection(Selection(sessions = session1 :: session :: include_sessions))
val deps1 = Sessions.deps(selected_sessions1, progress = progress)
Base_Info(full_sessions1, deps1.errors, deps1(session1), infos1)
}
/* cumulative session info */
sealed case class Info(
name: String,
chapter: String,
dir_selected: Boolean,
pos: Position.T,
groups: List[String],
dir: Path,
parent: Option[String],
description: String,
directories: List[Path],
options: Options,
imports: List[String],
theories: List[(Options, List[(String, Position.T)])],
global_theories: List[String],
document_theories: List[(String, Position.T)],
document_files: List[(Path, Path)],
export_files: List[(Path, Int, List[String])],
export_classpath: List[String],
meta_digest: SHA1.Digest
) {
def chapter_session: String = chapter + "/" + name
def relative_path(info1: Info): String =
if (name == info1.name) ""
else if (chapter == info1.chapter) "../" + info1.name + "/"
else "../../" + info1.chapter_session + "/"
def deps: List[String] = parent.toList ::: imports
def deps_base(session_bases: String => Base): Base = {
val parent_base = session_bases(parent.getOrElse(""))
val imports_bases = imports.map(session_bases)
parent_base.copy(
known_theories =
(for {
base <- imports_bases.iterator
(_, entry) <- base.known_theories.iterator
} yield (entry.name.theory -> entry)).foldLeft(parent_base.known_theories)(_ + _),
known_loaded_files =
imports_bases.iterator.map(_.known_loaded_files).
foldLeft(parent_base.known_loaded_files)(_ ++ _))
}
def dirs: List[Path] = dir :: directories
def timeout_ignored: Boolean =
!options.bool("timeout_build") || Time.seconds(options.real("timeout")) < Time.ms(1)
def timeout: Time = Time.seconds(options.real("timeout") * options.real("timeout_scale"))
def document_enabled: Boolean =
options.string("document") match {
case "" | "false" => false
case "pdf" | "true" => true
case doc => error("Bad document specification " + quote(doc))
}
def document_variants: List[Document_Build.Document_Variant] = {
val variants =
Library.space_explode(':', options.string("document_variants")).
map(Document_Build.Document_Variant.parse)
val dups = Library.duplicates(variants.map(_.name))
if (dups.nonEmpty) error("Duplicate document variants: " + commas_quote(dups))
variants
}
def documents: List[Document_Build.Document_Variant] = {
val variants = document_variants
if (!document_enabled || document_files.isEmpty) Nil else variants
}
def document_output: Option[Path] =
options.string("document_output") match {
case "" => None
case s => Some(dir + Path.explode(s))
}
def browser_info: Boolean = options.bool("browser_info")
lazy val bibtex_entries: List[Text.Info[String]] =
(for {
(document_dir, file) <- document_files.iterator
if Bibtex.is_bibtex(file.file_name)
info <- Bibtex.entries(File.read(dir + document_dir + file)).iterator
} yield info).toList
def record_proofs: Boolean = options.int("record_proofs") >= 2
def is_afp: Boolean = chapter == AFP.chapter
def is_afp_bulky: Boolean = is_afp && groups.exists(AFP.groups_bulky.contains)
}
def make_info(
options: Options,
dir_selected: Boolean,
dir: Path,
chapter: String,
entry: Session_Entry
): Info = {
try {
val name = entry.name
if (exclude_session(name)) error("Bad session name")
if (is_pure(name) && entry.parent.isDefined) error("Illegal parent session")
if (!is_pure(name) && !entry.parent.isDefined) error("Missing parent session")
val session_path = dir + Path.explode(entry.path)
val directories = entry.directories.map(dir => session_path + Path.explode(dir))
val session_options = options ++ entry.options
val theories =
entry.theories.map({ case (opts, thys) =>
(session_options ++ opts,
thys.map({ case ((thy, pos), _) =>
if (exclude_theory(thy))
error("Bad theory name " + quote(thy) + Position.here(pos))
else (thy, pos) })) })
val global_theories =
for { (_, thys) <- entry.theories; ((thy, pos), global) <- thys if global }
yield {
val thy_name = Path.explode(thy).file_name
if (Long_Name.is_qualified(thy_name))
error("Bad qualified name for global theory " +
quote(thy_name) + Position.here(pos))
else thy_name
}
val conditions =
theories.flatMap(thys => space_explode(',', thys._1.string("condition"))).distinct.sorted.
map(x => (x, Isabelle_System.getenv(x) != ""))
val document_files =
entry.document_files.map({ case (s1, s2) => (Path.explode(s1), Path.explode(s2)) })
val export_files =
entry.export_files.map({ case (dir, prune, pats) => (Path.explode(dir), prune, pats) })
val meta_digest =
SHA1.digest(
(name, chapter, entry.parent, entry.directories, entry.options, entry.imports,
entry.theories_no_position, conditions, entry.document_theories_no_position,
entry.document_files)
.toString)
Info(name, chapter, dir_selected, entry.pos, entry.groups, session_path,
entry.parent, entry.description, directories, session_options,
entry.imports, theories, global_theories, entry.document_theories, document_files,
export_files, entry.export_classpath, meta_digest)
}
catch {
case ERROR(msg) =>
error(msg + "\nThe error(s) above occurred in session entry " +
quote(entry.name) + Position.here(entry.pos))
}
}
object Selection {
val empty: Selection = Selection()
val all: Selection = Selection(all_sessions = true)
def session(session: String): Selection = Selection(sessions = List(session))
}
sealed case class Selection(
requirements: Boolean = false,
all_sessions: Boolean = false,
base_sessions: List[String] = Nil,
exclude_session_groups: List[String] = Nil,
exclude_sessions: List[String] = Nil,
session_groups: List[String] = Nil,
sessions: List[String] = Nil
) {
def ++ (other: Selection): Selection =
Selection(
requirements = requirements || other.requirements,
all_sessions = all_sessions || other.all_sessions,
base_sessions = Library.merge(base_sessions, other.base_sessions),
exclude_session_groups = Library.merge(exclude_session_groups, other.exclude_session_groups),
exclude_sessions = Library.merge(exclude_sessions, other.exclude_sessions),
session_groups = Library.merge(session_groups, other.session_groups),
sessions = Library.merge(sessions, other.sessions))
}
object Structure {
val empty: Structure = make(Nil)
def make(infos: List[Info]): Structure = {
def add_edges(
graph: Graph[String, Info],
kind: String,
edges: Info => Iterable[String]
) : Graph[String, Info] = {
def add_edge(pos: Position.T, name: String, g: Graph[String, Info], parent: String) = {
if (!g.defined(parent))
error("Bad " + kind + " session " + quote(parent) + " for " +
quote(name) + Position.here(pos))
try { g.add_edge_acyclic(parent, name) }
catch {
case exn: Graph.Cycles[_] =>
error(cat_lines(exn.cycles.map(cycle =>
"Cyclic session dependency of " +
cycle.map(c => quote(c.toString)).mkString(" via "))) + Position.here(pos))
}
}
graph.iterator.foldLeft(graph) {
case (g, (name, (info, _))) =>
edges(info).foldLeft(g)(add_edge(info.pos, name, _, _))
}
}
val info_graph =
infos.foldLeft(Graph.string[Info]) {
case (graph, info) =>
if (graph.defined(info.name))
error("Duplicate session " + quote(info.name) + Position.here(info.pos) +
Position.here(graph.get_node(info.name).pos))
else graph.new_node(info.name, info)
}
val build_graph = add_edges(info_graph, "parent", _.parent)
val imports_graph = add_edges(build_graph, "imports", _.imports)
val session_positions: List[(String, Position.T)] =
(for ((name, (info, _)) <- info_graph.iterator) yield (name, info.pos)).toList
val session_directories: Map[JFile, String] =
(for {
session <- imports_graph.topological_order.iterator
info = info_graph.get_node(session)
dir <- info.dirs.iterator
} yield (info, dir)).foldLeft(Map.empty[JFile, String]) {
case (dirs, (info, dir)) =>
val session = info.name
val canonical_dir = dir.canonical_file
dirs.get(canonical_dir) match {
case Some(session1) =>
val info1 = info_graph.get_node(session1)
error("Duplicate use of directory " + dir +
"\n for session " + quote(session1) + Position.here(info1.pos) +
"\n vs. session " + quote(session) + Position.here(info.pos))
case None => dirs + (canonical_dir -> session)
}
}
val global_theories: Map[String, String] =
(for {
session <- imports_graph.topological_order.iterator
info = info_graph.get_node(session)
thy <- info.global_theories.iterator }
yield (info, thy)).foldLeft(Thy_Header.bootstrap_global_theories.toMap) {
case (global, (info, thy)) =>
val qualifier = info.name
global.get(thy) match {
case Some(qualifier1) if qualifier != qualifier1 =>
error("Duplicate global theory " + quote(thy) + Position.here(info.pos))
case _ => global + (thy -> qualifier)
}
}
new Structure(
session_positions, session_directories, global_theories, build_graph, imports_graph)
}
}
final class Structure private[Sessions](
val session_positions: List[(String, Position.T)],
val session_directories: Map[JFile, String],
val global_theories: Map[String, String],
val build_graph: Graph[String, Info],
val imports_graph: Graph[String, Info]
) {
sessions_structure =>
def bootstrap: Base =
Base(
session_directories = session_directories,
global_theories = global_theories,
overall_syntax = Thy_Header.bootstrap_syntax)
def dest_session_directories: List[(String, String)] =
for ((file, session) <- session_directories.toList)
yield (File.standard_path(file), session)
lazy val chapters: SortedMap[String, List[Info]] =
build_graph.iterator.foldLeft(SortedMap.empty[String, List[Info]]) {
case (chs, (_, (info, _))) =>
chs + (info.chapter -> (info :: chs.getOrElse(info.chapter, Nil)))
}
def build_graph_display: Graph_Display.Graph = Graph_Display.make_graph(build_graph)
def imports_graph_display: Graph_Display.Graph = Graph_Display.make_graph(imports_graph)
def defined(name: String): Boolean = imports_graph.defined(name)
def apply(name: String): Info = imports_graph.get_node(name)
def get(name: String): Option[Info] = if (defined(name)) Some(apply(name)) else None
def theory_qualifier(name: String): String =
global_theories.getOrElse(name, Long_Name.qualifier(name))
def check_sessions(names: List[String]): Unit = {
val bad_sessions = SortedSet(names.filterNot(defined): _*).toList
if (bad_sessions.nonEmpty)
error("Undefined session(s): " + commas_quote(bad_sessions))
}
def check_sessions(sel: Selection): Unit =
check_sessions(sel.base_sessions ::: sel.exclude_sessions ::: sel.sessions)
private def selected(graph: Graph[String, Info], sel: Selection): List[String] = {
check_sessions(sel)
val select_group = sel.session_groups.toSet
val select_session = sel.sessions.toSet ++ imports_graph.all_succs(sel.base_sessions)
val selected0 =
if (sel.all_sessions) graph.keys
else {
(for {
(name, (info, _)) <- graph.iterator
if info.dir_selected || select_session(name) || info.groups.exists(select_group)
} yield name).toList
}
if (sel.requirements) (graph.all_preds(selected0).toSet -- selected0).toList
else selected0
}
def selection(sel: Selection): Structure = {
check_sessions(sel)
val excluded = {
val exclude_group = sel.exclude_session_groups.toSet
val exclude_group_sessions =
(for {
(name, (info, _)) <- imports_graph.iterator
if imports_graph.get_node(name).groups.exists(exclude_group)
} yield name).toList
imports_graph.all_succs(exclude_group_sessions ::: sel.exclude_sessions).toSet
}
def restrict(graph: Graph[String, Info]): Graph[String, Info] = {
val sessions = graph.all_preds(selected(graph, sel)).filterNot(excluded)
graph.restrict(graph.all_preds(sessions).toSet)
}
new Structure(
session_positions, session_directories, global_theories,
restrict(build_graph), restrict(imports_graph))
}
def selection(session: String): Structure = selection(Selection.session(session))
def selection_deps(
selection: Selection,
progress: Progress = new Progress,
loading_sessions: Boolean = false,
inlined_files: Boolean = false,
verbose: Boolean = false
): Deps = {
val deps =
Sessions.deps(sessions_structure.selection(selection),
progress = progress, inlined_files = inlined_files, verbose = verbose)
if (loading_sessions) {
val selection_size = deps.sessions_structure.build_graph.size
if (selection_size > 1) progress.echo("Loading " + selection_size + " sessions ...")
}
deps
}
def build_hierarchy(session: String): List[String] =
if (build_graph.defined(session)) build_graph.all_preds(List(session))
else List(session)
def build_selection(sel: Selection): List[String] = selected(build_graph, sel)
def build_descendants(ss: List[String]): List[String] = build_graph.all_succs(ss)
def build_requirements(ss: List[String]): List[String] = build_graph.all_preds_rev(ss)
def build_topological_order: List[String] = build_graph.topological_order
def imports_selection(sel: Selection): List[String] = selected(imports_graph, sel)
def imports_descendants(ss: List[String]): List[String] = imports_graph.all_succs(ss)
def imports_requirements(ss: List[String]): List[String] = imports_graph.all_preds_rev(ss)
def imports_topological_order: List[String] = imports_graph.topological_order
def bibtex_entries: List[(String, List[String])] =
build_topological_order.flatMap(name =>
apply(name).bibtex_entries match {
case Nil => None
case entries => Some(name -> entries.map(_.info))
})
override def toString: String =
imports_graph.keys_iterator.mkString("Sessions.Structure(", ", ", ")")
}
/* parser */
private val CHAPTER = "chapter"
private val SESSION = "session"
private val IN = "in"
private val DESCRIPTION = "description"
private val DIRECTORIES = "directories"
private val OPTIONS = "options"
private val SESSIONS = "sessions"
private val THEORIES = "theories"
private val GLOBAL = "global"
private val DOCUMENT_THEORIES = "document_theories"
private val DOCUMENT_FILES = "document_files"
private val EXPORT_FILES = "export_files"
private val EXPORT_CLASSPATH = "export_classpath"
val root_syntax: Outer_Syntax =
Outer_Syntax.empty + "(" + ")" + "+" + "," + "=" + "[" + "]" +
GLOBAL + IN +
(CHAPTER, Keyword.THY_DECL) +
(SESSION, Keyword.THY_DECL) +
(DESCRIPTION, Keyword.QUASI_COMMAND) +
(DIRECTORIES, Keyword.QUASI_COMMAND) +
(OPTIONS, Keyword.QUASI_COMMAND) +
(SESSIONS, Keyword.QUASI_COMMAND) +
(THEORIES, Keyword.QUASI_COMMAND) +
(DOCUMENT_THEORIES, Keyword.QUASI_COMMAND) +
(DOCUMENT_FILES, Keyword.QUASI_COMMAND) +
(EXPORT_FILES, Keyword.QUASI_COMMAND) +
(EXPORT_CLASSPATH, Keyword.QUASI_COMMAND)
abstract class Entry
sealed case class Chapter(name: String) extends Entry
sealed case class Session_Entry(
pos: Position.T,
name: String,
groups: List[String],
path: String,
parent: Option[String],
description: String,
options: List[Options.Spec],
imports: List[String],
directories: List[String],
theories: List[(List[Options.Spec], List[((String, Position.T), Boolean)])],
document_theories: List[(String, Position.T)],
document_files: List[(String, String)],
export_files: List[(String, Int, List[String])],
export_classpath: List[String]
) extends Entry {
def theories_no_position: List[(List[Options.Spec], List[(String, Boolean)])] =
theories.map({ case (a, b) => (a, b.map({ case ((c, _), d) => (c, d) })) })
def document_theories_no_position: List[String] =
document_theories.map(_._1)
}
private object Parsers extends Options.Parsers {
private val chapter: Parser[Chapter] = {
val chapter_name = atom("chapter name", _.is_name)
command(CHAPTER) ~! chapter_name ^^ { case _ ~ a => Chapter(a) }
}
private val session_entry: Parser[Session_Entry] = {
val option =
option_name ~ opt($$$("=") ~! option_value ^^ { case _ ~ x => x }) ^^
{ case x ~ y => (x, y) }
val options = $$$("[") ~> rep1sep(option, $$$(",")) <~ $$$("]")
val theory_entry =
position(theory_name) ~ opt_keyword(GLOBAL) ^^ { case x ~ y => (x, y) }
val theories =
$$$(THEORIES) ~!
((options | success(Nil)) ~ rep1(theory_entry)) ^^
{ case _ ~ (x ~ y) => (x, y) }
val in_path = $$$("(") ~! ($$$(IN) ~ path ~ $$$(")")) ^^ { case _ ~ (_ ~ x ~ _) => x }
val document_theories =
$$$(DOCUMENT_THEORIES) ~! rep1(position(name)) ^^ { case _ ~ x => x }
val document_files =
$$$(DOCUMENT_FILES) ~!
((in_path | success("document")) ~ rep1(path)) ^^ { case _ ~ (x ~ y) => y.map((x, _)) }
val prune = $$$("[") ~! (nat ~ $$$("]")) ^^ { case _ ~ (x ~ _) => x } | success(0)
val export_files =
$$$(EXPORT_FILES) ~! ((in_path | success("export")) ~ prune ~ rep1(embedded)) ^^
{ case _ ~ (x ~ y ~ z) => (x, y, z) }
val export_classpath =
$$$(EXPORT_CLASSPATH) ~! (rep1(embedded) | success(List("*:classpath/*.jar"))) ^^
{ case _ ~ x => x }
command(SESSION) ~!
(position(session_name) ~
(($$$("(") ~! (rep1(name) <~ $$$(")")) ^^ { case _ ~ x => x }) | success(Nil)) ~
(($$$(IN) ~! path ^^ { case _ ~ x => x }) | success(".")) ~
($$$("=") ~!
(opt(session_name ~! $$$("+") ^^ { case x ~ _ => x }) ~
(($$$(DESCRIPTION) ~! text ^^ { case _ ~ x => x }) | success("")) ~
(($$$(OPTIONS) ~! options ^^ { case _ ~ x => x }) | success(Nil)) ~
(($$$(SESSIONS) ~! rep1(session_name) ^^ { case _ ~ x => x }) | success(Nil)) ~
(($$$(DIRECTORIES) ~! rep1(path) ^^ { case _ ~ x => x }) | success(Nil)) ~
rep(theories) ~
(opt(document_theories) ^^ (x => x.getOrElse(Nil))) ~
(rep(document_files) ^^ (x => x.flatten)) ~
rep(export_files) ~
opt(export_classpath)))) ^^
{ case _ ~ ((a, pos) ~ b ~ c ~ (_ ~ (d ~ e ~ f ~ g ~ h ~ i ~ j ~ k ~ l ~ m))) =>
Session_Entry(pos, a, b, c, d, e, f, g, h, i, j, k, l, m.getOrElse(Nil)) }
}
def parse_root(path: Path): List[Entry] = {
val toks = Token.explode(root_syntax.keywords, File.read(path))
val start = Token.Pos.file(path.implode)
parse_all(rep(chapter | session_entry), Token.reader(toks, start)) match {
case Success(result, _) => result
case bad => error(bad.toString)
}
}
}
def parse_root(path: Path): List[Entry] = Parsers.parse_root(path)
def parse_root_entries(path: Path): List[Session_Entry] =
for (entry <- Parsers.parse_root(path) if entry.isInstanceOf[Session_Entry])
yield entry.asInstanceOf[Session_Entry]
def read_root(options: Options, select: Boolean, path: Path): List[Info] = {
var entry_chapter = UNSORTED
val infos = new mutable.ListBuffer[Info]
parse_root(path).foreach {
case Chapter(name) => entry_chapter = name
case entry: Session_Entry =>
infos += make_info(options, select, path.dir, entry_chapter, entry)
}
infos.toList
}
def parse_roots(roots: Path): List[String] = {
for {
line <- split_lines(File.read(roots))
if !(line == "" || line.startsWith("#"))
} yield line
}
/* load sessions from certain directories */
def is_session_dir(dir: Path): Boolean =
(dir + ROOT).is_file || (dir + ROOTS).is_file
def check_session_dir(dir: Path): Path =
if (is_session_dir(dir)) File.pwd() + dir.expand
else error("Bad session root directory (missing ROOT or ROOTS): " + dir.expand.toString)
def directories(dirs: List[Path], select_dirs: List[Path]): List[(Boolean, Path)] = {
val default_dirs = Components.directories().filter(is_session_dir)
for { (select, dir) <- (default_dirs ::: dirs).map((false, _)) ::: select_dirs.map((true, _)) }
yield (select, dir.canonical)
}
def load_structure(
options: Options,
dirs: List[Path] = Nil,
select_dirs: List[Path] = Nil,
infos: List[Info] = Nil
): Structure = {
def load_dir(select: Boolean, dir: Path): List[(Boolean, Path)] =
load_root(select, dir) ::: load_roots(select, dir)
def load_root(select: Boolean, dir: Path): List[(Boolean, Path)] = {
val root = dir + ROOT
if (root.is_file) List((select, root)) else Nil
}
def load_roots(select: Boolean, dir: Path): List[(Boolean, Path)] = {
val roots = dir + ROOTS
if (roots.is_file) {
for {
entry <- parse_roots(roots)
dir1 =
try { check_session_dir(dir + Path.explode(entry)) }
catch {
case ERROR(msg) =>
error(msg + "\nThe error(s) above occurred in session catalog " + roots.toString)
}
res <- load_dir(select, dir1)
} yield res
}
else Nil
}
val roots =
for {
(select, dir) <- directories(dirs, select_dirs)
res <- load_dir(select, check_session_dir(dir))
} yield res
val unique_roots =
roots.foldLeft(Map.empty[JFile, (Boolean, Path)]) {
case (m, (select, path)) =>
val file = path.canonical_file
m.get(file) match {
case None => m + (file -> (select, path))
case Some((select1, path1)) => m + (file -> (select1 || select, path1))
}
}.toList.map(_._2)
Structure.make(unique_roots.flatMap(p => read_root(options, p._1, p._2)) ::: infos)
}
/* Isabelle tool wrapper */
val isabelle_tool = Isabelle_Tool("sessions", "explore structure of Isabelle sessions",
Scala_Project.here,
{ args =>
var base_sessions: List[String] = Nil
var select_dirs: List[Path] = Nil
var requirements = false
var exclude_session_groups: List[String] = Nil
var all_sessions = false
var dirs: List[Path] = Nil
var session_groups: List[String] = Nil
var exclude_sessions: List[String] = Nil
val getopts = Getopts("""
Usage: isabelle sessions [OPTIONS] [SESSIONS ...]
Options are:
-B NAME include session NAME and all descendants
-D DIR include session directory and select its sessions
-R refer to requirements of selected sessions
-X NAME exclude sessions from group NAME and all descendants
-a select all sessions
-d DIR include session directory
-g NAME select session group NAME
-x NAME exclude session NAME and all descendants
Explore the structure of Isabelle sessions and print result names in
topological order (on stdout).
""",
"B:" -> (arg => base_sessions = base_sessions ::: List(arg)),
"D:" -> (arg => select_dirs = select_dirs ::: List(Path.explode(arg))),
"R" -> (_ => requirements = true),
"X:" -> (arg => exclude_session_groups = exclude_session_groups ::: List(arg)),
"a" -> (_ => all_sessions = true),
"d:" -> (arg => dirs = dirs ::: List(Path.explode(arg))),
"g:" -> (arg => session_groups = session_groups ::: List(arg)),
"x:" -> (arg => exclude_sessions = exclude_sessions ::: List(arg)))
val sessions = getopts(args)
val options = Options.init()
val selection =
Selection(requirements = requirements, all_sessions = all_sessions, base_sessions = base_sessions,
exclude_session_groups = exclude_session_groups, exclude_sessions = exclude_sessions,
session_groups = session_groups, sessions = sessions)
val sessions_structure =
load_structure(options, dirs = dirs, select_dirs = select_dirs).selection(selection)
for (name <- sessions_structure.imports_topological_order) {
Output.writeln(name, stdout = true)
}
})
/** heap file with SHA1 digest **/
private val sha1_prefix = "SHA1:"
def read_heap_digest(heap: Path): Option[String] = {
if (heap.is_file) {
using(FileChannel.open(heap.java_path, StandardOpenOption.READ)) { file =>
val len = file.size
val n = sha1_prefix.length + SHA1.digest_length
if (len >= n) {
file.position(len - n)
val buf = ByteBuffer.allocate(n)
var i = 0
var m = 0
while ({
m = file.read(buf)
if (m != -1) i += m
m != -1 && n > i
}) ()
if (i == n) {
val prefix = new String(buf.array(), 0, sha1_prefix.length, UTF8.charset)
val s = new String(buf.array(), sha1_prefix.length, SHA1.digest_length, UTF8.charset)
if (prefix == sha1_prefix) Some(s) else None
}
else None
}
else None
}
}
else None
}
def write_heap_digest(heap: Path): String =
read_heap_digest(heap) match {
case None =>
val s = SHA1.digest(heap).toString
File.append(heap, sha1_prefix + s)
s
case Some(s) => s
}
/** persistent store **/
object Session_Info {
val session_name = SQL.Column.string("session_name").make_primary_key
// Build_Log.Session_Info
val session_timing = SQL.Column.bytes("session_timing")
val command_timings = SQL.Column.bytes("command_timings")
val theory_timings = SQL.Column.bytes("theory_timings")
val ml_statistics = SQL.Column.bytes("ml_statistics")
val task_statistics = SQL.Column.bytes("task_statistics")
val errors = SQL.Column.bytes("errors")
val build_log_columns =
List(session_name, session_timing, command_timings, theory_timings,
ml_statistics, task_statistics, errors)
// Build.Session_Info
val sources = SQL.Column.string("sources")
val input_heaps = SQL.Column.string("input_heaps")
val output_heap = SQL.Column.string("output_heap")
val return_code = SQL.Column.int("return_code")
val build_columns = List(sources, input_heaps, output_heap, return_code)
val table = SQL.Table("isabelle_session_info", build_log_columns ::: build_columns)
}
class Database_Context private[Sessions](
val store: Sessions.Store,
val database_server: Option[SQL.Database]
) extends AutoCloseable {
def cache: Term.Cache = store.cache
def close(): Unit = database_server.foreach(_.close())
def database_output[A](session: String)(f: SQL.Database => A): A =
database_server match {
case Some(db) => f(db)
case None => using(store.open_database(session, output = true))(f)
}
def database[A](session: String)(f: SQL.Database => Option[A]): Option[A] =
database_server match {
case Some(db) => f(db)
case None =>
store.try_open_database(session) match {
case Some(db) => using(db)(f)
case None => None
}
}
def read_export(
session_hierarchy: List[String], theory: String, name: String
): Option[Export.Entry] = {
def read(db: SQL.Database, session: String): Option[Export.Entry] =
Export.Entry_Name(session = session, theory = theory, name = name).read(db, store.cache)
val attempts =
database_server match {
case Some(db) => session_hierarchy.view.map(read(db, _))
case None =>
session_hierarchy.view.map(session =>
store.try_open_database(session) match {
case Some(db) => using(db) { _ => read(db, session) }
case None => None
})
}
attempts.collectFirst({ case Some(entry) => entry })
}
def get_export(session_hierarchy: List[String], theory: String, name: String): Export.Entry =
read_export(session_hierarchy, theory, name) getOrElse
Export.empty_entry(theory, name)
def get_classpath(structure: Structure, session: String): List[File.Content_Bytes] =
{
(for {
name <- structure.build_requirements(List(session))
patterns = structure(name).export_classpath if patterns.nonEmpty
} yield {
database(name) { db =>
val matcher = Export.make_matcher(patterns)
val res =
for {
entry_name <- Export.read_entry_names(db, name) if matcher(entry_name)
entry <- entry_name.read(db, store.cache)
} yield File.Content(entry.entry_name.make_path(), entry.uncompressed)
Some(res)
}.getOrElse(Nil)
}).flatten
}
override def toString: String = {
val s =
database_server match {
case Some(db) => db.toString
case None => "input_dirs = " + store.input_dirs.map(_.absolute).mkString(", ")
}
"Database_Context(" + s + ")"
}
}
def store(options: Options, cache: Term.Cache = Term.Cache.make()): Store =
new Store(options, cache)
class Store private[Sessions](val options: Options, val cache: Term.Cache) {
store =>
override def toString: String = "Store(output_dir = " + output_dir.absolute + ")"
/* directories */
val system_output_dir: Path = Path.explode("$ISABELLE_HEAPS_SYSTEM/$ML_IDENTIFIER")
val user_output_dir: Path = Path.explode("$ISABELLE_HEAPS/$ML_IDENTIFIER")
def system_heaps: Boolean = options.bool("system_heaps")
val output_dir: Path =
if (system_heaps) system_output_dir else user_output_dir
val input_dirs: List[Path] =
if (system_heaps) List(system_output_dir)
else List(user_output_dir, system_output_dir)
def presentation_dir: Path =
if (system_heaps) Path.explode("$ISABELLE_BROWSER_INFO_SYSTEM")
else Path.explode("$ISABELLE_BROWSER_INFO")
/* file names */
def heap(name: String): Path = Path.basic(name)
def database(name: String): Path = Path.basic("log") + Path.basic(name).ext("db")
def log(name: String): Path = Path.basic("log") + Path.basic(name)
def log_gz(name: String): Path = log(name).ext("gz")
def output_heap(name: String): Path = output_dir + heap(name)
def output_database(name: String): Path = output_dir + database(name)
def output_log(name: String): Path = output_dir + log(name)
def output_log_gz(name: String): Path = output_dir + log_gz(name)
def prepare_output_dir(): Unit = Isabelle_System.make_directory(output_dir + Path.basic("log"))
/* heap */
def find_heap(name: String): Option[Path] =
input_dirs.map(_ + heap(name)).find(_.is_file)
def find_heap_digest(name: String): Option[String] =
find_heap(name).flatMap(read_heap_digest)
def the_heap(name: String): Path =
find_heap(name) getOrElse
error("Missing heap image for session " + quote(name) + " -- expected in:\n" +
cat_lines(input_dirs.map(dir => " " + dir.expand.implode)))
/* database */
def find_database(name: String): Option[Path] =
input_dirs.map(_ + database(name)).find(_.is_file)
def database_server: Boolean = options.bool("build_database_server")
def open_database_server(): SQL.Database =
PostgreSQL.open_database(
user = options.string("build_database_user"),
password = options.string("build_database_password"),
database = options.string("build_database_name"),
host = options.string("build_database_host"),
port = options.int("build_database_port"),
ssh =
options.proper_string("build_database_ssh_host").map(ssh_host =>
SSH.open_session(options,
host = ssh_host,
user = options.string("build_database_ssh_user"),
port = options.int("build_database_ssh_port"))),
ssh_close = true)
def open_database_context(): Database_Context =
new Database_Context(store, if (database_server) Some(open_database_server()) else None)
def try_open_database(name: String, output: Boolean = false): Option[SQL.Database] = {
def check(db: SQL.Database): Option[SQL.Database] =
if (output || session_info_exists(db)) Some(db) else { db.close(); None }
if (database_server) check(open_database_server())
else if (output) Some(SQLite.open_database(output_database(name)))
else {
(for {
dir <- input_dirs.view
path = dir + database(name) if path.is_file
db <- check(SQLite.open_database(path))
} yield db).headOption
}
}
def bad_database(name: String): Nothing =
error("Missing build database for session " + quote(name))
def open_database(name: String, output: Boolean = false): SQL.Database =
try_open_database(name, output = output) getOrElse bad_database(name)
def clean_output(name: String): (Boolean, Boolean) = {
val relevant_db =
database_server && {
try_open_database(name) match {
case Some(db) =>
try {
db.transaction {
val relevant_db = session_info_defined(db, name)
init_session_info(db, name)
relevant_db
}
} finally { db.close() }
case None => false
}
}
val del =
for {
dir <-
(if (system_heaps) List(user_output_dir, system_output_dir) else List(user_output_dir))
file <- List(heap(name), database(name), log(name), log_gz(name))
path = dir + file if path.is_file
} yield path.file.delete
val relevant = relevant_db || del.nonEmpty
val ok = del.forall(b => b)
(relevant, ok)
}
/* SQL database content */
def read_bytes(db: SQL.Database, name: String, column: SQL.Column): Bytes =
db.using_statement(Session_Info.table.select(List(column),
Session_Info.session_name.where_equal(name))) { stmt =>
val res = stmt.execute_query()
if (!res.next()) Bytes.empty else res.bytes(column)
}
def read_properties(db: SQL.Database, name: String, column: SQL.Column): List[Properties.T] =
Properties.uncompress(read_bytes(db, name, column), cache = cache)
/* session info */
def init_session_info(db: SQL.Database, name: String): Unit = {
db.transaction {
db.create_table(Session_Info.table)
db.using_statement(
Session_Info.table.delete(Session_Info.session_name.where_equal(name)))(_.execute())
db.create_table(Export.Data.table)
db.using_statement(
Export.Data.table.delete(Export.Data.session_name.where_equal(name)))(_.execute())
db.create_table(Document_Build.Data.table)
db.using_statement(
Document_Build.Data.table.delete(
Document_Build.Data.session_name.where_equal(name)))(_.execute())
}
}
def session_info_exists(db: SQL.Database): Boolean = {
val tables = db.tables
tables.contains(Session_Info.table.name) &&
tables.contains(Export.Data.table.name)
}
def session_info_defined(db: SQL.Database, name: String): Boolean =
session_info_exists(db) && {
db.using_statement(
Session_Info.table.select(List(Session_Info.session_name),
Session_Info.session_name.where_equal(name)))(stmt => stmt.execute_query().next())
}
def write_session_info(
db: SQL.Database,
name: String,
build_log: Build_Log.Session_Info,
build: Build.Session_Info
): Unit = {
db.transaction {
db.using_statement(Session_Info.table.insert()) { stmt =>
stmt.string(1) = name
stmt.bytes(2) = Properties.encode(build_log.session_timing)
stmt.bytes(3) = Properties.compress(build_log.command_timings, cache = cache.xz)
stmt.bytes(4) = Properties.compress(build_log.theory_timings, cache = cache.xz)
stmt.bytes(5) = Properties.compress(build_log.ml_statistics, cache = cache.xz)
stmt.bytes(6) = Properties.compress(build_log.task_statistics, cache = cache.xz)
stmt.bytes(7) = Build_Log.compress_errors(build_log.errors, cache = cache.xz)
stmt.string(8) = build.sources
stmt.string(9) = cat_lines(build.input_heaps)
stmt.string(10) = build.output_heap getOrElse ""
stmt.int(11) = build.return_code
stmt.execute()
}
}
}
def read_session_timing(db: SQL.Database, name: String): Properties.T =
Properties.decode(read_bytes(db, name, Session_Info.session_timing), cache = cache)
def read_command_timings(db: SQL.Database, name: String): List[Properties.T] =
read_properties(db, name, Session_Info.command_timings)
def read_theory_timings(db: SQL.Database, name: String): List[Properties.T] =
read_properties(db, name, Session_Info.theory_timings)
def read_ml_statistics(db: SQL.Database, name: String): List[Properties.T] =
read_properties(db, name, Session_Info.ml_statistics)
def read_task_statistics(db: SQL.Database, name: String): List[Properties.T] =
read_properties(db, name, Session_Info.task_statistics)
def read_theories(db: SQL.Database, name: String): List[String] =
read_theory_timings(db, name).flatMap(Markup.Name.unapply)
def read_errors(db: SQL.Database, name: String): List[String] =
Build_Log.uncompress_errors(read_bytes(db, name, Session_Info.errors), cache = cache)
def read_build(db: SQL.Database, name: String): Option[Build.Session_Info] = {
if (db.tables.contains(Session_Info.table.name)) {
db.using_statement(Session_Info.table.select(Session_Info.build_columns,
Session_Info.session_name.where_equal(name))) { stmt =>
val res = stmt.execute_query()
if (!res.next()) None
else {
Some(
Build.Session_Info(
res.string(Session_Info.sources),
split_lines(res.string(Session_Info.input_heaps)),
res.string(Session_Info.output_heap) match { case "" => None case s => Some(s) },
res.int(Session_Info.return_code)))
}
}
}
else None
}
}
}