diff --git a/src/Pure/Thy/export_theory.scala b/src/Pure/Thy/export_theory.scala --- a/src/Pure/Thy/export_theory.scala +++ b/src/Pure/Thy/export_theory.scala @@ -1,814 +1,814 @@ /* Title: Pure/Thy/export_theory.scala Author: Makarius Export foundational theory content. */ package isabelle import scala.collection.immutable.SortedMap object Export_Theory { /** session content **/ sealed case class Session(name: String, theory_graph: Graph[String, Option[Theory]]) { override def toString: String = name def theory(theory_name: String): Option[Theory] = if (theory_graph.defined(theory_name)) theory_graph.get_node(theory_name) else None def theories: List[Theory] = theory_graph.topological_order.flatMap(theory) } def read_session( store: Sessions.Store, sessions_structure: Sessions.Structure, session_name: String, progress: Progress = new Progress, cache: Term.Cache = Term.Cache.make()): Session = { val thys = sessions_structure.build_requirements(List(session_name)).flatMap(session => using(store.open_database(session))(db => { db.transaction { for (theory <- Export.read_theory_names(db, session)) yield { progress.echo("Reading theory " + theory) val provider = Export.Provider.database(db, store.cache, session, theory) read_theory(provider, session, theory, cache = cache) } } })) val graph0 = thys.foldLeft(Graph.string[Option[Theory]]) { case (g, thy) => g.default_node(thy.name, Some(thy)) } val graph1 = thys.foldLeft(graph0) { case (g0, thy) => thy.parents.foldLeft(g0) { case (g1, parent) => g1.default_node(parent, None).add_edge_acyclic(parent, thy.name) } } Session(session_name, graph1) } /** theory content **/ sealed case class Theory(name: String, parents: List[String], types: List[Entity[Type]], consts: List[Entity[Const]], axioms: List[Entity[Axiom]], thms: List[Entity[Thm]], classes: List[Entity[Class]], locales: List[Entity[Locale]], locale_dependencies: List[Entity[Locale_Dependency]], classrel: List[Classrel], arities: List[Arity], constdefs: List[Constdef], typedefs: List[Typedef], datatypes: List[Datatype], spec_rules: List[Spec_Rule], others: Map[String, List[Entity[Other]]]) { override def toString: String = name def entity_iterator: Iterator[Entity[No_Content]] = types.iterator.map(_.no_content) ++ consts.iterator.map(_.no_content) ++ axioms.iterator.map(_.no_content) ++ thms.iterator.map(_.no_content) ++ classes.iterator.map(_.no_content) ++ locales.iterator.map(_.no_content) ++ locale_dependencies.iterator.map(_.no_content) ++ (for { (_, xs) <- others; x <- xs.iterator } yield x.no_content) lazy val entity_by_range: Map[Symbol.Range, List[Entity[No_Content]]] = entity_iterator.toList.groupBy(_.range) - lazy val entity_by_kname: Map[String, Entity[No_Content]] = - entity_iterator.map(entity => (entity.kname, entity)).toMap + lazy val entity_by_kind_name: Map[(String, String), Entity[No_Content]] = + entity_iterator.map(entity => ((entity.kind, entity.name), entity)).toMap lazy val entity_kinds: Set[String] = entity_iterator.map(_.kind).toSet def cache(cache: Term.Cache): Theory = Theory(cache.string(name), parents.map(cache.string), types.map(_.cache(cache)), consts.map(_.cache(cache)), axioms.map(_.cache(cache)), thms.map(_.cache(cache)), classes.map(_.cache(cache)), locales.map(_.cache(cache)), locale_dependencies.map(_.cache(cache)), classrel.map(_.cache(cache)), arities.map(_.cache(cache)), constdefs.map(_.cache(cache)), typedefs.map(_.cache(cache)), datatypes.map(_.cache(cache)), spec_rules.map(_.cache(cache)), (for ((k, xs) <- others.iterator) yield cache.string(k) -> xs.map(_.cache(cache))).toMap) } def read_theory_parents(provider: Export.Provider, theory_name: String): Option[List[String]] = { if (theory_name == Thy_Header.PURE) Some(Nil) else { provider(Export.THEORY_PREFIX + "parents") .map(entry => split_lines(entry.uncompressed.text)) } } def no_theory: Theory = Theory("", Nil, Nil, Nil, Nil, Nil, Nil, Nil, Nil, Nil, Nil, Nil, Nil, Nil, Nil, Map.empty) def read_theory(provider: Export.Provider, session_name: String, theory_name: String, cache: Term.Cache = Term.Cache.none): Theory = { val parents = read_theory_parents(provider, theory_name) getOrElse error("Missing theory export in session " + quote(session_name) + ": " + quote(theory_name)) val theory = Theory(theory_name, parents, read_types(provider), read_consts(provider), read_axioms(provider), read_thms(provider), read_classes(provider), read_locales(provider), read_locale_dependencies(provider), read_classrel(provider), read_arities(provider), read_constdefs(provider), read_typedefs(provider), read_datatypes(provider), read_spec_rules(provider), read_others(provider)) if (cache.no_cache) theory else theory.cache(cache) } def read_pure[A](store: Sessions.Store, read: (Export.Provider, String, String) => A): A = { val session_name = Thy_Header.PURE val theory_name = Thy_Header.PURE using(store.open_database(session_name))(db => { db.transaction { val provider = Export.Provider.database(db, store.cache, session_name, theory_name) read(provider, session_name, theory_name) } }) } def read_pure_theory(store: Sessions.Store, cache: Term.Cache = Term.Cache.none): Theory = read_pure(store, read_theory(_, _, _, cache = cache)) def read_pure_proof( store: Sessions.Store, id: Thm_Id, cache: Term.Cache = Term.Cache.none): Option[Proof] = read_pure(store, (provider, _, _) => read_proof(provider, id, cache = cache)) /* entities */ object Kind { val TYPE = "type" val CONST = "const" val THM = "thm" val PROOF = "proof" val LOCALE_DEPENDENCY = "locale_dependency" val DOCUMENT_HEADING = "document_heading" val DOCUMENT_TEXT = "document_text" val PROOF_TEXT = "proof_text" } def export_kind(kind: String): String = if (kind == Markup.TYPE_NAME) Kind.TYPE else if (kind == Markup.CONSTANT) Kind.CONST else kind def export_kind_name(kind: String, name: String): String = name + "|" + export_kind(kind) abstract class Content[T] { def cache(cache: Term.Cache): T } sealed case class No_Content() extends Content[No_Content] { def cache(cache: Term.Cache): No_Content = this } sealed case class Entity[A <: Content[A]]( kind: String, name: String, xname: String, pos: Position.T, id: Option[Long], serial: Long, content: Option[A]) { val kname: String = export_kind_name(kind, name) val range: Symbol.Range = Position.Range.unapply(pos).getOrElse(Text.Range.offside) def export_kind: String = Export_Theory.export_kind(kind) override def toString: String = export_kind + " " + quote(name) def the_content: A = if (content.isDefined) content.get else error("No content for " + toString) def no_content: Entity[No_Content] = copy(content = None) def cache(cache: Term.Cache): Entity[A] = Entity( cache.string(kind), cache.string(name), cache.string(xname), cache.position(pos), id, serial, content.map(_.cache(cache))) } def read_entities[A <: Content[A]]( provider: Export.Provider, export_name: String, kind: String, decode: XML.Decode.T[A]): List[Entity[A]] = { def decode_entity(tree: XML.Tree): Entity[A] = { def err(): Nothing = throw new XML.XML_Body(List(tree)) tree match { case XML.Elem(Markup(Markup.ENTITY, props), body) => val name = Markup.Name.unapply(props) getOrElse err() val xname = Markup.XName.unapply(props) getOrElse err() val pos = props.filter(p => Markup.position_property(p) && p._1 != Markup.ID) val id = Position.Id.unapply(props) val serial = Markup.Serial.unapply(props) getOrElse err() val content = if (body.isEmpty) None else Some(decode(body)) Entity(kind, name, xname, pos, id, serial, content) case _ => err() } } provider.uncompressed_yxml(export_name).map(decode_entity) } /* approximative syntax */ object Assoc extends Enumeration { val NO_ASSOC, LEFT_ASSOC, RIGHT_ASSOC = Value } sealed abstract class Syntax case object No_Syntax extends Syntax case class Prefix(delim: String) extends Syntax case class Infix(assoc: Assoc.Value, delim: String, pri: Int) extends Syntax def decode_syntax: XML.Decode.T[Syntax] = XML.Decode.variant(List( { case (Nil, Nil) => No_Syntax }, { case (List(delim), Nil) => Prefix(delim) }, { case (Nil, body) => import XML.Decode._ val (ass, delim, pri) = triple(int, string, int)(body) Infix(Assoc(ass), delim, pri) })) /* types */ sealed case class Type(syntax: Syntax, args: List[String], abbrev: Option[Term.Typ]) extends Content[Type] { override def cache(cache: Term.Cache): Type = Type( syntax, args.map(cache.string), abbrev.map(cache.typ)) } def read_types(provider: Export.Provider): List[Entity[Type]] = read_entities(provider, Export.THEORY_PREFIX + "types", Markup.TYPE_NAME, body => { import XML.Decode._ val (syntax, args, abbrev) = triple(decode_syntax, list(string), option(Term_XML.Decode.typ))(body) Type(syntax, args, abbrev) }) /* consts */ sealed case class Const( syntax: Syntax, typargs: List[String], typ: Term.Typ, abbrev: Option[Term.Term], propositional: Boolean) extends Content[Const] { override def cache(cache: Term.Cache): Const = Const( syntax, typargs.map(cache.string), cache.typ(typ), abbrev.map(cache.term), propositional) } def read_consts(provider: Export.Provider): List[Entity[Const]] = read_entities(provider, Export.THEORY_PREFIX + "consts", Markup.CONSTANT, body => { import XML.Decode._ val (syntax, (typargs, (typ, (abbrev, propositional)))) = pair(decode_syntax, pair(list(string), pair(Term_XML.Decode.typ, pair(option(Term_XML.Decode.term), bool))))(body) Const(syntax, typargs, typ, abbrev, propositional) }) /* axioms */ sealed case class Prop( typargs: List[(String, Term.Sort)], args: List[(String, Term.Typ)], term: Term.Term) extends Content[Prop] { override def cache(cache: Term.Cache): Prop = Prop( typargs.map({ case (name, sort) => (cache.string(name), cache.sort(sort)) }), args.map({ case (name, typ) => (cache.string(name), cache.typ(typ)) }), cache.term(term)) } def decode_prop(body: XML.Body): Prop = { val (typargs, args, t) = { import XML.Decode._ import Term_XML.Decode._ triple(list(pair(string, sort)), list(pair(string, typ)), term)(body) } Prop(typargs, args, t) } sealed case class Axiom(prop: Prop) extends Content[Axiom] { override def cache(cache: Term.Cache): Axiom = Axiom(prop.cache(cache)) } def read_axioms(provider: Export.Provider): List[Entity[Axiom]] = read_entities(provider, Export.THEORY_PREFIX + "axioms", Markup.AXIOM, body => Axiom(decode_prop(body))) /* theorems */ sealed case class Thm_Id(serial: Long, theory_name: String) { def pure: Boolean = theory_name == Thy_Header.PURE } sealed case class Thm( prop: Prop, deps: List[String], proof: Term.Proof) extends Content[Thm] { override def cache(cache: Term.Cache): Thm = Thm( prop.cache(cache), deps.map(cache.string), cache.proof(proof)) } def read_thms(provider: Export.Provider): List[Entity[Thm]] = read_entities(provider, Export.THEORY_PREFIX + "thms", Kind.THM, body => { import XML.Decode._ import Term_XML.Decode._ val (prop, deps, prf) = triple(decode_prop, list(string), proof)(body) Thm(prop, deps, prf) }) sealed case class Proof( typargs: List[(String, Term.Sort)], args: List[(String, Term.Typ)], term: Term.Term, proof: Term.Proof) { def prop: Prop = Prop(typargs, args, term) def cache(cache: Term.Cache): Proof = Proof( typargs.map({ case (name, sort) => (cache.string(name), cache.sort(sort)) }), args.map({ case (name, typ) => (cache.string(name), cache.typ(typ)) }), cache.term(term), cache.proof(proof)) } def read_proof( provider: Export.Provider, id: Thm_Id, cache: Term.Cache = Term.Cache.none): Option[Proof] = { for { entry <- provider.focus(id.theory_name)(Export.PROOFS_PREFIX + id.serial) } yield { val body = entry.uncompressed_yxml val (typargs, (args, (prop_body, proof_body))) = { import XML.Decode._ import Term_XML.Decode._ pair(list(pair(string, sort)), pair(list(pair(string, typ)), pair(x => x, x => x)))(body) } val env = args.toMap val prop = Term_XML.Decode.term_env(env)(prop_body) val proof = Term_XML.Decode.proof_env(env)(proof_body) val result = Proof(typargs, args, prop, proof) if (cache.no_cache) result else result.cache(cache) } } def read_proof_boxes( store: Sessions.Store, provider: Export.Provider, proof: Term.Proof, suppress: Thm_Id => Boolean = _ => false, cache: Term.Cache = Term.Cache.none): List[(Thm_Id, Proof)] = { var seen = Set.empty[Long] var result = SortedMap.empty[Long, (Thm_Id, Proof)] def boxes(context: Option[(Long, Term.Proof)], prf: Term.Proof): Unit = { prf match { case Term.Abst(_, _, p) => boxes(context, p) case Term.AbsP(_, _, p) => boxes(context, p) case Term.Appt(p, _) => boxes(context, p) case Term.AppP(p, q) => boxes(context, p); boxes(context, q) case thm: Term.PThm if !seen(thm.serial) => seen += thm.serial val id = Thm_Id(thm.serial, thm.theory_name) if (!suppress(id)) { val read = if (id.pure) Export_Theory.read_pure_proof(store, id, cache = cache) else Export_Theory.read_proof(provider, id, cache = cache) read match { case Some(p) => result += (thm.serial -> (id -> p)) boxes(Some((thm.serial, p.proof)), p.proof) case None => error("Missing proof " + thm.serial + " (theory " + quote (thm.theory_name) + ")" + (context match { case None => "" case Some((i, p)) => " in proof " + i + ":\n" + p })) } } case _ => } } boxes(None, proof) result.iterator.map(_._2).toList } /* type classes */ sealed case class Class(params: List[(String, Term.Typ)], axioms: List[Prop]) extends Content[Class] { override def cache(cache: Term.Cache): Class = Class( params.map({ case (name, typ) => (cache.string(name), cache.typ(typ)) }), axioms.map(_.cache(cache))) } def read_classes(provider: Export.Provider): List[Entity[Class]] = read_entities(provider, Export.THEORY_PREFIX + "classes", Markup.CLASS, body => { import XML.Decode._ import Term_XML.Decode._ val (params, axioms) = pair(list(pair(string, typ)), list(decode_prop))(body) Class(params, axioms) }) /* locales */ sealed case class Locale( typargs: List[(String, Term.Sort)], args: List[((String, Term.Typ), Syntax)], axioms: List[Prop]) extends Content[Locale] { override def cache(cache: Term.Cache): Locale = Locale( typargs.map({ case (name, sort) => (cache.string(name), cache.sort(sort)) }), args.map({ case ((name, typ), syntax) => ((cache.string(name), cache.typ(typ)), syntax) }), axioms.map(_.cache(cache))) } def read_locales(provider: Export.Provider): List[Entity[Locale]] = read_entities(provider, Export.THEORY_PREFIX + "locales", Markup.LOCALE, body => { import XML.Decode._ import Term_XML.Decode._ val (typargs, args, axioms) = triple(list(pair(string, sort)), list(pair(pair(string, typ), decode_syntax)), list(decode_prop))(body) Locale(typargs, args, axioms) }) /* locale dependencies */ sealed case class Locale_Dependency( source: String, target: String, prefix: List[(String, Boolean)], subst_types: List[((String, Term.Sort), Term.Typ)], subst_terms: List[((String, Term.Typ), Term.Term)]) extends Content[Locale_Dependency] { override def cache(cache: Term.Cache): Locale_Dependency = Locale_Dependency( cache.string(source), cache.string(target), prefix.map({ case (name, mandatory) => (cache.string(name), mandatory) }), subst_types.map({ case ((a, s), ty) => ((cache.string(a), cache.sort(s)), cache.typ(ty)) }), subst_terms.map({ case ((x, ty), t) => ((cache.string(x), cache.typ(ty)), cache.term(t)) })) def is_inclusion: Boolean = subst_types.isEmpty && subst_terms.isEmpty } def read_locale_dependencies(provider: Export.Provider): List[Entity[Locale_Dependency]] = read_entities(provider, Export.THEORY_PREFIX + "locale_dependencies", Kind.LOCALE_DEPENDENCY, body => { import XML.Decode._ import Term_XML.Decode._ val (source, (target, (prefix, (subst_types, subst_terms)))) = pair(string, pair(string, pair(list(pair(string, bool)), pair(list(pair(pair(string, sort), typ)), list(pair(pair(string, typ), term))))))(body) Locale_Dependency(source, target, prefix, subst_types, subst_terms) }) /* sort algebra */ sealed case class Classrel(class1: String, class2: String, prop: Prop) { def cache(cache: Term.Cache): Classrel = Classrel(cache.string(class1), cache.string(class2), prop.cache(cache)) } def read_classrel(provider: Export.Provider): List[Classrel] = { val body = provider.uncompressed_yxml(Export.THEORY_PREFIX + "classrel") val classrel = { import XML.Decode._ list(pair(decode_prop, pair(string, string)))(body) } for ((prop, (c1, c2)) <- classrel) yield Classrel(c1, c2, prop) } sealed case class Arity(type_name: String, domain: List[Term.Sort], codomain: String, prop: Prop) { def cache(cache: Term.Cache): Arity = Arity(cache.string(type_name), domain.map(cache.sort), cache.string(codomain), prop.cache(cache)) } def read_arities(provider: Export.Provider): List[Arity] = { val body = provider.uncompressed_yxml(Export.THEORY_PREFIX + "arities") val arities = { import XML.Decode._ import Term_XML.Decode._ list(pair(decode_prop, triple(string, list(sort), string)))(body) } for ((prop, (a, b, c)) <- arities) yield Arity(a, b, c, prop) } /* Pure constdefs */ sealed case class Constdef(name: String, axiom_name: String) { def cache(cache: Term.Cache): Constdef = Constdef(cache.string(name), cache.string(axiom_name)) } def read_constdefs(provider: Export.Provider): List[Constdef] = { val body = provider.uncompressed_yxml(Export.THEORY_PREFIX + "constdefs") val constdefs = { import XML.Decode._ list(pair(string, string))(body) } for ((name, axiom_name) <- constdefs) yield Constdef(name, axiom_name) } /* HOL typedefs */ sealed case class Typedef(name: String, rep_type: Term.Typ, abs_type: Term.Typ, rep_name: String, abs_name: String, axiom_name: String) { def cache(cache: Term.Cache): Typedef = Typedef(cache.string(name), cache.typ(rep_type), cache.typ(abs_type), cache.string(rep_name), cache.string(abs_name), cache.string(axiom_name)) } def read_typedefs(provider: Export.Provider): List[Typedef] = { val body = provider.uncompressed_yxml(Export.THEORY_PREFIX + "typedefs") val typedefs = { import XML.Decode._ import Term_XML.Decode._ list(pair(string, pair(typ, pair(typ, pair(string, pair(string, string))))))(body) } for { (name, (rep_type, (abs_type, (rep_name, (abs_name, axiom_name))))) <- typedefs } yield Typedef(name, rep_type, abs_type, rep_name, abs_name, axiom_name) } /* HOL datatypes */ sealed case class Datatype( pos: Position.T, name: String, co: Boolean, typargs: List[(String, Term.Sort)], typ: Term.Typ, constructors: List[(Term.Term, Term.Typ)]) { def id: Option[Long] = Position.Id.unapply(pos) def cache(cache: Term.Cache): Datatype = Datatype( cache.position(pos), cache.string(name), co, typargs.map({ case (name, sort) => (cache.string(name), cache.sort(sort)) }), cache.typ(typ), constructors.map({ case (term, typ) => (cache.term(term), cache.typ(typ)) })) } def read_datatypes(provider: Export.Provider): List[Datatype] = { val body = provider.uncompressed_yxml(Export.THEORY_PREFIX + "datatypes") val datatypes = { import XML.Decode._ import Term_XML.Decode._ list(pair(properties, pair(string, pair(bool, pair(list(pair(string, sort)), pair(typ, list(pair(term, typ))))))))(body) } for ((pos, (name, (co, (typargs, (typ, constructors))))) <- datatypes) yield Datatype(pos, name, co, typargs, typ, constructors) } /* Pure spec rules */ sealed abstract class Recursion { def cache(cache: Term.Cache): Recursion = this match { case Primrec(types) => Primrec(types.map(cache.string)) case Primcorec(types) => Primcorec(types.map(cache.string)) case _ => this } } case class Primrec(types: List[String]) extends Recursion case object Recdef extends Recursion case class Primcorec(types: List[String]) extends Recursion case object Corec extends Recursion case object Unknown_Recursion extends Recursion val decode_recursion: XML.Decode.T[Recursion] = { import XML.Decode._ variant(List( { case (Nil, a) => Primrec(list(string)(a)) }, { case (Nil, Nil) => Recdef }, { case (Nil, a) => Primcorec(list(string)(a)) }, { case (Nil, Nil) => Corec }, { case (Nil, Nil) => Unknown_Recursion })) } sealed abstract class Rough_Classification { def is_equational: Boolean = this.isInstanceOf[Equational] def is_inductive: Boolean = this == Inductive def is_co_inductive: Boolean = this == Co_Inductive def is_relational: Boolean = is_inductive || is_co_inductive def is_unknown: Boolean = this == Unknown def cache(cache: Term.Cache): Rough_Classification = this match { case Equational(recursion) => Equational(recursion.cache(cache)) case _ => this } } case class Equational(recursion: Recursion) extends Rough_Classification case object Inductive extends Rough_Classification case object Co_Inductive extends Rough_Classification case object Unknown extends Rough_Classification val decode_rough_classification: XML.Decode.T[Rough_Classification] = { import XML.Decode._ variant(List( { case (Nil, a) => Equational(decode_recursion(a)) }, { case (Nil, Nil) => Inductive }, { case (Nil, Nil) => Co_Inductive }, { case (Nil, Nil) => Unknown })) } sealed case class Spec_Rule( pos: Position.T, name: String, rough_classification: Rough_Classification, typargs: List[(String, Term.Sort)], args: List[(String, Term.Typ)], terms: List[(Term.Term, Term.Typ)], rules: List[Term.Term]) { def id: Option[Long] = Position.Id.unapply(pos) def cache(cache: Term.Cache): Spec_Rule = Spec_Rule( cache.position(pos), cache.string(name), rough_classification.cache(cache), typargs.map({ case (name, sort) => (cache.string(name), cache.sort(sort)) }), args.map({ case (name, typ) => (cache.string(name), cache.typ(typ)) }), terms.map({ case (term, typ) => (cache.term(term), cache.typ(typ)) }), rules.map(cache.term)) } def read_spec_rules(provider: Export.Provider): List[Spec_Rule] = { val body = provider.uncompressed_yxml(Export.THEORY_PREFIX + "spec_rules") val spec_rules = { import XML.Decode._ import Term_XML.Decode._ list( pair(properties, pair(string, pair(decode_rough_classification, pair(list(pair(string, sort)), pair(list(pair(string, typ)), pair(list(pair(term, typ)), list(term))))))))(body) } for ((pos, (name, (rough_classification, (typargs, (args, (terms, rules)))))) <- spec_rules) yield Spec_Rule(pos, name, rough_classification, typargs, args, terms, rules) } /* other entities */ sealed case class Other() extends Content[Other] { override def cache(cache: Term.Cache): Other = this } def read_others(provider: Export.Provider): Map[String, List[Entity[Other]]] = { val kinds = provider(Export.THEORY_PREFIX + "other_kinds") match { case Some(entry) => split_lines(entry.uncompressed.text) case None => Nil } val other = Other() def read_other(kind: String): List[Entity[Other]] = read_entities(provider, Export.THEORY_PREFIX + "other/" + kind, kind, _ => other) kinds.map(kind => kind -> read_other(kind)).toMap } } diff --git a/src/Pure/Thy/presentation.scala b/src/Pure/Thy/presentation.scala --- a/src/Pure/Thy/presentation.scala +++ b/src/Pure/Thy/presentation.scala @@ -1,600 +1,600 @@ /* Title: Pure/Thy/presentation.scala Author: Makarius HTML presentation of PIDE document content. */ package isabelle import scala.annotation.tailrec import scala.collection.immutable.SortedMap import scala.collection.mutable object Presentation { /** HTML documents **/ /* HTML context */ val fonts_path = Path.explode("fonts") sealed case class HTML_Document(title: String, content: String) def html_context(fonts_url: String => String = HTML.fonts_url()): HTML_Context = new HTML_Context(fonts_url) final class HTML_Context private[Presentation](fonts_url: String => String) { val term_cache: Term.Cache = Term.Cache.make() private val theory_cache = Synchronized(Map.empty[String, Export_Theory.Theory]) def cache_theory(thy_name: String, make_thy: => Export_Theory.Theory): Export_Theory.Theory = { theory_cache.change_result(thys => { thys.get(thy_name) match { case Some(thy) => (thy, thys) case None => val thy = make_thy (thy, thys + (thy_name -> thy)) } }) } def init_fonts(dir: Path): Unit = { val fonts_dir = Isabelle_System.make_directory(dir + fonts_path) for (entry <- Isabelle_Fonts.fonts(hidden = true)) Isabelle_System.copy_file(entry.path, fonts_dir) } def head(title: String, rest: XML.Body = Nil): XML.Tree = HTML.div("head", HTML.chapter(title) :: rest) def source(body: XML.Body): XML.Tree = HTML.pre("source", body) def offset_ref(offset: Text.Range): String = "offset/" + offset.start + ".." + offset.stop def contents(heading: String, items: List[XML.Body], css_class: String = "contents") : List[XML.Elem] = { if (items.isEmpty) Nil else List(HTML.div(css_class, List(HTML.section(heading), HTML.itemize(items)))) } def output_document(title: String, body: XML.Body): String = HTML.output_document( List( HTML.style(HTML.fonts_css(fonts_url) + "\n\n" + File.read(HTML.isabelle_css)), HTML.title(title)), List(HTML.source(body)), css = "", structural = false) def html_document(title: String, body: XML.Body): HTML_Document = HTML_Document(title, output_document(title, body)) } /* presentation elements */ sealed case class Elements( html: Markup.Elements = Markup.Elements.empty, entity: Markup.Elements = Markup.Elements.empty, language: Markup.Elements = Markup.Elements.empty) val elements1: Elements = Elements( html = Rendering.foreground_elements ++ Rendering.text_color_elements + Markup.NUMERAL + Markup.COMMENT + Markup.ENTITY + Markup.LANGUAGE, entity = Markup.Elements(Markup.THEORY, Markup.TYPE_NAME, Markup.CONSTANT, Markup.FACT, Markup.CLASS, Markup.LOCALE, Markup.FREE)) val elements2: Elements = Elements( html = elements1.html ++ Rendering.markdown_elements, language = Markup.Elements(Markup.Language.DOCUMENT)) /* formal entities */ type Entity = Export_Theory.Entity[Export_Theory.No_Content] case class Entity_Context(node: Document.Node.Name) { val seen_ranges: mutable.Set[Symbol.Range] = mutable.Set.empty } /* HTML output */ private val div_elements = Set(HTML.div.name, HTML.pre.name, HTML.par.name, HTML.list.name, HTML.enum.name, HTML.descr.name) def make_html( name: Document.Node.Name, elements: Elements, entity_anchor: (Entity_Context, Symbol.Range, XML.Body) => Option[XML.Tree], entity_link: (Entity_Context, Properties.T, XML.Body) => Option[XML.Tree], xml: XML.Body): XML.Body = { val context = Entity_Context(name) def html_div(html: XML.Body): Boolean = html exists { case XML.Elem(markup, body) => div_elements.contains(markup.name) || html_div(body) case XML.Text(_) => false } def html_class(c: String, html: XML.Body): XML.Body = if (c == "") html else if (html_div(html)) List(HTML.div(c, html)) else List(HTML.span(c, html)) def html_body(xml_body: XML.Body, end_offset: Symbol.Offset): (XML.Body, Symbol.Offset) = xml_body.foldRight((List.empty[XML.Tree], end_offset)) { case (tree, (res, end_offset1)) => val (res1, offset) = html_body_single(tree, end_offset1) (res1 ++ res, offset) } @tailrec def html_body_single(xml_tree: XML.Tree, end_offset: Symbol.Offset): (XML.Body, Symbol.Offset) = xml_tree match { case XML.Wrapped_Elem(markup, _, body) => html_body_single(XML.Elem(markup, body), end_offset) case XML.Elem(Markup(Markup.ENTITY, props @ Markup.Kind(kind)), body) => val (body1, offset) = html_body(body, end_offset) if (elements.entity(kind)) { entity_link(context, props, body1) match { case Some(link) => (List(link), offset) case None => (body1, offset) } } else (body1, offset) case XML.Elem(Markup(Markup.LANGUAGE, Markup.Name(name)), body) => val (body1, offset) = html_body(body, end_offset) (html_class(if (elements.language(name)) name else "", body1), offset) case XML.Elem(Markup(Markup.MARKDOWN_PARAGRAPH, _), body) => val (body1, offset) = html_body(body, end_offset) (List(HTML.par(body1)), offset) case XML.Elem(Markup(Markup.MARKDOWN_ITEM, _), body) => val (body1, offset) = html_body(body, end_offset) (List(HTML.item(body1)), offset) case XML.Elem(Markup(Markup.Markdown_Bullet.name, _), text) => (Nil, end_offset - XML.symbol_length(text)) case XML.Elem(Markup.Markdown_List(kind), body) => val (body1, offset) = html_body(body, end_offset) if (kind == Markup.ENUMERATE) (List(HTML.enum(body1)), offset) else (List(HTML.list(body1)), offset) case XML.Elem(markup, body) => val name = markup.name val (body1, offset) = html_body(body, end_offset) val html = markup.properties match { case Markup.Kind(kind) if kind == Markup.COMMAND || kind == Markup.KEYWORD => html_class(kind, body1) case _ => body1 } Rendering.foreground.get(name) orElse Rendering.text_color.get(name) match { case Some(c) => (html_class(c.toString, html), offset) case None => (html_class(name, html), offset) } case XML.Text(text) => val offset = end_offset - Symbol.length(text) val body = HTML.text(Symbol.decode(text)) entity_anchor(context, Text.Range(offset, end_offset), body) match { case Some(body1) => (List(body1), offset) case None => (body, offset) } } html_body(xml, XML.symbol_length(xml) + 1)._1 } /* PIDE HTML document */ def html_document( resources: Resources, snapshot: Document.Snapshot, html_context: HTML_Context, elements: Elements, plain_text: Boolean = false): HTML_Document = { require(!snapshot.is_outdated, "document snapshot outdated") val name = snapshot.node_name if (plain_text) { val title = "File " + Symbol.cartouche_decoded(name.path.file_name) val body = HTML.text(snapshot.node.source) html_context.html_document(title, body) } else { resources.html_document(snapshot) getOrElse { val title = if (name.is_theory) "Theory " + quote(name.theory_base_name) else "File " + Symbol.cartouche_decoded(name.path.file_name) val xml = snapshot.xml_markup(elements = elements.html) val body = make_html(name, elements, (_, _, _) => None, (_, _, _) => None, xml) html_context.html_document(title, body) } } } /** HTML presentation **/ /* presentation context */ object Context { val none: Context = new Context { def enabled: Boolean = false } val standard: Context = new Context { def enabled: Boolean = true } def dir(path: Path): Context = new Context { def enabled: Boolean = true override def dir(store: Sessions.Store): Path = path } def make(s: String): Context = if (s == ":") standard else dir(Path.explode(s)) } abstract class Context private { def enabled: Boolean def enabled(info: Sessions.Info): Boolean = enabled || info.browser_info def dir(store: Sessions.Store): Path = store.presentation_dir def dir(store: Sessions.Store, info: Sessions.Info): Path = dir(store) + Path.explode(info.chapter_session) } /* maintain chapter index */ private val sessions_path = Path.basic(".sessions") private def read_sessions(dir: Path): List[(String, String)] = { val path = dir + sessions_path if (path.is_file) { import XML.Decode._ list(pair(string, string))(Symbol.decode_yxml(File.read(path))) } else Nil } def update_chapter_index( presentation_dir: Path, chapter: String, new_sessions: List[(String, String)]): Unit = { val dir = Isabelle_System.make_directory(presentation_dir + Path.basic(chapter)) val sessions0 = try { read_sessions(dir) } catch { case _: XML.Error => Nil } val sessions = (SortedMap.empty[String, String] ++ sessions0 ++ new_sessions).toList File.write(dir + sessions_path, { import XML.Encode._ YXML.string_of_body(list(pair(string, string))(sessions)) }) val title = "Isabelle/" + chapter + " sessions" HTML.write_document(dir, "index.html", List(HTML.title(title + Isabelle_System.isabelle_heading())), HTML.chapter(title) :: (if (sessions.isEmpty) Nil else List(HTML.div("sessions", List(HTML.description( sessions.map({ case (name, description) => val descr = Symbol.trim_blank_lines(description) (List(HTML.link(name + "/index.html", HTML.text(name))), if (descr == "") Nil else HTML.break ::: List(HTML.pre(HTML.text(descr)))) }))))))) } def update_global_index(presentation_dir: Path): Unit = { Isabelle_System.make_directory(presentation_dir) Isabelle_System.copy_file(Path.explode("~~/lib/logo/isabelle.gif"), presentation_dir + Path.explode("isabelle.gif")) val title = "The " + XML.text(Isabelle_System.isabelle_name()) + " Library" File.write(presentation_dir + Path.explode("index.html"), HTML.header + """ """ + HTML.head_meta + """ """ + title + """
[Isabelle]
""" + title + """

""" + File.read(Path.explode("~~/lib/html/library_index_content.template")) + """ """ + HTML.footer) } /* present session */ val session_graph_path = Path.explode("session_graph.pdf") val readme_path = Path.explode("README.html") val files_path = Path.explode("files") def html_name(name: Document.Node.Name): String = Path.explode(name.theory_base_name).html.implode def html_path(path: Path): String = (files_path + path.squash.html).implode def session_relative(deps: Sessions.Deps, session0: String, session1: String): Option[String] = { for { info0 <- deps.sessions_structure.get(session0) info1 <- deps.sessions_structure.get(session1) } yield info0.relative_path(info1) } def node_relative( deps: Sessions.Deps, session0: String, node_name: Document.Node.Name): Option[String] = { val session1 = deps(session0).theory_qualifier(node_name) session_relative(deps, session0, session1) } def theory_link(deps: Sessions.Deps, session0: String, name: Document.Node.Name, body: XML.Body, anchor: Option[String] = None): Option[XML.Tree] = { val session1 = deps(session0).theory_qualifier(name) val info0 = deps.sessions_structure.get(session0) val info1 = deps.sessions_structure.get(session1) val fragment = if (anchor.isDefined) "#" + anchor.get else "" if (info0.isDefined && info1.isDefined) { Some(HTML.link(info0.get.relative_path(info1.get) + html_name(name) + fragment, body)) } else None } def session_html( resources: Resources, session: String, deps: Sessions.Deps, db_context: Sessions.Database_Context, progress: Progress = new Progress, verbose: Boolean = false, html_context: HTML_Context, elements: Elements, presentation: Context): Unit = { val hierarchy = deps.sessions_structure.hierarchy(session) val info = deps.sessions_structure(session) val options = info.options val base = deps(session) val session_dir = presentation.dir(db_context.store, info) html_context.init_fonts(session_dir) Bytes.write(session_dir + session_graph_path, graphview.Graph_File.make_pdf(options, base.session_graph_display)) val documents = for { doc <- info.document_variants document <- db_context.input_database(session)(Document_Build.read_document(_, _, doc.name)) } yield { if (verbose) progress.echo("Presenting document " + session + "/" + doc.name) Bytes.write(session_dir + doc.path.pdf, document.pdf) doc } val view_links = { val deps_link = HTML.link(session_graph_path, HTML.text("theory dependencies")) val readme_links = if ((info.dir + readme_path).is_file) { Isabelle_System.copy_file(info.dir + readme_path, session_dir + readme_path) List(HTML.link(readme_path, HTML.text("README"))) } else Nil val document_links = documents.map(doc => HTML.link(doc.path.pdf, HTML.text(doc.name))) Library.separate(HTML.break ::: HTML.nl, (deps_link :: readme_links ::: document_links). map(link => HTML.text("View ") ::: List(link))).flatten } val theory_exports: Map[String, Export_Theory.Theory] = (for ((_, entry) <- base.known_theories.iterator) yield { val thy_name = entry.name.theory val theory = if (thy_name == Thy_Header.PURE) Export_Theory.no_theory else { html_context.cache_theory(thy_name, { val provider = Export.Provider.database_context(db_context, hierarchy, thy_name) Export_Theory.read_theory( provider, session, thy_name, cache = html_context.term_cache) }) } thy_name -> theory }).toMap val theories: List[XML.Body] = { var seen_files = List.empty[(Path, String, Document.Node.Name)] def node_file(node: Document.Node.Name): String = if (node.node.endsWith(".thy")) html_name(node) else html_path(Path.explode(node.node)) def entity_anchor( context: Entity_Context, range: Symbol.Range, body: XML.Body): Option[XML.Elem] = { body match { case List(XML.Elem(Markup("span", List("id" -> _)), _)) => None case _ => Some { val entities = theory_exports.get(context.node.theory).flatMap(_.entity_by_range.get(range)) .getOrElse(Nil) val body1 = if (context.seen_ranges.contains(range)) { HTML.entity_def(HTML.span(HTML.id(html_context.offset_ref(range)), body)) } else HTML.span(body) entities.map(_.kname).foldLeft(body1) { case (elem, id) => HTML.entity_def(HTML.span(HTML.id(id), List(elem))) } } } } def entity_ref(thy_name: String, kind: String, name: String): Option[String] = for { thy <- theory_exports.get(thy_name) - entity <- thy.entity_by_kname.get(Export_Theory.export_kind_name(kind, name)) + entity <- thy.entity_by_kind_name.get((kind, name)) } yield entity.kname def offset_ref(context: Entity_Context, thy_name: String, props: Properties.T): Option[String] = { if (thy_name == context.node.theory) { for { offset <- Position.Def_Offset.unapply(props) end_offset <- Position.Def_End_Offset.unapply(props) range = Text.Range(offset, end_offset) } yield { context.seen_ranges += range html_context.offset_ref(range) } } else None } def entity_link( context: Entity_Context, props: Properties.T, body: XML.Body): Option[XML.Elem] = { (props, props, props, props, props) match { case (Markup.Kind(Markup.THEORY), Markup.Name(theory), Position.Def_File(thy_file), _, _) => val node_name = resources.file_node(Path.explode(thy_file), theory = theory) node_relative(deps, session, node_name).map(html_dir => HTML.link(html_dir + html_name(node_name), body)) case (Markup.Ref(_), Position.Def_File(thy_file), Position.Def_Theory(def_theory), Markup.Kind(kind), Markup.Name(name)) => val thy_name = if (def_theory.nonEmpty) def_theory else context.node.theory val node_name = resources.file_node(Path.explode(thy_file), theory = thy_name) for { html_dir <- node_relative(deps, session, node_name) html_file = node_file(node_name) html_ref <- entity_ref(thy_name, kind, name).orElse(offset_ref(context, thy_name, props)) } yield { HTML.entity_ref(HTML.link(html_dir + html_file + "#" + html_ref, body)) } case _ => None } } sealed case class Theory( name: Document.Node.Name, command: Command, files_html: List[(Path, XML.Tree)], html: XML.Tree) def read_theory(name: Document.Node.Name): Option[Theory] = { progress.expose_interrupt() if (verbose) progress.echo("Presenting theory " + name) for (command <- Build_Job.read_theory(db_context, resources, session, name.theory)) yield { val snapshot = Document.State.init.snippet(command) val files_html = for { (src_path, xml) <- snapshot.xml_markup_blobs(elements = elements.html) if xml.nonEmpty } yield { progress.expose_interrupt() if (verbose) progress.echo("Presenting file " + src_path) (src_path, html_context.source( make_html(name, elements, entity_anchor, entity_link, xml))) } val html = html_context.source( make_html(name, elements, entity_anchor, entity_link, snapshot.xml_markup(elements = elements.html))) Theory(name, command, files_html, html) } } for (thy <- Par_List.map(read_theory, base.session_theories).flatten) yield { val files = for { (src_path, file_html) <- thy.files_html } yield { val file_name = html_path(src_path) seen_files.find(p => p._1 == src_path || p._2 == file_name) match { case None => seen_files ::= (src_path, file_name, thy.name) case Some((_, _, thy_name1)) => error("Incoherent use of file name " + src_path + " as " + quote(file_name) + " in theory " + thy_name1 + " vs. " + thy.name) } val file_path = session_dir + Path.explode(file_name) html_context.init_fonts(file_path.dir) val file_title = "File " + Symbol.cartouche_decoded(src_path.implode_short) HTML.write_document(file_path.dir, file_path.file_name, List(HTML.title(file_title)), List(html_context.head(file_title), file_html)) List(HTML.link(file_name, HTML.text(file_title))) } val thy_title = "Theory " + thy.name.theory_base_name HTML.write_document(session_dir, html_name(thy.name), List(HTML.title(thy_title)), List(html_context.head(thy_title), thy.html)) List(HTML.link(html_name(thy.name), HTML.text(thy.name.theory_base_name) ::: (if (files.isEmpty) Nil else List(HTML.itemize(files))))) } } val title = "Session " + session HTML.write_document(session_dir, "index.html", List(HTML.title(title + Isabelle_System.isabelle_heading())), html_context.head(title, List(HTML.par(view_links))) :: html_context.contents("Theories", theories)) } }