diff --git a/src/Pure/Thy/export_theory.ML b/src/Pure/Thy/export_theory.ML --- a/src/Pure/Thy/export_theory.ML +++ b/src/Pure/Thy/export_theory.ML @@ -1,410 +1,424 @@ (* Title: Pure/Thy/export_theory.ML Author: Makarius Export foundational theory content and locale/class structure. *) signature EXPORT_THEORY = sig val setup_presentation: (Thy_Info.presentation_context -> theory -> unit) -> unit val export_body: theory -> string -> XML.body -> unit end; structure Export_Theory: EXPORT_THEORY = struct (* approximative syntax *) val get_syntax = Syntax.get_approx o Proof_Context.syn_of; fun get_syntax_type ctxt = get_syntax ctxt o Lexicon.mark_type; fun get_syntax_const ctxt = get_syntax ctxt o Lexicon.mark_const; fun get_syntax_fixed ctxt = get_syntax ctxt o Lexicon.mark_fixed; fun get_syntax_param ctxt loc x = let val thy = Proof_Context.theory_of ctxt in if Class.is_class thy loc then (case AList.lookup (op =) (Class.these_params thy [loc]) x of NONE => NONE | SOME (_, (c, _)) => get_syntax_const ctxt c) else get_syntax_fixed ctxt x end; val encode_syntax = XML.Encode.variant [fn NONE => ([], []), fn SOME (Syntax.Prefix delim) => ([delim], []), fn SOME (Syntax.Infix {assoc, delim, pri}) => let val ass = (case assoc of Printer.No_Assoc => 0 | Printer.Left_Assoc => 1 | Printer.Right_Assoc => 2); open XML.Encode Term_XML.Encode; in ([], triple int string int (ass, delim, pri)) end]; (* free variables: not declared in the context *) val is_free = not oo Name.is_declared; fun add_frees used = fold_aterms (fn Free (x, T) => is_free used x ? insert (op =) (x, T) | _ => I); fun add_tfrees used = (fold_types o fold_atyps) (fn TFree (a, S) => is_free used a ? insert (op =) (a, S) | _ => I); (* spec rules *) fun primrec_types ctxt const = Spec_Rules.retrieve ctxt (Const const) |> get_first (fn (Spec_Rules.Equational (Spec_Rules.Primrec types), _) => SOME (types, false) | (Spec_Rules.Equational (Spec_Rules.Primcorec types), _) => SOME (types, true) | _ => NONE) |> the_default ([], false); (* locales content *) fun locale_content thy loc = let val ctxt = Locale.init loc thy; val args = Locale.params_of thy loc |> map (fn ((x, T), _) => ((x, T), get_syntax_param ctxt loc x)); val axioms = let val (asm, defs) = Locale.specification_of thy loc; val cprops = map (Thm.cterm_of ctxt) (the_list asm @ defs); val (intro1, intro2) = Locale.intros_of thy loc; val intros_tac = Method.try_intros_tac ctxt (the_list intro1 @ the_list intro2) []; val res = Goal.init (Conjunction.mk_conjunction_balanced cprops) |> (ALLGOALS Goal.conjunction_tac THEN intros_tac) |> try Seq.hd; in (case res of SOME goal => Thm.prems_of goal | NONE => raise Fail ("Cannot unfold locale " ^ quote loc)) end; val typargs = rev (fold Term.add_tfrees (map (Free o #1) args @ axioms) []); in {typargs = typargs, args = args, axioms = axioms} end; fun get_locales thy = Locale.get_locales thy |> map_filter (fn loc => if Experiment.is_experiment thy loc then NONE else SOME (loc, ())); fun get_dependencies prev_thys thy = Locale.dest_dependencies prev_thys thy |> map_filter (fn dep => if Experiment.is_experiment thy (#source dep) orelse Experiment.is_experiment thy (#target dep) then NONE else let val (type_params, params) = Locale.parameters_of thy (#source dep); val typargs = fold (Term.add_tfreesT o #2 o #1) params type_params; val substT = typargs |> map_filter (fn v => let val T = TFree v; val T' = Morphism.typ (#morphism dep) T; in if T = T' then NONE else SOME (v, T') end); val subst = params |> map_filter (fn (v, _) => let val t = Free v; val t' = Morphism.term (#morphism dep) t; in if t aconv t' then NONE else SOME (v, t') end); in SOME (dep, (substT, subst)) end); (* general setup *) fun setup_presentation f = Theory.setup (Thy_Info.add_presentation (fn context => fn thy => if Options.bool (#options context) "export_theory" then f context thy else ())); fun export_buffer thy name buffer = if Buffer.is_empty buffer then () else Export.export thy (Path.binding0 (Path.make ["theory", name])) (Buffer.chunks buffer); fun export_body thy name elems = export_buffer thy name (YXML.buffer_body elems Buffer.empty); (* presentation *) val _ = setup_presentation (fn {adjust_pos, ...} => fn thy => let val parents = Theory.parents_of thy; val rep_tsig = Type.rep_tsig (Sign.tsig_of thy); val thy_ctxt = Proof_Context.init_global thy; (* entities *) fun make_entity_markup name xname pos serial = let val props = Position.offset_properties_of (adjust_pos pos) @ Position.id_properties_of pos @ Markup.serial_properties serial; in (Markup.entityN, (Markup.nameN, name) :: (Markup.xnameN, xname) :: props) end; fun entity_markup space name = let val xname = Name_Space.extern_shortest thy_ctxt space name; val {serial, pos, ...} = Name_Space.the_entry space name; in make_entity_markup name xname pos serial end; fun export_entities export_name export get_space decls = let val parent_spaces = map get_space parents; val space = get_space thy; in (decls, []) |-> fold (fn (name, decl) => if exists (fn space => Name_Space.declared space name) parent_spaces then I else (case export name decl of NONE => I | SOME body => cons (#serial (Name_Space.the_entry space name), XML.Elem (entity_markup space name, body)))) |> sort (int_ord o apply2 #1) |> map #2 |> export_body thy export_name end; (* types *) val encode_type = let open XML.Encode Term_XML.Encode in triple encode_syntax (list string) (option typ) end; fun export_type c (Type.LogicalType n) = SOME (encode_type (get_syntax_type thy_ctxt c, Name.invent Name.context Name.aT n, NONE)) | export_type c (Type.Abbreviation (args, U, false)) = SOME (encode_type (get_syntax_type thy_ctxt c, args, SOME U)) | export_type _ _ = NONE; val _ = export_entities "types" export_type Sign.type_space (Name_Space.dest_table (#types rep_tsig)); (* consts *) val consts = Sign.consts_of thy; val encode_term = Term_XML.Encode.term consts; val encode_const = let open XML.Encode Term_XML.Encode in pair encode_syntax (pair (list string) (pair typ (pair (option encode_term) (pair bool (pair (list string) bool))))) end; fun export_const c (T, abbrev) = let val syntax = get_syntax_const thy_ctxt c; val U = Logic.unvarifyT_global T; val U0 = Type.strip_sorts U; val recursion = primrec_types thy_ctxt (c, U); val abbrev' = abbrev |> Option.map (Proofterm.standard_vars_term Name.context #> map_types Type.strip_sorts); val args = map (#1 o dest_TFree) (Consts.typargs consts (c, U0)); val propositional = Object_Logic.is_propositional thy_ctxt (Term.body_type U0); in encode_const (syntax, (args, (U0, (abbrev', (propositional, recursion))))) end; val _ = export_entities "consts" (SOME oo export_const) Sign.const_space (#constants (Consts.dest consts)); (* axioms *) fun standard_prop used extra_shyps raw_prop raw_proof = let val (prop, proof) = Proofterm.standard_vars used (raw_prop, raw_proof); val args = rev (add_frees used prop []); val typargs = rev (add_tfrees used prop []); val used_typargs = fold (Name.declare o #1) typargs used; val sorts = Name.invent used_typargs Name.aT (length extra_shyps) ~~ extra_shyps; in ((sorts @ typargs, args, prop), proof) end; val encode_prop = let open XML.Encode Term_XML.Encode in triple (list (pair string sort)) (list (pair string typ)) encode_term end; fun encode_axiom used prop = encode_prop (#1 (standard_prop used [] prop NONE)); val _ = export_entities "axioms" (fn _ => fn t => SOME (encode_axiom Name.context t)) Theory.axiom_space (Theory.axioms_of thy); (* theorems and proof terms *) val export_standard_proofs = Options.default_bool @{system_option export_standard_proofs}; val clean_thm = Thm.check_hyps (Context.Theory thy) #> Thm.strip_shyps; val lookup_thm_id = Global_Theory.lookup_thm_id thy; fun proof_boxes_of thm thm_id = let val dep_boxes = thm |> Thm_Deps.proof_boxes (fn thm_id' => if #serial thm_id = #serial thm_id' then false else is_some (lookup_thm_id thm_id')); in dep_boxes @ [thm_id] end; fun expand_name thm_id (header: Proofterm.thm_header) = if #serial header = #serial thm_id then "" else (case lookup_thm_id (Proofterm.thm_header_id header) of NONE => "" | SOME thm_name => Thm_Name.print thm_name); fun entity_markup_thm (serial, (name, i)) = let val space = Facts.space_of (Global_Theory.facts_of thy); val xname = Name_Space.extern_shortest thy_ctxt space name; val {pos, ...} = Name_Space.the_entry space name; in make_entity_markup (Thm_Name.print (name, i)) (Thm_Name.print (xname, i)) pos serial end; fun encode_thm thm_id raw_thm = let val deps = map (Thm_Name.print o #2) (Thm_Deps.thm_deps thy [raw_thm]); val thm = clean_thm (Thm.unconstrainT raw_thm); val boxes = proof_boxes_of thm thm_id; val proof0 = if export_standard_proofs then Thm.standard_proof_of {full = true, expand_name = SOME o expand_name thm_id} thm else if Proofterm.export_enabled () then Thm.reconstruct_proof_of thm else MinProof; val (prop, SOME proof) = standard_prop Name.context (Thm.extra_shyps thm) (Thm.full_prop_of thm) (SOME proof0); val _ = Proofterm.commit_proof proof; in (prop, (deps, (boxes, proof))) |> let open XML.Encode Term_XML.Encode; fun encode_box {serial, theory_name} = pair int string (serial, theory_name); val encode_proof = Proofterm.encode_standard_proof consts; in pair encode_prop (pair (list string) (pair (list encode_box) encode_proof)) end end; fun buffer_export_thm (thm_id, thm_name) = let val markup = entity_markup_thm (#serial thm_id, thm_name); val thm = Global_Theory.get_thm_name thy (thm_name, Position.none); val body = encode_thm thm_id thm; in YXML.buffer (XML.Elem (markup, body)) end; val _ = Buffer.empty |> fold buffer_export_thm (Global_Theory.dest_thm_names thy) |> export_buffer thy "thms"; (* type classes *) val encode_class = let open XML.Encode Term_XML.Encode in pair (list (pair string typ)) (list (encode_axiom Name.context)) end; fun export_class name = (case try (Axclass.get_info thy) name of NONE => ([], []) | SOME {params, axioms, ...} => (params, map (Thm.plain_prop_of o clean_thm) axioms)) |> encode_class |> SOME; val _ = export_entities "classes" (fn name => fn () => export_class name) Sign.class_space (map (rpair ()) (Graph.keys (Sorts.classes_of (#2 (#classes rep_tsig))))); (* sort algebra *) local val prop = encode_axiom Name.context o Logic.varify_global; val encode_classrel = let open XML.Encode in list (pair prop (pair string string)) end; val encode_arities = let open XML.Encode Term_XML.Encode in list (pair prop (triple string (list sort) string)) end; in val export_classrel = maps (fn (c, cs) => map (pair c) cs) #> map (`Logic.mk_classrel) #> encode_classrel; val export_arities = map (`Logic.mk_arity) #> encode_arities; val {classrel, arities} = Sorts.dest_algebra (map (#2 o #classes o Type.rep_tsig o Sign.tsig_of) parents) (#2 (#classes rep_tsig)); end; val _ = if null classrel then () else export_body thy "classrel" (export_classrel classrel); val _ = if null arities then () else export_body thy "arities" (export_arities arities); (* locales *) fun encode_locale used = let open XML.Encode Term_XML.Encode in triple (list (pair string sort)) (list (pair (pair string typ) encode_syntax)) (list (encode_axiom used)) end; fun export_locale loc = let val {typargs, args, axioms} = locale_content thy loc; val used = fold Name.declare (map #1 typargs @ map (#1 o #1) args) Name.context; in encode_locale used (typargs, args, axioms) end handle ERROR msg => cat_error msg ("The error(s) above occurred in locale " ^ quote (Locale.markup_name thy_ctxt loc)); val _ = export_entities "locales" (fn loc => fn () => SOME (export_locale loc)) Locale.locale_space (get_locales thy); (* locale dependencies *) fun encode_locale_dependency (dep: Locale.locale_dependency, subst) = (#source dep, (#target dep, (#prefix dep, subst))) |> let open XML.Encode Term_XML.Encode; val encode_subst = pair (list (pair (pair string sort) typ)) (list (pair (pair string typ) (term consts))); in pair string (pair string (pair (list (pair string bool)) encode_subst)) end; val _ = get_dependencies parents thy |> map_index (fn (i, dep) => let val xname = string_of_int (i + 1); val name = Long_Name.implode [Context.theory_name thy, xname]; val markup = make_entity_markup name xname (#pos (#1 dep)) (#serial (#1 dep)); val body = encode_locale_dependency dep; in XML.Elem (markup, body) end) |> export_body thy "locale_dependencies"; + (* constdefs *) + + val constdefs = + Defs.dest_constdefs (map Theory.defs_of (Theory.parents_of thy)) (Theory.defs_of thy) + |> sort_by #1; + + val encode_constdefs = + let open XML.Encode + in list (pair string string) end; + + val _ = + if null constdefs then () else export_body thy "constdefs" (encode_constdefs constdefs); + + (* parents *) val _ = Export.export thy \<^path_binding>\theory/parents\ [YXML.string_of_body (XML.Encode.string (cat_lines (map Context.theory_long_name parents)))]; in () end); end; diff --git a/src/Pure/Thy/export_theory.scala b/src/Pure/Thy/export_theory.scala --- a/src/Pure/Thy/export_theory.scala +++ b/src/Pure/Thy/export_theory.scala @@ -1,584 +1,609 @@ /* Title: Pure/Thy/export_theory.scala Author: Makarius Export foundational theory content. */ package isabelle object Export_Theory { /** session content **/ sealed case class Session(name: String, theory_graph: Graph[String, Option[Theory]]) { override def toString: String = name def theory(theory_name: String): Option[Theory] = if (theory_graph.defined(theory_name)) theory_graph.get_node(theory_name) else None def theories: List[Theory] = theory_graph.topological_order.flatMap(theory(_)) } def read_session( store: Sessions.Store, sessions_structure: Sessions.Structure, session_name: String, progress: Progress = No_Progress, types: Boolean = true, consts: Boolean = true, axioms: Boolean = true, thms: Boolean = true, classes: Boolean = true, locales: Boolean = true, locale_dependencies: Boolean = true, classrel: Boolean = true, arities: Boolean = true, + constdefs: Boolean = true, typedefs: Boolean = true, cache: Term.Cache = Term.make_cache()): Session = { val thys = sessions_structure.build_requirements(List(session_name)).flatMap(session => using(store.open_database(session))(db => { db.transaction { for (theory <- Export.read_theory_names(db, session)) yield { progress.echo("Reading theory " + theory) read_theory(Export.Provider.database(db, session, theory), session, theory, types = types, consts = consts, axioms = axioms, thms = thms, classes = classes, locales = locales, locale_dependencies = locale_dependencies, classrel = classrel, arities = arities, - typedefs = typedefs, cache = Some(cache)) + constdefs = constdefs, typedefs = typedefs, cache = Some(cache)) } } })) val graph0 = (Graph.string[Option[Theory]] /: thys) { case (g, thy) => g.default_node(thy.name, Some(thy)) } val graph1 = (graph0 /: thys) { case (g0, thy) => (g0 /: thy.parents) { case (g1, parent) => g1.default_node(parent, None).add_edge_acyclic(parent, thy.name) } } Session(session_name, graph1) } /** theory content **/ val export_prefix: String = "theory/" val export_prefix_proofs: String = "proofs/" sealed case class Theory(name: String, parents: List[String], types: List[Type], consts: List[Const], axioms: List[Axiom], thms: List[Thm], classes: List[Class], locales: List[Locale], locale_dependencies: List[Locale_Dependency], classrel: List[Classrel], arities: List[Arity], + constdefs: List[Constdef], typedefs: List[Typedef]) { override def toString: String = name def entity_iterator: Iterator[Entity] = types.iterator.map(_.entity) ++ consts.iterator.map(_.entity) ++ axioms.iterator.map(_.entity) ++ thms.iterator.map(_.entity) ++ classes.iterator.map(_.entity) ++ locales.iterator.map(_.entity) ++ locale_dependencies.iterator.map(_.entity) def cache(cache: Term.Cache): Theory = Theory(cache.string(name), parents.map(cache.string(_)), types.map(_.cache(cache)), consts.map(_.cache(cache)), axioms.map(_.cache(cache)), thms.map(_.cache(cache)), classes.map(_.cache(cache)), locales.map(_.cache(cache)), locale_dependencies.map(_.cache(cache)), classrel.map(_.cache(cache)), arities.map(_.cache(cache)), + constdefs.map(_.cache(cache)), typedefs.map(_.cache(cache))) } def read_theory(provider: Export.Provider, session_name: String, theory_name: String, types: Boolean = true, consts: Boolean = true, axioms: Boolean = true, thms: Boolean = true, classes: Boolean = true, locales: Boolean = true, locale_dependencies: Boolean = true, classrel: Boolean = true, arities: Boolean = true, + constdefs: Boolean = true, typedefs: Boolean = true, cache: Option[Term.Cache] = None): Theory = { val parents = if (theory_name == Thy_Header.PURE) Nil else { provider(export_prefix + "parents") match { case Some(entry) => split_lines(entry.uncompressed().text) case None => error("Missing theory export in session " + quote(session_name) + ": " + quote(theory_name)) } } val theory = Theory(theory_name, parents, if (types) read_types(provider) else Nil, if (consts) read_consts(provider) else Nil, if (axioms) read_axioms(provider) else Nil, if (thms) read_thms(provider) else Nil, if (classes) read_classes(provider) else Nil, if (locales) read_locales(provider) else Nil, if (locale_dependencies) read_locale_dependencies(provider) else Nil, if (classrel) read_classrel(provider) else Nil, if (arities) read_arities(provider) else Nil, + if (constdefs) read_constdefs(provider) else Nil, if (typedefs) read_typedefs(provider) else Nil) if (cache.isDefined) theory.cache(cache.get) else theory } def read_pure[A](store: Sessions.Store, read: (Export.Provider, String, String) => A): A = { val session_name = Thy_Header.PURE val theory_name = Thy_Header.PURE using(store.open_database(session_name))(db => { db.transaction { read(Export.Provider.database(db, session_name, theory_name), session_name, theory_name) } }) } def read_pure_theory(store: Sessions.Store, cache: Option[Term.Cache] = None): Theory = read_pure(store, read_theory(_, _, _, cache = cache)) def read_pure_proof( store: Sessions.Store, id: Thm_Id, cache: Option[Term.Cache] = None): Option[Proof] = read_pure(store, (provider, _, _) => read_proof(provider, id, cache = cache)) /* entities */ object Kind extends Enumeration { val TYPE = Value("type") val CONST = Value("const") val AXIOM = Value("axiom") val THM = Value("thm") val PROOF = Value("proof") val CLASS = Value("class") val LOCALE = Value("locale") val LOCALE_DEPENDENCY = Value("locale_dependency") val DOCUMENT_HEADING = Value("document_heading") val DOCUMENT_TEXT = Value("document_text") val PROOF_TEXT = Value("proof_text") } sealed case class Entity( kind: Kind.Value, name: String, xname: String, pos: Position.T, id: Option[Long], serial: Long) { override def toString: String = kind.toString + " " + quote(name) def cache(cache: Term.Cache): Entity = Entity(kind, cache.string(name), cache.string(xname), cache.position(pos), id, serial) } def decode_entity(kind: Kind.Value, tree: XML.Tree): (Entity, XML.Body) = { def err(): Nothing = throw new XML.XML_Body(List(tree)) tree match { case XML.Elem(Markup(Markup.ENTITY, props), body) => val name = Markup.Name.unapply(props) getOrElse err() val xname = Markup.XName.unapply(props) getOrElse err() val pos = props.filter({ case (a, _) => Markup.POSITION_PROPERTIES(a) && a != Markup.ID }) val id = Position.Id.unapply(props) val serial = Markup.Serial.unapply(props) getOrElse err() (Entity(kind, name, xname, pos, id, serial), body) case _ => err() } } /* approximative syntax */ object Assoc extends Enumeration { val NO_ASSOC, LEFT_ASSOC, RIGHT_ASSOC = Value } sealed abstract class Syntax case object No_Syntax extends Syntax case class Prefix(delim: String) extends Syntax case class Infix(assoc: Assoc.Value, delim: String, pri: Int) extends Syntax def decode_syntax: XML.Decode.T[Syntax] = XML.Decode.variant(List( { case (Nil, Nil) => No_Syntax }, { case (List(delim), Nil) => Prefix(delim) }, { case (Nil, body) => import XML.Decode._ val (ass, delim, pri) = triple(int, string, int)(body) Infix(Assoc(ass), delim, pri) })) /* types */ sealed case class Type( entity: Entity, syntax: Syntax, args: List[String], abbrev: Option[Term.Typ]) { def cache(cache: Term.Cache): Type = Type(entity.cache(cache), syntax, args.map(cache.string(_)), abbrev.map(cache.typ(_))) } def read_types(provider: Export.Provider): List[Type] = provider.uncompressed_yxml(export_prefix + "types").map((tree: XML.Tree) => { val (entity, body) = decode_entity(Kind.TYPE, tree) val (syntax, args, abbrev) = { import XML.Decode._ triple(decode_syntax, list(string), option(Term_XML.Decode.typ))(body) } Type(entity, syntax, args, abbrev) }) /* consts */ sealed case class Const( entity: Entity, syntax: Syntax, typargs: List[String], typ: Term.Typ, abbrev: Option[Term.Term], propositional: Boolean, primrec_types: List[String], corecursive: Boolean) { def cache(cache: Term.Cache): Const = Const(entity.cache(cache), syntax, typargs.map(cache.string(_)), cache.typ(typ), abbrev.map(cache.term(_)), propositional, primrec_types.map(cache.string(_)), corecursive) } def read_consts(provider: Export.Provider): List[Const] = provider.uncompressed_yxml(export_prefix + "consts").map((tree: XML.Tree) => { val (entity, body) = decode_entity(Kind.CONST, tree) val (syntax, (args, (typ, (abbrev, (propositional, (primrec_types, corecursive)))))) = { import XML.Decode._ pair(decode_syntax, pair(list(string), pair(Term_XML.Decode.typ, pair(option(Term_XML.Decode.term), pair(bool, pair(list(string), bool))))))(body) } Const(entity, syntax, args, typ, abbrev, propositional, primrec_types, corecursive) }) /* axioms */ sealed case class Prop( typargs: List[(String, Term.Sort)], args: List[(String, Term.Typ)], term: Term.Term) { def cache(cache: Term.Cache): Prop = Prop( typargs.map({ case (name, sort) => (cache.string(name), cache.sort(sort)) }), args.map({ case (name, typ) => (cache.string(name), cache.typ(typ)) }), cache.term(term)) } def decode_prop(body: XML.Body): Prop = { val (typargs, args, t) = { import XML.Decode._ import Term_XML.Decode._ triple(list(pair(string, sort)), list(pair(string, typ)), term)(body) } Prop(typargs, args, t) } sealed case class Axiom(entity: Entity, prop: Prop) { def cache(cache: Term.Cache): Axiom = Axiom(entity.cache(cache), prop.cache(cache)) } def read_axioms(provider: Export.Provider): List[Axiom] = provider.uncompressed_yxml(export_prefix + "axioms").map((tree: XML.Tree) => { val (entity, body) = decode_entity(Kind.AXIOM, tree) val prop = decode_prop(body) Axiom(entity, prop) }) /* theorems */ sealed case class Thm_Id(serial: Long, theory_name: String) { def pure: Boolean = theory_name == Thy_Header.PURE def cache(cache: Term.Cache): Thm_Id = Thm_Id(serial, cache.string(theory_name)) } sealed case class Thm( entity: Entity, prop: Prop, deps: List[String], proof_boxes: List[Thm_Id], proof: Term.Proof) { def cache(cache: Term.Cache): Thm = Thm( entity.cache(cache), prop.cache(cache), deps.map(cache.string _), proof_boxes.map(_.cache(cache)), cache.proof(proof)) } def read_thms(provider: Export.Provider): List[Thm] = provider.uncompressed_yxml(export_prefix + "thms").map((tree: XML.Tree) => { val (entity, body) = decode_entity(Kind.THM, tree) val (prop, (deps, (prf_boxes, prf))) = { import XML.Decode._ import Term_XML.Decode._ def thm_id(body: XML.Body): Thm_Id = { val (serial, theory_name) = pair(long, string)(body) Thm_Id(serial, theory_name) } pair(decode_prop _, pair(list(string), pair(list(thm_id), proof)))(body) } Thm(entity, prop, deps, prf_boxes, prf) }) sealed case class Proof( typargs: List[(String, Term.Sort)], args: List[(String, Term.Typ)], term: Term.Term, proof: Term.Proof) { def prop: Prop = Prop(typargs, args, term) def cache(cache: Term.Cache): Proof = Proof( typargs.map({ case (name, sort) => (cache.string(name), cache.sort(sort)) }), args.map({ case (name, typ) => (cache.string(name), cache.typ(typ)) }), cache.term(term), cache.proof(proof)) } def read_proof( provider: Export.Provider, id: Thm_Id, cache: Option[Term.Cache] = None): Option[Proof] = { for { entry <- provider.focus(id.theory_name)(export_prefix_proofs + id.serial) } yield { val body = entry.uncompressed_yxml() val (typargs, (args, (prop_body, proof_body))) = { import XML.Decode._ import Term_XML.Decode._ pair(list(pair(string, sort)), pair(list(pair(string, typ)), pair(x => x, x => x)))(body) } val env = args.toMap val prop = Term_XML.Decode.term_env(env)(prop_body) val proof = Term_XML.Decode.proof_env(env)(proof_body) val result = Proof(typargs, args, prop, proof) cache.map(result.cache(_)) getOrElse result } } /* type classes */ sealed case class Class( entity: Entity, params: List[(String, Term.Typ)], axioms: List[Prop]) { def cache(cache: Term.Cache): Class = Class(entity.cache(cache), params.map({ case (name, typ) => (cache.string(name), cache.typ(typ)) }), axioms.map(_.cache(cache))) } def read_classes(provider: Export.Provider): List[Class] = provider.uncompressed_yxml(export_prefix + "classes").map((tree: XML.Tree) => { val (entity, body) = decode_entity(Kind.CLASS, tree) val (params, axioms) = { import XML.Decode._ import Term_XML.Decode._ pair(list(pair(string, typ)), list(decode_prop))(body) } Class(entity, params, axioms) }) /* locales */ sealed case class Locale( entity: Entity, typargs: List[(String, Term.Sort)], args: List[((String, Term.Typ), Syntax)], axioms: List[Prop]) { def cache(cache: Term.Cache): Locale = Locale(entity.cache(cache), typargs.map({ case (name, sort) => (cache.string(name), cache.sort(sort)) }), args.map({ case ((name, typ), syntax) => ((cache.string(name), cache.typ(typ)), syntax) }), axioms.map(_.cache(cache))) } def read_locales(provider: Export.Provider): List[Locale] = provider.uncompressed_yxml(export_prefix + "locales").map((tree: XML.Tree) => { val (entity, body) = decode_entity(Kind.LOCALE, tree) val (typargs, args, axioms) = { import XML.Decode._ import Term_XML.Decode._ triple(list(pair(string, sort)), list(pair(pair(string, typ), decode_syntax)), list(decode_prop))(body) } Locale(entity, typargs, args, axioms) }) /* locale dependencies */ sealed case class Locale_Dependency( entity: Entity, source: String, target: String, prefix: List[(String, Boolean)], subst_types: List[((String, Term.Sort), Term.Typ)], subst_terms: List[((String, Term.Typ), Term.Term)]) { def cache(cache: Term.Cache): Locale_Dependency = Locale_Dependency(entity.cache(cache), cache.string(source), cache.string(target), prefix.map({ case (name, mandatory) => (cache.string(name), mandatory) }), subst_types.map({ case ((a, s), ty) => ((cache.string(a), cache.sort(s)), cache.typ(ty)) }), subst_terms.map({ case ((x, ty), t) => ((cache.string(x), cache.typ(ty)), cache.term(t)) })) def is_inclusion: Boolean = subst_types.isEmpty && subst_terms.isEmpty } def read_locale_dependencies(provider: Export.Provider): List[Locale_Dependency] = provider.uncompressed_yxml(export_prefix + "locale_dependencies").map((tree: XML.Tree) => { val (entity, body) = decode_entity(Kind.LOCALE_DEPENDENCY, tree) val (source, (target, (prefix, (subst_types, subst_terms)))) = { import XML.Decode._ import Term_XML.Decode._ pair(string, pair(string, pair(list(pair(string, bool)), pair(list(pair(pair(string, sort), typ)), list(pair(pair(string, typ), term))))))(body) } Locale_Dependency(entity, source, target, prefix, subst_types, subst_terms) }) /* sort algebra */ sealed case class Classrel(class1: String, class2: String, prop: Prop) { def cache(cache: Term.Cache): Classrel = Classrel(cache.string(class1), cache.string(class2), prop.cache(cache)) } def read_classrel(provider: Export.Provider): List[Classrel] = { val body = provider.uncompressed_yxml(export_prefix + "classrel") val classrel = { import XML.Decode._ list(pair(decode_prop, pair(string, string)))(body) } for ((prop, (c1, c2)) <- classrel) yield Classrel(c1, c2, prop) } sealed case class Arity(type_name: String, domain: List[Term.Sort], codomain: String, prop: Prop) { def cache(cache: Term.Cache): Arity = Arity(cache.string(type_name), domain.map(cache.sort(_)), cache.string(codomain), prop.cache(cache)) } def read_arities(provider: Export.Provider): List[Arity] = { val body = provider.uncompressed_yxml(export_prefix + "arities") val arities = { import XML.Decode._ import Term_XML.Decode._ list(pair(decode_prop, triple(string, list(sort), string)))(body) } for ((prop, (a, b, c)) <- arities) yield Arity(a, b, c, prop) } + /* Pure constdefs */ + + sealed case class Constdef(name: String, axiom_name: String) + { + def cache(cache: Term.Cache): Constdef = + Constdef(cache.string(name), cache.string(axiom_name)) + } + + def read_constdefs(provider: Export.Provider): List[Constdef] = + { + val body = provider.uncompressed_yxml(export_prefix + "constdefs") + val constdefs = + { + import XML.Decode._ + list(pair(string, string))(body) + } + for ((name, axiom_name) <- constdefs) yield Constdef(name, axiom_name) + } + + /* HOL typedefs */ sealed case class Typedef(name: String, rep_type: Term.Typ, abs_type: Term.Typ, rep_name: String, abs_name: String, axiom_name: String) { def cache(cache: Term.Cache): Typedef = Typedef(cache.string(name), cache.typ(rep_type), cache.typ(abs_type), cache.string(rep_name), cache.string(abs_name), cache.string(axiom_name)) } def read_typedefs(provider: Export.Provider): List[Typedef] = { val body = provider.uncompressed_yxml(export_prefix + "typedefs") val typedefs = { import XML.Decode._ import Term_XML.Decode._ list(pair(string, pair(typ, pair(typ, pair(string, pair(string, string))))))(body) } for { (name, (rep_type, (abs_type, (rep_name, (abs_name, axiom_name))))) <- typedefs } yield Typedef(name, rep_type, abs_type, rep_name, abs_name, axiom_name) } } diff --git a/src/Pure/defs.ML b/src/Pure/defs.ML --- a/src/Pure/defs.ML +++ b/src/Pure/defs.ML @@ -1,269 +1,283 @@ (* Title: Pure/defs.ML Author: Makarius Global well-formedness checks for overloaded definitions (mixed constants and types). Recall that constant definitions may be explained syntactically within Pure, but type definitions require particular set-theoretic semantics. *) signature DEFS = sig datatype item_kind = Const | Type type item = item_kind * string type entry = item * typ list val item_kind_ord: item_kind ord val plain_args: typ list -> bool type context = Proof.context * (Name_Space.T * Name_Space.T) val global_context: theory -> context val space: context -> item_kind -> Name_Space.T val pretty_item: context -> item -> Pretty.T val pretty_args: Proof.context -> typ list -> Pretty.T list val pretty_entry: context -> entry -> Pretty.T type T type spec = {def: string option, description: string, pos: Position.T, lhs: typ list, rhs: entry list} val all_specifications_of: T -> (item * spec list) list val specifications_of: T -> item -> spec list val dest: T -> {restricts: (entry * string) list, reducts: (entry * entry list) list} + val dest_constdefs: T list -> T -> (string * string) list val empty: T val merge: context -> T * T -> T val define: context -> bool -> string option -> string -> entry -> entry list -> T -> T val get_deps: T -> item -> (typ list * entry list) list end; structure Defs: DEFS = struct (* specification items *) datatype item_kind = Const | Type; type item = item_kind * string; type entry = item * typ list; fun item_kind_ord (Const, Type) = LESS | item_kind_ord (Type, Const) = GREATER | item_kind_ord _ = EQUAL; structure Itemtab = Table(type key = item val ord = prod_ord item_kind_ord fast_string_ord); (* pretty printing *) type context = Proof.context * (Name_Space.T * Name_Space.T); fun global_context thy = (Syntax.init_pretty_global thy, (Sign.const_space thy, Sign.type_space thy)); fun space ((_, spaces): context) kind = if kind = Const then #1 spaces else #2 spaces; fun pretty_item (context as (ctxt, _)) (kind, name) = let val prt_name = Name_Space.pretty ctxt (space context kind) name in if kind = Const then prt_name else Pretty.block [Pretty.keyword1 "type", Pretty.brk 1, prt_name] end; fun pretty_args ctxt args = if null args then [] else [Pretty.list "(" ")" (map (Syntax.pretty_typ ctxt o Logic.unvarifyT_global) args)]; fun pretty_entry context (c, args) = Pretty.block (pretty_item context c :: pretty_args (#1 context) args); (* type arguments *) fun plain_args args = forall Term.is_TVar args andalso not (has_duplicates (op =) args); fun disjoint_args (Ts, Us) = not (Type.could_unifys (Ts, Us)) orelse ((Type.raw_unifys (Ts, map (Logic.incr_tvar (maxidx_of_typs Ts + 1)) Us) Vartab.empty; false) handle Type.TUNIFY => true); fun match_args (Ts, Us) = if Type.could_matches (Ts, Us) then Option.map Envir.subst_type (SOME (Type.raw_matches (Ts, Us) Vartab.empty) handle Type.TYPE_MATCH => NONE) else NONE; (* datatype defs *) type spec = {def: string option, description: string, pos: Position.T, lhs: typ list, rhs: entry list}; type def = {specs: spec Inttab.table, (*source specifications*) restricts: (typ list * string) list, (*global restrictions imposed by incomplete patterns*) reducts: (typ list * entry list) list}; (*specifications as reduction system*) fun make_def (specs, restricts, reducts) = {specs = specs, restricts = restricts, reducts = reducts}: def; fun map_def c f = Itemtab.default (c, make_def (Inttab.empty, [], [])) #> Itemtab.map_entry c (fn {specs, restricts, reducts}: def => make_def (f (specs, restricts, reducts))); datatype T = Defs of def Itemtab.table; fun lookup_list which defs c = (case Itemtab.lookup defs c of SOME (def: def) => which def | NONE => []); fun all_specifications_of (Defs defs) = (map o apsnd) (map snd o Inttab.dest o #specs) (Itemtab.dest defs); fun specifications_of (Defs defs) = lookup_list (map snd o Inttab.dest o #specs) defs; val restricts_of = lookup_list #restricts; val reducts_of = lookup_list #reducts; fun dest (Defs defs) = let val restricts = Itemtab.fold (fn (c, {restricts, ...}) => fold (fn (args, description) => cons ((c, args), description)) restricts) defs []; val reducts = Itemtab.fold (fn (c, {reducts, ...}) => fold (fn (args, deps) => cons ((c, args), deps)) reducts) defs []; in {restricts = restricts, reducts = reducts} end; +fun dest_constdefs prevs (Defs defs) = + let + fun prev_spec c i = prevs |> exists (fn Defs prev_defs => + (case Itemtab.lookup prev_defs c of + NONE => false + | SOME {specs, ...} => Inttab.defined specs i)); + in + (defs, []) |-> Itemtab.fold (fn (c, {specs, ...}) => + specs |> Inttab.fold (fn (i, spec) => + if #1 c = Const andalso is_some (#def spec) andalso not (prev_spec c i) + then cons (#2 c, the (#def spec)) else I)) + end; + val empty = Defs Itemtab.empty; (* specifications *) fun disjoint_specs context c (i, {description = a, pos = pos_a, lhs = Ts, ...}: spec) = Inttab.forall (fn (j, {description = b, pos = pos_b, lhs = Us, ...}: spec) => i = j orelse disjoint_args (Ts, Us) orelse error ("Clash of specifications for " ^ Pretty.unformatted_string_of (pretty_item context c) ^ ":\n" ^ " " ^ quote a ^ Position.here pos_a ^ "\n" ^ " " ^ quote b ^ Position.here pos_b)); fun join_specs context c ({specs = specs1, restricts, reducts}, {specs = specs2, ...}: def) = let val specs' = Inttab.fold (fn spec2 => (disjoint_specs context c spec2 specs1; Inttab.update spec2)) specs2 specs1; in make_def (specs', restricts, reducts) end; fun update_specs context c spec = map_def c (fn (specs, restricts, reducts) => (disjoint_specs context c spec specs; (Inttab.update spec specs, restricts, reducts))); (* normalized dependencies: reduction with well-formedness check *) local val prt = Pretty.string_of oo pretty_entry; fun err context (c, Ts) (d, Us) s1 s2 = error (s1 ^ " dependency of " ^ prt context (c, Ts) ^ " -> " ^ prt context (d, Us) ^ s2); fun acyclic context (c, Ts) (d, Us) = c <> d orelse is_none (match_args (Ts, Us)) orelse err context (c, Ts) (d, Us) "Circular" ""; fun reduction context defs const deps = let fun reduct Us (Ts, rhs) = (case match_args (Ts, Us) of NONE => NONE | SOME subst => SOME (map (apsnd (map subst)) rhs)); fun reducts (d, Us) = get_first (reduct Us) (reducts_of defs d); val reds = map (`reducts) deps; val deps' = if forall (is_none o #1) reds then NONE else SOME (fold_rev (fn (NONE, dp) => insert (op =) dp | (SOME dps, _) => fold (insert (op =)) dps) reds []); val _ = forall (acyclic context const) (the_default deps deps'); in deps' end; fun restriction context defs (c, Ts) (d, Us) = plain_args Us orelse (case find_first (fn (Rs, _) => not (disjoint_args (Rs, Us))) (restricts_of defs d) of SOME (Rs, description) => err context (c, Ts) (d, Us) "Malformed" ("\n(restriction " ^ prt context (d, Rs) ^ " from " ^ quote description ^ ")") | NONE => true); in fun normalize context = let fun check_def defs (c, {reducts, ...}: def) = reducts |> forall (fn (Ts, deps) => forall (restriction context defs (c, Ts)) deps); fun check_defs defs = Itemtab.forall (check_def defs) defs; fun norm_update (c, {reducts, ...}: def) (changed, defs) = let val reducts' = reducts |> map (fn (Ts, deps) => (Ts, perhaps (reduction context defs (c, Ts)) deps)); in if reducts = reducts' then (changed, defs) else (true, defs |> map_def c (fn (specs, restricts, _) => (specs, restricts, reducts'))) end; fun norm_loop defs = (case Itemtab.fold norm_update defs (false, defs) of (true, defs') => norm_loop defs' | (false, _) => defs); in norm_loop #> tap check_defs end; fun dependencies context (c, args) restr deps = map_def c (fn (specs, restricts, reducts) => let val restricts' = Library.merge (op =) (restricts, restr); val reducts' = insert (op =) (args, deps) reducts; in (specs, restricts', reducts') end) #> normalize context; end; (* merge *) fun merge context (Defs defs1, Defs defs2) = let fun add_deps (c, args) restr deps defs = if AList.defined (op =) (reducts_of defs c) args then defs else dependencies context (c, args) restr deps defs; fun add_def (c, {restricts, reducts, ...}: def) = fold (fn (args, deps) => add_deps (c, args) restricts deps) reducts; in Defs (Itemtab.join (join_specs context) (defs1, defs2) |> normalize context |> Itemtab.fold add_def defs2) end; (* define *) fun define context unchecked def description (c, args) deps (Defs defs) = let val pos = Position.thread_data (); val restr = if plain_args args orelse (case args of [Term.Type (_, rec_args)] => plain_args rec_args | _ => false) then [] else [(args, description)]; val spec = (serial (), {def = def, description = description, pos = pos, lhs = args, rhs = deps}); val defs' = defs |> update_specs context c spec; in Defs (defs' |> (if unchecked then I else dependencies context (c, args) restr deps)) end; fun get_deps (Defs defs) c = reducts_of defs c; end;