diff --git a/thys/Dependent_SIFUM_Type_Systems/Examples/TypeSystemTactics.thy b/thys/Dependent_SIFUM_Type_Systems/Examples/TypeSystemTactics.thy --- a/thys/Dependent_SIFUM_Type_Systems/Examples/TypeSystemTactics.thy +++ b/thys/Dependent_SIFUM_Type_Systems/Examples/TypeSystemTactics.thy @@ -1,171 +1,171 @@ theory TypeSystemTactics imports "../Compositionality" "../TypeSystem" "HOL-Eisbach.Eisbach_Tools" begin (* Some Eisbach magic to get around Eisbach and locales not getting along *) ML \ structure Data = Proof_Data ( type T = morphism fun init _ = Morphism.identity; ); \ method_setup wrap = \Method.text_closure >> (fn text => fn ctxt => let val morph = Data.get ctxt; - fun safe_fact thm = - perhaps (try (Morphism.thm morph)) thm; + fun safe_fact thms = + perhaps (try (Morphism.fact morph)) thms; val morph' = Morphism.morphism "safe" - {binding = [Morphism.binding morph], - fact = [map safe_fact], - term = [Morphism.term morph], - typ = [Morphism.typ morph]} + {binding = [K (Morphism.binding morph)], + fact = [K safe_fact], + term = [K (Morphism.term morph)], + typ = [K (Morphism.typ morph)]} val text' = Method.map_source (map (Token.transform morph')) text; in Method.evaluate_runtime text' ctxt end)\ method_setup print_headgoal = \Scan.succeed (fn ctxt => SIMPLE_METHOD (SUBGOAL (fn (t,_) => (Output.writeln (Pretty.string_of (Syntax.pretty_term ctxt t)); all_tac)) 1))\ named_theorems aexpr and bexpr and prog and custom_if context sifum_types_assign begin (* More Eisbach magic to get around Eisbach and mutual recursion not getting along *) method_setup call_by_name = \Scan.lift Parse.string >> (fn str => fn ctxt => case (try (Method.check_src ctxt) (Token.make_src (str, Position.none) [])) of SOME src => Method.evaluate (Method.Source src) ctxt | _ => Method.fail)\ (* Eisbach tactics for partially automating has_type proofs *) method seq_tac methods tac = (wrap \rule seq_type\, tac, tac) method anno_tac methods tac = (wrap \rule anno_type[OF HOL.refl HOL.refl HOL.refl], clarsimp, tac, simp, fastforce simp: add_anno_def subtype_def pred_entailment_def pred_def bot_Sec_def[symmetric], simp add: add_anno_def, simp\) method assign\<^sub>2_tac = wrap \rule assign\<^sub>2\, simp, solves \rule aexpr; simp\, (fastforce), simp, simp method assign\<^sub>1_tac = wrap \rule assign\<^sub>1, simp, simp, solves \rule aexpr; simp\, simp, clarsimp simp: subtype_def pred_def, simp\ method assign\<^sub>\_tac = wrap \rule assign\<^sub>\\, simp, solves \rule aexpr; simp\, (solves \simp\), (solves \simp\ | (clarsimp, fast)), (solves \simp\)?, simp method if_tac methods tac = wrap \rule if_type'\, solves \rule bexpr, simp\, solves \simp\, solves \tac\, solves \tac\, solves \clarsimp, fastforce\ method has_type_no_if_tac' declares aexpr bexpr= (seq_tac \call_by_name "has_type_no_if_tac'"\ | anno_tac \call_by_name "has_type_no_if_tac'"\ | wrap \rule skip_type\ | wrap \rule stop_type\ | assign\<^sub>1_tac| assign\<^sub>2_tac | assign\<^sub>\_tac)? method has_type_no_if_tac uses prog declares aexpr bexpr = (intro exI, unfold prog, has_type_no_if_tac') method has_type_tac' declares aexpr bexpr= (seq_tac \call_by_name "has_type_tac'"\ | anno_tac \call_by_name "has_type_tac'"\ | wrap \rule skip_type\ | wrap \rule stop_type\ | assign\<^sub>2_tac | if_tac \call_by_name "has_type_tac'"\ | assign\<^sub>1_tac | assign\<^sub>\_tac)? method has_type_tac uses prog declares aexpr bexpr = (intro exI, unfold prog, has_type_tac') method if_type_tac declares bexpr custom_if = (wrap \rule custom_if, rule bexpr, simp?, simp?, has_type_tac', has_type_tac', (* Check if this work for other cases? If so just give the rest of this method a time out. Otherwise, remove from tac and add as explicit lemma proof *) (clarsimp simp: context_equiv_def type_equiv_def subtype_def)?, (clarsimp simp: context_equiv_def type_equiv_def subtype_def)?, (simp add: subset_entailment)?, (simp add: subset_entailment)?, (clarsimp, (subst tyenv_wellformed_def) , (clarsimp simp: mds_consistent_def types_wellformed_def type_wellformed_def types_stable_def), (fastforce simp: tyenv_wellformed_def mds_consistent_def))?, (clarsimp, (subst tyenv_wellformed_def) , (clarsimp simp: mds_consistent_def types_wellformed_def type_wellformed_def types_stable_def), (fastforce simp: tyenv_wellformed_def mds_consistent_def))?\) declaration \fn phi => Context.mapping I (Data.put phi)\ end end diff --git a/thys/Isabelle_C/C11-FrontEnd/src/C_Lexer_Annotation.thy b/thys/Isabelle_C/C11-FrontEnd/src/C_Lexer_Annotation.thy --- a/thys/Isabelle_C/C11-FrontEnd/src/C_Lexer_Annotation.thy +++ b/thys/Isabelle_C/C11-FrontEnd/src/C_Lexer_Annotation.thy @@ -1,1413 +1,1413 @@ (****************************************************************************** * Isabelle/C * * Copyright (c) 2018-2019 Université Paris-Saclay, Univ. Paris-Sud, France * * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are * met: * * * Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * * Redistributions in binary form must reproduce the above * copyright notice, this list of conditions and the following * disclaimer in the documentation and/or other materials provided * with the distribution. * * * Neither the name of the copyright holders nor the names of its * contributors may be used to endorse or promote products derived * from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. ******************************************************************************) section \Annotation Language: Parsing Combinator\ theory C_Lexer_Annotation imports C_Lexer_Language begin ML \ \\<^file>\~~/src/Pure/Isar/keyword.ML\\ (* Author: Frédéric Tuong, Université Paris-Saclay Analogous to: (* Title: Pure/Isar/keyword.ML Author: Makarius Isar keyword classification. *)*) \ structure C_Keyword = struct (** keyword classification **) (* kinds *) val command_kinds = [Keyword.diag, Keyword.document_heading, Keyword.document_body, Keyword.document_raw, Keyword.thy_begin, Keyword.thy_end, Keyword.thy_load, Keyword.thy_decl, Keyword.thy_decl_block, Keyword.thy_defn, Keyword.thy_stmt, Keyword.thy_goal, Keyword.thy_goal_defn, Keyword.thy_goal_stmt, Keyword.qed, Keyword.qed_script, Keyword.qed_block, Keyword.qed_global, Keyword.prf_goal, Keyword.prf_block, Keyword.next_block, Keyword.prf_open, Keyword.prf_close, Keyword.prf_chain, Keyword.prf_decl, Keyword.prf_asm, Keyword.prf_asm_goal, Keyword.prf_script, Keyword.prf_script_goal, Keyword.prf_script_asm_goal]; (* specifications *) type spec = Keyword.spec; type entry = {pos: Position.T, id: serial, kind: string, tags: string list}; fun check_spec pos ({kind, tags, ...}: spec) : entry = if not (member (op =) command_kinds kind) then error ("Unknown annotation syntax keyword kind " ^ quote kind) else {pos = pos, id = serial (), kind = kind, tags = tags}; (** keyword tables **) (* type keywords *) datatype keywords = Keywords of {minor: Scan.lexicon, major: Scan.lexicon, commands: entry Symtab.table}; fun minor_keywords (Keywords {minor, ...}) = minor; fun major_keywords (Keywords {major, ...}) = major; fun make_keywords (minor, major, commands) = Keywords {minor = minor, major = major, commands = commands}; fun map_keywords f (Keywords {minor, major, commands}) = make_keywords (f (minor, major, commands)); (* build keywords *) val empty_keywords = make_keywords (Scan.empty_lexicon, Scan.empty_lexicon, Symtab.empty); fun empty_keywords' minor = make_keywords (minor, Scan.empty_lexicon, Symtab.empty); fun merge_keywords (Keywords {minor = minor1, major = major1, commands = commands1}, Keywords {minor = minor2, major = major2, commands = commands2}) = make_keywords (Scan.merge_lexicons (minor1, minor2), Scan.merge_lexicons (major1, major2), Symtab.merge (K true) (commands1, commands2)); val add_keywords0 = fold (fn ((name, pos), force_minor, spec as {kind, ...}: spec) => map_keywords (fn (minor, major, commands) => let val extend = Scan.extend_lexicon (Symbol.explode name) fun update spec = Symtab.update (name, spec) in if force_minor then (extend minor, major, update (check_spec pos spec) commands) else if kind = "" orelse kind = Keyword.before_command orelse kind = Keyword.quasi_command then (extend minor, major, commands) else (minor, extend major, update (check_spec pos spec) commands) end)); val add_keywords = add_keywords0 o map (fn (cmd, spec) => (cmd, false, spec)) val add_keywords_minor = add_keywords0 o map (fn (cmd, spec) => (cmd, true, spec)) (* keyword status *) fun is_command (Keywords {commands, ...}) = Symtab.defined commands; fun dest_commands (Keywords {commands, ...}) = Symtab.keys commands; (* command keywords *) fun lookup_command (Keywords {commands, ...}) = Symtab.lookup commands; fun command_markup keywords name = lookup_command keywords name |> Option.map (fn {pos, id, ...} => Position.make_entity_markup {def = false} id Markup.command_keywordN (name, pos)); (* command categories *) fun command_category ks = let val tab = Symtab.make_set ks; fun pred keywords name = (case lookup_command keywords name of NONE => false | SOME {kind, ...} => Symtab.defined tab kind); in pred end; val is_theory_end = command_category [Keyword.thy_end]; val is_proof_asm = command_category [Keyword.prf_asm, Keyword.prf_asm_goal]; val is_improper = command_category [ Keyword.qed_script , Keyword.prf_script , Keyword.prf_script_goal , Keyword.prf_script_asm_goal]; end; \ text \ Notes: \<^item> The next structure contains a duplicated copy of the type \<^ML_type>\Token.T\, since it is not possible to set an arbitrary \<^emph>\slot\ value in \<^ML_structure>\Token\. \<^item> Parsing priorities in C and HOL slightly differ, see for instance \<^ML>\Token.explode\. \ ML \ \\<^file>\~~/src/Pure/Isar/token.ML\\ (* Author: Frédéric Tuong, Université Paris-Saclay Analogous to: (* Title: Pure/Isar/token.ML Author: Markus Wenzel, TU Muenchen Outer token syntax for Isabelle/Isar. *)*) \ structure C_Token = struct (** tokens **) (* token kind *) fun equiv_kind kind kind' = (case (kind, kind') of (Token.Control _, Token.Control _) => true | (Token.Error _, Token.Error _) => true | _ => kind = kind'); val immediate_kinds' = fn Token.Command => 0 | Token.Keyword => 1 | Token.Ident => 2 | Token.Long_Ident => 3 | Token.Sym_Ident => 4 | Token.Var => 5 | Token.Type_Ident => 6 | Token.Type_Var => 7 | Token.Nat => 8 | Token.Float => 9 | Token.Space => 10 | _ => ~1 val delimited_kind = (fn Token.String => true | Token.Alt_String => true | Token.Cartouche => true | Token.Control _ => true | Token.Comment _ => true | _ => false); (* datatype token *) (*The value slot assigns an (optional) internal value to a token, usually as a side-effect of special scanner setup (see also args.ML). Note that an assignable ref designates an intermediate state of internalization -- it is NOT meant to persist.*) datatype T = Token of (Symbol_Pos.text * Position.range) * (Token.kind * string) * slot and slot = Slot | Value of value option | Assignable of value option Unsynchronized.ref and value = Source of T list | Literal of bool * Markup.T | Name of Token.name_value * morphism | Typ of typ | Term of term | Fact of string option * thm list | (*optional name for dynamic fact, i.e. fact "variable"*) Attribute of morphism -> attribute | - Declaration of declaration | + Declaration of Morphism.declaration | Files of Token.file Exn.result list | Output of XML.body option; type src = T list; (* position *) fun pos_of (Token ((_, (pos, _)), _, _)) = pos; fun end_pos_of (Token ((_, (_, pos)), _, _)) = pos; fun adjust_offsets adjust (Token ((x, range), y, z)) = Token ((x, apply2 (Position.adjust_offsets adjust) range), y, z); (* stopper *) fun mk_eof pos = Token (("", (pos, Position.none)), (Token.EOF, ""), Slot); val eof = mk_eof Position.none; fun is_eof (Token (_, (Token.EOF, _), _)) = true | is_eof _ = false; val not_eof = not o is_eof; val stopper = Scan.stopper (fn [] => eof | toks => mk_eof (end_pos_of (List.last toks))) is_eof; (* kind of token *) fun kind_of (Token (_, (k, _), _)) = k; fun is_kind k (Token (_, (k', _), _)) = equiv_kind k k'; fun get_control tok = (case kind_of tok of Token.Control control => SOME control | _ => NONE); val is_command = is_kind Token.Command; fun keyword_with pred (Token (_, (Token.Keyword, x), _)) = pred x | keyword_with _ _ = false; val is_command_modifier = keyword_with (fn x => x = "private" orelse x = "qualified"); fun ident_with pred (Token (_, (Token.Ident, x), _)) = pred x | ident_with _ _ = false; fun is_ignored (Token (_, (Token.Space, _), _)) = true | is_ignored (Token (_, (Token.Comment NONE, _), _)) = true | is_ignored _ = false; fun is_proper (Token (_, (Token.Space, _), _)) = false | is_proper (Token (_, (Token.Comment _, _), _)) = false | is_proper _ = true; fun is_comment (Token (_, (Token.Comment _, _), _)) = true | is_comment _ = false; fun is_informal_comment (Token (_, (Token.Comment NONE, _), _)) = true | is_informal_comment _ = false; fun is_formal_comment (Token (_, (Token.Comment (SOME _), _), _)) = true | is_formal_comment _ = false; fun is_document_marker (Token (_, (Token.Comment (SOME Comment.Marker), _), _)) = true | is_document_marker _ = false; fun is_begin_ignore (Token (_, (Token.Comment NONE, "<"), _)) = true | is_begin_ignore _ = false; fun is_end_ignore (Token (_, (Token.Comment NONE, ">"), _)) = true | is_end_ignore _ = false; fun is_error (Token (_, (Token.Error _, _), _)) = true | is_error _ = false; fun is_error' (Token (_, (Token.Error msg, _), _)) = SOME msg | is_error' _ = NONE; fun content_of (Token (_, (_, x), _)) = x; fun content_of' (Token (_, (_, _), Value (SOME (Source l)))) = map (fn Token ((_, (pos, _)), (_, x), _) => (x, pos)) l | content_of' _ = []; val is_stack1 = fn Token (_, (Token.Sym_Ident, _), Value (SOME (Source l))) => forall (fn tok => content_of tok = "+") l | _ => false; val is_stack2 = fn Token (_, (Token.Sym_Ident, _), Value (SOME (Source l))) => forall (fn tok => content_of tok = "@") l | _ => false; val is_stack3 = fn Token (_, (Token.Sym_Ident, _), Value (SOME (Source l))) => forall (fn tok => content_of tok = "&") l | _ => false; (* blanks and newlines -- space tokens obey lines *) fun is_space (Token (_, (Token.Space, _), _)) = true | is_space _ = false; fun is_blank (Token (_, (Token.Space, x), _)) = not (String.isSuffix "\n" x) | is_blank _ = false; fun is_newline (Token (_, (Token.Space, x), _)) = String.isSuffix "\n" x | is_newline _ = false; (* range of tokens *) fun range_of (toks as tok :: _) = let val pos' = end_pos_of (List.last toks) in Position.range (pos_of tok, pos') end | range_of [] = Position.no_range; val core_range_of = drop_prefix is_ignored #> drop_suffix is_ignored #> range_of; (* token content *) fun content_of (Token (_, (_, x), _)) = x; fun source_of (Token ((source, _), _, _)) = source; fun input_of (Token ((source, range), (kind, _), _)) = Input.source (delimited_kind kind) source range; fun inner_syntax_of tok = let val x = content_of tok in if YXML.detect x then x else Syntax.implode_input (input_of tok) end; (* markup reports *) local val token_kind_markup = fn Token.Var => (Markup.var, "") | Token.Type_Ident => (Markup.tfree, "") | Token.Type_Var => (Markup.tvar, "") | Token.String => (Markup.string, "") | Token.Alt_String => (Markup.alt_string, "") | Token.Cartouche => (Markup.cartouche, "") | Token.Control _ => (Markup.cartouche, "") | Token.Comment _ => (Markup.ML_comment, "") | Token.Error msg => (Markup.bad (), msg) | _ => (Markup.empty, ""); fun keyword_reports tok = map (fn markup => ((pos_of tok, markup), "")); fun command_markups keywords x = if C_Keyword.is_theory_end keywords x then [Markup.keyword2 |> Markup.keyword_properties] else (if C_Keyword.is_proof_asm keywords x then [Markup.keyword3] else if C_Keyword.is_improper keywords x then [Markup.keyword1, Markup.improper] else [Markup.keyword1]) |> map Markup.command_properties; fun keyword_markup (important, keyword) x = if important orelse Symbol.is_ascii_identifier x then keyword else Markup.delimiter; fun command_minor_markups keywords x = if C_Keyword.is_theory_end keywords x then [Markup.keyword2 |> Markup.keyword_properties] else (if C_Keyword.is_proof_asm keywords x then [Markup.keyword3] else if C_Keyword.is_improper keywords x then [Markup.keyword1, Markup.improper] else if C_Keyword.is_command keywords x then [Markup.keyword1] else [keyword_markup (false, Markup.keyword2 |> Markup.keyword_properties) x]); in fun completion_report tok = if is_kind Token.Keyword tok then map (fn m => ((pos_of tok, m), "")) (Completion.suppress_abbrevs (content_of tok)) else []; fun reports keywords tok = if is_command tok then keyword_reports tok (command_markups keywords (content_of tok)) else if is_stack1 tok orelse is_stack2 tok orelse is_stack3 tok then keyword_reports tok [Markup.keyword2 |> Markup.keyword_properties] else if is_kind Token.Keyword tok then keyword_reports tok (command_minor_markups keywords (content_of tok)) else let val pos = pos_of tok; val (m, text) = token_kind_markup (kind_of tok); val deleted = Symbol_Pos.explode_deleted (source_of tok, pos); in ((pos, m), text) :: map (fn p => ((p, Markup.delete), "")) deleted end; fun markups keywords = map (#2 o #1) o reports keywords; end; (* unparse *) fun unparse' (Token ((source0, _), (kind, x), _)) = let val source = \ \ We are computing a reverse function of \<^ML>\Symbol_Pos.implode_range\ taking into account consecutive \<^ML>\Symbol.DEL\ symbols potentially appearing at the beginning, or at the end of the string.\ case Symbol.explode source0 of x :: xs => if x = Symbol.DEL then case rev xs of x' :: xs => if x' = Symbol.DEL then implode (rev xs) else source0 | _ => source0 else source0 | _ => source0 in case kind of Token.String => Symbol_Pos.quote_string_qq source | Token.Alt_String => Symbol_Pos.quote_string_bq source | Token.Cartouche => cartouche source | Token.Control control => Symbol_Pos.content (Antiquote.control_symbols control) | Token.Comment NONE => enclose "(*" "*)" source | Token.EOF => "" | _ => x end; fun text_of tok = let val k = Token.str_of_kind (kind_of tok); val ms = markups C_Keyword.empty_keywords tok; val s = unparse' tok; in if s = "" then (k, "") else if size s < 40 andalso not (exists_string (fn c => c = "\n") s) then (k ^ " " ^ Markup.markups ms s, "") else (k, Markup.markups ms s) end; (** associated values **) (* inlined file content *) fun file_source (file: Token.file) = let val text = cat_lines (#lines file); val end_pos = Position.symbol_explode text (#pos file); in Input.source true text (Position.range (#pos file, end_pos)) end; fun get_files (Token (_, _, Value (SOME (Files files)))) = files | get_files _ = []; fun put_files [] tok = tok | put_files files (Token (x, y, Slot)) = Token (x, y, Value (SOME (Files files))) | put_files _ tok = raise Fail ("Cannot put inlined files here" ^ Position.here (pos_of tok)); (* access values *) (* reports of value *) (* name value *) (* maxidx *) (* fact values *) (* transform *) (* static binding *) (*1st stage: initialize assignable slots*) fun init_assignable tok = (case tok of Token (x, y, Slot) => Token (x, y, Assignable (Unsynchronized.ref NONE)) | Token (_, _, Value _) => tok | Token (_, _, Assignable r) => (r := NONE; tok)); (*2nd stage: assign values as side-effect of scanning*) fun assign v tok = (case tok of Token (x, y, Slot) => Token (x, y, Value v) | Token (_, _, Value _) => tok | Token (_, _, Assignable r) => (r := v; tok)); fun evaluate mk eval arg = let val x = eval arg in (assign (SOME (mk x)) arg; x) end; (*3rd stage: static closure of final values*) fun closure (Token (x, y, Assignable (Unsynchronized.ref v))) = Token (x, y, Value v) | closure tok = tok; (* pretty *) (* src *) (** scanners **) open Basic_Symbol_Pos; val err_prefix = "Annotation lexical error: "; fun !!! msg = Symbol_Pos.!!! (fn () => err_prefix ^ msg); (* scan stack *) fun scan_stack is_stack = Scan.optional (Scan.one is_stack >> content_of') [] (* scan symbolic idents *) val scan_symid = Scan.many1 (Symbol.is_symbolic_char o Symbol_Pos.symbol) || Scan.one (Symbol.is_symbolic o Symbol_Pos.symbol) >> single; fun is_symid str = (case try Symbol.explode str of SOME [s] => Symbol.is_symbolic s orelse Symbol.is_symbolic_char s | SOME ss => forall Symbol.is_symbolic_char ss | _ => false); fun ident_or_symbolic "begin" = false | ident_or_symbolic ":" = true | ident_or_symbolic "::" = true | ident_or_symbolic s = Symbol_Pos.is_identifier s orelse is_symid s; (* scan cartouche *) val scan_cartouche = Symbol_Pos.scan_pos -- ((Symbol_Pos.scan_cartouche err_prefix >> Symbol_Pos.cartouche_content) -- Symbol_Pos.scan_pos); (* scan space *) fun space_symbol (s, _) = Symbol.is_blank s andalso s <> "\n"; val scan_space = Scan.many1 space_symbol @@@ Scan.optional ($$$ "\n") [] || Scan.many space_symbol @@@ $$$ "\n"; (* scan comment *) val scan_comment = Symbol_Pos.scan_pos -- (Symbol_Pos.scan_comment_body err_prefix -- Symbol_Pos.scan_pos); (** token sources **) local fun token_leq ((_, syms1), (_, syms2)) = length syms1 <= length syms2; fun token k ss = Token ((Symbol_Pos.implode ss, Symbol_Pos.range ss), (k, Symbol_Pos.content ss), Slot); fun token' (mk_value, k) ss = if mk_value then Token ( (Symbol_Pos.implode ss, Symbol_Pos.range ss) , (k, Symbol_Pos.content ss) , Value (SOME (Source (map (fn (s, pos) => Token (("", (pos, Position.none)), (k, s), Slot)) ss)))) else token k ss; fun token_t k = token' (true, k) fun token_range k (pos1, (ss, pos2)) = Token (Symbol_Pos.implode_range (pos1, pos2) ss, (k, Symbol_Pos.content ss), Slot); fun scan_token keywords = !!! "bad input" (Symbol_Pos.scan_string_qq err_prefix >> token_range Token.String || Symbol_Pos.scan_string_bq err_prefix >> token_range Token.Alt_String || scan_comment >> token_range (Token.Comment NONE) || Comment.scan_outer >> (fn (k, ss) => token (Token.Comment (SOME k)) ss) || scan_cartouche >> token_range Token.Cartouche || Antiquote.scan_control err_prefix >> (fn control => token (Token.Control control) (Antiquote.control_symbols control)) || scan_space >> token Token.Space || Scan.repeats1 ($$$ "+") >> token_t Token.Sym_Ident || Scan.repeats1 ($$$ "@") >> token_t Token.Sym_Ident || Scan.repeats1 ($$$ "&") >> token_t Token.Sym_Ident || (Scan.max token_leq (Scan.max token_leq (Scan.literal (C_Keyword.major_keywords keywords) >> pair Token.Command) (Scan.literal (C_Keyword.minor_keywords keywords) >> pair Token.Keyword)) (Lexicon.scan_longid >> pair Token.Long_Ident || Scan.max token_leq (C_Lex.scan_ident' >> pair Token.Ident) (Lexicon.scan_id >> pair Token.Ident) || Lexicon.scan_var >> pair Token.Var || Lexicon.scan_tid >> pair Token.Type_Ident || Lexicon.scan_tvar >> pair Token.Type_Var || Symbol_Pos.scan_float >> pair Token.Float || Symbol_Pos.scan_nat >> pair Token.Nat || scan_symid >> pair Token.Sym_Ident)) >> uncurry (token' o pair false)); fun recover msg = (Symbol_Pos.recover_string_qq || Symbol_Pos.recover_string_bq || Symbol_Pos.recover_cartouche || Symbol_Pos.recover_comment || Scan.one (Symbol.not_eof o Symbol_Pos.symbol) >> single) >> (single o token (Token.Error msg)); in fun make_source keywords {strict} = let val scan_strict = Scan.bulk (scan_token keywords); val scan = if strict then scan_strict else Scan.recover scan_strict recover; in Source.source Symbol_Pos.stopper scan end; end; (* explode *) fun tokenize keywords strict syms = Source.of_list syms |> make_source keywords strict |> Source.exhaust; fun explode keywords pos text = Symbol_Pos.explode (text, pos) |> tokenize keywords {strict = false}; fun explode0 keywords = explode keywords Position.none; (* print names in parsable form *) (* make *) (** parsers **) type 'a parser = T list -> 'a * T list; type 'a context_parser = Context.generic * T list -> 'a * (Context.generic * T list); (* wrapped syntax *) local fun make src pos = Token.make src pos |> #1 fun make_default text pos = make ((~1, 0), text) pos fun explode keywords pos text = case Token.explode keywords pos text of [tok] => tok | _ => make_default text pos in fun syntax' f = I #> map (fn tok0 as Token ((source, (pos1, pos2)), (kind, x), _) => if is_stack1 tok0 orelse is_stack2 tok0 orelse is_stack3 tok0 then make_default source pos1 else if is_eof tok0 then Token.eof else if delimited_kind kind then explode Keyword.empty_keywords pos1 (unparse' tok0) else let val tok1 = explode ((case kind of Token.Keyword => Keyword.add_keywords [((x, Position.none), Keyword.no_spec)] | Token.Command => Keyword.add_keywords [( (x, Position.none) , Keyword.command_spec (Keyword.thy_decl, []))] | _ => I) Keyword.empty_keywords) pos1 source in if Token.kind_of tok1 = kind then tok1 else make ( ( immediate_kinds' kind , case Position.distance_of (pos1, pos2) of NONE => 0 | SOME i => i) , source) pos1 end) #> f #> apsnd (map (fn tok => Token ( (Token.source_of tok, Token.range_of [tok]) , (Token.kind_of tok, Token.content_of tok) , Slot))) end end; type 'a c_parser = 'a C_Token.parser; type 'a c_context_parser = 'a C_Token.context_parser; \ (* parsers for C syntax. A partial copy is unfortunately necessary due to signature restrictions. *) ML \ \\<^file>\~~/src/Pure/Isar/parse.ML\\ (* Author: Frédéric Tuong, Université Paris-Saclay Analogous to: (* Title: Pure/Isar/parse.ML Author: Markus Wenzel, TU Muenchen Generic parsers for Isabelle/Isar outer syntax. *)*) \ signature C_PARSE = sig type T type src = T list type 'a parser = T list -> 'a * T list type 'a context_parser = Context.generic * T list -> 'a * (Context.generic * T list) (**) val C_source: Input.source parser val star: string parser (**) val group: (unit -> string) -> (T list -> 'a) -> T list -> 'a val !!! : (T list -> 'a) -> T list -> 'a val !!!! : (T list -> 'a) -> T list -> 'a val not_eof: T parser val token: 'a parser -> T parser val range: 'a parser -> ('a * Position.range) parser val position: 'a parser -> ('a * Position.T) parser val input: 'a parser -> Input.source parser val inner_syntax: 'a parser -> string parser val command: string parser val keyword: string parser val short_ident: string parser val long_ident: string parser val sym_ident: string parser val dots: string parser val minus: string parser val term_var: string parser val type_ident: string parser val type_var: string parser val number: string parser val float_number: string parser val string: string parser val string_position: (string * Position.T) parser val alt_string: string parser val cartouche: string parser val control: Antiquote.control parser val eof: string parser val command_name: string -> string parser val keyword_with: (string -> bool) -> string parser val keyword_markup: bool * Markup.T -> string -> string parser val keyword_improper: string -> string parser val $$$ : string -> string parser val reserved: string -> string parser val underscore: string parser val maybe: 'a parser -> 'a option parser val maybe_position: ('a * Position.T) parser -> ('a option * Position.T) parser val opt_keyword: string -> bool parser val opt_bang: bool parser val begin: string parser val opt_begin: bool parser val nat: int parser val int: int parser val real: real parser val enum_positions: string -> 'a parser -> ('a list * Position.T list) parser val enum1_positions: string -> 'a parser -> ('a list * Position.T list) parser val enum: string -> 'a parser -> 'a list parser val enum1: string -> 'a parser -> 'a list parser val and_list: 'a parser -> 'a list parser val and_list1: 'a parser -> 'a list parser val enum': string -> 'a context_parser -> 'a list context_parser val enum1': string -> 'a context_parser -> 'a list context_parser val and_list': 'a context_parser -> 'a list context_parser val and_list1': 'a context_parser -> 'a list context_parser val list: 'a parser -> 'a list parser val list1: 'a parser -> 'a list parser val name: string parser val name_range: (string * Position.range) parser val name_position: (string * Position.T) parser val binding: binding parser val embedded: string parser val embedded_inner_syntax: string parser val embedded_input: Input.source parser val embedded_position: (string * Position.T) parser val path_input: Input.source parser val path: string parser val path_binding: (string * Position.T) parser val session_name: (string * Position.T) parser val theory_name: (string * Position.T) parser val liberal_name: string parser val parname: string parser val parbinding: binding parser val class: string parser val sort: string parser val type_const: string parser val arity: (string * string list * string) parser val multi_arity: (string list * string list * string) parser val type_args: string list parser val type_args_constrained: (string * string option) list parser val typ: string parser val mixfix: mixfix parser val mixfix': mixfix parser val opt_mixfix: mixfix parser val opt_mixfix': mixfix parser val syntax_mode: Syntax.mode parser val where_: string parser val const_decl: (string * string * mixfix) parser val const_binding: (binding * string * mixfix) parser val params: (binding * string option * mixfix) list parser val vars: (binding * string option * mixfix) list parser val for_fixes: (binding * string option * mixfix) list parser val ML_source: Input.source parser val document_source: Input.source parser val document_marker: Input.source parser val const: string parser val term: string parser val prop: string parser val literal_fact: string parser val propp: (string * string list) parser val termp: (string * string list) parser val private: Position.T parser val qualified: Position.T parser val target: (string * Position.T) parser val opt_target: (string * Position.T) option parser val args: T list parser val args1: (string -> bool) -> T list parser val attribs: src list parser val opt_attribs: src list parser val thm_sel: Facts.interval list parser val thm: (Facts.ref * src list) parser val thms1: (Facts.ref * src list) list parser val options: ((string * Position.T) * (string * Position.T)) list parser val embedded_ml: ML_Lex.token Antiquote.antiquote list parser end; structure C_Parse: C_PARSE = struct type T = C_Token.T type src = T list type 'a parser = T list -> 'a * T list type 'a context_parser = Context.generic * T list -> 'a * (Context.generic * T list) structure Token = struct open Token open C_Token end (** error handling **) (* group atomic parsers (no cuts!) *) fun group s scan = scan || Scan.fail_with (fn [] => (fn () => s () ^ " expected,\nbut end-of-input was found") | tok :: _ => (fn () => (case Token.text_of tok of (txt, "") => s () ^ " expected,\nbut " ^ txt ^ Position.here (Token.pos_of tok) ^ " was found" | (txt1, txt2) => s () ^ " expected,\nbut " ^ txt1 ^ Position.here (Token.pos_of tok) ^ " was found:\n" ^ txt2))); (* cut *) fun cut kind scan = let fun get_pos [] = " (end-of-input)" | get_pos (tok :: _) = Position.here (Token.pos_of tok); fun err (toks, NONE) = (fn () => kind ^ get_pos toks) | err (toks, SOME msg) = (fn () => let val s = msg () in if String.isPrefix kind s then s else kind ^ get_pos toks ^ ": " ^ s end); in Scan.!! err scan end; fun !!! scan = cut "Annotation syntax error" scan; fun !!!! scan = cut "Corrupted annotation syntax in presentation" scan; (** basic parsers **) (* tokens *) fun RESET_VALUE atom = (*required for all primitive parsers*) Scan.ahead (Scan.one (K true)) -- atom >> (fn (arg, x) => (Token.assign NONE arg; x)); val not_eof = RESET_VALUE (Scan.one Token.not_eof); fun token atom = Scan.ahead not_eof --| atom; fun range scan = (Scan.ahead not_eof >> (Token.range_of o single)) -- scan >> Library.swap; fun position scan = (Scan.ahead not_eof >> Token.pos_of) -- scan >> Library.swap; fun input atom = Scan.ahead atom |-- not_eof >> Token.input_of; fun inner_syntax atom = Scan.ahead atom |-- not_eof >> Token.inner_syntax_of; fun kind k = group (fn () => Token.str_of_kind k) (RESET_VALUE (Scan.one (Token.is_kind k) >> Token.content_of)); val command = kind Token.Command; val keyword = kind Token.Keyword; val short_ident = kind Token.Ident; val long_ident = kind Token.Long_Ident; val sym_ident = kind Token.Sym_Ident; val term_var = kind Token.Var; val type_ident = kind Token.Type_Ident; val type_var = kind Token.Type_Var; val number = kind Token.Nat; val float_number = kind Token.Float; val string = kind Token.String; val alt_string = kind Token.Alt_String; val cartouche = kind Token.Cartouche; val control = token (kind Token.control_kind) >> (the o Token.get_control); val eof = kind Token.EOF; fun command_name x = group (fn () => Token.str_of_kind Token.Command ^ " " ^ quote x) (RESET_VALUE (Scan.one (fn tok => Token.is_command tok andalso Token.content_of tok = x))) >> Token.content_of; fun keyword_with pred = RESET_VALUE (Scan.one (Token.keyword_with pred) >> Token.content_of); fun keyword_markup markup x = group (fn () => Token.str_of_kind Token.Keyword ^ " " ^ quote x) (Scan.ahead not_eof -- keyword_with (fn y => x = y)) >> (fn (tok, x) => (Token.assign (SOME (Token.Literal markup)) tok; x)); val keyword_improper = keyword_markup (true, Markup.improper); val $$$ = keyword_markup (false, Markup.quasi_keyword); fun reserved x = group (fn () => "reserved identifier " ^ quote x) (RESET_VALUE (Scan.one (Token.ident_with (fn y => x = y)) >> Token.content_of)); val dots = sym_ident :-- (fn "\" => Scan.succeed () | _ => Scan.fail) >> #1; val minus = sym_ident :-- (fn "-" => Scan.succeed () | _ => Scan.fail) >> #1; val underscore = sym_ident :-- (fn "_" => Scan.succeed () | _ => Scan.fail) >> #1; fun maybe scan = underscore >> K NONE || scan >> SOME; fun maybe_position scan = position (underscore >> K NONE) || scan >> apfst SOME; val nat = number >> (#1 o Library.read_int o Symbol.explode); val int = Scan.optional (minus >> K ~1) 1 -- nat >> op *; val real = float_number >> Value.parse_real || int >> Real.fromInt; fun opt_keyword s = Scan.optional ($$$ "(" |-- !!! (($$$ s >> K true) --| $$$ ")")) false; val opt_bang = Scan.optional ($$$ "!" >> K true) false; val begin = $$$ "begin"; val opt_begin = Scan.optional (begin >> K true) false; (* enumerations *) fun enum1_positions sep scan = scan -- Scan.repeat (position ($$$ sep) -- !!! scan) >> (fn (x, ys) => (x :: map #2 ys, map (#2 o #1) ys)); fun enum_positions sep scan = enum1_positions sep scan || Scan.succeed ([], []); fun enum1 sep scan = scan ::: Scan.repeat ($$$ sep |-- !!! scan); fun enum sep scan = enum1 sep scan || Scan.succeed []; fun enum1' sep scan = scan ::: Scan.repeat (Scan.lift ($$$ sep) |-- scan); fun enum' sep scan = enum1' sep scan || Scan.succeed []; fun and_list1 scan = enum1 "and" scan; fun and_list scan = enum "and" scan; fun and_list1' scan = enum1' "and" scan; fun and_list' scan = enum' "and" scan; fun list1 scan = enum1 "," scan; fun list scan = enum "," scan; (* names and embedded content *) val name = group (fn () => "name") (short_ident || long_ident || sym_ident || number || string); val name_range = input name >> Input.source_content_range; val name_position = input name >> Input.source_content; val string_position = input string >> Input.source_content; val binding = name_position >> Binding.make; val embedded = group (fn () => "embedded content") (cartouche || string || short_ident || long_ident || sym_ident || term_var || type_ident || type_var || number); val embedded_inner_syntax = inner_syntax embedded; val embedded_input = input embedded; val embedded_position = embedded_input >> Input.source_content; val path_input = group (fn () => "file name/path specification") embedded_input; val path = path_input >> Input.string_of; val path_binding = group (fn () => "path binding (strict file name)") (position embedded); val session_name = group (fn () => "session name") name_position; val theory_name = group (fn () => "theory name") name_position; val liberal_name = keyword_with Token.ident_or_symbolic || name; val parname = Scan.optional ($$$ "(" |-- name --| $$$ ")") ""; val parbinding = Scan.optional ($$$ "(" |-- binding --| $$$ ")") Binding.empty; (* type classes *) val class = group (fn () => "type class") (inner_syntax embedded); val sort = group (fn () => "sort") (inner_syntax embedded); val type_const = group (fn () => "type constructor") (inner_syntax embedded); val arity = type_const -- ($$$ "::" |-- !!! (Scan.optional ($$$ "(" |-- !!! (list1 sort --| $$$ ")")) [] -- sort)) >> Scan.triple2; val multi_arity = and_list1 type_const -- ($$$ "::" |-- !!! (Scan.optional ($$$ "(" |-- !!! (list1 sort --| $$$ ")")) [] -- sort)) >> Scan.triple2; (* types *) val typ = group (fn () => "type") (inner_syntax embedded); fun type_arguments arg = arg >> single || $$$ "(" |-- !!! (list1 arg --| $$$ ")") || Scan.succeed []; val type_args = type_arguments type_ident; val type_args_constrained = type_arguments (type_ident -- Scan.option ($$$ "::" |-- !!! sort)); (* mixfix annotations *) local val mfix = input (string || cartouche); val mixfix_ = mfix -- !!! (Scan.optional ($$$ "[" |-- !!! (list nat --| $$$ "]")) [] -- Scan.optional nat 1000) >> (fn (sy, (ps, p)) => fn range => Mixfix (sy, ps, p, range)); val structure_ = $$$ "structure" >> K Structure; val binder_ = $$$ "binder" |-- !!! (mfix -- ($$$ "[" |-- nat --| $$$ "]" -- nat || nat >> (fn n => (n, n)))) >> (fn (sy, (p, q)) => fn range => Binder (sy, p, q, range)); val infixl_ = $$$ "infixl" |-- !!! (mfix -- nat >> (fn (sy, p) => fn range => Infixl (sy, p, range))); val infixr_ = $$$ "infixr" |-- !!! (mfix -- nat >> (fn (sy, p) => fn range => Infixr (sy, p, range))); val infix_ = $$$ "infix" |-- !!! (mfix -- nat >> (fn (sy, p) => fn range => Infix (sy, p, range))); val mixfix_body = mixfix_ || structure_ || binder_ || infixl_ || infixr_ || infix_; fun annotation guard body = Scan.trace ($$$ "(" |-- guard (body --| $$$ ")")) >> (fn (mx, toks) => mx (Token.range_of toks)); fun opt_annotation guard body = Scan.optional (annotation guard body) NoSyn; in val mixfix = annotation !!! mixfix_body; val mixfix' = annotation I mixfix_body; val opt_mixfix = opt_annotation !!! mixfix_body; val opt_mixfix' = opt_annotation I mixfix_body; end; (* syntax mode *) val syntax_mode_spec = ($$$ "output" >> K ("", false)) || name -- Scan.optional ($$$ "output" >> K false) true; val syntax_mode = Scan.optional ($$$ "(" |-- !!! (syntax_mode_spec --| $$$ ")")) Syntax.mode_default; (* fixes *) val where_ = $$$ "where"; val const_decl = name -- ($$$ "::" |-- !!! typ) -- opt_mixfix >> Scan.triple1; val const_binding = binding -- ($$$ "::" |-- !!! typ) -- opt_mixfix >> Scan.triple1; val param_mixfix = binding -- Scan.option ($$$ "::" |-- typ) -- mixfix' >> (single o Scan.triple1); val params = (binding -- Scan.repeat binding) -- Scan.option ($$$ "::" |-- !!! (Scan.ahead typ -- embedded)) >> (fn ((x, ys), T) => (x, Option.map #1 T, NoSyn) :: map (fn y => (y, Option.map #2 T, NoSyn)) ys); val vars = and_list1 (param_mixfix || params) >> flat; val for_fixes = Scan.optional ($$$ "for" |-- !!! vars) []; (* embedded source text *) val ML_source = input (group (fn () => "ML source") embedded); val document_source = input (group (fn () => "document source") embedded); val document_marker = group (fn () => "document marker") (RESET_VALUE (Scan.one Token.is_document_marker >> Token.input_of)); (* terms *) val const = group (fn () => "constant") (inner_syntax embedded); val term = group (fn () => "term") (inner_syntax embedded); val prop = group (fn () => "proposition") (inner_syntax embedded); val literal_fact = inner_syntax (group (fn () => "literal fact") (alt_string || cartouche)); (* patterns *) val is_terms = Scan.repeat1 ($$$ "is" |-- term); val is_props = Scan.repeat1 ($$$ "is" |-- prop); val propp = prop -- Scan.optional ($$$ "(" |-- !!! (is_props --| $$$ ")")) []; val termp = term -- Scan.optional ($$$ "(" |-- !!! (is_terms --| $$$ ")")) []; (* target information *) val private = position ($$$ "private") >> #2; val qualified = position ($$$ "qualified") >> #2; val target = ($$$ "(" -- $$$ "in") |-- !!! (name_position --| $$$ ")"); val opt_target = Scan.option target; (* arguments within outer syntax *) local val argument_kinds = [Token.Ident, Token.Long_Ident, Token.Sym_Ident, Token.Var, Token.Type_Ident, Token.Type_Var, Token.Nat, Token.Float, Token.String, Token.Alt_String, Token.Cartouche]; fun arguments is_symid = let fun argument blk = group (fn () => "argument") (Scan.one (fn tok => let val kind = Token.kind_of tok in member (op =) argument_kinds kind orelse Token.keyword_with is_symid tok orelse (blk andalso Token.keyword_with (fn s => s = ",") tok) end)); fun args blk x = Scan.optional (args1 blk) [] x and args1 blk x = (Scan.repeats1 (Scan.repeat1 (argument blk) || argsp "(" ")" || argsp "[" "]")) x and argsp l r x = (token ($$$ l) ::: !!! (args true @@@ (token ($$$ r) >> single))) x; in (args, args1) end; in val args = #1 (arguments Token.ident_or_symbolic) false; fun args1 is_symid = #2 (arguments is_symid) false; end; (* attributes *) val attrib = token liberal_name ::: !!! args; val attribs = $$$ "[" |-- list attrib --| $$$ "]"; val opt_attribs = Scan.optional attribs []; (* theorem references *) val thm_sel = $$$ "(" |-- list1 (nat --| minus -- nat >> Facts.FromTo || nat --| minus >> Facts.From || nat >> Facts.Single) --| $$$ ")"; val thm = $$$ "[" |-- attribs --| $$$ "]" >> pair (Facts.named "") || (literal_fact >> Facts.Fact || name_position -- Scan.option thm_sel >> Facts.Named) -- opt_attribs; val thms1 = Scan.repeat1 thm; (* options *) val option_name = group (fn () => "option name") name_position; val option_value = group (fn () => "option value") ((token real || token name) >> Token.content_of); val option = option_name :-- (fn (_, pos) => Scan.optional ($$$ "=" |-- !!! (position option_value)) ("true", pos)); val options = $$$ "[" |-- list1 option --| $$$ "]"; (* embedded ML *) val embedded_ml = input underscore >> ML_Lex.read_source || embedded_input >> ML_Lex.read_source || control >> (ML_Lex.read_symbols o Antiquote.control_symbols); (* read embedded source, e.g. for antiquotations *) (** C basic parsers **) (* embedded source text *) val C_source = input (group (fn () => "C source") embedded); (* AutoCorres (MODIFIES) *) val star = sym_ident :-- (fn "*" => Scan.succeed () | _ => Scan.fail) >> #1; end; structure C_Parse_Native: C_PARSE = struct open Token open Parse (** C basic parsers **) (* embedded source text *) val C_source = input (group (fn () => "C source") embedded); (* AutoCorres (MODIFIES) *) val star = sym_ident :-- (fn "*" => Scan.succeed () | _ => Scan.fail) >> #1; end; structure C_Parse_Read = struct (* read embedded source, e.g. for antiquotations *) fun read_with_commands'0 keywords syms = Source.of_list syms |> C_Token.make_source keywords {strict = false} |> Source.filter (not o C_Token.is_proper) |> Source.exhaust fun read_with_commands' keywords scan syms = Source.of_list syms |> C_Token.make_source keywords {strict = false} |> Source.filter C_Token.is_proper |> Source.source C_Token.stopper (Scan.recover (Scan.bulk scan) (fn msg => Scan.one (not o C_Token.is_eof) >> (fn tok => [C_Scan.Right let val msg = case C_Token.is_error' tok of SOME msg0 => msg0 ^ " (" ^ msg ^ ")" | NONE => msg in ( msg , [((C_Token.pos_of tok, Markup.bad ()), msg)] , tok) end]))) |> Source.exhaust; fun read_antiq' keywords scan = read_with_commands' keywords (scan >> C_Scan.Left); end \ ML \ \\<^file>\~~/src/Pure/Thy/thy_header.ML\\ (* Author: Frédéric Tuong, Université Paris-Saclay Analogous to: (* Title: Pure/Thy/thy_header.ML Author: Makarius Static theory header information. *)*) \ structure C_Thy_Header = struct val bootstrap_keywords = C_Keyword.empty_keywords' (Keyword.minor_keywords (Thy_Header.get_keywords @{theory})) (* theory data *) structure Data = Theory_Data ( type T = C_Keyword.keywords; val empty = bootstrap_keywords; val merge = C_Keyword.merge_keywords; ); val add_keywords = Data.map o C_Keyword.add_keywords; val add_keywords_minor = Data.map o C_Keyword.add_keywords_minor; val get_keywords = Data.get; val get_keywords' = get_keywords o Proof_Context.theory_of; end \ end diff --git a/thys/Nominal2/nominal_function_common.ML b/thys/Nominal2/nominal_function_common.ML --- a/thys/Nominal2/nominal_function_common.ML +++ b/thys/Nominal2/nominal_function_common.ML @@ -1,162 +1,163 @@ (* Nominal Function Common Author: Christian Urban heavily based on the code of Alexander Krauss (code forked on 5 June 2011) Redefinition of config datatype *) signature NOMINAL_FUNCTION_DATA = sig type nominal_info = {is_partial : bool, defname : string, (* contains no logical entities: invariant under morphisms: *) add_simps : (binding -> binding) -> string -> (binding -> binding) -> Token.src list -> thm list -> local_theory -> thm list * local_theory, case_names : string list, fs : term list, R : term, psimps: thm list, pinducts: thm list, simps : thm list option, inducts : thm list option, termination: thm, eqvts: thm list} end structure Nominal_Function_Common = struct type nominal_info = {is_partial : bool, defname : string, (* contains no logical entities: invariant under morphisms: *) add_simps : (binding -> binding) -> string -> (binding -> binding) -> Token.src list -> thm list -> local_theory -> thm list * local_theory, case_names : string list, fs : term list, R : term, psimps: thm list, pinducts: thm list, simps : thm list option, inducts : thm list option, termination: thm, eqvts: thm list} fun morph_function_data ({add_simps, case_names, fs, R, psimps, pinducts, simps, inducts, termination, defname, is_partial, eqvts} : nominal_info) phi = let val term = Morphism.term phi val thm = Morphism.thm phi val fact = Morphism.fact phi val name = Binding.name_of o Morphism.binding phi o Binding.name in { add_simps = add_simps, case_names = case_names, fs = map term fs, R = term R, psimps = fact psimps, pinducts = fact pinducts, simps = Option.map fact simps, inducts = Option.map fact inducts, termination = thm termination, defname = name defname, is_partial=is_partial, eqvts = fact eqvts } end structure NominalFunctionData = Generic_Data ( type T = (term * nominal_info) Item_Net.T; val empty : T = Item_Net.init (op aconv o apply2 fst) (single o fst); fun merge tabs : T = Item_Net.merge tabs; ) val get_function = NominalFunctionData.get o Context.Proof; -fun lift_morphism ctxt f = +fun lift_morphism f = let - fun term t = Thm.term_of (Drule.cterm_rule f (Thm.cterm_of ctxt t)) + fun term thy t = Thm.term_of (Drule.cterm_rule f (Thm.global_cterm_of thy t)) + fun typ thy t = Logic.type_map (term thy) t in Morphism.morphism "lift_morphism" {binding = [], - typ = [Logic.type_map term], - term = [term], - fact = [map f]} + typ = [typ o Morphism.the_theory], + term = [term o Morphism.the_theory], + fact = [fn _ => map f]} end fun import_function_data t ctxt = let val ct = Thm.cterm_of ctxt t - val inst_morph = lift_morphism ctxt o Thm.instantiate + val inst_morph = Morphism.set_context' ctxt o lift_morphism o Thm.instantiate fun match (trm, data) = SOME (morph_function_data data (inst_morph (Thm.match (Thm.cterm_of ctxt trm, ct)))) handle Pattern.MATCH => NONE in get_first match (Item_Net.retrieve (get_function ctxt) t) end fun import_last_function ctxt = case Item_Net.content (get_function ctxt) of [] => NONE | (t, data) :: _ => let val ([t'], ctxt') = Variable.import_terms true [t] ctxt in import_function_data t' ctxt' end val all_function_data = Item_Net.content o get_function fun add_function_data (data : nominal_info as {fs, termination, ...}) = NominalFunctionData.map (fold (fn f => Item_Net.update (f, data)) fs) #> Function_Common.store_termination_rule termination (* Configuration management *) datatype nominal_function_opt = Sequential | Default of string | DomIntros | No_Partials | Invariant of string datatype nominal_function_config = NominalFunctionConfig of {sequential: bool, default: string option, domintros: bool, partials: bool, inv: string option} fun apply_opt Sequential (NominalFunctionConfig {sequential, default, domintros, partials, inv}) = NominalFunctionConfig {sequential=true, default=default, domintros=domintros, partials=partials, inv=inv} | apply_opt (Default d) (NominalFunctionConfig {sequential, default, domintros, partials, inv}) = NominalFunctionConfig {sequential=sequential, default=SOME d, domintros=domintros, partials=partials, inv=inv} | apply_opt DomIntros (NominalFunctionConfig {sequential, default, domintros, partials, inv}) = NominalFunctionConfig {sequential=sequential, default=default, domintros=true, partials=partials, inv=inv} | apply_opt No_Partials (NominalFunctionConfig {sequential, default, domintros, partials, inv}) = NominalFunctionConfig {sequential=sequential, default=default, domintros=domintros, partials=false, inv=inv} | apply_opt (Invariant s) (NominalFunctionConfig {sequential, default, domintros, partials, inv}) = NominalFunctionConfig {sequential=sequential, default=default, domintros=domintros, partials=partials, inv = SOME s} val nominal_default_config = NominalFunctionConfig { sequential=false, default=NONE, domintros=false, partials=true, inv=NONE} datatype nominal_function_result = NominalFunctionResult of {fs: term list, G: term, R: term, psimps : thm list, simple_pinducts : thm list, cases : thm, termination : thm, domintros : thm list option, eqvts : thm list} end diff --git a/thys/Partial_Function_MR/partial_function_mr.ML b/thys/Partial_Function_MR/partial_function_mr.ML --- a/thys/Partial_Function_MR/partial_function_mr.ML +++ b/thys/Partial_Function_MR/partial_function_mr.ML @@ -1,338 +1,338 @@ (* Author: Rene Thiemann, License: LGPL *) signature PARTIAL_FUNCTION_MR = sig val init: string -> (* make monad_map: monad term * funs * monad as typ * monad bs typ * a->b typs -> map_monad funs monad term *) (term * term list * typ * typ * typ list -> term) -> (* make monad type: fixed and flexible types *) (typ list * typ list -> typ) -> (* destruct monad type: fixed and flexible types *) (typ -> typ list * typ list) -> (* monad_map_compose thm: mapM f (mapM g x) = mapM (f o g) x *) thm list -> (* monad_map_ident thm: mapM (% y. y) x = x *) - thm list -> declaration + thm list -> Morphism.declaration val add_partial_function_mr: string -> (binding * typ option * mixfix) list -> Specification.multi_specs -> local_theory -> thm list * local_theory val add_partial_function_mr_cmd: string -> (binding * string option * mixfix) list -> Specification.multi_specs_cmd -> local_theory -> thm list * local_theory end; structure Partial_Function_MR: PARTIAL_FUNCTION_MR = struct val partial_function_mr_trace = Attrib.setup_config_bool @{binding partial_function_mr_trace} (K false); fun trace ctxt msg = if Config.get ctxt partial_function_mr_trace then tracing msg else () datatype setup_data = Setup_Data of {mk_monad_map: term * term list * typ * typ * typ list -> term, mk_monadT: typ list * typ list -> typ, dest_monadT: typ -> typ list * typ list, monad_map_comp: thm list, monad_map_id: thm list}; (* the following code has been copied from partial_function.ML *) structure Modes = Generic_Data ( type T = setup_data Symtab.table; val empty = Symtab.empty; fun merge data = Symtab.merge (K true) data; ) val known_modes = Symtab.keys o Modes.get o Context.Proof; val lookup_mode = Symtab.lookup o Modes.get o Context.Proof; fun curry_const (A, B, C) = Const (@{const_name Product_Type.curry}, [HOLogic.mk_prodT (A, B) --> C, A, B] ---> C); fun mk_curry f = case fastype_of f of Type ("fun", [Type (_, [S, T]), U]) => curry_const (S, T, U) $ f | T => raise TYPE ("mk_curry", [T], [f]); fun curry_n arity = funpow (arity - 1) mk_curry; fun uncurry_n arity = funpow (arity - 1) HOLogic.mk_case_prod; (* end copy of partial_function.ML *) fun init mode mk_monad_map mk_monadT dest_monadT monad_map_comp monad_map_id phi = let val thm = Morphism.thm phi; (* TODO: are there morphisms required on mk_monad_map???, ... *) val data' = Setup_Data {mk_monad_map=mk_monad_map, mk_monadT=mk_monadT, dest_monadT=dest_monadT, monad_map_comp=map thm monad_map_comp,monad_map_id=map thm monad_map_id}; in Modes.map (Symtab.update (mode, data')) end fun mk_sumT (T1,T2) = Type (@{type_name sum}, [T1,T2]) fun mk_choiceT [ty] = ty | mk_choiceT (ty :: more) = mk_sumT (ty,mk_choiceT more) | mk_choiceT _ = error "mk_choiceT []" fun mk_choice_resT mk_monadT dest_monadT mTs = let val (commonTs,argTs) = map dest_monadT mTs |> split_list |> apfst hd; val n = length (hd argTs); val new = map (fn i => mk_choiceT (map (fn xs => nth xs i) argTs)) (0 upto (n - 1)) in mk_monadT (commonTs,new) end; fun mk_inj [_] t _ = t | mk_inj (ty :: more) t n = let val moreT = mk_choiceT more; val allT = mk_sumT (ty,moreT) in if n = 0 then Const (@{const_name Inl}, ty --> allT) $ t else Const (@{const_name Inr}, moreT --> allT) $ mk_inj more t (n-1) end | mk_inj _ _ _ = error "mk_inj [] _ _" fun mk_proj [_] t _ = t | mk_proj (ty :: more) t n = let val moreT = mk_choiceT more; val allT = mk_sumT (ty,moreT) in if n = 0 then Const (@{const_name Sum_Type.projl}, allT --> ty) $ t else mk_proj more (Const (@{const_name Sum_Type.projr}, allT --> moreT) $ t) (n-1) end | mk_proj _ _ _ = error "mk_proj [] _ _" fun get_head ctxt (_,(_,eqn)) = let val ((_, plain_eqn), _) = Variable.focus NONE eqn ctxt; val lhs = HOLogic.dest_eq (HOLogic.dest_Trueprop plain_eqn) |> #1; val head = strip_comb lhs |> #1; in head end; fun get_infos lthy heads (fix,(_,eqn)) = let val ((_, plain_eqn), _) = Variable.focus NONE eqn lthy; val ((f_binding, fT), mixfix) = fix; val fname = Binding.name_of f_binding; val (lhs, rhs) = HOLogic.dest_eq (HOLogic.dest_Trueprop plain_eqn); val (_, args) = strip_comb lhs; val F = fold_rev lambda (heads @ args) rhs; val arity = length args; val (aTs, bTs) = chop arity (binder_types fT); val tupleT = foldl1 HOLogic.mk_prodT aTs; val fT_uc = tupleT :: bTs ---> body_type fT; val (inT,resT) = dest_funT fT_uc; val f_uc = Free (fname, fT_uc); val f_cuc = curry_n arity f_uc in (fname, f_cuc, f_uc, inT, resT, ((f_binding,mixfix),fT), F, arity, args) end; fun fresh_var ctxt name = Name.variant name (Variable.names_of ctxt) |> #1 (* partial_function_mr definition *) fun gen_add_partial_function_mr prep mode fixes_raw eqns_raw lthy = let val setup_data = the (lookup_mode lthy mode) handle Option.Option => error (cat_lines ["Unknown mode " ^ quote mode ^ ".", "Known modes are " ^ commas_quote (known_modes lthy) ^ "."]); val Setup_Data {mk_monad_map, mk_monadT, dest_monadT, monad_map_comp, monad_map_id} = setup_data; val _ = if length eqns_raw < 2 then error "require at least two function definitions" else (); val ((fixes, eq_abinding_eqns), _) = prep fixes_raw eqns_raw lthy; val _ = if length eqns_raw = length fixes then () else error "# of eqns does not match # of constants"; val fix_eq_abinding_eqns = fixes ~~ eq_abinding_eqns; val heads = map (get_head lthy) fix_eq_abinding_eqns; val fnames = map (Binding.name_of o #1 o #1) fixes val fnames' = map (#1 o Term.dest_Free) heads val f_f = fnames ~~ fnames' val _ = case find_first (fn (f,g) => not (f = g)) f_f of NONE => () | SOME _ => error ("list of function symbols does not match list of equations:\n" ^ @{make_string} fnames ^ "\nvs\n" ^ @{make_string} fnames') val all = map (get_infos lthy heads) fix_eq_abinding_eqns val f_cucs = map #2 all val f_ucs = map #3 all val inTs = map #4 all val resTs = map #5 all val bindings_types = map #6 all val Fs = map #7 all val arities = map #8 all val all_args = map #9 all val glob_inT = mk_choiceT inTs val glob_resT = mk_choice_resT mk_monadT dest_monadT resTs val inj = mk_inj inTs val glob_fname = fresh_var lthy (foldl1 (fn (a,b) => a ^ "_" ^ b) (fnames @ [serial_string ()])) val glob_constT = glob_inT --> glob_resT; val glob_const = Free (glob_fname, glob_constT) val nums = 0 upto (length all - 1) fun mk_res_inj_proj n = let val resT = nth resTs n val glob_Targs = dest_monadT glob_resT |> #2 val res_Targs = dest_monadT resT |> #2 val m = length res_Targs fun inj_proj m = let val resTs_m = map (fn resT => nth (dest_monadT resT |> #2) m) resTs val resT_arg = nth resTs_m n val globT_arg = nth glob_Targs m val x = Free ("x",resT_arg) val y = Free ("x",globT_arg) val inj = lambda x (mk_inj resTs_m x n) val proj = lambda y (mk_proj resTs_m y n) in ((inj, resT_arg --> globT_arg), (proj, globT_arg --> resT_arg)) end; val (inj,proj) = map inj_proj (0 upto (m - 1)) |> split_list val (t_to_ss_inj,t_to_sTs_inj) = split_list inj; val (t_to_ss_proj,t_to_sTs_proj) = split_list proj; in (fn mt => mk_monad_map (mt, t_to_ss_inj, resT, glob_resT, t_to_sTs_inj), fn mt => mk_monad_map (mt, t_to_ss_proj, glob_resT, resT, t_to_sTs_proj)) end; val (res_inj, res_proj) = map mk_res_inj_proj nums |> split_list fun mk_global_fun n = let val fname = nth fnames n val inT = nth inTs n val xs = Free (fresh_var lthy ("x_" ^ fname), inT) val inj_xs = inj xs n val glob_inj_xs = glob_const $ inj_xs val glob_inj_xs_map = nth res_proj n glob_inj_xs val res = lambda xs glob_inj_xs_map in (xs,res) end val (xss,global_funs) = map mk_global_fun nums |> split_list fun mk_cases n = let val xs = nth xss n val F = nth Fs n; val arity = nth arities n; val F_uc = fold_rev lambda f_ucs (uncurry_n arity (list_comb (F, f_cucs))); val F_uc_inst = Term.betapplys (F_uc,global_funs) val res = lambda xs (nth res_inj n (F_uc_inst $ xs)) in res end; val all_cases = map mk_cases nums; fun combine_cases [cs] [_] = cs | combine_cases (cs :: more) (inT :: moreTy) = let val moreT = mk_choiceT moreTy val sumT = mk_sumT (inT, moreT) val case_const = Const (@{const_name case_sum}, (inT --> glob_resT) --> (moreT --> glob_resT) --> sumT --> glob_resT) in case_const $ cs $ combine_cases more moreTy end | combine_cases _ _ = error "combine_cases with incompatible argument lists"; val glob_x_name = fresh_var lthy ("x_" ^ glob_fname) val glob_x = Free (glob_x_name,glob_inT) val rhs = combine_cases all_cases inTs $ glob_x; val lhs = glob_const $ glob_x val eq = HOLogic.mk_Trueprop (HOLogic.mk_eq (lhs,rhs)) val glob_binding = Binding.name (glob_fname) |> Binding.concealed val glob_attrib_binding = Binding.empty_atts val _ = trace lthy "invoking partial_function on global function" val priv_lthy = lthy |> Proof_Context.private_scope (Binding.new_scope()) val ((glob_const, glob_simp_thm),priv_lthy') = priv_lthy |> Partial_Function.add_partial_function mode [(glob_binding,SOME glob_constT,NoSyn)] (glob_attrib_binding,eq) val glob_lthy = priv_lthy' |> Proof_Context.restore_naming lthy val _ = trace lthy "deriving simp rules for separate functions from global function" fun define_f n (fs, fdefs,rhss,lthy) = let val ((fbinding,mixfix),_) = nth bindings_types n val fname = nth fnames n val inT = nth inTs n; val arity = nth arities n; val x = Free (fresh_var lthy ("x_" ^ fname), inT) val inj_argsProd = inj x n val call = glob_const $ inj_argsProd val post = nth res_proj n call val rhs = curry_n arity (lambda x post) val ((f, (_, f_def)),lthy') = Local_Theory.define_internal ((fbinding,mixfix), (Binding.empty_atts, rhs)) lthy in (f :: fs, f_def :: fdefs,rhs :: rhss,lthy') end val (fs,fdefs,f_rhss,local_lthy) = fold_rev define_f nums ([],[],[],glob_lthy) val glob_simp_thm' = let fun mk_case_new n = let val F = nth Fs n val arity = nth arities n val Finst = uncurry_n arity (Term.betapplys (F,fs)) val xs = nth xss n val res = lambda xs (nth res_inj n (Finst $ xs)) in res end; val new_cases = map mk_case_new nums; val rhs = combine_cases new_cases inTs $ glob_x; val lhs = glob_const $ glob_x val eq = HOLogic.mk_Trueprop (HOLogic.mk_eq (lhs,rhs)) in Goal.prove local_lthy [glob_x_name] [] eq (fn {prems = _, context = ctxt} => Thm.instantiate' [] [SOME (Thm.cterm_of ctxt glob_x)] glob_simp_thm |> (fn simp_thm => unfold_tac ctxt [simp_thm] THEN unfold_tac ctxt fdefs)) end fun mk_simp_thm n = let val args = nth all_args n val arg_names = map (dest_Free #> fst) args val f = nth fs n val F = nth Fs n val fdef = nth fdefs n val lhs = list_comb (f,args); val mhs = Term.betapplys (nth f_rhss n, args) val rhs = list_comb (list_comb (F,fs), args); val eq1 = HOLogic.mk_Trueprop (HOLogic.mk_eq (lhs,mhs)) val eq2 = HOLogic.mk_Trueprop (HOLogic.mk_eq (mhs,rhs)) val simp_thm1 = Goal.prove local_lthy arg_names [] eq1 (fn {prems = _, context = ctxt} => unfold_tac ctxt [fdef]) val simp_thm2 = Goal.prove local_lthy arg_names [] eq2 (fn {prems = _, context = ctxt} => unfold_tac ctxt [glob_simp_thm'] THEN unfold_tac ctxt @{thms sum.simps curry_def split} THEN unfold_tac ctxt (@{thm o_def} :: monad_map_comp) THEN unfold_tac ctxt (monad_map_id @ @{thms sum.sel})) in @{thm trans} OF [simp_thm1,simp_thm2] end val simp_thms = map mk_simp_thm nums fun register n lthy = let val simp_thm = nth simp_thms n val eq_abinding = nth eq_abinding_eqns n |> fst val fname = nth fnames n val f = nth fs n in lthy |> Local_Theory.note (eq_abinding, [simp_thm]) |-> (fn (_, simps) => Spec_Rules.add Binding.empty Spec_Rules.equational_recdef [f] simps #> Local_Theory.note ((Binding.qualify true fname (Binding.name "simps"), @{attributes [code]}), simps) #>> snd #>> hd) end in fold (fn i => fn (simps, lthy) => case register i lthy of (simp, lthy') => (simps @ [simp], lthy')) nums ([], local_lthy) end; val add_partial_function_mr = gen_add_partial_function_mr Specification.check_multi_specs; val add_partial_function_mr_cmd = gen_add_partial_function_mr Specification.read_multi_specs; val mode = @{keyword "("} |-- Parse.name --| @{keyword ")"}; val _ = Outer_Syntax.local_theory @{command_keyword partial_function_mr} "define mutually recursive partial functions" (mode -- Parse_Spec.specification >> (fn (mode, (fixes, specs)) => add_partial_function_mr_cmd mode fixes specs #> #2)); end diff --git a/thys/Types_To_Sets_Extension/ETTS/ETTS_Lemmas.ML b/thys/Types_To_Sets_Extension/ETTS/ETTS_Lemmas.ML --- a/thys/Types_To_Sets_Extension/ETTS/ETTS_Lemmas.ML +++ b/thys/Types_To_Sets_Extension/ETTS/ETTS_Lemmas.ML @@ -1,370 +1,370 @@ (* Title: ETTS/ETTS_Lemmas.ML Author: Mihails Milehins Copyright 2021 (C) Mihails Milehins Implementation of the command tts_lemmas. *) signature TTS_LEMMAS = sig val tts_lemmas : Proof.context -> ETTS_Algorithm.etts_output_type -> ((binding * Token.src list) * (thm list * (string * Token.src list))) list -> Proof.context * int list end; structure TTS_Lemmas : TTS_LEMMAS = struct (**** Prerequisites ****) open ETTS_Algorithm; open ETTS_Context; open ETTS_Active; (**** Implicit statement of theorems ****) fun register_output ctxt ab out_thms = let val ((thmc, out_thms), lthy) = let val facts' = (ab ||> map (Attrib.check_src ctxt), single (out_thms, [])) |> single |> Attrib.partial_evaluation ctxt in ctxt |> Local_Theory.notes facts' |>> the_single end val _ = CTR_Utilities.thm_printer lthy true thmc out_thms in (lthy, "") end (**** Output to an active area ****) local (*number of repeated premises*) fun num_rep_prem eq ts = let val premss = map Logic.strip_imp_prems ts val min_length = min_list (map length premss) val premss = map (take min_length) premss val template_prems = hd premss val num_eq_prems = premss |> tl |> map ( compare_each eq template_prems #> take_prefix (curry op= true) #> length ) val num_rep_prems = if length premss = 1 then length template_prems else min_list num_eq_prems in (num_rep_prems, premss) end; (*elimination of premises*) fun elim_assms assm_thms thm = fold (flip Thm.implies_elim) assm_thms thm; (*create a single theorem from a fact via Pure conjunction*) fun thm_of_fact ctxt thms = let val ts = map Thm.full_prop_of thms val (num_rep_prems, _) = num_rep_prem (op aconv) ts val rep_prems = thms |> hd |> Thm.full_prop_of |> Logic.strip_imp_prems |> take num_rep_prems |> map (Thm.cterm_of ctxt); val all_ftv_rels = let val subtract = swap #> uncurry (subtract op=) in rep_prems |> map ( Thm.term_of #> Logic.forall_elim_all #> apfst (fn t => Term.add_frees t []) #> apsnd dup #> reroute_sp_ps #> apfst (apfst dup) #> apfst reroute_ps_sp #> apfst (apsnd subtract) #> apfst subtract ) end val index_of_ftvs = all_ftv_rels |> map ( #1 #> map_index I #> map swap #> AList.lookup op= #> curry (swap #> op#>) the ) val all_indicess = (map #2 all_ftv_rels) ~~ index_of_ftvs |> map (fn (x, f) => map f x) val (assms, ctxt') = Assumption.add_assumes rep_prems ctxt val stvss = map ( Thm.full_prop_of #> (fn t => Term.add_vars t []) #> map Var #> map (Thm.cterm_of ctxt) ) assms val stvss = stvss ~~ all_indicess |> map (fn (stvs, indices) => map (nth stvs) indices) val assms = map2 forall_intr_list stvss assms val thm = thms |> map (elim_assms assms) |> Conjunction.intr_balanced - |> singleton (Proof_Context.goal_export ctxt' ctxt) + |> singleton (Proof_Context.export_goal ctxt' ctxt) in thm end; in (*output to an active area*) fun active_output ctxt thm_out_str attrs_out thm_in_str attrs_in thms = let val (thms, ctxt') = Thm.unvarify_local_fact ctxt thms val thmc = thms |> thm_of_fact ctxt' |> Thm.full_prop_of |> theorem_string_of_term ctxt tts_lemma thm_out_str attrs_out thm_in_str attrs_in in (ctxt, thmc) end; end; (**** Implementation ****) fun tts_lemmas ctxt tts_output_type thm_specs = let fun process_thm_out_str c = if Symbol_Pos.is_identifier c then c else quote c val { mpespc_opt = mpespc_opt, rispec = rispec, sbtspec = sbtspec, sbrr_opt = sbrr_opt, subst_thms = subst_thms, attrbs = attrbs } = get_tts_ctxt_data ctxt val writer = ETTS_Writer.initialize 4 fun folder ((b, attrs_out), (thms, (thm_in_str, attrs_in))) (ctxt, thmcs, writer) = let val ((out_thms, writer), ctxt) = ETTS_Algorithm.etts_fact ctxt tts_output_type writer rispec sbtspec sbrr_opt subst_thms mpespc_opt attrbs thms val writer = ETTS_Writer.increment_index 0 writer val (lthy, thmc) = if is_default tts_output_type orelse is_verbose tts_output_type then register_output ctxt (b, attrs_out) out_thms else active_output ctxt (b |> Name_Space.base_name |> process_thm_out_str) attrs_out thm_in_str attrs_in out_thms val thmcs = thmcs ^ thmc in (lthy, thmcs, writer) end val (ctxt'', thmcs, writer) = fold folder thm_specs (ctxt, "", writer) val _ = if is_active tts_output_type then thmcs |> Active.sendback_markup_command |> writeln else () in (ctxt'', writer) end; (**** Parser ****) local val parse_output_mode = Scan.optional (\<^keyword>\!\ || \<^keyword>\?\) ""; val parse_facts = \<^keyword>\in\ |-- Parse_Spec.name_facts; in val parse_tts_lemmas = parse_output_mode -- parse_facts; end; (**** User input analysis ****) fun mk_msg_tts_lemmas msg = "tts_lemmas: " ^ msg; fun thm_specs_raw_input thm_specs_raw = let val msg_multiple_facts = mk_msg_tts_lemmas "only one fact per entry is allowed" val _ = thm_specs_raw |> map (#2 #> length) |> List.all (fn n => n = 1) orelse error msg_multiple_facts in () end; (**** Evaluation ****) local fun mk_thm_specs ctxt thm_specs_raw = let (*auxiliary functions*) fun process_thm_in_name c = let val flag_identifier = c |> Long_Name.explode |> map Symbol_Pos.is_identifier |> List.all I in if flag_identifier then c else quote c end (*based on Facts.string_of_ref from Facts.ML in src/Pure*) fun process_thm_in_ref (Facts.Named ((name, _), sel)) = process_thm_in_name name ^ Facts.string_of_selection sel | process_thm_in_ref (Facts.Fact _) = raise Fail "Illegal literal fact"; fun binding_last c = let val binding_last_h = Long_Name.base_name #> Binding.qualified_name in if size c = 0 then Binding.empty else binding_last_h c end fun binding_of_binding_spec (b, (factb, thmbs)) = if Binding.is_empty b then if length thmbs = 1 then if Binding.is_empty (the_single thmbs) then factb else the_single thmbs else factb else b (*main*) val thm_specs = thm_specs_raw |> map (apsnd the_single) |> ( Facts.string_of_ref #> binding_last |> apdupl |> apfst #> reroute_ps_sp |> apsnd |> map ) |> map reroute_sp_ps |> map (reroute_ps_sp #> (swap |> apsnd) #> reroute_sp_ps |> apfst) |> map (apsnd dup) |> ( single #> (ctxt |> Attrib.eval_thms) #> (Thm.derivation_name #> binding_last |> map |> apdupl) |> apfst |> apsnd |> map ) |> map (reroute_sp_ps #> apfst reroute_sp_ps #> reroute_ps_sp) |> ( reroute_ps_sp #> (swap |> apsnd) #> reroute_sp_ps #> (reroute_ps_sp #> binding_of_binding_spec |> apfst) |> apfst |> map ) |> map (process_thm_in_ref |> apfst |> apsnd |> apsnd) in thm_specs end; in fun process_tts_lemmas args ctxt = let (*unpacking*) val tts_output_type = args |> #1 |> etts_output_type_of_string val thm_specs_raw = #2 args (*user input analysis*) val _ = thm_specs_raw_input thm_specs_raw (*pre-processing*) val thm_specs = mk_thm_specs ctxt thm_specs_raw in thm_specs |> tts_lemmas ctxt tts_output_type |> #1 end; end; (**** Interface ****) val _ = Outer_Syntax.local_theory \<^command_keyword>\tts_lemmas\ "automated relativization of facts" (parse_tts_lemmas >> process_tts_lemmas); end; \ No newline at end of file