diff --git a/src/Pure/General/name_space.ML b/src/Pure/General/name_space.ML --- a/src/Pure/General/name_space.ML +++ b/src/Pure/General/name_space.ML @@ -1,649 +1,649 @@ (* Title: Pure/General/name_space.ML Author: Markus Wenzel, TU Muenchen Generic name spaces with declared and hidden entries; no support for absolute addressing. *) type xstring = string; (*external names*) signature NAME_SPACE = sig type entry = {concealed: bool, group: serial option, theory_long_name: string, pos: Position.T, serial: serial} type T val empty: string -> T val kind_of: T -> string val markup: T -> string -> Markup.T val markup_def: T -> string -> Markup.T val get_names: T -> string list val the_entry: T -> string -> entry val the_entry_theory_name: T -> string -> string val entry_ord: T -> string ord val is_concealed: T -> string -> bool val intern: T -> xstring -> string val names_long: bool Config.T val names_short: bool Config.T val names_unique: bool Config.T val extern: Proof.context -> T -> string -> xstring val extern_ord: Proof.context -> T -> string ord val extern_shortest: Proof.context -> T -> string -> xstring val markup_extern: Proof.context -> T -> string -> Markup.T * xstring val pretty: Proof.context -> T -> string -> Pretty.T val completion: Context.generic -> T -> (string -> bool) -> xstring * Position.T -> Completion.T val merge: T * T -> T type naming val get_scopes: naming -> Binding.scope list val get_scope: naming -> Binding.scope option val new_scope: naming -> Binding.scope * naming val restricted: bool -> Position.T -> naming -> naming val private_scope: Binding.scope -> naming -> naming val private: Position.T -> naming -> naming val qualified_scope: Binding.scope -> naming -> naming val qualified: Position.T -> naming -> naming val concealed: naming -> naming val get_group: naming -> serial option val set_group: serial option -> naming -> naming val set_theory_long_name: string -> naming -> naming val new_group: naming -> naming val reset_group: naming -> naming val add_path: string -> naming -> naming val root_path: naming -> naming val parent_path: naming -> naming val mandatory_path: string -> naming -> naming val qualified_path: bool -> binding -> naming -> naming val global_naming: naming val local_naming: naming val transform_naming: naming -> naming -> naming val transform_binding: naming -> binding -> binding val full_name: naming -> binding -> string val base_name: binding -> string val hide: bool -> string -> T -> T val alias: naming -> binding -> string -> T -> T val naming_of: Context.generic -> naming val map_naming: (naming -> naming) -> Context.generic -> Context.generic val declared: T -> string -> bool val declare: Context.generic -> bool -> binding -> T -> string * T type 'a table val change_base: bool -> 'a table -> 'a table val change_ignore: 'a table -> 'a table val space_of_table: 'a table -> T val check_reports: Context.generic -> 'a table -> xstring * Position.T list -> (string * Position.report list) * 'a val check: Context.generic -> 'a table -> xstring * Position.T -> string * 'a val defined: 'a table -> string -> bool val lookup: 'a table -> string -> 'a option val lookup_key: 'a table -> string -> (string * 'a) option val get: 'a table -> string -> 'a val define: Context.generic -> bool -> binding * 'a -> 'a table -> string * 'a table val alias_table: naming -> binding -> string -> 'a table -> 'a table val hide_table: bool -> string -> 'a table -> 'a table val del_table: string -> 'a table -> 'a table val map_table_entry: string -> ('a -> 'a) -> 'a table -> 'a table val fold_table: (string * 'a -> 'b -> 'b) -> 'a table -> 'b -> 'b val dest_table: 'a table -> (string * 'a) list val empty_table: string -> 'a table val merge_tables: 'a table * 'a table -> 'a table val join_tables: (string -> 'a * 'a -> 'a) (*exception Change_Table.SAME*) -> 'a table * 'a table -> 'a table val extern_entries: bool -> Proof.context -> T -> (string * 'a) list -> ((string * xstring) * 'a) list val markup_entries: bool -> Proof.context -> T -> (string * 'a) list -> ((Markup.T * xstring) * 'a) list val extern_table: bool -> Proof.context -> 'a table -> ((string * xstring) * 'a) list val markup_table: bool -> Proof.context -> 'a table -> ((Markup.T * xstring) * 'a) list end; structure Name_Space: NAME_SPACE = struct (** name spaces **) (* datatype entry *) type entry = {concealed: bool, group: serial option, theory_long_name: string, pos: Position.T, serial: serial}; fun entry_markup def kind (name, {pos, serial, ...}: entry) = Position.make_entity_markup def serial kind (name, pos); fun print_entry_ref kind (name, entry) = - quote (Markup.markup (entry_markup false kind (name, entry)) name); + quote (Markup.markup (entry_markup {def = false} kind (name, entry)) name); fun err_dup kind entry1 entry2 pos = error ("Duplicate " ^ plain_words kind ^ " declaration " ^ print_entry_ref kind entry1 ^ " vs. " ^ print_entry_ref kind entry2 ^ Position.here pos); (* internal names *) type internals = (string list * string list) Change_Table.T; (*xname -> visible, hidden*) fun map_internals f xname : internals -> internals = Change_Table.map_default (xname, ([], [])) f; val del_name = map_internals o apfst o remove (op =); fun del_name_extra name = map_internals (apfst (fn [] => [] | x :: xs => x :: remove (op =) name xs)); val add_name = map_internals o apfst o update (op =); fun hide_name name = map_internals (apsnd (update (op =) name)) name; (* external accesses *) type accesses = (xstring list * xstring list); (*input / output fragments*) type entries = (accesses * entry) Change_Table.T; (*name -> accesses, entry*) (* datatype T *) datatype T = Name_Space of {kind: string, internals: internals, entries: entries}; fun make_name_space (kind, internals, entries) = Name_Space {kind = kind, internals = internals, entries = entries}; fun map_name_space f (Name_Space {kind = kind, internals = internals, entries = entries}) = make_name_space (f (kind, internals, entries)); fun change_base_space begin = map_name_space (fn (kind, internals, entries) => (kind, Change_Table.change_base begin internals, Change_Table.change_base begin entries)); val change_ignore_space = map_name_space (fn (kind, internals, entries) => (kind, Change_Table.change_ignore internals, Change_Table.change_ignore entries)); fun empty kind = make_name_space (kind, Change_Table.empty, Change_Table.empty); fun kind_of (Name_Space {kind, ...}) = kind; fun gen_markup def (Name_Space {kind, entries, ...}) name = (case Change_Table.lookup entries name of NONE => Markup.intensify | SOME (_, entry) => entry_markup def kind (name, entry)); -val markup = gen_markup false; -val markup_def = gen_markup true; +val markup = gen_markup {def = false}; +val markup_def = gen_markup {def = true}; fun undefined (space as Name_Space {kind, entries, ...}) bad = let val (prfx, sfx) = (case Long_Name.dest_hidden bad of SOME name => if Change_Table.defined entries name then ("Inaccessible", Markup.markup (markup space name) (quote name)) else ("Undefined", quote name) | NONE => ("Undefined", quote bad)); in prfx ^ " " ^ plain_words kind ^ ": " ^ sfx end; fun get_names (Name_Space {entries, ...}) = Change_Table.fold (cons o #1) entries []; fun the_entry (space as Name_Space {entries, ...}) name = (case Change_Table.lookup entries name of NONE => error (undefined space name) | SOME (_, entry) => entry); fun the_entry_theory_name space name = Long_Name.base_name (#theory_long_name (the_entry space name)); fun entry_ord space = int_ord o apply2 (#serial o the_entry space); fun is_concealed space name = #concealed (the_entry space name) handle ERROR _ => false; (* intern *) fun intern' (Name_Space {internals, ...}) xname = (case the_default ([], []) (Change_Table.lookup internals xname) of ([name], _) => (name, true) | (name :: _, _) => (name, false) | ([], []) => (Long_Name.hidden xname, true) | ([], name' :: _) => (Long_Name.hidden name', true)); val intern = #1 oo intern'; fun get_accesses (Name_Space {entries, ...}) name = (case Change_Table.lookup entries name of NONE => ([], []) | SOME (accesses, _) => accesses); fun is_valid_access (Name_Space {internals, ...}) name xname = (case Change_Table.lookup internals xname of SOME (name' :: _, _) => name = name' | _ => false); (* extern *) val names_long = Config.declare_option_bool ("names_long", \<^here>); val names_short = Config.declare_option_bool ("names_short", \<^here>); val names_unique = Config.declare_option_bool ("names_unique", \<^here>); fun extern ctxt space name = let val names_long = Config.get ctxt names_long; val names_short = Config.get ctxt names_short; val names_unique = Config.get ctxt names_unique; fun valid require_unique xname = let val (name', is_unique) = intern' space xname in name = name' andalso (not require_unique orelse is_unique) end; fun ext [] = if valid false name then name else Long_Name.hidden name | ext (nm :: nms) = if valid names_unique nm then nm else ext nms; in if names_long then name else if names_short then Long_Name.base_name name else ext (#2 (get_accesses space name)) end; fun extern_ord ctxt space = string_ord o apply2 (extern ctxt space); fun extern_shortest ctxt = extern (ctxt |> Config.put names_long false |> Config.put names_short false |> Config.put names_unique false); fun markup_extern ctxt space name = (markup space name, extern ctxt space name); fun pretty ctxt space name = Pretty.mark_str (markup_extern ctxt space name); (* completion *) fun completion context space pred (xname, pos) = Completion.make (xname, pos) (fn completed => let fun result_ord ((pri1, (xname1, (_, name1))), (pri2, (xname2, (_, name2)))) = (case int_ord (pri2, pri1) of EQUAL => (case bool_ord (apply2 (is_some o Long_Name.dest_local) (name2, name1)) of EQUAL => (case int_ord (apply2 Long_Name.qualification (xname1, xname2)) of EQUAL => string_ord (xname1, xname2) | ord => ord) | ord => ord) | ord => ord); val Name_Space {kind, internals, ...} = space; val ext = extern_shortest (Context.proof_of context) space; val full = Name.clean xname = ""; fun complete xname' name = if (completed xname' orelse exists completed (Long_Name.explode xname')) andalso not (is_concealed space name) andalso pred name then let val xname'' = ext name; val pri = (if xname' = xname'' then 1 else 0) + (if completed xname' then 1 else 0); in if xname' <> xname'' andalso full then I else cons (pri, (xname', (kind, name))) end else I; in Change_Table.fold (fn (xname', (name :: _, _)) => complete xname' name | _ => I) internals [] |> sort_distinct result_ord |> map #2 end); (* merge *) fun merge (Name_Space {kind = kind1, internals = internals1, entries = entries1}, Name_Space {kind = kind2, internals = internals2, entries = entries2}) = let val kind' = if kind1 = kind2 then kind1 else error ("Attempt to merge different kinds of name spaces " ^ quote kind1 ^ " vs. " ^ quote kind2); val internals' = (internals1, internals2) |> Change_Table.join (K (fn ((names1, names1'), (names2, names2')) => if pointer_eq (names1, names2) andalso pointer_eq (names1', names2') then raise Change_Table.SAME else (Library.merge (op =) (names1, names2), Library.merge (op =) (names1', names2')))); val entries' = (entries1, entries2) |> Change_Table.join (fn name => fn ((_, entry1), (_, entry2)) => if #serial entry1 = #serial entry2 then raise Change_Table.SAME else err_dup kind' (name, entry1) (name, entry2) Position.none); in make_name_space (kind', internals', entries') end; (** naming context **) (* datatype naming *) datatype naming = Naming of {scopes: Binding.scope list, restricted: (bool * Binding.scope) option, concealed: bool, group: serial option, theory_long_name: string, path: (string * bool) list}; fun make_naming (scopes, restricted, concealed, group, theory_long_name, path) = Naming {scopes = scopes, restricted = restricted, concealed = concealed, group = group, theory_long_name = theory_long_name, path = path}; fun map_naming f (Naming {scopes, restricted, concealed, group, theory_long_name, path}) = make_naming (f (scopes, restricted, concealed, group, theory_long_name, path)); (* scope and access restriction *) fun get_scopes (Naming {scopes, ...}) = scopes; val get_scope = try hd o get_scopes; fun new_scope naming = let val scope = Binding.new_scope (); val naming' = naming |> map_naming (fn (scopes, restricted, concealed, group, theory_long_name, path) => (scope :: scopes, restricted, concealed, group, theory_long_name, path)); in (scope, naming') end; fun restricted_scope strict scope = map_naming (fn (scopes, _, concealed, group, theory_long_name, path) => (scopes, SOME (strict, scope), concealed, group, theory_long_name, path)); fun restricted strict pos naming = (case get_scope naming of SOME scope => restricted_scope strict scope naming | NONE => error ("Missing local scope -- cannot restrict name space accesses" ^ Position.here pos)); val private_scope = restricted_scope true; val private = restricted true; val qualified_scope = restricted_scope false; val qualified = restricted false; val concealed = map_naming (fn (scopes, restricted, _, group, theory_long_name, path) => (scopes, restricted, true, group, theory_long_name, path)); (* additional structural info *) fun set_theory_long_name theory_long_name = map_naming (fn (scopes, restricted, concealed, group, _, path) => (scopes, restricted, concealed, group, theory_long_name, path)); fun get_group (Naming {group, ...}) = group; fun set_group group = map_naming (fn (scopes, restricted, concealed, _, theory_long_name, path) => (scopes, restricted, concealed, group, theory_long_name, path)); fun new_group naming = set_group (SOME (serial ())) naming; val reset_group = set_group NONE; (* name entry path *) fun get_path (Naming {path, ...}) = path; fun map_path f = map_naming (fn (scopes, restricted, concealed, group, theory_long_name, path) => (scopes, restricted, concealed, group, theory_long_name, f path)); fun add_path elems = map_path (fn path => path @ [(elems, false)]); val root_path = map_path (fn _ => []); val parent_path = map_path (perhaps (try (#1 o split_last))); fun mandatory_path elems = map_path (fn path => path @ [(elems, true)]); fun qualified_path mandatory binding = map_path (fn path => path @ Binding.path_of (Binding.qualify_name mandatory binding "")); val global_naming = make_naming ([], NONE, false, NONE, "", []); val local_naming = global_naming |> add_path Long_Name.localN; (* transform *) fun transform_naming (Naming {restricted = restricted', concealed = concealed', ...}) = (case restricted' of SOME (strict, scope) => restricted_scope strict scope | NONE => I) #> concealed' ? concealed; fun transform_binding (Naming {restricted, concealed, ...}) = Binding.restricted restricted #> concealed ? Binding.concealed; (* full name *) fun name_spec naming binding = Binding.name_spec (get_scopes naming) (get_path naming) (transform_binding naming binding); fun full_name naming = name_spec naming #> #spec #> map #1 #> Long_Name.implode; val base_name = full_name global_naming #> Long_Name.base_name; (* accesses *) fun mandatory xs = map_filter (fn (x, true) => SOME x | _ => NONE) xs; fun mandatory_prefixes xs = mandatory xs :: mandatory_prefixes1 xs and mandatory_prefixes1 [] = [] | mandatory_prefixes1 ((x, true) :: xs) = map (cons x) (mandatory_prefixes1 xs) | mandatory_prefixes1 ((x, false) :: xs) = map (cons x) (mandatory_prefixes xs); fun mandatory_suffixes xs = map rev (mandatory_prefixes (rev xs)); fun make_accesses naming binding = (case name_spec naming binding of {restriction = SOME true, ...} => ([], []) | {restriction, spec, ...} => let val restrict = is_some restriction ? filter (fn [_] => false | _ => true); val sfxs = restrict (mandatory_suffixes spec); val pfxs = restrict (mandatory_prefixes spec); in apply2 (map Long_Name.implode) (sfxs @ pfxs, sfxs) end); (* hide *) fun hide fully name space = space |> map_name_space (fn (kind, internals, entries) => let val _ = the_entry space name; val (accs, accs') = get_accesses space name; val xnames = filter (is_valid_access space name) accs; val internals' = internals |> hide_name name |> fold (del_name name) (if fully then xnames else inter (op =) [Long_Name.base_name name] xnames) |> fold (del_name_extra name) accs'; in (kind, internals', entries) end); (* alias *) fun alias naming binding name space = space |> map_name_space (fn (kind, internals, entries) => let val _ = the_entry space name; val (more_accs, more_accs') = make_accesses naming binding; val internals' = internals |> fold (add_name name) more_accs; val entries' = entries |> Change_Table.map_entry name (apfst (fn (accs, accs') => (fold_rev (update op =) more_accs accs, fold_rev (update op =) more_accs' accs'))) in (kind, internals', entries') end); (** context naming **) structure Data_Args = struct type T = naming; val empty = global_naming; fun init _ = local_naming; val extend = I; val merge = #1; end; structure Global_Naming = Theory_Data(Data_Args); structure Local_Naming = Proof_Data(Data_Args); fun naming_of (Context.Theory thy) = Global_Naming.get thy | naming_of (Context.Proof ctxt) = Local_Naming.get ctxt; fun map_naming f (Context.Theory thy) = Context.Theory (Global_Naming.map f thy) | map_naming f (Context.Proof ctxt) = Context.Proof (Local_Naming.map f ctxt); (** entry definition **) (* declaration *) fun declared (Name_Space {entries, ...}) = Change_Table.defined entries; fun declare context strict binding space = let val naming = naming_of context; val Naming {group, theory_long_name, ...} = naming; val {concealed, spec, ...} = name_spec naming binding; val accesses = make_accesses naming binding; val name = Long_Name.implode (map fst spec); val _ = name = "" andalso error (Binding.bad binding); val (proper_pos, pos) = Position.default (Binding.pos_of binding); val entry = {concealed = concealed, group = group, theory_long_name = theory_long_name, pos = pos, serial = serial ()}; val space' = space |> map_name_space (fn (kind, internals, entries) => let val internals' = internals |> fold (add_name name) (#1 accesses); val entries' = (if strict then Change_Table.update_new else Change_Table.update) (name, (accesses, entry)) entries handle Change_Table.DUP dup => err_dup kind (dup, #2 (the (Change_Table.lookup entries dup))) (name, entry) (#pos entry); in (kind, internals', entries') end); val _ = if proper_pos andalso Context_Position.is_reported_generic context pos then - Position.report pos (entry_markup true (kind_of space) (name, entry)) + Position.report pos (entry_markup {def = true} (kind_of space) (name, entry)) else (); in (name, space') end; (* definition in symbol table *) datatype 'a table = Table of T * 'a Change_Table.T; fun change_base begin (Table (space, tab)) = Table (change_base_space begin space, Change_Table.change_base begin tab); fun change_ignore (Table (space, tab)) = Table (change_ignore_space space, Change_Table.change_ignore tab); fun space_of_table (Table (space, _)) = space; fun check_reports context (Table (space, tab)) (xname, ps) = let val name = intern space xname in (case Change_Table.lookup tab name of SOME x => let val reports = filter (Context_Position.is_reported_generic context) ps |> map (fn pos => (pos, markup space name)); in ((name, reports), x) end | NONE => error (undefined space name ^ Position.here_list ps ^ Completion.markup_report (map (fn pos => completion context space (K true) (xname, pos)) ps))) end; fun check context table (xname, pos) = let val ((name, reports), x) = check_reports context table (xname, [pos]); val _ = Context_Position.reports_generic context reports; in (name, x) end; fun defined (Table (_, tab)) name = Change_Table.defined tab name; fun lookup (Table (_, tab)) name = Change_Table.lookup tab name; fun lookup_key (Table (_, tab)) name = Change_Table.lookup_key tab name; fun get table name = (case lookup_key table name of SOME (_, x) => x | NONE => error (undefined (space_of_table table) name)); fun define context strict (binding, x) (Table (space, tab)) = let val (name, space') = declare context strict binding space; val tab' = Change_Table.update (name, x) tab; in (name, Table (space', tab')) end; (* derived table operations *) fun alias_table naming binding name (Table (space, tab)) = Table (alias naming binding name space, tab); fun hide_table fully name (Table (space, tab)) = Table (hide fully name space, tab); fun del_table name (Table (space, tab)) = let val space' = hide true name space handle ERROR _ => space; val tab' = Change_Table.delete_safe name tab; in Table (space', tab') end; fun map_table_entry name f (Table (space, tab)) = Table (space, Change_Table.map_entry name f tab); fun fold_table f (Table (_, tab)) = Change_Table.fold f tab; fun dest_table (Table (_, tab)) = Change_Table.dest tab; fun empty_table kind = Table (empty kind, Change_Table.empty); fun merge_tables (Table (space1, tab1), Table (space2, tab2)) = Table (merge (space1, space2), Change_Table.merge (K true) (tab1, tab2)); fun join_tables f (Table (space1, tab1), Table (space2, tab2)) = Table (merge (space1, space2), Change_Table.join f (tab1, tab2)); (* present table content *) fun extern_entries verbose ctxt space entries = fold (fn (name, x) => (verbose orelse not (is_concealed space name)) ? cons ((name, extern ctxt space name), x)) entries [] |> sort_by (#2 o #1); fun markup_entries verbose ctxt space entries = extern_entries verbose ctxt space entries |> map (fn ((name, xname), x) => ((markup space name, xname), x)); fun extern_table verbose ctxt (Table (space, tab)) = extern_entries verbose ctxt space (Change_Table.dest tab); fun markup_table verbose ctxt (Table (space, tab)) = markup_entries verbose ctxt space (Change_Table.dest tab); end; diff --git a/src/Pure/General/position.ML b/src/Pure/General/position.ML --- a/src/Pure/General/position.ML +++ b/src/Pure/General/position.ML @@ -1,309 +1,309 @@ (* Title: Pure/General/position.ML Author: Makarius Source positions starting from 1; values <= 0 mean "absent". Count Isabelle symbols, not UTF8 bytes nor UTF16 characters. Position range specifies a right-open interval offset .. end_offset (exclusive). *) signature POSITION = sig eqtype T val ord: T ord val make: Thread_Position.T -> T val dest: T -> Thread_Position.T val line_of: T -> int option val offset_of: T -> int option val end_offset_of: T -> int option val file_of: T -> string option val id_of: T -> string option val symbol: Symbol.symbol -> T -> T val symbol_explode: string -> T -> T val distance_of: T * T -> int option val none: T val start: T val file_name: string -> Properties.T val file_only: string -> T val file: string -> T val line_file_only: int -> string -> T val line_file: int -> string -> T val line: int -> T val get_props: T -> Properties.T val id: string -> T val id_only: string -> T val put_id: string -> T -> T val copy_id: T -> T -> T val id_properties_of: T -> Properties.T val parse_id: T -> int option val advance_offsets: int -> T -> T val adjust_offsets: (int -> int option) -> T -> T val of_properties: Properties.T -> T val properties_of: T -> Properties.T val offset_properties_of: T -> Properties.T val def_properties_of: T -> Properties.T val entity_markup: string -> string * T -> Markup.T - val make_entity_markup: bool -> serial -> string -> string * T -> Markup.T + val make_entity_markup: {def: bool} -> serial -> string -> string * T -> Markup.T val markup: T -> Markup.T -> Markup.T val is_reported: T -> bool val is_reported_range: T -> bool val reported_text: T -> Markup.T -> string -> string val report_text: T -> Markup.T -> string -> unit val report: T -> Markup.T -> unit type report = T * Markup.T type report_text = report * string val reports_text: report_text list -> unit val reports: report list -> unit val store_reports: report_text list Unsynchronized.ref -> T list -> ('a -> Markup.T list) -> 'a -> unit val append_reports: report_text list Unsynchronized.ref -> report list -> unit val here_strs: T -> string * string val here: T -> string val here_list: T list -> string type range = T * T val no_range: range val no_range_position: T -> T val range_position: range -> T val range: T * T -> range val range_of_properties: Properties.T -> range val properties_of_range: range -> Properties.T val thread_data: unit -> T val setmp_thread_data: T -> ('a -> 'b) -> 'a -> 'b val default: T -> bool * T end; structure Position: POSITION = struct (* datatype position *) type count = int * int * int; datatype T = Pos of count * Properties.T; fun dest2 f = apply2 (fn Pos p => f p); val ord = pointer_eq_ord (int_ord o dest2 (#1 o #1) ||| int_ord o dest2 (#2 o #1) ||| int_ord o dest2 (#3 o #1) ||| Properties.ord o dest2 #2); fun norm_props (props: Properties.T) = maps (fn a => the_list (find_first (fn (b, _) => a = b) props)) [Markup.fileN, Markup.idN]; fun make {line = i, offset = j, end_offset = k, props} = Pos ((i, j, k), norm_props props); fun dest (Pos ((i, j, k), props)) = {line = i, offset = j, end_offset = k, props = props}; fun valid (i: int) = i > 0; val invalid = not o valid; fun maybe_valid i = if valid i then SOME i else NONE; fun if_valid i i' = if valid i then i' else i; (* fields *) fun line_of (Pos ((i, _, _), _)) = maybe_valid i; fun offset_of (Pos ((_, j, _), _)) = maybe_valid j; fun end_offset_of (Pos ((_, _, k), _)) = maybe_valid k; fun file_of (Pos (_, props)) = Properties.get props Markup.fileN; fun id_of (Pos (_, props)) = Properties.get props Markup.idN; (* count position *) fun count_symbol "\n" ((i, j, k): count) = (if_valid i (i + 1), if_valid j (j + 1), k) | count_symbol s (i, j, k) = if Symbol.not_eof s then (i, if_valid j (j + 1), k) else (i, j, k); fun count_invalid ((i, j, _): count) = invalid i andalso invalid j; fun symbol sym (pos as (Pos (count, props))) = if count_invalid count then pos else Pos (count_symbol sym count, props); val symbol_explode = fold symbol o Symbol.explode; (* distance of adjacent positions *) fun distance_of (Pos ((_, j, _), _), Pos ((_, j', _), _)) = if valid j andalso valid j' then SOME (j' - j) else NONE; (* make position *) val none = Pos ((0, 0, 0), []); val start = Pos ((1, 1, 0), []); fun file_name "" = [] | file_name name = [(Markup.fileN, name)]; fun file_only name = Pos ((0, 0, 0), file_name name); fun file name = Pos ((1, 1, 0), file_name name); fun line_file_only i name = Pos ((i, 0, 0), file_name name); fun line_file i name = Pos ((i, 1, 0), file_name name); fun line i = line_file i ""; fun get_props (Pos (_, props)) = props; fun id id = Pos ((0, 1, 0), [(Markup.idN, id)]); fun id_only id = Pos ((0, 0, 0), [(Markup.idN, id)]); fun put_id id (Pos (count, props)) = Pos (count, norm_props (Properties.put (Markup.idN, id) props)); fun copy_id pos = (case id_of pos of NONE => I | SOME id => put_id id); fun parse_id pos = Option.map Value.parse_int (id_of pos); fun id_properties_of pos = (case id_of pos of SOME id => [(Markup.idN, id)] | NONE => []); (* adjust offsets *) fun advance_offsets offset (pos as (Pos (count as (i, j, k), props))) = if offset = 0 orelse count_invalid count then pos else if offset < 0 then raise Fail "Illegal offset" else if valid i then raise Fail "Illegal line position" else Pos ((i, if_valid j (j + offset), if_valid k (k + offset)), props); fun adjust_offsets adjust (pos as Pos (_, props)) = if is_none (file_of pos) then (case parse_id pos of SOME id => (case adjust id of SOME offset => let val Pos (count, _) = advance_offsets offset pos in Pos (count, Properties.remove Markup.idN props) end | NONE => pos) | NONE => pos) else pos; (* markup properties *) fun get_int props name = (case Properties.get props name of NONE => 0 | SOME s => Value.parse_int s); fun of_properties props = make { line = get_int props Markup.lineN, offset = get_int props Markup.offsetN, end_offset = get_int props Markup.end_offsetN, props = props}; fun int_entry k i = if invalid i then [] else [(k, Value.print_int i)]; fun properties_of (Pos ((i, j, k), props)) = int_entry Markup.lineN i @ int_entry Markup.offsetN j @ int_entry Markup.end_offsetN k @ props; fun offset_properties_of (Pos ((_, j, k), _)) = int_entry Markup.offsetN j @ int_entry Markup.end_offsetN k; val def_properties_of = properties_of #> map (apfst Markup.def_name); fun entity_markup kind (name, pos) = Markup.entity kind name |> Markup.properties (def_properties_of pos); -fun make_entity_markup def serial kind (name, pos) = +fun make_entity_markup {def} serial kind (name, pos) = let val props = if def then (Markup.defN, Value.print_int serial) :: properties_of pos else (Markup.refN, Value.print_int serial) :: def_properties_of pos; in Markup.entity kind name |> Markup.properties props end; val markup = Markup.properties o properties_of; (* reports *) fun is_reported pos = is_some (offset_of pos) andalso is_some (id_of pos); fun is_reported_range pos = is_reported pos andalso is_some (end_offset_of pos); fun reported_text pos m txt = if is_reported pos then Markup.markup (markup pos m) txt else ""; fun report_text pos markup txt = Output.report [reported_text pos markup txt]; fun report pos markup = report_text pos markup ""; type report = T * Markup.T; type report_text = report * string; val reports_text = map (fn ((pos, m), txt) => if is_reported pos then Markup.markup (markup pos m) txt else "") #> Output.report; val reports = map (rpair "") #> reports_text; fun store_reports _ [] _ _ = () | store_reports (r: report_text list Unsynchronized.ref) ps markup x = let val ms = markup x in Unsynchronized.change r (fold (fn p => fold (fn m => cons ((p, m), "")) ms) ps) end; fun append_reports (r: report_text list Unsynchronized.ref) reports = Unsynchronized.change r (append (map (rpair "") reports)); (* here: user output *) fun here_strs pos = (case (line_of pos, file_of pos) of (SOME i, NONE) => (" ", "(line " ^ Value.print_int i ^ ")") | (SOME i, SOME name) => (" ", "(line " ^ Value.print_int i ^ " of " ^ quote name ^ ")") | (NONE, SOME name) => (" ", "(file " ^ quote name ^ ")") | _ => if is_reported pos then ("", "\092<^here>") else ("", "")); fun here pos = let val props = properties_of pos; val (s1, s2) = here_strs pos; in if s2 = "" then "" else s1 ^ Markup.markup (Markup.properties props Markup.position) s2 end; val here_list = map here #> distinct (op =) #> implode; (* range *) type range = T * T; val no_range = (none, none); fun no_range_position (Pos ((i, j, _), props)) = Pos ((i, j, 0), props); fun range_position (Pos ((i, j, _), props), Pos ((_, j', _), _)) = Pos ((i, j, j'), props); fun range (pos, pos') = (range_position (pos, pos'), no_range_position pos'); fun range_of_properties props = let val pos = of_properties props; val pos' = make {line = get_int props Markup.end_lineN, offset = get_int props Markup.end_offsetN, end_offset = 0, props = props}; in (pos, pos') end; fun properties_of_range (pos, pos') = properties_of pos @ int_entry Markup.end_lineN (the_default 0 (line_of pos')); (* thread data *) val thread_data = make o Thread_Position.get; fun setmp_thread_data pos = Thread_Position.setmp (dest pos); fun default pos = if pos = none then (false, thread_data ()) else (true, pos); end; diff --git a/src/Pure/Isar/calculation.ML b/src/Pure/Isar/calculation.ML --- a/src/Pure/Isar/calculation.ML +++ b/src/Pure/Isar/calculation.ML @@ -1,241 +1,241 @@ (* Title: Pure/Isar/calculation.ML Author: Markus Wenzel, TU Muenchen Generic calculational proofs. *) signature CALCULATION = sig val print_rules: Proof.context -> unit val check: Proof.state -> thm list option val trans_add: attribute val trans_del: attribute val sym_add: attribute val sym_del: attribute val symmetric: attribute val also: thm list option -> bool -> Proof.state -> Proof.state Seq.result Seq.seq val also_cmd: (Facts.ref * Token.src list) list option -> bool -> Proof.state -> Proof.state Seq.result Seq.seq val finally: thm list option -> bool -> Proof.state -> Proof.state Seq.result Seq.seq val finally_cmd: (Facts.ref * Token.src list) list option -> bool -> Proof.state -> Proof.state Seq.result Seq.seq val moreover: bool -> Proof.state -> Proof.state val ultimately: bool -> Proof.state -> Proof.state end; structure Calculation: CALCULATION = struct (** calculation data **) type calculation = {result: thm list, level: int, serial: serial, pos: Position.T}; structure Data = Generic_Data ( type T = (thm Item_Net.T * thm Item_Net.T) * calculation option; val empty = ((Thm.item_net_elim, Thm.item_net), NONE); val extend = I; fun merge (((trans1, sym1), _), ((trans2, sym2), _)) = ((Item_Net.merge (trans1, trans2), Item_Net.merge (sym1, sym2)), NONE); ); val get_rules = #1 o Data.get o Context.Proof; val get_trans_rules = Item_Net.content o #1 o get_rules; val get_sym_rules = Item_Net.content o #2 o get_rules; val get_calculation = #2 o Data.get o Context.Proof; fun print_rules ctxt = let val pretty_thm = Thm.pretty_thm_item ctxt in [Pretty.big_list "transitivity rules:" (map pretty_thm (get_trans_rules ctxt)), Pretty.big_list "symmetry rules:" (map pretty_thm (get_sym_rules ctxt))] end |> Pretty.writeln_chunks; (* access calculation *) fun check_calculation state = (case get_calculation (Proof.context_of state) of NONE => NONE | SOME calculation => if #level calculation = Proof.level state then SOME calculation else NONE); val check = Option.map #result o check_calculation; val calculationN = "calculation"; fun update_calculation calc state = let fun report def serial pos = Context_Position.report (Proof.context_of state) (Position.thread_data ()) (Position.make_entity_markup def serial calculationN ("", pos)); val calculation = (case calc of NONE => NONE | SOME result => (case check_calculation state of NONE => let val level = Proof.level state; val serial = serial (); val pos = Position.thread_data (); - val _ = report true serial pos; + val _ = report {def = true} serial pos; in SOME {result = result, level = level, serial = serial, pos = pos} end | SOME {level, serial, pos, ...} => - (report false serial pos; + (report {def = false} serial pos; SOME {result = result, level = level, serial = serial, pos = pos}))); in state |> (Proof.map_context o Context.proof_map o Data.map o apsnd) (K calculation) |> Proof.map_context (Proof_Context.put_thms false (calculationN, calc)) end; (** attributes **) (* add/del rules *) val trans_add = Thm.declaration_attribute (Data.map o apfst o apfst o Item_Net.update o Thm.trim_context); val trans_del = Thm.declaration_attribute (Data.map o apfst o apfst o Item_Net.remove); val sym_add = Thm.declaration_attribute (fn th => (Data.map o apfst o apsnd) (Item_Net.update (Thm.trim_context th)) #> Thm.attribute_declaration (Context_Rules.elim_query NONE) th); val sym_del = Thm.declaration_attribute (fn th => (Data.map o apfst o apsnd) (Item_Net.remove th) #> Thm.attribute_declaration Context_Rules.rule_del th); (* symmetric *) val symmetric = Thm.rule_attribute [] (fn context => fn th => let val ctxt = Context.proof_of context in (case Seq.chop 2 (Drule.multi_resolves (SOME ctxt) [th] (get_sym_rules ctxt)) of ([th'], _) => Drule.zero_var_indexes th' | ([], _) => raise THM ("symmetric: no unifiers", 1, [th]) | _ => raise THM ("symmetric: multiple unifiers", 1, [th])) end); (* concrete syntax *) val _ = Theory.setup (Attrib.setup \<^binding>\trans\ (Attrib.add_del trans_add trans_del) "declaration of transitivity rule" #> Attrib.setup \<^binding>\sym\ (Attrib.add_del sym_add sym_del) "declaration of symmetry rule" #> Attrib.setup \<^binding>\symmetric\ (Scan.succeed symmetric) "resolution with symmetry rule" #> Global_Theory.add_thms [((Binding.empty, transitive_thm), [trans_add]), ((Binding.empty, symmetric_thm), [sym_add])] #> snd); (** proof commands **) fun assert_sane final = if final then Proof.assert_forward else Proof.assert_forward_or_chain #> tap (fn state => if can Proof.assert_chain state then Context_Position.report (Proof.context_of state) (Position.thread_data ()) Markup.improper else ()); fun maintain_calculation int final calc state = let val state' = state |> update_calculation (SOME calc) |> Proof.improper_reset_facts; val ctxt' = Proof.context_of state'; val _ = if int then Proof_Context.pretty_fact ctxt' (Proof_Context.full_name ctxt' (Binding.name calculationN), calc) |> Pretty.string_of |> writeln else (); in state' |> final ? (update_calculation NONE #> Proof.chain_facts calc) end; (* also and finally *) fun calculate prep_rules final raw_rules int state = let val ctxt = Proof.context_of state; val pretty_thm = Thm.pretty_thm ctxt; val pretty_thm_item = Thm.pretty_thm_item ctxt; val strip_assums_concl = Logic.strip_assums_concl o Thm.prop_of; val eq_prop = op aconv o apply2 (Envir.beta_eta_contract o strip_assums_concl); fun check_projection ths th = (case find_index (curry eq_prop th) ths of ~1 => Seq.Result [th] | i => Seq.Error (fn () => (Pretty.string_of o Pretty.chunks) [Pretty.block [Pretty.str "Vacuous calculation result:", Pretty.brk 1, pretty_thm th], (Pretty.block o Pretty.fbreaks) (Pretty.str ("derived as projection (" ^ string_of_int (i + 1) ^ ") from:") :: map pretty_thm_item ths)])); val opt_rules = Option.map (prep_rules ctxt) raw_rules; fun combine ths = Seq.append ((case opt_rules of SOME rules => rules | NONE => (case ths of [] => Item_Net.content (#1 (get_rules ctxt)) | th :: _ => Item_Net.retrieve (#1 (get_rules ctxt)) (strip_assums_concl th))) |> Seq.of_list |> Seq.maps (Drule.multi_resolve (SOME ctxt) ths) |> Seq.map (check_projection ths)) (Seq.single (Seq.Error (fn () => (Pretty.string_of o Pretty.block o Pretty.fbreaks) (Pretty.str "No matching trans rules for calculation:" :: map pretty_thm_item ths)))); val facts = Proof.the_facts (assert_sane final state); val (initial, calculations) = (case check state of NONE => (true, Seq.single (Seq.Result facts)) | SOME calc => (false, combine (calc @ facts))); val _ = initial andalso final andalso error "No calculation yet"; val _ = initial andalso is_some opt_rules andalso error "Initial calculation -- no rules to be given"; in calculations |> Seq.map_result (fn calc => maintain_calculation int final calc state) end; val also = calculate (K I) false; val also_cmd = calculate Attrib.eval_thms false; val finally = calculate (K I) true; val finally_cmd = calculate Attrib.eval_thms true; (* moreover and ultimately *) fun collect final int state = let val facts = Proof.the_facts (assert_sane final state); val (initial, thms) = (case check state of NONE => (true, []) | SOME thms => (false, thms)); val calc = thms @ facts; val _ = initial andalso final andalso error "No calculation yet"; in maintain_calculation int final calc state end; val moreover = collect false; val ultimately = collect true; end; diff --git a/src/Pure/Isar/keyword.ML b/src/Pure/Isar/keyword.ML --- a/src/Pure/Isar/keyword.ML +++ b/src/Pure/Isar/keyword.ML @@ -1,284 +1,284 @@ (* Title: Pure/Isar/keyword.ML Author: Makarius Isar keyword classification. *) signature KEYWORD = sig val diag: string val document_heading: string val document_body: string val document_raw: string val thy_begin: string val thy_end: string val thy_decl: string val thy_decl_block: string val thy_defn: string val thy_stmt: string val thy_load: string val thy_goal: string val thy_goal_defn: string val thy_goal_stmt: string val qed: string val qed_script: string val qed_block: string val qed_global: string val prf_goal: string val prf_block: string val next_block: string val prf_open: string val prf_close: string val prf_chain: string val prf_decl: string val prf_asm: string val prf_asm_goal: string val prf_script: string val prf_script_goal: string val prf_script_asm_goal: string val before_command: string val quasi_command: string type spec = string * string list val no_spec: spec val before_command_spec: spec val quasi_command_spec: spec val document_heading_spec: spec val document_body_spec: spec type keywords val minor_keywords: keywords -> Scan.lexicon val major_keywords: keywords -> Scan.lexicon val no_command_keywords: keywords -> keywords val empty_keywords: keywords val merge_keywords: keywords * keywords -> keywords val add_keywords: ((string * Position.T) * spec) list -> keywords -> keywords val is_keyword: keywords -> string -> bool val is_command: keywords -> string -> bool val is_literal: keywords -> string -> bool val dest_commands: keywords -> string list val command_markup: keywords -> string -> Markup.T option val command_kind: keywords -> string -> string option val command_tags: keywords -> string -> string list val is_vacuous: keywords -> string -> bool val is_diag: keywords -> string -> bool val is_document_heading: keywords -> string -> bool val is_document_body: keywords -> string -> bool val is_document_raw: keywords -> string -> bool val is_document: keywords -> string -> bool val is_theory_end: keywords -> string -> bool val is_theory_load: keywords -> string -> bool val is_theory: keywords -> string -> bool val is_theory_body: keywords -> string -> bool val is_proof: keywords -> string -> bool val is_proof_body: keywords -> string -> bool val is_theory_goal: keywords -> string -> bool val is_proof_goal: keywords -> string -> bool val is_qed: keywords -> string -> bool val is_qed_global: keywords -> string -> bool val is_proof_open: keywords -> string -> bool val is_proof_close: keywords -> string -> bool val is_proof_asm: keywords -> string -> bool val is_improper: keywords -> string -> bool val is_printed: keywords -> string -> bool end; structure Keyword: KEYWORD = struct (** keyword classification **) (* kinds *) val diag = "diag"; val document_heading = "document_heading"; val document_body = "document_body"; val document_raw = "document_raw"; val thy_begin = "thy_begin"; val thy_end = "thy_end"; val thy_decl = "thy_decl"; val thy_decl_block = "thy_decl_block"; val thy_defn = "thy_defn"; val thy_stmt = "thy_stmt"; val thy_load = "thy_load"; val thy_goal = "thy_goal"; val thy_goal_defn = "thy_goal_defn"; val thy_goal_stmt = "thy_goal_stmt"; val qed = "qed"; val qed_script = "qed_script"; val qed_block = "qed_block"; val qed_global = "qed_global"; val prf_goal = "prf_goal"; val prf_block = "prf_block"; val next_block = "next_block"; val prf_open = "prf_open"; val prf_close = "prf_close"; val prf_chain = "prf_chain"; val prf_decl = "prf_decl"; val prf_asm = "prf_asm"; val prf_asm_goal = "prf_asm_goal"; val prf_script = "prf_script"; val prf_script_goal = "prf_script_goal"; val prf_script_asm_goal = "prf_script_asm_goal"; val before_command = "before_command"; val quasi_command = "quasi_command"; val command_kinds = [diag, document_heading, document_body, document_raw, thy_begin, thy_end, thy_load, thy_decl, thy_decl_block, thy_defn, thy_stmt, thy_goal, thy_goal_defn, thy_goal_stmt, qed, qed_script, qed_block, qed_global, prf_goal, prf_block, next_block, prf_open, prf_close, prf_chain, prf_decl, prf_asm, prf_asm_goal, prf_script, prf_script_goal, prf_script_asm_goal]; (* specifications *) type spec = string * string list; val no_spec: spec = ("", []); val before_command_spec: spec = (before_command, []); val quasi_command_spec: spec = (quasi_command, []); val document_heading_spec: spec = ("document_heading", ["document"]); val document_body_spec: spec = ("document_body", ["document"]); type entry = {pos: Position.T, id: serial, kind: string, tags: string list}; fun check_spec pos (kind, tags) : entry = if not (member (op =) command_kinds kind) then error ("Unknown outer syntax keyword kind " ^ quote kind) else {pos = pos, id = serial (), kind = kind, tags = tags}; (** keyword tables **) (* type keywords *) datatype keywords = Keywords of {minor: Scan.lexicon, major: Scan.lexicon, commands: entry Symtab.table}; fun minor_keywords (Keywords {minor, ...}) = minor; fun major_keywords (Keywords {major, ...}) = major; fun make_keywords (minor, major, commands) = Keywords {minor = minor, major = major, commands = commands}; fun map_keywords f (Keywords {minor, major, commands}) = make_keywords (f (minor, major, commands)); val no_command_keywords = map_keywords (fn (minor, _, _) => (minor, Scan.empty_lexicon, Symtab.empty)); (* build keywords *) val empty_keywords = make_keywords (Scan.empty_lexicon, Scan.empty_lexicon, Symtab.empty); fun merge_keywords (Keywords {minor = minor1, major = major1, commands = commands1}, Keywords {minor = minor2, major = major2, commands = commands2}) = make_keywords (Scan.merge_lexicons (minor1, minor2), Scan.merge_lexicons (major1, major2), Symtab.merge (K true) (commands1, commands2)); val add_keywords = fold (fn ((name, pos), spec as (kind, _)) => map_keywords (fn (minor, major, commands) => if kind = "" orelse kind = before_command orelse kind = quasi_command then let val minor' = Scan.extend_lexicon (Symbol.explode name) minor; in (minor', major, commands) end else let val entry = check_spec pos spec; val major' = Scan.extend_lexicon (Symbol.explode name) major; val commands' = Symtab.update (name, entry) commands; in (minor, major', commands') end)); (* keyword status *) fun is_keyword keywords s = Scan.is_literal (minor_keywords keywords) (Symbol.explode s); fun is_command (Keywords {commands, ...}) = Symtab.defined commands; fun is_literal keywords = is_keyword keywords orf is_command keywords; fun dest_commands (Keywords {commands, ...}) = Symtab.keys commands; (* command keywords *) fun lookup_command (Keywords {commands, ...}) = Symtab.lookup commands; fun command_markup keywords name = lookup_command keywords name |> Option.map (fn {pos, id, ...} => - Position.make_entity_markup false id Markup.command_keywordN (name, pos)); + Position.make_entity_markup {def = false} id Markup.command_keywordN (name, pos)); fun command_kind keywords = Option.map #kind o lookup_command keywords; fun command_tags keywords name = (case lookup_command keywords name of SOME {tags, ...} => tags | NONE => []); (* command categories *) fun command_category ks = let val tab = Symtab.make_set ks; fun pred keywords name = (case lookup_command keywords name of NONE => false | SOME {kind, ...} => Symtab.defined tab kind); in pred end; val is_vacuous = command_category [diag, document_heading, document_body, document_raw]; val is_diag = command_category [diag]; val is_document_heading = command_category [document_heading]; val is_document_body = command_category [document_body]; val is_document_raw = command_category [document_raw]; val is_document = command_category [document_heading, document_body, document_raw]; val is_theory_end = command_category [thy_end]; val is_theory_load = command_category [thy_load]; val is_theory = command_category [thy_begin, thy_end, thy_load, thy_decl, thy_decl_block, thy_defn, thy_stmt, thy_goal, thy_goal_defn, thy_goal_stmt]; val is_theory_body = command_category [thy_load, thy_decl, thy_decl_block, thy_defn, thy_stmt, thy_goal, thy_goal_defn, thy_goal_stmt]; val is_proof = command_category [qed, qed_script, qed_block, qed_global, prf_goal, prf_block, next_block, prf_open, prf_close, prf_chain, prf_decl, prf_asm, prf_asm_goal, prf_script, prf_script_goal, prf_script_asm_goal]; val is_proof_body = command_category [diag, document_heading, document_body, document_raw, prf_block, next_block, prf_open, prf_close, prf_chain, prf_decl, prf_asm, prf_asm_goal, prf_script, prf_script_goal, prf_script_asm_goal]; val is_theory_goal = command_category [thy_goal, thy_goal_defn, thy_goal_stmt]; val is_proof_goal = command_category [prf_goal, prf_asm_goal, prf_script_goal, prf_script_asm_goal]; val is_qed = command_category [qed, qed_script, qed_block]; val is_qed_global = command_category [qed_global]; val is_proof_open = command_category [prf_goal, prf_asm_goal, prf_script_goal, prf_script_asm_goal, prf_open]; val is_proof_close = command_category [qed, qed_script, qed_block, prf_close]; val is_proof_asm = command_category [prf_asm, prf_asm_goal]; val is_improper = command_category [qed_script, prf_script, prf_script_goal, prf_script_asm_goal]; fun is_printed keywords = is_theory_goal keywords orf is_proof keywords; end; diff --git a/src/Pure/Isar/outer_syntax.ML b/src/Pure/Isar/outer_syntax.ML --- a/src/Pure/Isar/outer_syntax.ML +++ b/src/Pure/Isar/outer_syntax.ML @@ -1,331 +1,332 @@ (* Title: Pure/Isar/outer_syntax.ML Author: Markus Wenzel, TU Muenchen Isabelle/Isar outer syntax. *) signature OUTER_SYNTAX = sig val help: theory -> string list -> unit val print_commands: theory -> unit type command_keyword = string * Position.T val command: command_keyword -> string -> (Toplevel.transition -> Toplevel.transition) parser -> unit val maybe_begin_local_theory: command_keyword -> string -> (local_theory -> local_theory) parser -> (theory -> local_theory) parser -> unit val local_theory': command_keyword -> string -> (bool -> local_theory -> local_theory) parser -> unit val local_theory: command_keyword -> string -> (local_theory -> local_theory) parser -> unit val local_theory_to_proof': command_keyword -> string -> (bool -> local_theory -> Proof.state) parser -> unit val local_theory_to_proof: command_keyword -> string -> (local_theory -> Proof.state) parser -> unit val bootstrap: bool Config.T val parse_spans: Token.T list -> Command_Span.span list val make_span: Token.T list -> Command_Span.span val parse_span: theory -> (unit -> theory) -> Token.T list -> Toplevel.transition val parse_text: theory -> (unit -> theory) -> Position.T -> string -> Toplevel.transition list val command_reports: theory -> Token.T -> Position.report_text list val check_command: Proof.context -> command_keyword -> string end; structure Outer_Syntax: OUTER_SYNTAX = struct (** outer syntax **) (* errors *) fun err_command msg name ps = error (msg ^ quote (Markup.markup Markup.keyword1 name) ^ Position.here_list ps); fun err_dup_command name ps = err_command "Duplicate outer syntax command " name ps; (* command parsers *) datatype command_parser = Parser of (Toplevel.transition -> Toplevel.transition) parser | Restricted_Parser of (bool * Position.T) option -> (Toplevel.transition -> Toplevel.transition) parser; datatype command = Command of {comment: string, command_parser: command_parser, pos: Position.T, id: serial}; fun eq_command (Command {id = id1, ...}, Command {id = id2, ...}) = id1 = id2; fun new_command comment command_parser pos = Command {comment = comment, command_parser = command_parser, pos = pos, id = serial ()}; fun command_pos (Command {pos, ...}) = pos; fun command_markup def (name, Command {pos, id, ...}) = Position.make_entity_markup def id Markup.commandN (name, pos); fun pretty_command (cmd as (name, Command {comment, ...})) = Pretty.block (Pretty.marks_str ([Active.make_markup Markup.sendbackN {implicit = true, properties = [Markup.padding_line]}, - command_markup false cmd], name) :: Pretty.str ":" :: Pretty.brk 2 :: Pretty.text comment); + command_markup {def = false} cmd], name) :: Pretty.str ":" :: + Pretty.brk 2 :: Pretty.text comment); (* theory data *) structure Data = Theory_Data ( type T = command Symtab.table; val empty = Symtab.empty; val extend = I; fun merge data : T = data |> Symtab.join (fn name => fn (cmd1, cmd2) => if eq_command (cmd1, cmd2) then raise Symtab.SAME else err_dup_command name [command_pos cmd1, command_pos cmd2]); ); val get_commands = Data.get; val dest_commands = get_commands #> Symtab.dest #> sort_by #1; val lookup_commands = Symtab.lookup o get_commands; fun help thy pats = dest_commands thy |> filter (fn (name, _) => forall (fn pat => match_string pat name) pats) |> map pretty_command |> Pretty.writeln_chunks; fun print_commands thy = let val keywords = Thy_Header.get_keywords thy; val minor = Scan.dest_lexicon (Keyword.minor_keywords keywords); val commands = dest_commands thy; in [Pretty.strs ("keywords:" :: map quote minor), Pretty.big_list "commands:" (map pretty_command commands)] |> Pretty.writeln_chunks end; (* maintain commands *) fun add_command name cmd thy = if member (op =) Thy_Header.bootstrap_thys (Context.theory_name thy) then thy else let val _ = Keyword.is_command (Thy_Header.get_keywords thy) name orelse err_command "Undeclared outer syntax command " name [command_pos cmd]; val _ = (case lookup_commands thy name of NONE => () | SOME cmd' => err_dup_command name [command_pos cmd, command_pos cmd']); val _ = Context_Position.report_generic (Context.the_generic_context ()) - (command_pos cmd) (command_markup true (name, cmd)); + (command_pos cmd) (command_markup {def = true} (name, cmd)); in Data.map (Symtab.update (name, cmd)) thy end; val _ = Theory.setup (Theory.at_end (fn thy => let val command_keywords = Scan.dest_lexicon (Keyword.major_keywords (Thy_Header.get_keywords thy)); val _ = (case subtract (op =) (map #1 (dest_commands thy)) command_keywords of [] => () | missing => error ("Missing outer syntax command(s) " ^ commas_quote missing)) in NONE end)); (* implicit theory setup *) type command_keyword = string * Position.T; fun raw_command (name, pos) comment command_parser = let val setup = add_command name (new_command comment command_parser pos) in Context.>> (Context.mapping setup (Local_Theory.background_theory setup)) end; fun command (name, pos) comment parse = raw_command (name, pos) comment (Parser parse); fun maybe_begin_local_theory command_keyword comment parse_local parse_global = raw_command command_keyword comment (Restricted_Parser (fn restricted => Parse.opt_target -- parse_local >> (fn (target, f) => Toplevel.local_theory restricted target f) || (if is_some restricted then Scan.fail else parse_global >> Toplevel.begin_main_target true))); fun local_theory_command trans command_keyword comment parse = raw_command command_keyword comment (Restricted_Parser (fn restricted => Parse.opt_target -- parse >> (fn (target, f) => trans restricted target f))); val local_theory' = local_theory_command Toplevel.local_theory'; val local_theory = local_theory_command Toplevel.local_theory; val local_theory_to_proof' = local_theory_command Toplevel.local_theory_to_proof'; val local_theory_to_proof = local_theory_command Toplevel.local_theory_to_proof; (** toplevel parsing **) (* parse spans *) local fun ship span = let val kind = if forall Token.is_ignored span then Command_Span.Ignored_Span else if exists Token.is_error span then Command_Span.Malformed_Span else (case find_first Token.is_command span of NONE => Command_Span.Malformed_Span | SOME cmd => Command_Span.Command_Span (Token.content_of cmd, Token.pos_of cmd)); in cons (Command_Span.Span (kind, span)) end; fun flush (result, content, ignored) = result |> not (null content) ? ship (rev content) |> not (null ignored) ? ship (rev ignored); fun parse tok (result, content, ignored) = if Token.is_ignored tok then (result, content, tok :: ignored) else if Token.is_command_modifier tok orelse Token.is_command tok andalso (not (exists Token.is_command_modifier content) orelse exists Token.is_command content) then (flush (result, content, ignored), [tok], []) else (result, tok :: (ignored @ content), []); in fun parse_spans toks = fold parse toks ([], [], []) |> flush |> rev; end; fun make_span toks = (case parse_spans toks of [span] => span | _ => Command_Span.Span (Command_Span.Malformed_Span, toks)); (* parse commands *) val bootstrap = Config.declare_bool ("Outer_Syntax.bootstrap", \<^here>) (K true); local val before_command = Scan.option (Parse.position (Parse.private >> K true || Parse.qualified >> K false)); fun parse_command thy markers = Scan.ahead (before_command |-- Parse.position Parse.command) :|-- (fn (name, pos) => let val keywords = Thy_Header.get_keywords thy; val tr0 = Toplevel.empty |> Toplevel.name name |> Toplevel.position pos |> Keyword.is_proof_open keywords name ? Toplevel.skip_proof_open |> Keyword.is_proof_close keywords name ? Toplevel.skip_proof_close; val command_markers = Parse.command |-- Document_Source.old_tags >> (fn tags => Toplevel.markers (map Document_Marker.legacy_tag tags @ markers) tr0); in (case lookup_commands thy name of SOME (Command {command_parser = Parser parse, ...}) => Parse.!!! (command_markers -- parse) >> (op |>) | SOME (Command {command_parser = Restricted_Parser parse, ...}) => before_command :|-- (fn restricted => Parse.!!! (command_markers -- parse restricted)) >> (op |>) | NONE => Scan.fail_with (fn _ => fn _ => let val msg = if Config.get_global thy bootstrap then "missing theory context for command " else "undefined command "; in msg ^ quote (Markup.markup Markup.keyword1 name) end)) end); in fun parse_span thy init span = let val full_range = Token.range_of span; val core_range = Token.core_range_of span; val markers = map Token.input_of (filter Token.is_document_marker span); fun parse () = filter Token.is_proper span |> Source.of_list |> Source.source Token.stopper (Scan.bulk (fn xs => Parse.!!! (parse_command thy markers) xs)) |> Source.exhaust; in (case parse () of [tr] => Toplevel.modify_init init tr | [] => Toplevel.ignored (#1 full_range) | _ => Toplevel.malformed (#1 core_range) "Exactly one command expected") handle ERROR msg => Toplevel.malformed (#1 core_range) msg end; fun parse_text thy init pos text = Symbol_Pos.explode (text, pos) |> Token.tokenize (Thy_Header.get_keywords thy) {strict = false} |> parse_spans |> map (Command_Span.content #> parse_span thy init); end; (* check commands *) fun command_reports thy tok = if Token.is_command tok then let val name = Token.content_of tok in (case lookup_commands thy name of NONE => [] - | SOME cmd => [((Token.pos_of tok, command_markup false (name, cmd)), "")]) + | SOME cmd => [((Token.pos_of tok, command_markup {def = false} (name, cmd)), "")]) end else []; fun check_command ctxt (name, pos) = let val thy = Proof_Context.theory_of ctxt; val keywords = Thy_Header.get_keywords thy; in if Keyword.is_command keywords name then let val markup = Token.explode0 keywords name |> maps (command_reports thy) |> map (#2 o #1); val _ = Context_Position.reports ctxt (map (pair pos) markup); in name end else let val completion_report = Completion.make_report (name, pos) (fn completed => Keyword.dest_commands keywords |> filter completed |> sort_strings |> map (fn a => (a, (Markup.commandN, a)))); in error ("Bad command " ^ quote name ^ Position.here pos ^ completion_report) end end; (* 'ML' command -- required for bootstrapping Isar *) val _ = command ("ML", \<^here>) "ML text within theory or local theory" (Parse.ML_source >> (fn source => Toplevel.generic_theory (ML_Context.exec (fn () => ML_Context.eval_source (ML_Compiler.verbose true ML_Compiler.flags) source) #> Local_Theory.propagate_ml_env))); end; diff --git a/src/Pure/PIDE/resources.ML b/src/Pure/PIDE/resources.ML --- a/src/Pure/PIDE/resources.ML +++ b/src/Pure/PIDE/resources.ML @@ -1,441 +1,441 @@ (* Title: Pure/PIDE/resources.ML Author: Makarius Resources for theories and auxiliary files. *) signature RESOURCES = sig val default_qualifier: string val init_session: {session_positions: (string * Properties.T) list, session_directories: (string * string) list, session_chapters: (string * string) list, bibtex_entries: (string * string list) list, command_timings: Properties.T list, scala_functions: (string * (bool * Position.T)) list, global_theories: (string * string) list, loaded_theories: string list} -> unit val init_session_yxml: string -> unit val init_session_file: Path.T -> unit val finish_session_base: unit -> unit val global_theory: string -> string option val loaded_theory: string -> bool val check_session: Proof.context -> string * Position.T -> string val session_chapter: string -> string val last_timing: Toplevel.transition -> Time.time val scala_functions: unit -> string list val check_scala_function: Proof.context -> string * Position.T -> string * bool val master_directory: theory -> Path.T val imports_of: theory -> (string * Position.T) list val begin_theory: Path.T -> Thy_Header.header -> theory list -> theory val thy_path: Path.T -> Path.T val theory_qualifier: string -> string val theory_bibtex_entries: string -> string list val find_theory_file: string -> Path.T option val import_name: string -> Path.T -> string -> {node_name: Path.T, master_dir: Path.T, theory_name: string} val check_thy: Path.T -> string -> {master: Path.T * SHA1.digest, text: string, theory_pos: Position.T, imports: (string * Position.T) list, keywords: Thy_Header.keywords} val parse_files: (Path.T -> Path.T list) -> (theory -> Token.file list) parser val parse_file: (theory -> Token.file) parser val provide: Path.T * SHA1.digest -> theory -> theory val provide_file: Token.file -> theory -> theory val provide_parse_files: (Path.T -> Path.T list) -> (theory -> Token.file list * theory) parser val provide_parse_file: (theory -> Token.file * theory) parser val loaded_files_current: theory -> bool val check_path: Proof.context -> Path.T option -> Input.source -> Path.T val check_file: Proof.context -> Path.T option -> Input.source -> Path.T val check_dir: Proof.context -> Path.T option -> Input.source -> Path.T val check_session_dir: Proof.context -> Path.T option -> Input.source -> Path.T end; structure Resources: RESOURCES = struct (* command timings *) type timings = ((string * Time.time) Inttab.table) Symtab.table; (*file -> offset -> name, time*) val empty_timings: timings = Symtab.empty; fun update_timings props = (case Markup.parse_command_timing_properties props of SOME ({file, offset, name}, time) => Symtab.map_default (file, Inttab.empty) (Inttab.map_default (offset, (name, time)) (fn (_, t) => (name, t + time))) | NONE => I); fun make_timings command_timings = fold update_timings command_timings empty_timings; fun approximative_id name pos = (case (Position.file_of pos, Position.offset_of pos) of (SOME file, SOME offset) => if name = "" then NONE else SOME {file = file, offset = offset, name = name} | _ => NONE); fun get_timings timings tr = (case approximative_id (Toplevel.name_of tr) (Toplevel.pos_of tr) of SOME {file, offset, name} => (case Symtab.lookup timings file of SOME offsets => (case Inttab.lookup offsets offset of SOME (name', time) => if name = name' then SOME time else NONE | NONE => NONE) | NONE => NONE) | NONE => NONE) |> the_default Time.zeroTime; (* session base *) val default_qualifier = "Draft"; type entry = {pos: Position.T, serial: serial}; fun make_entry props : entry = {pos = Position.of_properties props, serial = serial ()}; val empty_session_base = ({session_positions = []: (string * entry) list, session_directories = Symtab.empty: Path.T list Symtab.table, session_chapters = Symtab.empty: string Symtab.table, bibtex_entries = Symtab.empty: string list Symtab.table, timings = empty_timings, scala_functions = Symtab.empty: (bool * Position.T) Symtab.table}, {global_theories = Symtab.empty: string Symtab.table, loaded_theories = Symtab.empty: unit Symtab.table}); val global_session_base = Synchronized.var "Sessions.base" empty_session_base; fun init_session {session_positions, session_directories, session_chapters, bibtex_entries, command_timings, scala_functions, global_theories, loaded_theories} = Synchronized.change global_session_base (fn _ => ({session_positions = sort_by #1 (map (apsnd make_entry) session_positions), session_directories = Symtab.build (session_directories |> fold_rev (fn (dir, name) => Symtab.cons_list (name, Path.explode dir))), session_chapters = Symtab.make session_chapters, bibtex_entries = Symtab.make bibtex_entries, timings = make_timings command_timings, scala_functions = Symtab.make scala_functions}, {global_theories = Symtab.make global_theories, loaded_theories = Symtab.make_set loaded_theories})); fun init_session_yxml yxml = let val (session_positions, (session_directories, (session_chapters, (bibtex_entries, (command_timings, (scala_functions, (global_theories, loaded_theories))))))) = YXML.parse_body yxml |> let open XML.Decode in (pair (list (pair string properties)) (pair (list (pair string string)) (pair (list (pair string string)) (pair (list (pair string (list string))) (pair (list properties) (pair (list (pair string (pair bool properties))) (pair (list (pair string string)) (list string)))))))) end; in init_session {session_positions = session_positions, session_directories = session_directories, session_chapters = session_chapters, bibtex_entries = bibtex_entries, command_timings = command_timings, scala_functions = (map o apsnd o apsnd) Position.of_properties scala_functions, global_theories = global_theories, loaded_theories = loaded_theories} end; fun init_session_file path = init_session_yxml (File.read path) before File.rm path; fun finish_session_base () = Synchronized.change global_session_base (apfst (K (#1 empty_session_base))); fun get_session_base f = f (Synchronized.value global_session_base); fun get_session_base1 f = get_session_base (f o #1); fun get_session_base2 f = get_session_base (f o #2); fun global_theory a = Symtab.lookup (get_session_base2 #global_theories) a; fun loaded_theory a = Symtab.defined (get_session_base2 #loaded_theories) a; fun check_session ctxt arg = Completion.check_item "session" (fn (name, {pos, serial}) => - Position.make_entity_markup false serial Markup.sessionN (name, pos)) + Position.make_entity_markup {def = false} serial Markup.sessionN (name, pos)) (get_session_base1 #session_positions) ctxt arg; fun session_chapter name = the_default "Unsorted" (Symtab.lookup (get_session_base1 #session_chapters) name); fun last_timing tr = get_timings (get_session_base1 #timings) tr; (* Scala functions *) (*raw bootstrap environment*) fun scala_functions () = space_explode "," (getenv "ISABELLE_SCALA_FUNCTIONS"); (*regular resources*) fun scala_function a = (a, the_default (false, Position.none) (Symtab.lookup (get_session_base1 #scala_functions) a)); fun check_scala_function ctxt arg = let val funs = scala_functions () |> sort_strings |> map scala_function; val name = Completion.check_entity Markup.scala_functionN (map (apsnd #2) funs) ctxt arg; val multi = (case AList.lookup (op =) funs name of SOME (multi, _) => multi | NONE => false); in (name, multi) end; val _ = Theory.setup (Document_Output.antiquotation_verbatim_embedded \<^binding>\scala_function\ (Scan.lift Parse.embedded_position) (#1 oo check_scala_function) #> ML_Antiquotation.inline_embedded \<^binding>\scala_function\ (Args.context -- Scan.lift Parse.embedded_position >> (uncurry check_scala_function #> #1 #> ML_Syntax.print_string)) #> ML_Antiquotation.value_embedded \<^binding>\scala\ (Args.context -- Scan.lift Args.embedded_position >> (fn (ctxt, arg) => let val (name, multi) = check_scala_function ctxt arg; val func = if multi then "Scala.function" else "Scala.function1"; in ML_Syntax.atomic (func ^ " " ^ ML_Syntax.print_string name) end))); (* manage source files *) type files = {master_dir: Path.T, (*master directory of theory source*) imports: (string * Position.T) list, (*source specification of imports*) provided: (Path.T * SHA1.digest) list}; (*source path, digest*) fun make_files (master_dir, imports, provided): files = {master_dir = master_dir, imports = imports, provided = provided}; structure Files = Theory_Data ( type T = files; val empty = make_files (Path.current, [], []); val extend = I; fun merge ({master_dir, imports, provided = provided1}, {provided = provided2, ...}) = let val provided' = Library.merge (op =) (provided1, provided2) in make_files (master_dir, imports, provided') end ); fun map_files f = Files.map (fn {master_dir, imports, provided} => make_files (f (master_dir, imports, provided))); val master_directory = #master_dir o Files.get; val imports_of = #imports o Files.get; fun begin_theory master_dir {name, imports, keywords} parents = Theory.begin_theory name parents |> map_files (fn _ => (Path.explode (Path.implode_symbolic master_dir), imports, [])) |> Thy_Header.add_keywords keywords; (* theory files *) val thy_path = Path.ext "thy"; fun theory_qualifier theory = (case global_theory theory of SOME qualifier => qualifier | NONE => Long_Name.qualifier theory); fun theory_name qualifier theory = if Long_Name.is_qualified theory orelse is_some (global_theory theory) then theory else Long_Name.qualify qualifier theory; fun theory_bibtex_entries theory = Symtab.lookup_list (get_session_base1 #bibtex_entries) (theory_qualifier theory); fun find_theory_file thy_name = let val thy_file = thy_path (Path.basic (Long_Name.base_name thy_name)); val session = theory_qualifier thy_name; val dirs = Symtab.lookup_list (get_session_base1 #session_directories) session; in dirs |> get_first (fn dir => let val path = dir + thy_file in if File.is_file path then SOME path else NONE end) end; fun make_theory_node node_name theory = {node_name = node_name, master_dir = Path.dir node_name, theory_name = theory}; fun loaded_theory_node theory = {node_name = Path.basic theory, master_dir = Path.current, theory_name = theory}; fun import_name qualifier dir s = let val theory = theory_name qualifier (Thy_Header.import_name s); fun theory_node () = make_theory_node (File.full_path dir (thy_path (Path.expand (Path.explode s)))) theory; in if not (Thy_Header.is_base_name s) then theory_node () else if loaded_theory theory then loaded_theory_node theory else (case find_theory_file theory of SOME node_name => make_theory_node node_name theory | NONE => if Long_Name.is_qualified s then loaded_theory_node theory else theory_node ()) end; fun check_file dir file = File.check_file (File.full_path dir file); fun check_thy dir thy_name = let val thy_base_name = Long_Name.base_name thy_name; val master_file = (case find_theory_file thy_name of SOME path => check_file Path.current path | NONE => check_file dir (thy_path (Path.basic thy_base_name))); val text = File.read master_file; val {name = (name, pos), imports, keywords} = Thy_Header.read (Path.position master_file) text; val _ = thy_base_name <> name andalso error ("Bad theory name " ^ quote name ^ " for file " ^ Path.print (Path.base master_file) ^ Position.here pos); in {master = (master_file, SHA1.digest text), text = text, theory_pos = pos, imports = imports, keywords = keywords} end; (* load files *) fun parse_files make_paths = Scan.ahead Parse.not_eof -- Parse.path_input >> (fn (tok, source) => fn thy => (case Token.get_files tok of [] => let val master_dir = master_directory thy; val name = Input.string_of source; val pos = Input.pos_of source; val delimited = Input.is_delimited source; val src_paths = make_paths (Path.explode name); in map (Command.read_file master_dir pos delimited) src_paths end | files => map Exn.release files)); val parse_file = parse_files single >> (fn f => f #> the_single); fun provide (src_path, id) = map_files (fn (master_dir, imports, provided) => if AList.defined (op =) provided src_path then error ("Duplicate use of source file: " ^ Path.print src_path) else (master_dir, imports, (src_path, id) :: provided)); fun provide_file (file: Token.file) = provide (#src_path file, #digest file); fun provide_parse_files make_paths = parse_files make_paths >> (fn files => fn thy => let val fs = files thy; val thy' = fold (fn {src_path, digest, ...} => provide (src_path, digest)) fs thy; in (fs, thy') end); val provide_parse_file = provide_parse_files single >> (fn f => f #>> the_single); fun load_file thy src_path = let val full_path = check_file (master_directory thy) src_path; val text = File.read full_path; val id = SHA1.digest text; in ((full_path, id), text) end; fun loaded_files_current thy = #provided (Files.get thy) |> forall (fn (src_path, id) => (case try (load_file thy) src_path of NONE => false | SOME ((_, id'), _) => id = id')); (* formal check *) fun formal_check check_file ctxt opt_dir source = let val name = Input.string_of source; val pos = Input.pos_of source; val delimited = Input.is_delimited source; val _ = Context_Position.report ctxt pos (Markup.language_path delimited); fun err msg = error (msg ^ Position.here pos); val dir = (case opt_dir of SOME dir => dir | NONE => master_directory (Proof_Context.theory_of ctxt)); val path = dir + Path.explode name handle ERROR msg => err msg; val _ = Path.expand path handle ERROR msg => err msg; val _ = Context_Position.report ctxt pos (Markup.path (Path.implode_symbolic path)); val _ : Path.T = check_file path handle ERROR msg => err msg; in path end; val check_path = formal_check I; val check_file = formal_check File.check_file; val check_dir = formal_check File.check_dir; fun check_session_dir ctxt opt_dir s = let val dir = Path.expand (check_dir ctxt opt_dir s); val ok = File.is_file (dir + Path.explode("ROOT")) orelse File.is_file (dir + Path.explode("ROOTS")); in if ok then dir else error ("Bad session root directory (missing ROOT or ROOTS): " ^ Path.print dir ^ Position.here (Input.pos_of s)) end; (* antiquotations *) local fun document_antiq (check: Proof.context -> Path.T option -> Input.source -> Path.T) = Args.context -- Scan.lift Parse.path_input >> (fn (ctxt, source) => (check ctxt NONE source; Latex.string (Latex.output_ascii_breakable "/" (Input.string_of source)) |> Latex.enclose_text "\\isatt{" "}")); fun ML_antiq check = Args.context -- Scan.lift Parse.path_input >> (fn (ctxt, source) => check ctxt (SOME Path.current) source |> ML_Syntax.print_path); in val _ = Theory.setup (Document_Output.antiquotation_verbatim_embedded \<^binding>\session\ (Scan.lift Parse.embedded_position) check_session #> Document_Output.antiquotation_raw_embedded \<^binding>\path\ (document_antiq check_path) (K I) #> Document_Output.antiquotation_raw_embedded \<^binding>\file\ (document_antiq check_file) (K I) #> Document_Output.antiquotation_raw_embedded \<^binding>\dir\ (document_antiq check_dir) (K I) #> ML_Antiquotation.value_embedded \<^binding>\path\ (ML_antiq check_path) #> ML_Antiquotation.value_embedded \<^binding>\file\ (ML_antiq check_file) #> ML_Antiquotation.value_embedded \<^binding>\dir\ (ML_antiq check_dir) #> ML_Antiquotation.value_embedded \<^binding>\path_binding\ (Scan.lift (Parse.position Parse.path) >> (ML_Syntax.print_path_binding o Path.explode_binding)) #> ML_Antiquotation.value \<^binding>\master_dir\ (Args.theory >> (ML_Syntax.print_path o master_directory))); end; end; diff --git a/src/Pure/Syntax/syntax_phases.ML b/src/Pure/Syntax/syntax_phases.ML --- a/src/Pure/Syntax/syntax_phases.ML +++ b/src/Pure/Syntax/syntax_phases.ML @@ -1,1017 +1,1017 @@ (* Title: Pure/Syntax/syntax_phases.ML Author: Makarius Main phases of inner syntax processing, with standard implementations of parse/unparse operations. *) signature SYNTAX_PHASES = sig val reports_of_scope: Position.T list -> Position.report list val decode_sort: term -> sort val decode_typ: term -> typ val decode_term: Proof.context -> Position.report_text list * term Exn.result -> Position.report_text list * term Exn.result val parse_ast_pattern: Proof.context -> string * string -> Ast.ast val term_of_typ: Proof.context -> typ -> term val print_checks: Proof.context -> unit val typ_check: int -> string -> (Proof.context -> typ list -> typ list) -> Context.generic -> Context.generic val term_check: int -> string -> (Proof.context -> term list -> term list) -> Context.generic -> Context.generic val typ_uncheck: int -> string -> (Proof.context -> typ list -> typ list) -> Context.generic -> Context.generic val term_uncheck: int -> string -> (Proof.context -> term list -> term list) -> Context.generic -> Context.generic val typ_check': int -> string -> (typ list -> Proof.context -> (typ list * Proof.context) option) -> Context.generic -> Context.generic val term_check': int -> string -> (term list -> Proof.context -> (term list * Proof.context) option) -> Context.generic -> Context.generic val typ_uncheck': int -> string -> (typ list -> Proof.context -> (typ list * Proof.context) option) -> Context.generic -> Context.generic val term_uncheck': int -> string -> (term list -> Proof.context -> (term list * Proof.context) option) -> Context.generic -> Context.generic end structure Syntax_Phases: SYNTAX_PHASES = struct (** markup logical entities **) fun markup_class ctxt c = [Name_Space.markup (Type.class_space (Proof_Context.tsig_of ctxt)) c]; fun markup_type ctxt c = [Name_Space.markup (Type.type_space (Proof_Context.tsig_of ctxt)) c]; fun markup_const ctxt c = [Name_Space.markup (Consts.space_of (Proof_Context.consts_of ctxt)) c]; fun markup_free ctxt x = Variable.markup ctxt x :: (if Variable.is_body ctxt orelse Variable.is_fixed ctxt x then [Variable.markup_fixed ctxt x] else []); fun markup_var xi = [Markup.name (Term.string_of_vname xi) Markup.var]; fun markup_bound def ps (name, id) = Markup.bound :: map (fn pos => Position.make_entity_markup def id Markup.boundN (name, pos)) ps; fun markup_entity ctxt c = (case Syntax.lookup_const (Proof_Context.syn_of ctxt) c of SOME "" => [] | SOME b => markup_entity ctxt b | NONE => c |> Lexicon.unmark {case_class = markup_class ctxt, case_type = markup_type ctxt, case_const = markup_const ctxt, case_fixed = markup_free ctxt, case_default = K []}); (** reports of implicit variable scope **) fun reports_of_scope [] = [] | reports_of_scope (def_pos :: ps) = let val id = serial (); fun entity def = Position.make_entity_markup def id "" ("", def_pos); in map (rpair Markup.bound) (def_pos :: ps) @ - ((def_pos, entity true) :: map (rpair (entity false)) ps) + ((def_pos, entity {def = true}) :: map (rpair (entity {def = false})) ps) end; (** decode parse trees **) (* decode_sort *) fun decode_sort tm = let fun err () = raise TERM ("decode_sort: bad encoding of classes", [tm]); fun class s = Lexicon.unmark_class s handle Fail _ => err (); fun classes (Const (s, _)) = [class s] | classes (Const ("_classes", _) $ Const (s, _) $ cs) = class s :: classes cs | classes _ = err (); fun sort (Const ("_dummy_sort", _)) = dummyS | sort (Const ("_topsort", _)) = [] | sort (Const ("_sort", _) $ cs) = classes cs | sort (Const (s, _)) = [class s] | sort _ = err (); in sort tm end; (* decode_typ *) fun decode_pos (Free (s, _)) = if is_some (Term_Position.decode s) then SOME s else NONE | decode_pos _ = NONE; fun decode_typ tm = let fun err () = raise TERM ("decode_typ: bad encoding of type", [tm]); fun typ ps sort tm = (case tm of Const ("_tfree", _) $ t => typ ps sort t | Const ("_tvar", _) $ t => typ ps sort t | Const ("_ofsort", _) $ t $ s => (case decode_pos s of SOME p => typ (p :: ps) sort t | NONE => if is_none sort then typ ps (SOME (decode_sort s)) t else err ()) | Const ("_dummy_ofsort", _) $ s => TFree ("'_dummy_", decode_sort s) | Free (x, _) => TFree (x, ps @ the_default dummyS sort) | Var (xi, _) => TVar (xi, ps @ the_default dummyS sort) | _ => if null ps andalso is_none sort then let val (head, args) = Term.strip_comb tm; val a = (case head of Const (c, _) => (Lexicon.unmark_type c handle Fail _ => err ()) | _ => err ()); in Type (a, map (typ [] NONE) args) end else err ()); in typ [] NONE tm end; (* parsetree_to_ast *) fun parsetree_to_ast ctxt trf parsetree = let val reports = Unsynchronized.ref ([]: Position.report_text list); fun report pos = Position.store_reports reports [pos]; val append_reports = Position.append_reports reports; fun report_pos tok = if Lexicon.suppress_markup tok then Position.none else Lexicon.pos_of_token tok; fun trans a args = (case trf a of NONE => Ast.mk_appl (Ast.Constant a) args | SOME f => f ctxt args); fun asts_of_token tok = if Lexicon.valued_token tok then [Ast.Variable (Lexicon.str_of_token tok)] else []; fun ast_of_position tok = Ast.Variable (Term_Position.encode (report_pos tok)); fun ast_of_dummy a tok = Ast.Appl [Ast.Constant "_constrain", Ast.Constant a, ast_of_position tok]; fun asts_of_position c tok = [Ast.Appl [Ast.Constant c, ast_of (Parser.Tip tok), ast_of_position tok]] and asts_of (Parser.Node ("_class_name", [Parser.Tip tok])) = let val pos = report_pos tok; val (c, rs) = Proof_Context.check_class ctxt (Lexicon.str_of_token tok, pos); val _ = append_reports rs; in [Ast.Constant (Lexicon.mark_class c)] end | asts_of (Parser.Node ("_type_name", [Parser.Tip tok])) = let val pos = report_pos tok; val (Type (c, _), rs) = Proof_Context.check_type_name {proper = true, strict = false} ctxt (Lexicon.str_of_token tok, pos); val _ = append_reports rs; in [Ast.Constant (Lexicon.mark_type c)] end | asts_of (Parser.Node ("_position", [Parser.Tip tok])) = asts_of_position "_constrain" tok | asts_of (Parser.Node ("_position_sort", [Parser.Tip tok])) = asts_of_position "_ofsort" tok | asts_of (Parser.Node (a as "\<^const>Pure.dummy_pattern", [Parser.Tip tok])) = [ast_of_dummy a tok] | asts_of (Parser.Node (a as "_idtdummy", [Parser.Tip tok])) = [ast_of_dummy a tok] | asts_of (Parser.Node ("_idtypdummy", pts as [Parser.Tip tok, _, _])) = [Ast.Appl (Ast.Constant "_constrain" :: ast_of_dummy "_idtdummy" tok :: maps asts_of pts)] | asts_of (Parser.Node (a, pts)) = let val _ = pts |> List.app (fn Parser.Node _ => () | Parser.Tip tok => if Lexicon.valued_token tok then () else report (report_pos tok) (markup_entity ctxt) a); in [trans a (maps asts_of pts)] end | asts_of (Parser.Tip tok) = asts_of_token tok and ast_of pt = (case asts_of pt of [ast] => ast | asts => raise Ast.AST ("parsetree_to_ast: malformed parsetree", asts)); val ast = Exn.interruptible_capture ast_of parsetree; in (! reports, ast) end; (* ast_to_term *) fun ast_to_term ctxt trf = let fun trans a args = (case trf a of NONE => Term.list_comb (Syntax.const a, args) | SOME f => f ctxt args); fun term_of (Ast.Constant a) = trans a [] | term_of (Ast.Variable x) = Lexicon.read_var x | term_of (Ast.Appl (Ast.Constant a :: (asts as _ :: _))) = trans a (map term_of asts) | term_of (Ast.Appl (ast :: (asts as _ :: _))) = Term.list_comb (term_of ast, map term_of asts) | term_of (ast as Ast.Appl _) = raise Ast.AST ("ast_to_term: malformed ast", [ast]); in term_of end; (* decode_term -- transform parse tree into raw term *) fun decode_const ctxt (c, ps) = let val (Const (c', _), reports) = Proof_Context.check_const {proper = true, strict = false} ctxt (c, ps); in (c', reports) end; local fun get_free ctxt x = let val fixed = Variable.lookup_fixed ctxt x; val is_const = can (decode_const ctxt) (x, []) orelse Long_Name.is_qualified x; val is_declared = is_some (Variable.def_type ctxt false (x, ~1)); in if Variable.is_const ctxt x then NONE else if is_some fixed then fixed else if not is_const orelse is_declared then SOME x else NONE end; in fun decode_term _ (result as (_: Position.report_text list, Exn.Exn _)) = result | decode_term ctxt (reports0, Exn.Res tm) = let val reports = Unsynchronized.ref reports0; fun report ps = Position.store_reports reports ps; val append_reports = Position.append_reports reports; fun decode ps qs bs (Const ("_constrain", _) $ t $ typ) = (case Term_Position.decode_position typ of SOME (p, T) => Type.constraint T (decode (p :: ps) qs bs t) | NONE => Type.constraint (decode_typ typ) (decode ps qs bs t)) | decode ps qs bs (Const ("_constrainAbs", _) $ t $ typ) = (case Term_Position.decode_position typ of SOME (q, T) => Type.constraint (T --> dummyT) (decode ps (q :: qs) bs t) | NONE => Type.constraint (decode_typ typ --> dummyT) (decode ps qs bs t)) | decode _ qs bs (Abs (x, T, t)) = let val id = serial (); - val _ = report qs (markup_bound true qs) (x, id); + val _ = report qs (markup_bound {def = true} qs) (x, id); in Abs (x, T, decode [] [] ((qs, (x, id)) :: bs) t) end | decode _ _ bs (t $ u) = decode [] [] bs t $ decode [] [] bs u | decode ps _ _ (Const (a, T)) = (case try Lexicon.unmark_fixed a of SOME x => (report ps (markup_free ctxt) x; Free (x, T)) | NONE => let val c = (case try Lexicon.unmark_const a of SOME c => c | NONE => #1 (decode_const ctxt (a, []))); val _ = report ps (markup_const ctxt) c; in Const (c, T) end) | decode ps _ _ (Free (a, T)) = ((Name.reject_internal (a, ps) handle ERROR msg => error (msg ^ Proof_Context.consts_completion_message ctxt (a, ps))); (case get_free ctxt a of SOME x => (report ps (markup_free ctxt) x; Free (x, T)) | NONE => let val (c, rs) = decode_const ctxt (a, ps); val _ = append_reports rs; in Const (c, T) end)) | decode ps _ _ (Var (xi, T)) = (report ps markup_var xi; Var (xi, T)) | decode ps _ bs (t as Bound i) = (case try (nth bs) i of - SOME (qs, (x, id)) => (report ps (markup_bound false qs) (x, id); t) + SOME (qs, (x, id)) => (report ps (markup_bound {def = false} qs) (x, id); t) | NONE => t); val tm' = Exn.interruptible_capture (fn () => decode [] [] [] tm) (); in (! reports, tm') end; end; (** parse **) (* results *) fun proper_results results = map_filter (fn (y, Exn.Res x) => SOME (y, x) | _ => NONE) results; fun failed_results results = map_filter (fn (y, Exn.Exn e) => SOME (y, e) | _ => NONE) results; fun report_result ctxt pos ambig_msgs results = (case (proper_results results, failed_results results) of ([], (reports, exn) :: _) => (Context_Position.reports_text ctxt reports; Exn.reraise exn) | ([(reports, x)], _) => (Context_Position.reports_text ctxt reports; x) | _ => if null ambig_msgs then error ("Parse error: ambiguous syntax" ^ Position.here pos) else error (cat_lines ambig_msgs)); (* parse raw asts *) fun parse_asts ctxt raw root (syms, pos) = let val syn = Proof_Context.syn_of ctxt; val ast_tr = Syntax.parse_ast_translation syn; val toks = Syntax.tokenize syn raw syms; val _ = Context_Position.reports ctxt (maps Lexicon.reports_of_token toks); val pts = Syntax.parse syn root (filter Lexicon.is_proper toks) handle ERROR msg => error (msg ^ Markup.markup_report (implode (map (Lexicon.reported_token_range ctxt) toks))); val len = length pts; val limit = Config.get ctxt Syntax.ambiguity_limit; val ambig_msgs = if len <= 1 then [] else [cat_lines (("Ambiguous input" ^ Position.here (Position.no_range_position pos) ^ " produces " ^ string_of_int len ^ " parse trees" ^ (if len <= limit then "" else " (" ^ string_of_int limit ^ " displayed)") ^ ":") :: map (Pretty.string_of o Pretty.item o single o Parser.pretty_parsetree) (take limit pts))]; in (ambig_msgs, map (parsetree_to_ast ctxt ast_tr) pts) end; fun parse_tree ctxt root input = let val syn = Proof_Context.syn_of ctxt; val tr = Syntax.parse_translation syn; val parse_rules = Syntax.parse_rules syn; val (ambig_msgs, asts) = parse_asts ctxt false root input; val results = (map o apsnd o Exn.maps_res) (Ast.normalize ctxt parse_rules #> Exn.interruptible_capture (ast_to_term ctxt tr)) asts; in (ambig_msgs, results) end; (* parse logical entities *) fun parse_failed ctxt pos msg kind = cat_error msg ("Failed to parse " ^ kind ^ Markup.markup_report (Context_Position.reported_text ctxt pos (Markup.bad ()) "")); fun parse_sort ctxt = Syntax.parse_input ctxt Term_XML.Decode.sort Markup.language_sort (fn (syms, pos) => parse_tree ctxt "sort" (syms, pos) |> uncurry (report_result ctxt pos) |> decode_sort |> Type.minimize_sort (Proof_Context.tsig_of ctxt) handle ERROR msg => parse_failed ctxt pos msg "sort"); fun parse_typ ctxt = Syntax.parse_input ctxt Term_XML.Decode.typ Markup.language_type (fn (syms, pos) => parse_tree ctxt "type" (syms, pos) |> uncurry (report_result ctxt pos) |> decode_typ handle ERROR msg => parse_failed ctxt pos msg "type"); fun parse_term is_prop ctxt = let val (markup, kind, root, constrain) = if is_prop then (Markup.language_prop, "prop", "prop", Type.constraint propT) else (Markup.language_term, "term", Config.get ctxt Syntax.root, I); val decode = constrain o Term_XML.Decode.term (Proof_Context.consts_of ctxt); in Syntax.parse_input ctxt decode markup (fn (syms, pos) => let val (ambig_msgs, results) = parse_tree ctxt root (syms, pos) ||> map (decode_term ctxt); val parsed_len = length (proper_results results); val ambiguity_warning = Config.get ctxt Syntax.ambiguity_warning; val limit = Config.get ctxt Syntax.ambiguity_limit; (*brute-force disambiguation via type-inference*) fun check t = (Syntax.check_term (Proof_Context.allow_dummies ctxt) (constrain t); Exn.Res t) handle exn as ERROR _ => Exn.Exn exn; fun par_map xs = Par_List.map' {name = "Syntax_Phases.parse_term", sequential = false} xs; val results' = if parsed_len > 1 then (grouped 10 par_map o apsnd o Exn.maps_res) check results else results; val reports' = fst (hd results'); val errs = map snd (failed_results results'); val checked = map snd (proper_results results'); val checked_len = length checked; val pretty_term = Syntax.pretty_term (Config.put Printer.show_brackets true ctxt); in if checked_len = 0 then report_result ctxt pos [] [(reports', Exn.Exn (Exn.EXCEPTIONS (map ERROR ambig_msgs @ errs)))] else if checked_len = 1 then (if not (null ambig_msgs) andalso ambiguity_warning andalso Context_Position.is_visible ctxt then warning (cat_lines (ambig_msgs @ ["Fortunately, only one parse tree is well-formed and type-correct,\n\ \but you may still want to disambiguate your grammar or your input."])) else (); report_result ctxt pos [] results') else report_result ctxt pos [] [(reports', Exn.Exn (ERROR (cat_lines (ambig_msgs @ (("Ambiguous input\n" ^ string_of_int checked_len ^ " terms are type correct" ^ (if checked_len <= limit then "" else " (" ^ string_of_int limit ^ " displayed)") ^ ":") :: map (Pretty.string_of o Pretty.item o single o pretty_term) (take limit checked))))))] end handle ERROR msg => parse_failed ctxt pos msg kind) end; (* parse_ast_pattern *) fun parse_ast_pattern ctxt (root, str) = let val syn = Proof_Context.syn_of ctxt; val reports = Unsynchronized.ref ([]: Position.report_text list); fun report ps = Position.store_reports reports ps; fun decode_const ps c = (report ps (markup_entity ctxt) c; Ast.Constant c); fun decode_var ps x = (report ps (fn () => [Markup.name x Markup.free]) (); Ast.Variable x); fun decode_appl ps asts = Ast.Appl (map (decode ps) asts) and decode ps (Ast.Constant c) = decode_const ps c | decode ps (Ast.Variable x) = if is_some (Syntax.lookup_const syn x) orelse Long_Name.is_qualified x then decode_const ps x else decode_var ps x | decode ps (Ast.Appl (asts as (Ast.Constant c :: ast :: Ast.Variable x :: args))) = if member (op =) Term_Position.markers c then (case Term_Position.decode x of SOME p => Ast.mk_appl (decode (p :: ps) ast) (map (decode ps) args) | NONE => decode_appl ps asts) else decode_appl ps asts | decode ps (Ast.Appl asts) = decode_appl ps asts; val source = Syntax.read_input str; val pos = Input.pos_of source; val syms = Input.source_explode source; val ast = parse_asts ctxt true root (syms, pos) |> uncurry (report_result ctxt pos) |> decode []; val _ = Context_Position.reports_text ctxt (! reports); in ast end; (** encode parse trees **) (* term_of_sort *) fun term_of_sort S = let val class = Syntax.const o Lexicon.mark_class; fun classes [c] = class c | classes (c :: cs) = Syntax.const "_classes" $ class c $ classes cs; in if S = dummyS then Syntax.const "_dummy_sort" else (case S of [] => Syntax.const "_topsort" | [c] => class c | cs => Syntax.const "_sort" $ classes cs) end; (* term_of_typ *) fun term_of_typ ctxt ty = let val show_sorts = Config.get ctxt show_sorts orelse Config.get ctxt show_markup; fun ofsort t raw_S = if show_sorts then let val S = #2 (Term_Position.decode_positionS raw_S) in if S = dummyS then t else Syntax.const "_ofsort" $ t $ term_of_sort S end else t; fun term_of (Type (a, Ts)) = Term.list_comb (Syntax.const (Lexicon.mark_type a), map term_of Ts) | term_of (TFree (x, S)) = if is_some (Term_Position.decode x) then Syntax.free x else ofsort (Syntax.const "_tfree" $ Syntax.free x) S | term_of (TVar (xi, S)) = ofsort (Syntax.const "_tvar" $ Syntax.var xi) S; in term_of ty end; (* simple_ast_of *) fun simple_ast_of ctxt = let val tune_var = if Config.get ctxt show_question_marks then I else unprefix "?"; fun ast_of (Const (c, _)) = Ast.Constant c | ast_of (Free (x, _)) = Ast.Variable x | ast_of (Var (xi, _)) = Ast.Variable (tune_var (Term.string_of_vname xi)) | ast_of (t as _ $ _) = let val (f, args) = strip_comb t in Ast.mk_appl (ast_of f) (map ast_of args) end | ast_of (Bound i) = Ast.Appl [Ast.Constant "_loose", Ast.Variable ("B." ^ string_of_int i)] | ast_of (Abs _) = raise Fail "simple_ast_of: Abs"; in ast_of end; (* sort_to_ast and typ_to_ast *) fun ast_of_termT ctxt trf tm = let val ctxt' = Config.put show_sorts false ctxt; fun ast_of (t as Const ("_tfree", _) $ Free _) = simple_ast_of ctxt t | ast_of (t as Const ("_tvar", _) $ Var _) = simple_ast_of ctxt t | ast_of (Const (a, _)) = trans a [] | ast_of (t as _ $ _) = (case strip_comb t of (Const (a, _), args) => trans a args | (f, args) => Ast.Appl (map ast_of (f :: args))) | ast_of t = simple_ast_of ctxt t and trans a args = ast_of (trf a ctxt' dummyT args) handle Match => Ast.mk_appl (Ast.Constant a) (map ast_of args); in ast_of tm end; fun sort_to_ast ctxt trf S = ast_of_termT ctxt trf (term_of_sort S); fun typ_to_ast ctxt trf T = ast_of_termT ctxt trf (term_of_typ ctxt T); (* term_to_ast *) local fun mark_aprop tm = let fun aprop t = Syntax.const "_aprop" $ t; fun is_prop Ts t = Type_Annotation.clean (Type_Annotation.fastype_of Ts t) = propT handle TERM _ => false; fun is_term (Const ("Pure.term", _) $ _) = true | is_term _ = false; fun mark _ (t as Const _) = t | mark Ts (t as Const ("_bound", _) $ u) = if is_prop Ts u then aprop t else t | mark Ts (t as Free _) = if is_prop Ts t then aprop t else t | mark Ts (t as Var _) = if is_prop Ts t then aprop t else t | mark Ts (t as Bound _) = if is_prop Ts t then aprop t else t | mark Ts (Abs (x, T, t)) = Abs (x, T, mark (T :: Ts) t) | mark Ts (t as t1 $ (t2 as Const ("Pure.type", Type ("itself", [T])))) = if is_prop Ts t andalso not (is_term t) then Const ("_type_prop", T) $ mark Ts t1 else mark Ts t1 $ mark Ts t2 | mark Ts (t as t1 $ t2) = (if is_Const (Term.head_of t) orelse not (is_prop Ts t) then I else aprop) (mark Ts t1 $ mark Ts t2); in mark [] tm end; fun prune_types tm = let fun regard t t' seen = if Type_Annotation.is_omitted (Type_Annotation.fastype_of [] t) then (t, seen) else if member (op aconv) seen t then (t', seen) else (t, t :: seen); fun prune (t as Const _, seen) = (t, seen) | prune (t as Free (x, T), seen) = regard t (Free (x, Type_Annotation.ignore_type T)) seen | prune (t as Var (xi, T), seen) = regard t (Var (xi, Type_Annotation.ignore_type T)) seen | prune (t as Bound _, seen) = (t, seen) | prune (Abs (x, T, t), seen) = let val (t', seen') = prune (t, seen); in (Abs (x, T, t'), seen') end | prune (t1 $ t2, seen) = let val (t1', seen') = prune (t1, seen); val (t2', seen'') = prune (t2, seen'); in (t1' $ t2', seen'') end; in #1 (prune (tm, [])) end; fun mark_atoms is_syntax_const ctxt tm = let val {structs, fixes} = Syntax_Trans.get_idents ctxt; val show_structs = Config.get ctxt show_structs; fun mark ((t as Const (c, _)) $ u) = if member (op =) Pure_Thy.token_markers c then t $ u else mark t $ mark u | mark (t $ u) = mark t $ mark u | mark (Abs (x, T, t)) = Abs (x, T, mark t) | mark (t as Const (c, T)) = if is_syntax_const c then t else Const (Lexicon.mark_const c, T) | mark (t as Free (x, T)) = let val i = find_index (fn s => s = x) structs + 1 in if i = 0 andalso member (op =) fixes x then Const (Lexicon.mark_fixed x, T) else if i = 1 andalso not show_structs then Syntax.const "_struct" $ Syntax.const "_indexdefault" else Syntax.const "_free" $ t end | mark (t as Var (xi, T)) = if xi = Auto_Bind.dddot then Const ("_DDDOT", T) else Syntax.const "_var" $ t | mark a = a; in mark tm end; in fun term_to_ast is_syntax_const ctxt trf tm = let val show_types = Config.get ctxt show_types orelse Config.get ctxt show_sorts; val show_markup = Config.get ctxt show_markup; fun ast_of tm = (case strip_comb tm of (t as Abs _, ts) => Ast.mk_appl (ast_of (Syntax_Trans.abs_tr' ctxt t)) (map ast_of ts) | ((c as Const ("_free", _)), Free (x, T) :: ts) => Ast.mk_appl (constrain (c $ Syntax.free x) T) (map ast_of ts) | ((c as Const ("_var", _)), Var (xi, T) :: ts) => Ast.mk_appl (constrain (c $ Syntax.var xi) T) (map ast_of ts) | ((c as Const ("_bound", B)), Free (x, T) :: ts) => let val X = if show_markup andalso not show_types orelse B <> dummyT then T else dummyT; in Ast.mk_appl (constrain (c $ Syntax.free x) X) (map ast_of ts) end | (Const ("_idtdummy", T), ts) => Ast.mk_appl (constrain (Syntax.const "_idtdummy") T) (map ast_of ts) | (Const (c, T), ts) => trans c (Type_Annotation.smash T) ts | (t, ts) => Ast.mk_appl (simple_ast_of ctxt t) (map ast_of ts)) and trans a T args = ast_of (trf a ctxt T args) handle Match => Ast.mk_appl (Ast.Constant a) (map ast_of args) and constrain t T0 = let val T = if show_markup andalso not show_types then Type_Annotation.clean T0 else Type_Annotation.smash T0; in if (show_types orelse show_markup) andalso T <> dummyT then Ast.Appl [Ast.Constant "_constrain", simple_ast_of ctxt t, ast_of_termT ctxt trf (term_of_typ ctxt T)] else simple_ast_of ctxt t end; in tm |> mark_aprop |> show_types ? prune_types |> Variable.revert_bounds ctxt |> mark_atoms is_syntax_const ctxt |> ast_of end; end; (** unparse **) local fun free_or_skolem ctxt x = let val m = if Variable.is_fixed ctxt x orelse Syntax.is_pretty_global ctxt then Markup.fixed x else Markup.intensify; in if Name.is_skolem x then ([m, Markup.skolem], Variable.revert_fixed ctxt x) else ([m, Markup.free], x) end; fun var_or_skolem s = (case Lexicon.read_variable s of SOME (x, i) => (case try Name.dest_skolem x of NONE => (Markup.var, s) | SOME x' => (Markup.skolem, Term.string_of_vname (x', i))) | NONE => (Markup.var, s)); val typing_elem = YXML.output_markup_elem Markup.typing; val sorting_elem = YXML.output_markup_elem Markup.sorting; fun unparse_t t_to_ast prt_t markup ctxt t = let val show_markup = Config.get ctxt show_markup; val show_sorts = Config.get ctxt show_sorts; val show_types = Config.get ctxt show_types orelse show_sorts; val syn = Proof_Context.syn_of ctxt; val prtabs = Syntax.prtabs syn; val trf = Syntax.print_ast_translation syn; fun markup_extern c = (case Syntax.lookup_const syn c of SOME "" => ([], c) | SOME b => markup_extern b | NONE => c |> Lexicon.unmark {case_class = fn x => (markup_class ctxt x, Proof_Context.extern_class ctxt x), case_type = fn x => (markup_type ctxt x, Proof_Context.extern_type ctxt x), case_const = fn x => (markup_const ctxt x, Proof_Context.extern_const ctxt x), case_fixed = fn x => free_or_skolem ctxt x, case_default = fn x => ([], x)}); fun token_trans "_tfree" x = SOME (Pretty.mark_str (Markup.tfree, x)) | token_trans "_tvar" x = SOME (Pretty.mark_str (Markup.tvar, x)) | token_trans "_free" x = SOME (Pretty.marks_str (free_or_skolem ctxt x)) | token_trans "_bound" x = SOME (Pretty.mark_str (Markup.bound, x)) | token_trans "_loose" x = SOME (Pretty.mark_str (Markup.bad (), x)) | token_trans "_var" x = SOME (Pretty.mark_str (var_or_skolem x)) | token_trans "_numeral" x = SOME (Pretty.mark_str (Markup.numeral, x)) | token_trans "_inner_string" x = SOME (Pretty.mark_str (Markup.inner_string, x)) | token_trans _ _ = NONE; fun markup_trans a [Ast.Variable x] = token_trans a x | markup_trans "_constrain" [t, ty] = constrain_trans t ty | markup_trans "_idtyp" [t, ty] = constrain_trans t ty | markup_trans "_ofsort" [ty, s] = ofsort_trans ty s | markup_trans _ _ = NONE and constrain_trans t ty = if show_markup andalso not show_types then let val ((bg1, bg2), en) = typing_elem; val bg = bg1 ^ Pretty.symbolic_output (pretty_typ_ast Markup.empty ty) ^ bg2; val info = {markup = (bg, en), consistent = false, indent = 0}; in SOME (Pretty.make_block info [pretty_ast Markup.empty t]) end else NONE and ofsort_trans ty s = if show_markup andalso not show_sorts then let val ((bg1, bg2), en) = sorting_elem; val bg = bg1 ^ Pretty.symbolic_output (pretty_typ_ast Markup.empty s) ^ bg2; val info = {markup = (bg, en), consistent = false, indent = 0}; in SOME (Pretty.make_block info [pretty_typ_ast Markup.empty ty]) end else NONE and pretty_typ_ast m ast = ast |> Printer.pretty_typ_ast ctxt prtabs trf markup_trans markup_extern |> Pretty.markup m and pretty_ast m ast = ast |> prt_t ctxt prtabs trf markup_trans markup_extern |> Pretty.markup m; in t_to_ast ctxt (Syntax.print_translation syn) t |> Ast.normalize ctxt (Syntax.print_rules syn) |> pretty_ast markup end; in val unparse_sort = unparse_t sort_to_ast Printer.pretty_typ_ast (Markup.language_sort false); val unparse_typ = unparse_t typ_to_ast Printer.pretty_typ_ast (Markup.language_type false); fun unparse_term ctxt = let val thy = Proof_Context.theory_of ctxt; val syn = Proof_Context.syn_of ctxt; in unparse_t (term_to_ast (is_some o Syntax.lookup_const syn)) (Printer.pretty_term_ast (not (Pure_Thy.old_appl_syntax thy))) (Markup.language_term false) ctxt end; end; (** translations **) (* type propositions *) fun type_prop_tr' ctxt T [Const ("\<^const>Pure.sort_constraint", _)] = Syntax.const "_sort_constraint" $ term_of_typ (Config.put show_sorts true ctxt) T | type_prop_tr' ctxt T [t] = Syntax.const "_ofclass" $ term_of_typ ctxt T $ t | type_prop_tr' _ T ts = raise TYPE ("type_prop_tr'", [T], ts); (* type reflection *) fun type_tr' ctxt (Type ("itself", [T])) ts = Term.list_comb (Syntax.const "_TYPE" $ term_of_typ ctxt T, ts) | type_tr' _ _ _ = raise Match; (* type constraints *) fun type_constraint_tr' ctxt (Type ("fun", [T, _])) (t :: ts) = Term.list_comb (Syntax.const "_constrain" $ t $ term_of_typ ctxt T, ts) | type_constraint_tr' _ _ _ = raise Match; (* authentic syntax *) fun const_ast_tr intern ctxt asts = (case asts of [Ast.Appl [Ast.Constant "_constrain", Ast.Variable c, T as Ast.Variable p]] => let val pos = the_default Position.none (Term_Position.decode p); val (c', _) = decode_const ctxt (c, [pos]); val d = if intern then Lexicon.mark_const c' else c; in Ast.Appl [Ast.Constant "_constrain", Ast.Constant d, T] end | _ => raise Ast.AST ("const_ast_tr", asts)); (* setup translations *) val _ = Theory.setup (Sign.parse_ast_translation [("_context_const", const_ast_tr true), ("_context_xconst", const_ast_tr false)] #> Sign.typed_print_translation [("_type_prop", type_prop_tr'), ("\<^const>Pure.type", type_tr'), ("_type_constraint_", type_constraint_tr')]); (** check/uncheck **) (* context-sensitive (un)checking *) type key = int * bool; structure Checks = Generic_Data ( type 'a check = 'a list -> Proof.context -> ('a list * Proof.context) option; type T = ((key * ((string * typ check) * stamp) list) list * (key * ((string * term check) * stamp) list) list); val empty = ([], []); val extend = I; fun merge ((typ_checks1, term_checks1), (typ_checks2, term_checks2)) : T = (AList.join (op =) (K (Library.merge (eq_snd (op =)))) (typ_checks1, typ_checks2), AList.join (op =) (K (Library.merge (eq_snd (op =)))) (term_checks1, term_checks2)); ); fun print_checks ctxt = let fun split_checks checks = List.partition (fn ((_, un), _) => not un) checks |> apply2 (map (fn ((i, _), fs) => (i, map (fst o fst) fs)) #> sort (int_ord o apply2 fst)); fun pretty_checks kind checks = checks |> map (fn (i, names) => Pretty.block [Pretty.str (kind ^ " (stage " ^ signed_string_of_int i ^ "):"), Pretty.brk 1, Pretty.strs names]); val (typs, terms) = Checks.get (Context.Proof ctxt); val (typ_checks, typ_unchecks) = split_checks typs; val (term_checks, term_unchecks) = split_checks terms; in pretty_checks "typ_checks" typ_checks @ pretty_checks "term_checks" term_checks @ pretty_checks "typ_unchecks" typ_unchecks @ pretty_checks "term_unchecks" term_unchecks end |> Pretty.writeln_chunks; local fun context_check which (key: key) name f = Checks.map (which (AList.map_default op = (key, []) (cons ((name, f), stamp ())))); fun simple_check eq f xs ctxt = let val xs' = f ctxt xs in if eq_list eq (xs, xs') then NONE else SOME (xs', ctxt) end; in fun typ_check' stage = context_check apfst (stage, false); fun term_check' stage = context_check apsnd (stage, false); fun typ_uncheck' stage = context_check apfst (stage, true); fun term_uncheck' stage = context_check apsnd (stage, true); fun typ_check key name f = typ_check' key name (simple_check (op =) f); fun term_check key name f = term_check' key name (simple_check (op aconv) f); fun typ_uncheck key name f = typ_uncheck' key name (simple_check (op =) f); fun term_uncheck key name f = term_uncheck' key name (simple_check (op aconv) f); end; local fun check_stage fs = perhaps_loop (perhaps_apply (map uncurry fs)); fun check_all fs = perhaps_apply (map check_stage fs); fun check which uncheck ctxt0 xs0 = let val funs = which (Checks.get (Context.Proof ctxt0)) |> map_filter (fn ((i, u), fs) => if uncheck = u then SOME (i, map (snd o fst) fs) else NONE) |> Library.sort (int_ord o apply2 fst) |> map snd |> not uncheck ? map rev; in #1 (perhaps (check_all funs) (xs0, ctxt0)) end; val apply_typ_check = check fst false; val apply_term_check = check snd false; val apply_typ_uncheck = check fst true; val apply_term_uncheck = check snd true; in fun check_typs ctxt raw_tys = let val (sorting_report, tys) = Proof_Context.prepare_sortsT ctxt raw_tys; val _ = if Context_Position.reports_enabled ctxt then Output.report sorting_report else (); in tys |> apply_typ_check ctxt |> Term_Sharing.typs (Proof_Context.theory_of ctxt) end; fun check_terms ctxt raw_ts = let val (sorting_report, raw_ts') = Proof_Context.prepare_sorts ctxt raw_ts; val (ts, ps) = Type_Infer_Context.prepare_positions ctxt raw_ts'; val tys = map (Logic.mk_type o snd) ps; val (ts', tys') = ts @ tys |> apply_term_check ctxt |> chop (length ts); val typing_report = fold2 (fn (pos, _) => fn ty => if Position.is_reported pos then cons (Position.reported_text pos Markup.typing (Syntax.string_of_typ ctxt (Logic.dest_type ty))) else I) ps tys' []; val _ = if Context_Position.reports_enabled ctxt then Output.report (sorting_report @ typing_report) else (); in Term_Sharing.terms (Proof_Context.theory_of ctxt) ts' end; fun check_props ctxt = map (Type.constraint propT) #> check_terms ctxt; val uncheck_typs = apply_typ_uncheck; val uncheck_terms = apply_term_uncheck; end; (* install operations *) val _ = Theory.setup (Syntax.install_operations {parse_sort = parse_sort, parse_typ = parse_typ, parse_term = parse_term false, parse_prop = parse_term true, unparse_sort = unparse_sort, unparse_typ = unparse_typ, unparse_term = unparse_term, check_typs = check_typs, check_terms = check_terms, check_props = check_props, uncheck_typs = uncheck_typs, uncheck_terms = uncheck_terms}); end; (* standard phases *) val _ = Context.>> (Syntax_Phases.typ_check 0 "standard" Proof_Context.standard_typ_check #> Syntax_Phases.term_check 0 "standard" (fn ctxt => Type_Infer_Context.infer_types ctxt #> map (Proof_Context.expand_abbrevs ctxt)) #> Syntax_Phases.term_check 100 "standard_finish" Proof_Context.standard_term_check_finish #> Syntax_Phases.term_uncheck 0 "standard" Proof_Context.standard_term_uncheck); diff --git a/src/Pure/theory.ML b/src/Pure/theory.ML --- a/src/Pure/theory.ML +++ b/src/Pure/theory.ML @@ -1,342 +1,342 @@ (* Title: Pure/theory.ML Author: Lawrence C Paulson and Markus Wenzel Logical theory content: axioms, definitions, and begin/end wrappers. *) signature THEORY = sig val parents_of: theory -> theory list val ancestors_of: theory -> theory list val nodes_of: theory -> theory list val setup: (theory -> theory) -> unit val local_setup: (Proof.context -> Proof.context) -> unit val install_pure: theory -> unit val get_pure: unit -> theory val get_pure_bootstrap: unit -> theory val get_markup: theory -> Markup.T val check: {long: bool} -> Proof.context -> string * Position.T -> theory val axiom_table: theory -> term Name_Space.table val axiom_space: theory -> Name_Space.T val all_axioms_of: theory -> (string * term) list val defs_of: theory -> Defs.T val at_begin: (theory -> theory option) -> theory -> theory val at_end: (theory -> theory option) -> theory -> theory val join_theory: theory list -> theory val begin_theory: string * Position.T -> theory list -> theory val end_theory: theory -> theory val add_axiom: Proof.context -> binding * term -> theory -> theory val const_dep: theory -> string * typ -> Defs.entry val type_dep: string * typ list -> Defs.entry val add_deps: Defs.context -> string -> Defs.entry -> Defs.entry list -> theory -> theory val add_deps_global: string -> Defs.entry -> Defs.entry list -> theory -> theory val add_def: Defs.context -> bool -> bool -> binding * term -> theory -> theory val specify_const: (binding * typ) * mixfix -> theory -> term * theory val check_overloading: Proof.context -> bool -> string * typ -> unit end structure Theory: THEORY = struct (** theory context operations **) val parents_of = Context.parents_of; val ancestors_of = Context.ancestors_of; fun nodes_of thy = thy :: ancestors_of thy; fun setup f = Context.>> (Context.map_theory f); fun local_setup f = Context.>> (Context.map_proof f); (* implicit theory Pure *) val pure: theory Single_Assignment.var = Single_Assignment.var "pure"; fun install_pure thy = Single_Assignment.assign pure thy; fun get_pure () = (case Single_Assignment.peek pure of SOME thy => thy | NONE => raise Fail "Theory Pure not present"); fun get_pure_bootstrap () = (case Single_Assignment.peek pure of SOME thy => thy | NONE => Context.the_global_context ()); (** datatype thy **) type wrapper = (theory -> theory option) * stamp; fun apply_wrappers (wrappers: wrapper list) = perhaps (perhaps_loop (perhaps_apply (map fst wrappers))); datatype thy = Thy of {pos: Position.T, id: serial, axioms: term Name_Space.table, defs: Defs.T, wrappers: wrapper list * wrapper list}; fun make_thy (pos, id, axioms, defs, wrappers) = Thy {pos = pos, id = id, axioms = axioms, defs = defs, wrappers = wrappers}; structure Thy = Theory_Data' ( type T = thy; val extend = I; val empty = make_thy (Position.none, 0, Name_Space.empty_table Markup.axiomN, Defs.empty, ([], [])); fun merge old_thys (thy1, thy2) = let val Thy {pos, id, axioms = axioms1, defs = defs1, wrappers = (bgs1, ens1)} = thy1; val Thy {pos = _, id = _, axioms = axioms2, defs = defs2, wrappers = (bgs2, ens2)} = thy2; val axioms' = Name_Space.merge_tables (axioms1, axioms2); val defs' = Defs.merge (Defs.global_context (fst old_thys)) (defs1, defs2); val bgs' = Library.merge (eq_snd op =) (bgs1, bgs2); val ens' = Library.merge (eq_snd op =) (ens1, ens2); in make_thy (pos, id, axioms', defs', (bgs', ens')) end; ); fun rep_theory thy = Thy.get thy |> (fn Thy args => args); fun map_thy f = Thy.map (fn (Thy {pos, id, axioms, defs, wrappers}) => make_thy (f (pos, id, axioms, defs, wrappers))); fun map_axioms f = map_thy (fn (pos, id, axioms, defs, wrappers) => (pos, id, f axioms, defs, wrappers)); fun map_defs f = map_thy (fn (pos, id, axioms, defs, wrappers) => (pos, id, axioms, f defs, wrappers)); fun map_wrappers f = map_thy (fn (pos, id, axioms, defs, wrappers) => (pos, id, axioms, defs, f wrappers)); (* entity markup *) fun theory_markup def name id pos = if id = 0 then Markup.empty else Position.make_entity_markup def id Markup.theoryN (name, pos); fun init_markup (name, pos) thy = let val id = serial (); - val _ = Context_Position.reports_global thy [(pos, theory_markup true name id pos)]; + val _ = Context_Position.reports_global thy [(pos, theory_markup {def = true} name id pos)]; in map_thy (fn (_, _, axioms, defs, wrappers) => (pos, id, axioms, defs, wrappers)) thy end; fun get_markup thy = let val {pos, id, ...} = rep_theory thy - in theory_markup false (Context.theory_long_name thy) id pos end; + in theory_markup {def = false} (Context.theory_long_name thy) id pos end; fun check long ctxt (name, pos) = let val thy = Proof_Context.theory_of ctxt; val thy' = Context.get_theory long thy name handle ERROR msg => let val completion_report = Completion.make_report (name, pos) (fn completed => map (Context.theory_name' long) (ancestors_of thy) |> filter (completed o Long_Name.base_name) |> sort_strings |> map (fn a => (a, (Markup.theoryN, a)))); in error (msg ^ Position.here pos ^ completion_report) end; val _ = Context_Position.report ctxt pos (get_markup thy'); in thy' end; (* basic operations *) val axiom_table = #axioms o rep_theory; val axiom_space = Name_Space.space_of_table o axiom_table; val all_axioms_of = Name_Space.dest_table o axiom_table; val defs_of = #defs o rep_theory; (* join theory *) fun join_theory [] = raise List.Empty | join_theory [thy] = thy | join_theory thys = foldl1 Context.join_thys thys; (* begin/end theory *) val begin_wrappers = rev o #1 o #wrappers o rep_theory; val end_wrappers = rev o #2 o #wrappers o rep_theory; fun at_begin f = map_wrappers (apfst (cons (f, stamp ()))); fun at_end f = map_wrappers (apsnd (cons (f, stamp ()))); fun begin_theory (name, pos) imports = if name = Context.PureN then (case imports of [thy] => init_markup (name, pos) thy | _ => error "Bad bootstrapping of theory Pure") else let val thy = Context.begin_thy name imports; val wrappers = begin_wrappers thy; in thy |> init_markup (name, pos) |> Sign.init_naming |> Sign.local_path |> apply_wrappers wrappers |> tap (Syntax.force_syntax o Sign.syn_of) end; fun end_theory thy = thy |> apply_wrappers (end_wrappers thy) |> Sign.change_check |> Context.finish_thy; (** primitive specifications **) (* raw axioms *) fun cert_axm ctxt (b, raw_tm) = let val thy = Proof_Context.theory_of ctxt; val t = Sign.cert_prop thy raw_tm handle TYPE (msg, _, _) => error msg | TERM (msg, _) => error msg; val _ = Term.no_dummy_patterns t handle TERM (msg, _) => error msg; val bad_sorts = rev ((fold_types o fold_atyps_sorts) (fn (_, []) => I | (T, _) => insert (op =) T) t []); val _ = null bad_sorts orelse error ("Illegal sort constraints in primitive specification: " ^ commas (map (Syntax.string_of_typ (Config.put show_sorts true ctxt)) bad_sorts)); in (b, Sign.no_vars ctxt t) end handle ERROR msg => cat_error msg ("The error(s) above occurred in axiom " ^ Binding.print b); fun add_axiom ctxt raw_axm thy = thy |> map_axioms (fn axioms => let val axm = apsnd Logic.varify_global (cert_axm ctxt raw_axm); val context = ctxt |> Sign.inherit_naming thy |> Context_Position.set_visible_generic false; val (_, axioms') = Name_Space.define context true axm axioms; in axioms' end); (* dependencies *) fun const_dep thy (c, T) = ((Defs.Const, c), Sign.const_typargs thy (c, T)); fun type_dep (c, args) = ((Defs.Type, c), args); fun dependencies (context as (ctxt, _)) unchecked def description lhs rhs = let fun prep (item, args) = (case fold Term.add_tvarsT args [] of [] => (item, map Logic.varifyT_global args) | vs => raise TYPE ("Illegal schematic type variable(s)", map TVar vs, [])); val lhs_vars = Term_Subst.TFrees.build (snd lhs |> fold Term_Subst.add_tfreesT); val rhs_extras = build (rhs |> fold (#2 #> (fold o Term.fold_atyps) (fn TFree v => not (Term_Subst.TFrees.defined lhs_vars v) ? insert (op =) v | _ => I))); val _ = if null rhs_extras then () else error ("Specification depends on extra type variables: " ^ commas_quote (map (Syntax.string_of_typ ctxt o TFree) rhs_extras) ^ "\nThe error(s) above occurred in " ^ quote description); in Defs.define context unchecked def description (prep lhs) (map prep rhs) end; fun cert_entry thy ((Defs.Const, c), args) = Sign.cert_term thy (Const (c, Sign.const_instance thy (c, args))) |> dest_Const |> const_dep thy | cert_entry thy ((Defs.Type, c), args) = Sign.certify_typ thy (Type (c, args)) |> dest_Type |> type_dep; fun add_deps context a raw_lhs raw_rhs thy = let val (lhs as ((_, lhs_name), _)) :: rhs = map (cert_entry thy) (raw_lhs :: raw_rhs); val description = if a = "" then lhs_name ^ " axiom" else a; in thy |> map_defs (dependencies context false NONE description lhs rhs) end; fun add_deps_global a x y thy = add_deps (Defs.global_context thy) a x y thy; fun specify_const decl thy = let val (t, thy') = Sign.declare_const_global decl thy; in (t, add_deps_global "" (const_dep thy' (dest_Const t)) [] thy') end; (* overloading *) fun check_overloading ctxt overloaded (c, T) = let val thy = Proof_Context.theory_of ctxt; val declT = Sign.the_const_constraint thy c handle TYPE (msg, _, _) => error msg; val T' = Logic.varifyT_global T; fun message sorts txt = [Pretty.block [Pretty.str "Specification of constant ", Pretty.str c, Pretty.str " ::", Pretty.brk 1, Pretty.quote (Syntax.pretty_typ (Config.put show_sorts sorts ctxt) T)], Pretty.str txt] |> Pretty.chunks |> Pretty.string_of; in if Sign.typ_instance thy (declT, T') then () else if Type.raw_instance (declT, T') then error (message true "imposes additional sort constraints on the constant declaration") else if overloaded then () else error (message false "is strictly less general than the declared type (overloading required)") end; (* definitional axioms *) local fun check_def (context as (ctxt, _)) thy unchecked overloaded (b, tm) defs = let val name = Sign.full_name thy b; val ((lhs, rhs), _, _) = Primitive_Defs.dest_def ctxt {check_head = Term.is_Const, check_free_lhs = K true, check_free_rhs = K false, check_tfree = K false} tm handle TERM (msg, _) => error msg; val lhs_const = Term.dest_Const (Term.head_of lhs); val rhs_consts = fold_aterms (fn Const const => insert (op =) (const_dep thy const) | _ => I) rhs []; val rhs_types = (fold_types o fold_subtypes) (fn Type t => insert (op =) (type_dep t) | _ => I) rhs []; val rhs_deps = rhs_consts @ rhs_types; val _ = check_overloading ctxt overloaded lhs_const; in defs |> dependencies context unchecked (SOME name) name (const_dep thy lhs_const) rhs_deps end handle ERROR msg => cat_error msg (Pretty.string_of (Pretty.block [Pretty.str ("The error(s) above occurred in definition " ^ Binding.print b ^ ":"), Pretty.fbrk, Pretty.quote (Syntax.pretty_term ctxt tm)])); in fun add_def (context as (ctxt, _)) unchecked overloaded raw_axm thy = let val axm = cert_axm ctxt raw_axm in thy |> map_defs (check_def context thy unchecked overloaded axm) |> add_axiom ctxt axm end; end; end;