diff --git a/src/HOL/Tools/Mirabelle/mirabelle.ML b/src/HOL/Tools/Mirabelle/mirabelle.ML --- a/src/HOL/Tools/Mirabelle/mirabelle.ML +++ b/src/HOL/Tools/Mirabelle/mirabelle.ML @@ -1,345 +1,345 @@ (* Title: HOL/Mirabelle/Tools/mirabelle.ML Author: Jasmin Blanchette, TU Munich Author: Sascha Boehme, TU Munich Author: Makarius Author: Martin Desharnais, UniBw Munich *) signature MIRABELLE = sig (*core*) type action_context = {index: int, label: string, name: string, arguments: Properties.T, timeout: Time.time, output_dir: Path.T} type command = {theory_index: int, name: string, pos: Position.T, pre: Proof.state, post: Toplevel.state} type action = {run_action: command -> string, finalize: unit -> string} val register_action: string -> (action_context -> action) -> unit (*utility functions*) val print_exn: exn -> string val can_apply : Time.time -> (Proof.context -> int -> tactic) -> Proof.state -> bool val theorems_in_proof_term : theory -> thm -> thm list val theorems_of_sucessful_proof: Toplevel.state -> thm list val get_argument : (string * string) list -> string * string -> string val get_int_argument : (string * string) list -> string * int -> int val get_bool_argument : (string * string) list -> string * bool -> bool val cpu_time : ('a -> 'b) -> 'a -> 'b * int end structure Mirabelle : MIRABELLE = struct (** Mirabelle core **) (* concrete syntax *) fun read_actions str = let val thy = \<^theory>; val ctxt = Proof_Context.init_global thy val keywords = Keyword.no_command_keywords (Thy_Header.get_keywords thy) - fun read_actions () = Token.read_embedded ctxt keywords + fun read_actions () = Parse.read_embedded ctxt keywords (Parse.enum ";" (Parse.name -- Sledgehammer_Commands.parse_params)) (Input.string str) fun split_name_label (name, args) labels = (case String.tokens (fn c => c = #".") name of [name] => (("", name, args), labels) | [label, name] => (if Symtab.defined labels label then error ("Action label '" ^ label ^ "' already defined.") else (); ((label, name, args), Symtab.insert_set label labels)) | _ => error "Cannot parse action") in try read_actions () |> Option.map (fn xs => fst (fold_map split_name_label xs Symtab.empty)) end (* actions *) type command = {theory_index: int, name: string, pos: Position.T, pre: Proof.state, post: Toplevel.state}; type action_context = {index: int, label: string, name: string, arguments: Properties.T, timeout: Time.time, output_dir: Path.T}; type action = {run_action: command -> string, finalize: unit -> string}; local val actions = Synchronized.var "Mirabelle.actions" (Symtab.empty : (action_context -> action) Symtab.table); in fun register_action name make_action = (if name = "" then error "Registering unnamed Mirabelle action" else (); Synchronized.change actions (Symtab.map_default (name, make_action) (fn f => (warning ("Redefining Mirabelle action: " ^ quote name); f)))); fun get_action name = Symtab.lookup (Synchronized.value actions) name; end (* apply actions *) fun print_exn exn = (case exn of Timeout.TIMEOUT _ => "timeout" | ERROR msg => "error: " ^ msg | exn => "exception: " ^ General.exnMessage exn); fun run_action_function f = f () handle exn => if Exn.is_interrupt exn then Exn.reraise exn else print_exn exn; fun make_action_string ({index, label, name, ...} : action_context) = if label = "" then string_of_int index ^ "." ^ name else label; val make_action_path = Path.basic o make_action_string; fun finalize_action ({finalize, ...} : action) context = let val s = run_action_function finalize; val action_path = make_action_path context; val export_name = Path.binding0 (Path.basic "mirabelle" + action_path + Path.basic "finalize"); val () = File.append ((Path.dir (#output_dir context)) + Path.basic "mirabelle_ML.log") (prefix_lines (make_action_string context ^ " finalize ") (YXML.content_of s) ^ "\n") in if s <> "" then Export.export \<^theory> export_name [XML.Text s] else () end fun apply_action ({run_action, ...} : action) context (command as {pos, pre, ...} : command) = let val thy = Proof.theory_of pre; val action_path = make_action_path context; val goal_name_path = Path.basic (#name command) val line_path = Path.basic (string_of_int (the (Position.line_of pos))); val offset_path = Path.basic (string_of_int (the (Position.offset_of pos))); val export_name = Path.binding0 (Path.basic "mirabelle" + action_path + Path.basic "goal" + goal_name_path + line_path + offset_path); val s = run_action_function (fn () => run_action command); val prefix = make_action_string context ^ " goal." ^ StringCvt.padRight #" " 6 (#name command) ^ Context.theory_long_name thy ^ " " ^ string_of_int (the (Position.line_of pos)) ^ ":" ^ string_of_int (the (Position.offset_of pos)) ^ " " val _ = Substring.triml val () = File.append ((Path.dir (#output_dir context)) + Path.basic "mirabelle_ML.log") (prefix_lines prefix (YXML.content_of s) ^ "\n") in if s <> "" then Export.export thy export_name [XML.Text s] else () end; (* theory line range *) local val theory_name = Scan.many1 (Symbol_Pos.symbol #> (fn s => Symbol.not_eof s andalso s <> "[")) >> Symbol_Pos.content; val line = Symbol_Pos.scan_nat >> (Symbol_Pos.content #> Value.parse_nat); val end_line = Symbol_Pos.$$ ":" |-- line; val range = Symbol_Pos.$$ "[" |-- line -- Scan.option end_line --| Symbol_Pos.$$ "]"; in fun read_theory_range str = (case Scan.read Symbol_Pos.stopper (theory_name -- Scan.option range) (Symbol_Pos.explode0 str) of SOME res => res | NONE => error ("Malformed specification of theory line range: " ^ quote str)); end; fun check_theories strs = let fun theory_import_name s = #theory_name (Resources.import_name (Session.get_name ()) Path.current s); val theories = map read_theory_range strs |> map (apfst theory_import_name); fun get_theory name = if null theories then SOME NONE else get_first (fn (a, b) => if a = name then SOME b else NONE) theories; fun check_line NONE _ = false | check_line _ NONE = true | check_line (SOME NONE) _ = true | check_line (SOME (SOME (line, NONE))) (SOME i) = line <= i | check_line (SOME (SOME (line, SOME end_line))) (SOME i) = line <= i andalso i <= end_line; fun check_pos range = check_line range o Position.line_of; in check_pos o get_theory end; (* presentation hook *) val whitelist = ["apply", "by", "proof"]; val _ = Build.add_hook (fn qualifier => fn loaded_theories => let val mirabelle_actions = Options.default_string \<^system_option>\mirabelle_actions\; val actions = (case read_actions mirabelle_actions of SOME actions => actions | NONE => error ("Failed to parse mirabelle_actions: " ^ quote mirabelle_actions)); in if null actions then () else let val mirabelle_timeout = Options.default_seconds \<^system_option>\mirabelle_timeout\; val mirabelle_stride = Options.default_int \<^system_option>\mirabelle_stride\; val mirabelle_max_calls = Options.default_int \<^system_option>\mirabelle_max_calls\; val mirabelle_theories = Options.default_string \<^system_option>\mirabelle_theories\; val mirabelle_output_dir = Options.default_string \<^system_option>\mirabelle_output_dir\ |> Path.explode |> Isabelle_System.make_directory; val check_theory = check_theories (space_explode "," mirabelle_theories); fun make_commands (thy_index, (thy, segments)) = let val thy_long_name = Context.theory_long_name thy; val check_thy = check_theory thy_long_name; fun make_command {command = tr, prev_state = st, state = st', ...} = let val name = Toplevel.name_of tr; val pos = Toplevel.pos_of tr; in if Context.proper_subthy (\<^theory>, thy) andalso can (Proof.assert_backward o Toplevel.proof_of) st andalso member (op =) whitelist name andalso check_thy pos then SOME {theory_index = thy_index, name = name, pos = pos, pre = Toplevel.proof_of st, post = st'} else NONE end; in if Resources.theory_qualifier thy_long_name = qualifier then map_filter make_command segments else [] end; (* initialize actions *) val contexts = actions |> map_index (fn (n, (label, name, args)) => let val make_action = (case get_action name of SOME f => f | NONE => error "Unknown action"); val action_subdir = if label = "" then string_of_int n ^ "." ^ name else label; val output_dir = Path.append mirabelle_output_dir (Path.basic action_subdir); val context = {index = n, label = label, name = name, arguments = args, timeout = mirabelle_timeout, output_dir = output_dir}; in (make_action context, context) end); in (* run actions on all relevant goals *) loaded_theories |> map_index I |> maps make_commands |> (fn xs => fold_map (fn x => fn i => ((i, x), i + 1)) xs 0) |> (fn (indexed_commands, commands_length) => let val stride = if mirabelle_stride <= 0 then Integer.max 1 (commands_length div mirabelle_max_calls) else mirabelle_stride in maps (fn (n, command) => let val (m, k) = Integer.div_mod (n + 1) stride in if k = 0 andalso (mirabelle_max_calls <= 0 orelse m <= mirabelle_max_calls) then map (fn context => (context, command)) contexts else [] end) indexed_commands end) |> Par_List.map (fn ((action, context), command) => apply_action action context command); (* finalize actions *) List.app (uncurry finalize_action) contexts end end); (* Mirabelle utility functions *) fun can_apply time tac st = let val {context = ctxt, facts, goal} = Proof.goal st; val full_tac = HEADGOAL (Method.insert_tac ctxt facts THEN' tac ctxt); in (case try (Timeout.apply time (Seq.pull o full_tac)) goal of SOME (SOME _) => true | _ => false) end; local fun fold_body_thms f = let fun app n (PBody {thms, ...}) = thms |> fold (fn (i, thm_node) => fn (x, seen) => if Inttab.defined seen i then (x, seen) else let val name = Proofterm.thm_node_name thm_node; val prop = Proofterm.thm_node_prop thm_node; val body = Future.join (Proofterm.thm_node_body thm_node); val (x', seen') = app (n + (if name = "" then 0 else 1)) body (x, Inttab.update (i, ()) seen); in (x' |> n = 0 ? f (name, prop, body), seen') end); in fn bodies => fn x => #1 (fold (app 0) bodies (x, Inttab.empty)) end; in fun theorems_in_proof_term thy thm = let val all_thms = Global_Theory.all_thms_of thy true; fun collect (s, _, _) = if s <> "" then insert (op =) s else I; fun member_of xs (x, y) = if member (op =) xs x then SOME y else NONE; fun resolve_thms names = map_filter (member_of names) all_thms; in resolve_thms (fold_body_thms collect [Thm.proof_body_of thm] []) end; end; fun theorems_of_sucessful_proof st = (case try Toplevel.proof_of st of NONE => [] | SOME prf => theorems_in_proof_term (Proof.theory_of prf) (#goal (Proof.goal prf))); fun get_argument arguments (key, default) = the_default default (AList.lookup (op =) arguments key); fun get_int_argument arguments (key, default) = (case Option.map Int.fromString (AList.lookup (op =) arguments key) of SOME (SOME i) => i | SOME NONE => error ("bad option: " ^ key) | NONE => default); fun get_bool_argument arguments (key, default) = (case Option.map Bool.fromString (AList.lookup (op =) arguments key) of SOME (SOME i) => i | SOME NONE => error ("bad option: " ^ key) | NONE => default); fun cpu_time f x = let val ({cpu, ...}, y) = Timing.timing f x in (y, Time.toMilliseconds cpu) end; end diff --git a/src/Pure/Isar/method.ML b/src/Pure/Isar/method.ML --- a/src/Pure/Isar/method.ML +++ b/src/Pure/Isar/method.ML @@ -1,825 +1,825 @@ (* Title: Pure/Isar/method.ML Author: Markus Wenzel, TU Muenchen Isar proof methods. *) signature METHOD = sig type method = thm list -> context_tactic val CONTEXT_METHOD: (thm list -> context_tactic) -> method val METHOD: (thm list -> tactic) -> method val fail: method val succeed: method val insert_tac: Proof.context -> thm list -> int -> tactic val insert: thm list -> method val SIMPLE_METHOD: tactic -> method val SIMPLE_METHOD': (int -> tactic) -> method val SIMPLE_METHOD'': ((int -> tactic) -> tactic) -> (int -> tactic) -> method val goal_cases_tac: string list -> context_tactic val cheating: bool -> method val intro: Proof.context -> thm list -> method val elim: Proof.context -> thm list -> method val unfold: thm list -> Proof.context -> method val fold: thm list -> Proof.context -> method val atomize: bool -> Proof.context -> method val this: Proof.context -> method val fact: thm list -> Proof.context -> method val assm_tac: Proof.context -> int -> tactic val all_assm_tac: Proof.context -> tactic val assumption: Proof.context -> method val rule_trace: bool Config.T val trace: Proof.context -> thm list -> unit val rule_tac: Proof.context -> thm list -> thm list -> int -> tactic val some_rule_tac: Proof.context -> thm list -> thm list -> int -> tactic val intros_tac: Proof.context -> thm list -> thm list -> tactic val try_intros_tac: Proof.context -> thm list -> thm list -> tactic val rule: Proof.context -> thm list -> method val erule: Proof.context -> int -> thm list -> method val drule: Proof.context -> int -> thm list -> method val frule: Proof.context -> int -> thm list -> method val method_space: Context.generic -> Name_Space.T val set_tactic: (morphism -> thm list -> tactic) -> Context.generic -> Context.generic val clean_facts: thm list -> thm list val set_facts: thm list -> Proof.context -> Proof.context val get_facts: Proof.context -> thm list type combinator_info val no_combinator_info: combinator_info datatype combinator = Then | Then_All_New | Orelse | Try | Repeat1 | Select_Goals of int datatype text = Source of Token.src | Basic of Proof.context -> method | Combinator of combinator_info * combinator * text list val map_source: (Token.src -> Token.src) -> text -> text val primitive_text: (Proof.context -> thm -> thm) -> text val succeed_text: text val standard_text: text val this_text: text val done_text: text val sorry_text: bool -> text val finish_text: text option * bool -> text val print_methods: bool -> Proof.context -> unit val check_name: Proof.context -> xstring * Position.T -> string val check_src: Proof.context -> Token.src -> Token.src val check_text: Proof.context -> text -> text val checked_text: text -> bool val method_syntax: (Proof.context -> method) context_parser -> Token.src -> Proof.context -> method val setup: binding -> (Proof.context -> method) context_parser -> string -> theory -> theory val local_setup: binding -> (Proof.context -> method) context_parser -> string -> local_theory -> local_theory val method_setup: bstring * Position.T -> Input.source -> string -> local_theory -> local_theory val method: Proof.context -> Token.src -> Proof.context -> method val method_closure: Proof.context -> Token.src -> Token.src val closure: bool Config.T val method_cmd: Proof.context -> Token.src -> Proof.context -> method val detect_closure_state: thm -> bool val STATIC: (unit -> unit) -> context_tactic val RUNTIME: context_tactic -> context_tactic val sleep: Time.time -> context_tactic val evaluate: text -> Proof.context -> method val evaluate_runtime: text -> Proof.context -> method type modifier = {init: Proof.context -> Proof.context, attribute: attribute, pos: Position.T} val modifier: attribute -> Position.T -> modifier val old_section_parser: bool Config.T val sections: modifier parser list -> unit context_parser type text_range = text * Position.range val text: text_range option -> text option val position: text_range option -> Position.T val reports_of: text_range -> Position.report list val report: text_range -> unit val parser: int -> text_range parser val parse: text_range parser val read: Proof.context -> Token.src -> text val read_closure: Proof.context -> Token.src -> text * Token.src val read_closure_input: Proof.context -> Input.source -> text * Token.src val text_closure: text context_parser end; structure Method: METHOD = struct (** proof methods **) (* type method *) type method = thm list -> context_tactic; fun CONTEXT_METHOD tac : method = fn facts => CONTEXT_TACTIC (ALLGOALS Goal.conjunction_tac) #> Seq.maps_results (tac facts); fun METHOD tac : method = fn facts => CONTEXT_TACTIC (ALLGOALS Goal.conjunction_tac THEN tac facts); val fail = METHOD (K no_tac); val succeed = METHOD (K all_tac); (* insert facts *) fun insert_tac _ [] _ = all_tac | insert_tac ctxt facts i = EVERY (map (fn r => resolve_tac ctxt [Thm.forall_intr_vars r COMP_INCR revcut_rl] i) facts); fun insert thms = CONTEXT_METHOD (fn _ => fn (ctxt, st) => st |> ALLGOALS (insert_tac ctxt thms) |> TACTIC_CONTEXT ctxt); fun SIMPLE_METHOD tac = CONTEXT_METHOD (fn facts => fn (ctxt, st) => st |> (ALLGOALS (insert_tac ctxt facts) THEN tac) |> TACTIC_CONTEXT ctxt); fun SIMPLE_METHOD'' quant tac = CONTEXT_METHOD (fn facts => fn (ctxt, st) => st |> quant (insert_tac ctxt facts THEN' tac) |> TACTIC_CONTEXT ctxt); val SIMPLE_METHOD' = SIMPLE_METHOD'' HEADGOAL; (* goals as cases *) fun goal_cases_tac case_names : context_tactic = fn (ctxt, st) => let val cases = (if null case_names then map string_of_int (1 upto Thm.nprems_of st) else case_names) |> map (rpair [] o rpair []) |> Rule_Cases.make_common ctxt (Thm.prop_of (Rule_Cases.internalize_params st)); in CONTEXT_CASES cases all_tac (ctxt, st) end; (* cheating *) fun cheating int = CONTEXT_METHOD (fn _ => fn (ctxt, st) => if int orelse Config.get ctxt quick_and_dirty then TACTIC_CONTEXT ctxt (ALLGOALS (Skip_Proof.cheat_tac ctxt) st) else error "Cheating requires quick_and_dirty mode!"); (* unfold intro/elim rules *) fun intro ctxt ths = SIMPLE_METHOD' (CHANGED_PROP o REPEAT_ALL_NEW (match_tac ctxt ths)); fun elim ctxt ths = SIMPLE_METHOD' (CHANGED_PROP o REPEAT_ALL_NEW (ematch_tac ctxt ths)); (* unfold/fold definitions *) fun unfold_meth ths ctxt = SIMPLE_METHOD (CHANGED_PROP (Local_Defs.unfold_tac ctxt ths)); fun fold_meth ths ctxt = SIMPLE_METHOD (CHANGED_PROP (Local_Defs.fold_tac ctxt ths)); (* atomize rule statements *) fun atomize false ctxt = SIMPLE_METHOD' (CHANGED_PROP o Object_Logic.atomize_prems_tac ctxt) | atomize true ctxt = Context_Tactic.CONTEXT_TACTIC o K (HEADGOAL (CHANGED_PROP o Object_Logic.full_atomize_tac ctxt)); (* this -- resolve facts directly *) fun this ctxt = METHOD (EVERY o map (HEADGOAL o resolve_tac ctxt o single)); (* fact -- composition by facts from context *) fun fact [] ctxt = SIMPLE_METHOD' (Proof_Context.some_fact_tac ctxt) | fact rules ctxt = SIMPLE_METHOD' (Proof_Context.fact_tac ctxt rules); (* assumption *) local fun cond_rtac ctxt cond rule = SUBGOAL (fn (prop, i) => if cond (Logic.strip_assums_concl prop) then resolve_tac ctxt [rule] i else no_tac); in fun assm_tac ctxt = assume_tac ctxt APPEND' Goal.assume_rule_tac ctxt APPEND' cond_rtac ctxt (can Logic.dest_equals) Drule.reflexive_thm APPEND' cond_rtac ctxt (can Logic.dest_term) Drule.termI; fun all_assm_tac ctxt = let fun tac i st = if i > Thm.nprems_of st then all_tac st else ((assm_tac ctxt i THEN tac i) ORELSE tac (i + 1)) st; in tac 1 end; fun assumption ctxt = METHOD (HEADGOAL o (fn [] => assm_tac ctxt | [fact] => solve_tac ctxt [fact] | _ => K no_tac)); fun finish immed ctxt = METHOD (K ((if immed then all_assm_tac ctxt else all_tac) THEN flexflex_tac ctxt)); end; (* rule etc. -- single-step refinements *) val rule_trace = Attrib.setup_config_bool \<^binding>\rule_trace\ (fn _ => false); fun trace ctxt rules = if Config.get ctxt rule_trace andalso not (null rules) then Pretty.big_list "rules:" (map (Thm.pretty_thm_item ctxt) rules) |> Pretty.string_of |> tracing else (); local fun gen_rule_tac tac ctxt rules facts = (fn i => fn st => if null facts then tac ctxt rules i st else Seq.maps (fn rule => (tac ctxt o single) rule i st) (Drule.multi_resolves (SOME ctxt) facts rules)) THEN_ALL_NEW Goal.norm_hhf_tac ctxt; fun gen_arule_tac tac ctxt j rules facts = EVERY' (gen_rule_tac tac ctxt rules facts :: replicate j (assume_tac ctxt)); fun gen_some_rule_tac tac ctxt arg_rules facts = SUBGOAL (fn (goal, i) => let val rules = if not (null arg_rules) then arg_rules else flat (Context_Rules.find_rules ctxt false facts goal); in trace ctxt rules; tac ctxt rules facts i end); fun meth tac x y = METHOD (HEADGOAL o tac x y); fun meth' tac x y z = METHOD (HEADGOAL o tac x y z); in val rule_tac = gen_rule_tac resolve_tac; val rule = meth rule_tac; val some_rule_tac = gen_some_rule_tac rule_tac; val some_rule = meth some_rule_tac; val erule = meth' (gen_arule_tac eresolve_tac); val drule = meth' (gen_arule_tac dresolve_tac); val frule = meth' (gen_arule_tac forward_tac); end; (* intros_tac -- pervasive search spanned by intro rules *) fun gen_intros_tac goals ctxt intros facts = goals (insert_tac ctxt facts THEN' REPEAT_ALL_NEW (resolve_tac ctxt intros)) THEN Tactic.distinct_subgoals_tac; val intros_tac = gen_intros_tac ALLGOALS; val try_intros_tac = gen_intros_tac TRYALL; (** method syntax **) (* context data *) structure Data = Generic_Data ( type T = {methods: ((Token.src -> Proof.context -> method) * string) Name_Space.table, ml_tactic: (morphism -> thm list -> tactic) option, facts: thm list option}; val empty : T = {methods = Name_Space.empty_table Markup.methodN, ml_tactic = NONE, facts = NONE}; fun merge ({methods = methods1, ml_tactic = ml_tactic1, facts = facts1}, {methods = methods2, ml_tactic = ml_tactic2, facts = facts2}) : T = {methods = Name_Space.merge_tables (methods1, methods2), ml_tactic = merge_options (ml_tactic1, ml_tactic2), facts = merge_options (facts1, facts2)}; ); fun map_data f = Data.map (fn {methods, ml_tactic, facts} => let val (methods', ml_tactic', facts') = f (methods, ml_tactic, facts) in {methods = methods', ml_tactic = ml_tactic', facts = facts'} end); val get_methods = #methods o Data.get; val ops_methods = {get_data = get_methods, put_data = fn methods => map_data (fn (_, ml_tactic, facts) => (methods, ml_tactic, facts))}; val method_space = Name_Space.space_of_table o get_methods; (* ML tactic *) fun set_tactic ml_tactic = map_data (fn (methods, _, facts) => (methods, SOME ml_tactic, facts)); fun the_tactic context = (case #ml_tactic (Data.get context) of SOME tac => tac | NONE => raise Fail "Undefined ML tactic"); val parse_tactic = Scan.state :|-- (fn context => Scan.lift (Args.text_declaration (fn source => let val tac = context |> ML_Context.expression (Input.pos_of source) (ML_Lex.read "Context.>> (Method.set_tactic (fn morphism: Morphism.morphism => fn facts: thm list => (" @ ML_Lex.read_source source @ ML_Lex.read ")))") |> the_tactic; in fn phi => set_tactic (fn _ => Context.setmp_generic_context (SOME context) (tac phi)) end)) >> (fn decl => Morphism.form (the_tactic (Morphism.form decl context)))); (* method facts *) val clean_facts = filter_out Thm.is_dummy; fun set_facts facts = (Context.proof_map o map_data) (fn (methods, ml_tactic, _) => (methods, ml_tactic, SOME (clean_facts facts))); val get_facts_generic = these o #facts o Data.get; val get_facts = get_facts_generic o Context.Proof; val _ = Theory.setup (Global_Theory.add_thms_dynamic (Binding.make ("method_facts", \<^here>), get_facts_generic)); (* method text *) datatype combinator_info = Combinator_Info of {keywords: Position.T list}; fun combinator_info keywords = Combinator_Info {keywords = keywords}; val no_combinator_info = combinator_info []; datatype combinator = Then | Then_All_New | Orelse | Try | Repeat1 | Select_Goals of int; datatype text = Source of Token.src | Basic of Proof.context -> method | Combinator of combinator_info * combinator * text list; fun map_source f (Source src) = Source (f src) | map_source _ (Basic meth) = Basic meth | map_source f (Combinator (info, comb, txts)) = Combinator (info, comb, map (map_source f) txts); fun primitive_text r = Basic (SIMPLE_METHOD o PRIMITIVE o r); val succeed_text = Basic (K succeed); val standard_text = Source (Token.make_src ("standard", Position.none) []); val this_text = Basic this; val done_text = Basic (K (SIMPLE_METHOD all_tac)); fun sorry_text int = Basic (fn _ => cheating int); fun finish_text (NONE, immed) = Basic (finish immed) | finish_text (SOME txt, immed) = Combinator (no_combinator_info, Then, [txt, Basic (finish immed)]); (* method definitions *) fun print_methods verbose ctxt = let val meths = get_methods (Context.Proof ctxt); fun prt_meth (name, (_, "")) = Pretty.mark_str name | prt_meth (name, (_, comment)) = Pretty.block (Pretty.mark_str name :: Pretty.str ":" :: Pretty.brk 2 :: Pretty.text comment); in [Pretty.big_list "methods:" (map prt_meth (Name_Space.markup_table verbose ctxt meths))] |> Pretty.writeln_chunks end; (* define *) fun define_global binding meth comment = Entity.define_global ops_methods binding (meth, comment); fun define binding meth comment = Entity.define ops_methods binding (meth, comment); (* check *) fun check_name ctxt = let val context = Context.Proof ctxt in #1 o Name_Space.check context (get_methods context) end; fun check_src ctxt = #1 o Token.check_src ctxt (get_methods o Context.Proof); fun check_text ctxt (Source src) = Source (check_src ctxt src) | check_text _ (Basic m) = Basic m | check_text ctxt (Combinator (x, y, body)) = Combinator (x, y, map (check_text ctxt) body); fun checked_text (Source src) = Token.checked_src src | checked_text (Basic _) = true | checked_text (Combinator (_, _, body)) = forall checked_text body; val _ = Theory.setup (ML_Antiquotation.inline_embedded \<^binding>\method\ (Args.context -- Scan.lift Parse.embedded_position >> (ML_Syntax.print_string o uncurry check_name))); (* method setup *) fun method_syntax scan src ctxt : method = let val (m, ctxt') = Token.syntax scan src ctxt in m ctxt' end; fun setup binding scan comment = define_global binding (method_syntax scan) comment #> snd; fun local_setup binding scan comment = define binding (method_syntax scan) comment #> snd; fun method_setup binding source comment = ML_Context.expression (Input.pos_of source) (ML_Lex.read ("Theory.local_setup (Method.local_setup (" ^ ML_Syntax.make_binding binding ^ ") (") @ ML_Lex.read_source source @ ML_Lex.read (")" ^ ML_Syntax.print_string comment ^ ")")) |> Context.proof_map; (* prepare methods *) fun method ctxt = let val table = get_methods (Context.Proof ctxt) in fn src => #1 (Name_Space.get table (#1 (Token.name_of_src src))) src end; fun method_closure ctxt src = let val src' = map Token.init_assignable src; val ctxt' = Context_Position.not_really ctxt; val _ = Seq.pull (method ctxt' src' ctxt' [] (ctxt', Goal.protect 0 Drule.dummy_thm)); in map Token.closure src' end; val closure = Config.declare_bool ("Method.closure", \<^here>) (K true); fun method_cmd ctxt = check_src ctxt #> Config.get ctxt closure ? method_closure ctxt #> method ctxt; (* static vs. runtime state *) fun detect_closure_state st = (case try Logic.dest_term (Thm.concl_of (perhaps (try Goal.conclude) st)) of NONE => false | SOME t => Term.is_dummy_pattern t); fun STATIC test : context_tactic = fn (ctxt, st) => if detect_closure_state st then (test (); Seq.single (Seq.Result (ctxt, st))) else Seq.empty; fun RUNTIME (tac: context_tactic) (ctxt, st) = if detect_closure_state st then Seq.empty else tac (ctxt, st); fun sleep t = RUNTIME (fn ctxt_st => (OS.Process.sleep t; Seq.single (Seq.Result ctxt_st))); (* evaluate method text *) local val op THEN = Seq.THEN; fun BYPASS_CONTEXT (tac: tactic) = fn result => (case result of Seq.Error _ => Seq.single result | Seq.Result (ctxt, st) => tac st |> TACTIC_CONTEXT ctxt); val preparation = BYPASS_CONTEXT (ALLGOALS Goal.conjunction_tac); fun RESTRICT_GOAL i n method = BYPASS_CONTEXT (PRIMITIVE (Goal.restrict i n)) THEN method THEN BYPASS_CONTEXT (PRIMITIVE (Goal.unrestrict i)); fun SELECT_GOAL method i = RESTRICT_GOAL i 1 method; fun (method1 THEN_ALL_NEW method2) i (result : context_state Seq.result) = (case result of Seq.Error _ => Seq.single result | Seq.Result (_, st) => result |> method1 i |> Seq.maps (fn result' => (case result' of Seq.Error _ => Seq.single result' | Seq.Result (_, st') => result' |> Seq.INTERVAL method2 i (i + Thm.nprems_of st' - Thm.nprems_of st)))) fun COMBINATOR1 comb [meth] = comb meth | COMBINATOR1 _ _ = raise Fail "Method combinator requires exactly one argument"; fun combinator Then = Seq.EVERY | combinator Then_All_New = (fn [] => Seq.single | methods => preparation THEN (foldl1 (op THEN_ALL_NEW) (map SELECT_GOAL methods) 1)) | combinator Orelse = Seq.FIRST | combinator Try = COMBINATOR1 Seq.TRY | combinator Repeat1 = COMBINATOR1 Seq.REPEAT1 | combinator (Select_Goals n) = COMBINATOR1 (fn method => preparation THEN RESTRICT_GOAL 1 n method); in fun evaluate text ctxt0 facts = let val ctxt = set_facts facts ctxt0; fun eval0 m = Seq.single #> Seq.maps_results (m facts); fun eval (Basic m) = eval0 (m ctxt) | eval (Source src) = eval0 (method_cmd ctxt src ctxt) | eval (Combinator (_, c, txts)) = combinator c (map eval txts); in eval text o Seq.Result end; end; fun evaluate_runtime text ctxt = let val text' = text |> (map_source o map o Token.map_facts) (fn SOME name => (case Proof_Context.lookup_fact ctxt name of SOME {dynamic = true, thms} => K thms | _ => I) | NONE => I); val ctxt' = Config.put closure false ctxt; in fn facts => RUNTIME (fn st => evaluate text' ctxt' facts st) end; (** concrete syntax **) (* type modifier *) type modifier = {init: Proof.context -> Proof.context, attribute: attribute, pos: Position.T}; fun modifier attribute pos : modifier = {init = I, attribute = attribute, pos = pos}; (* sections *) val old_section_parser = Config.declare_bool ("Method.old_section_parser", \<^here>) (K false); local fun thms ss = Scan.repeats (Scan.unless (Scan.lift (Scan.first ss)) Attrib.multi_thm); fun app {init, attribute, pos = _} ths context = fold_map (Thm.apply_attribute attribute) ths (Context.map_proof init context); fun section ss = Scan.depend (fn context => (Scan.first ss -- Scan.pass context (thms ss)) :|-- (fn (m, ths) => Scan.succeed (swap (app m ths context)))); in fun old_sections ss = Scan.repeat (section ss) >> K (); end; local fun sect (modifier : modifier parser) = Scan.depend (fn context => Scan.ahead Parse.not_eof -- Scan.trace modifier -- Scan.repeat (Scan.unless modifier Parse.thm) >> (fn ((tok0, ({init, attribute, pos}, modifier_toks)), xthms) => let val decl = (case Token.get_value tok0 of SOME (Token.Declaration decl) => decl | _ => let val ctxt = Context.proof_of context; val prep_att = Attrib.check_src ctxt #> map (Token.assign NONE); val thms = map (fn (a, bs) => (Proof_Context.get_fact ctxt a, map prep_att bs)) xthms; val facts = Attrib.partial_evaluation ctxt [((Binding.name "dummy", []), thms)] |> map (fn (_, bs) => ((Binding.empty, [Attrib.internal (K attribute)]), bs)); fun decl phi = Context.mapping I init #> Attrib.generic_notes "" (Attrib.transform_facts phi facts) #> snd; val modifier_report = (#1 (Token.range_of modifier_toks), Position.entity_markup Markup.method_modifierN ("", pos)); val _ = Context_Position.reports ctxt (modifier_report :: Token.reports_of_value tok0); val _ = Token.assign (SOME (Token.Declaration decl)) tok0; in decl end); in (Morphism.form decl context, decl) end)); in fun sections ss = Args.context :|-- (fn ctxt => if Config.get ctxt old_section_parser then old_sections ss else Scan.repeat (sect (Scan.first ss)) >> K ()); end; (* extra rule methods *) fun xrule_meth meth = Scan.lift (Scan.optional (Args.parens Parse.nat) 0) -- Attrib.thms >> (fn (n, ths) => fn ctxt => meth ctxt n ths); (* text range *) type text_range = text * Position.range; fun text NONE = NONE | text (SOME (txt, _)) = SOME txt; fun position NONE = Position.none | position (SOME (_, (pos, _))) = pos; (* reports *) local fun keyword_positions (Source _) = [] | keyword_positions (Basic _) = [] | keyword_positions (Combinator (Combinator_Info {keywords}, _, texts)) = keywords @ maps keyword_positions texts; in fun reports_of ((text, (pos, _)): text_range) = (pos, Markup.language_method) :: maps (fn p => map (pair p) (Markup.keyword3 :: Completion.suppress_abbrevs "")) (keyword_positions text); fun report text_range = if Context_Position.pide_reports () then Position.reports (reports_of text_range) else (); end; (* parser *) local fun is_symid_meth s = s <> "|" andalso s <> "?" andalso s <> "+" andalso Token.ident_or_symbolic s; in fun parser pri = let val meth_name = Parse.token Parse.name; fun meth5 x = (meth_name >> (Source o single) || Scan.ahead Parse.cartouche |-- Parse.not_eof >> (fn tok => Source (Token.make_src ("cartouche", Position.none) [tok])) || Parse.$$$ "(" |-- Parse.!!! (meth0 --| Parse.$$$ ")")) x and meth4 x = (meth5 -- Parse.position (Parse.$$$ "?") >> (fn (m, (_, pos)) => Combinator (combinator_info [pos], Try, [m])) || meth5 -- Parse.position (Parse.$$$ "+") >> (fn (m, (_, pos)) => Combinator (combinator_info [pos], Repeat1, [m])) || meth5 -- (Parse.position (Parse.$$$ "[") -- Scan.optional Parse.nat 1 -- Parse.position (Parse.$$$ "]")) >> (fn (m, (((_, pos1), n), (_, pos2))) => Combinator (combinator_info [pos1, pos2], Select_Goals n, [m])) || meth5) x and meth3 x = (meth_name ::: Parse.args1 is_symid_meth >> Source || meth4) x and meth2 x = (Parse.enum1_positions "," meth3 >> (fn ([m], _) => m | (ms, ps) => Combinator (combinator_info ps, Then, ms))) x and meth1 x = (Parse.enum1_positions ";" meth2 >> (fn ([m], _) => m | (ms, ps) => Combinator (combinator_info ps, Then_All_New, ms))) x and meth0 x = (Parse.enum1_positions "|" meth1 >> (fn ([m], _) => m | (ms, ps) => Combinator (combinator_info ps, Orelse, ms))) x; val meth = nth [meth0, meth1, meth2, meth3, meth4, meth5] pri handle General.Subscript => raise Fail ("Bad method parser priority " ^ string_of_int pri); in Scan.trace meth >> (fn (m, toks) => (m, Token.range_of toks)) end; val parse = parser 4; end; (* read method text *) fun read ctxt src = (case Scan.read Token.stopper (Parse.!!! (parser 0 --| Scan.ahead Parse.eof)) src of SOME (text, range) => if checked_text text then text else (report (text, range); check_text ctxt text) | NONE => error ("Failed to parse method" ^ Position.here (#1 (Token.range_of src)))); fun read_closure ctxt src0 = let val src1 = map Token.init_assignable src0; val text = read ctxt src1 |> map_source (method_closure ctxt); val src2 = map Token.closure src1; in (text, src2) end; fun read_closure_input ctxt = let val keywords = Keyword.no_command_keywords (Thy_Header.get_keywords' ctxt) - in Token.read_embedded ctxt keywords (Scan.many Token.not_eof) #> read_closure ctxt end; + in Parse.read_embedded ctxt keywords (Scan.many Token.not_eof) #> read_closure ctxt end; val text_closure = Args.context -- Scan.lift (Parse.token Parse.text) >> (fn (ctxt, tok) => (case Token.get_value tok of SOME (Token.Source src) => read ctxt src | _ => let val (text, src) = read_closure_input ctxt (Token.input_of tok); val _ = Token.assign (SOME (Token.Source src)) tok; in text end)); (* theory setup *) val _ = Theory.setup (setup \<^binding>\fail\ (Scan.succeed (K fail)) "force failure" #> setup \<^binding>\succeed\ (Scan.succeed (K succeed)) "succeed" #> setup \<^binding>\sleep\ (Scan.lift Parse.real >> (fn s => fn _ => fn _ => sleep (seconds s))) "succeed after delay (in seconds)" #> setup \<^binding>\-\ (Scan.succeed (K (SIMPLE_METHOD all_tac))) "insert current facts, nothing else" #> setup \<^binding>\goal_cases\ (Scan.lift (Scan.repeat Args.name_token) >> (fn names => fn _ => CONTEXT_METHOD (fn _ => fn (ctxt, st) => (case drop (Thm.nprems_of st) names of [] => NONE | bad => if detect_closure_state st then NONE else SOME (fn () => ("Excessive case name(s): " ^ commas_quote (map Token.content_of bad) ^ Position.here (#1 (Token.range_of bad))))) |> (fn SOME msg => Seq.single (Seq.Error msg) | NONE => goal_cases_tac (map Token.content_of names) (ctxt, st))))) "bind cases for goals" #> setup \<^binding>\subproofs\ (text_closure >> (Context_Tactic.SUBPROOFS ooo evaluate_runtime)) "apply proof method to subproofs with closed derivation" #> setup \<^binding>\insert\ (Attrib.thms >> (K o insert)) "insert theorems, ignoring facts" #> setup \<^binding>\intro\ (Attrib.thms >> (fn ths => fn ctxt => intro ctxt ths)) "repeatedly apply introduction rules" #> setup \<^binding>\elim\ (Attrib.thms >> (fn ths => fn ctxt => elim ctxt ths)) "repeatedly apply elimination rules" #> setup \<^binding>\unfold\ (Attrib.thms >> unfold_meth) "unfold definitions" #> setup \<^binding>\fold\ (Attrib.thms >> fold_meth) "fold definitions" #> setup \<^binding>\atomize\ (Scan.lift (Args.mode "full") >> atomize) "present local premises as object-level statements" #> setup \<^binding>\rule\ (Attrib.thms >> (fn ths => fn ctxt => some_rule ctxt ths)) "apply some intro/elim rule" #> setup \<^binding>\erule\ (xrule_meth erule) "apply rule in elimination manner (improper)" #> setup \<^binding>\drule\ (xrule_meth drule) "apply rule in destruct manner (improper)" #> setup \<^binding>\frule\ (xrule_meth frule) "apply rule in forward manner (improper)" #> setup \<^binding>\this\ (Scan.succeed this) "apply current facts as rules" #> setup \<^binding>\fact\ (Attrib.thms >> fact) "composition by facts from context" #> setup \<^binding>\assumption\ (Scan.succeed assumption) "proof by assumption, preferring facts" #> setup \<^binding>\rename_tac\ (Args.goal_spec -- Scan.lift (Scan.repeat1 Args.name) >> (fn (quant, xs) => K (SIMPLE_METHOD'' quant (rename_tac xs)))) "rename parameters of goal" #> setup \<^binding>\rotate_tac\ (Args.goal_spec -- Scan.lift (Scan.optional Parse.int 1) >> (fn (quant, i) => K (SIMPLE_METHOD'' quant (rotate_tac i)))) "rotate assumptions of goal" #> setup \<^binding>\tactic\ (parse_tactic >> (K o METHOD)) "ML tactic as proof method" #> setup \<^binding>\raw_tactic\ (parse_tactic >> (fn tac => fn _ => Context_Tactic.CONTEXT_TACTIC o tac)) "ML tactic as raw proof method" #> setup \<^binding>\use\ (Attrib.thms -- (Scan.lift (Parse.$$$ "in") |-- text_closure) >> (fn (thms, text) => fn ctxt => fn _ => evaluate_runtime text ctxt thms)) "indicate method facts and context for method expression"); (*final declarations of this structure!*) val unfold = unfold_meth; val fold = fold_meth; end; val CONTEXT_METHOD = Method.CONTEXT_METHOD; val METHOD = Method.METHOD; val SIMPLE_METHOD = Method.SIMPLE_METHOD; val SIMPLE_METHOD' = Method.SIMPLE_METHOD'; val SIMPLE_METHOD'' = Method.SIMPLE_METHOD''; diff --git a/src/Pure/Isar/parse.ML b/src/Pure/Isar/parse.ML --- a/src/Pure/Isar/parse.ML +++ b/src/Pure/Isar/parse.ML @@ -1,511 +1,545 @@ (* Title: Pure/Isar/parse.ML Author: Markus Wenzel, TU Muenchen Generic parsers for Isabelle/Isar outer syntax. *) signature PARSE = sig val group: (unit -> string) -> (Token.T list -> 'a) -> Token.T list -> 'a val !!! : (Token.T list -> 'a) -> Token.T list -> 'a val !!!! : (Token.T list -> 'a) -> Token.T list -> 'a val not_eof: Token.T parser val token: 'a parser -> Token.T parser val range: 'a parser -> ('a * Position.range) parser val position: 'a parser -> ('a * Position.T) parser val input: 'a parser -> Input.source parser val inner_syntax: 'a parser -> string parser val command: string parser val keyword: string parser val short_ident: string parser val long_ident: string parser val sym_ident: string parser val dots: string parser val minus: string parser val term_var: string parser val type_ident: string parser val type_var: string parser val number: string parser val float_number: string parser val string: string parser val string_position: (string * Position.T) parser val alt_string: string parser val verbatim: string parser val cartouche: string parser val control: Antiquote.control parser val eof: string parser val command_name: string -> string parser val keyword_with: (string -> bool) -> string parser val keyword_markup: bool * Markup.T -> string -> string parser val keyword_improper: string -> string parser val $$$ : string -> string parser val reserved: string -> string parser val underscore: string parser val maybe: 'a parser -> 'a option parser val maybe_position: ('a * Position.T) parser -> ('a option * Position.T) parser val opt_keyword: string -> bool parser val opt_bang: bool parser val begin: string parser val opt_begin: bool parser val nat: int parser val int: int parser val real: real parser val enum_positions: string -> 'a parser -> ('a list * Position.T list) parser val enum1_positions: string -> 'a parser -> ('a list * Position.T list) parser val enum: string -> 'a parser -> 'a list parser val enum1: string -> 'a parser -> 'a list parser val and_list: 'a parser -> 'a list parser val and_list1: 'a parser -> 'a list parser val enum': string -> 'a context_parser -> 'a list context_parser val enum1': string -> 'a context_parser -> 'a list context_parser val and_list': 'a context_parser -> 'a list context_parser val and_list1': 'a context_parser -> 'a list context_parser val list: 'a parser -> 'a list parser val list1: 'a parser -> 'a list parser val name: string parser val name_range: (string * Position.range) parser val name_position: (string * Position.T) parser val binding: binding parser val embedded: string parser val embedded_inner_syntax: string parser val embedded_input: Input.source parser val embedded_position: (string * Position.T) parser val text: string parser val path_input: Input.source parser val path: string parser val path_binding: (string * Position.T) parser val session_name: (string * Position.T) parser val theory_name: (string * Position.T) parser val liberal_name: string parser val parname: string parser val parbinding: binding parser val class: string parser val sort: string parser val type_const: string parser val arity: (string * string list * string) parser val multi_arity: (string list * string list * string) parser val type_args: string list parser val type_args_constrained: (string * string option) list parser val typ: string parser val mixfix: mixfix parser val mixfix': mixfix parser val opt_mixfix: mixfix parser val opt_mixfix': mixfix parser val syntax_mode: Syntax.mode parser val where_: string parser val const_decl: (string * string * mixfix) parser val const_binding: (binding * string * mixfix) parser val params: (binding * string option * mixfix) list parser val vars: (binding * string option * mixfix) list parser val for_fixes: (binding * string option * mixfix) list parser val ML_source: Input.source parser val document_source: Input.source parser val document_marker: Input.source parser val const: string parser val term: string parser val prop: string parser val literal_fact: string parser val propp: (string * string list) parser val termp: (string * string list) parser val private: Position.T parser val qualified: Position.T parser val target: (string * Position.T) parser val opt_target: (string * Position.T) option parser val args: Token.T list parser val args1: (string -> bool) -> Token.T list parser val attribs: Token.src list parser val opt_attribs: Token.src list parser val thm_sel: Facts.interval list parser val thm: (Facts.ref * Token.src list) parser val thms1: (Facts.ref * Token.src list) list parser val options: ((string * Position.T) * (string * Position.T)) list parser val embedded_ml: ML_Lex.token Antiquote.antiquote list parser val embedded_ml_underscore: ML_Lex.token Antiquote.antiquote list parser + val read_antiq: Keyword.keywords -> 'a parser -> Symbol_Pos.T list * Position.T -> 'a + val read_embedded: Proof.context -> Keyword.keywords -> 'a parser -> Input.source -> 'a + val read_embedded_src: Proof.context -> Keyword.keywords -> 'a parser -> Token.src -> 'a end; structure Parse: PARSE = struct (** error handling **) (* group atomic parsers (no cuts!) *) fun group s scan = scan || Scan.fail_with (fn [] => (fn () => s () ^ " expected,\nbut end-of-input was found") | tok :: _ => (fn () => (case Token.text_of tok of (txt, "") => s () ^ " expected,\nbut " ^ txt ^ Position.here (Token.pos_of tok) ^ " was found" | (txt1, txt2) => s () ^ " expected,\nbut " ^ txt1 ^ Position.here (Token.pos_of tok) ^ " was found:\n" ^ txt2))); (* cut *) fun cut kind scan = let fun get_pos [] = " (end-of-input)" | get_pos (tok :: _) = Position.here (Token.pos_of tok); fun err (toks, NONE) = (fn () => kind ^ get_pos toks) | err (toks, SOME msg) = (fn () => let val s = msg () in if String.isPrefix kind s then s else kind ^ get_pos toks ^ ": " ^ s end); in Scan.!! err scan end; fun !!! scan = cut "Outer syntax error" scan; fun !!!! scan = cut "Corrupted outer syntax in presentation" scan; (** basic parsers **) (* tokens *) fun RESET_VALUE atom = (*required for all primitive parsers*) Scan.ahead (Scan.one (K true)) -- atom >> (fn (arg, x) => (Token.assign NONE arg; x)); val not_eof = RESET_VALUE (Scan.one Token.not_eof); fun token atom = Scan.ahead not_eof --| atom; fun range scan = (Scan.ahead not_eof >> (Token.range_of o single)) -- scan >> Library.swap; fun position scan = (Scan.ahead not_eof >> Token.pos_of) -- scan >> Library.swap; fun input atom = Scan.ahead atom |-- not_eof >> Token.input_of; fun inner_syntax atom = Scan.ahead atom |-- not_eof >> Token.inner_syntax_of; fun kind k = group (fn () => Token.str_of_kind k) (RESET_VALUE (Scan.one (Token.is_kind k) >> Token.content_of)); val command = kind Token.Command; val keyword = kind Token.Keyword; val short_ident = kind Token.Ident; val long_ident = kind Token.Long_Ident; val sym_ident = kind Token.Sym_Ident; val term_var = kind Token.Var; val type_ident = kind Token.Type_Ident; val type_var = kind Token.Type_Var; val number = kind Token.Nat; val float_number = kind Token.Float; val string = kind Token.String; val alt_string = kind Token.Alt_String; val verbatim = kind Token.Verbatim; val cartouche = kind Token.Cartouche; val control = token (kind Token.control_kind) >> (the o Token.get_control); val eof = kind Token.EOF; fun command_name x = group (fn () => Token.str_of_kind Token.Command ^ " " ^ quote x) (RESET_VALUE (Scan.one (fn tok => Token.is_command tok andalso Token.content_of tok = x))) >> Token.content_of; fun keyword_with pred = RESET_VALUE (Scan.one (Token.keyword_with pred) >> Token.content_of); fun keyword_markup markup x = group (fn () => Token.str_of_kind Token.Keyword ^ " " ^ quote x) (Scan.ahead not_eof -- keyword_with (fn y => x = y)) >> (fn (tok, x) => (Token.assign (SOME (Token.Literal markup)) tok; x)); val keyword_improper = keyword_markup (true, Markup.improper); val $$$ = keyword_markup (false, Markup.quasi_keyword); fun reserved x = group (fn () => "reserved identifier " ^ quote x) (RESET_VALUE (Scan.one (Token.ident_with (fn y => x = y)) >> Token.content_of)); val dots = sym_ident :-- (fn "\" => Scan.succeed () | _ => Scan.fail) >> #1; val minus = sym_ident :-- (fn "-" => Scan.succeed () | _ => Scan.fail) >> #1; val underscore = sym_ident :-- (fn "_" => Scan.succeed () | _ => Scan.fail) >> #1; fun maybe scan = underscore >> K NONE || scan >> SOME; fun maybe_position scan = position (underscore >> K NONE) || scan >> apfst SOME; val nat = number >> (#1 o Library.read_int o Symbol.explode); val int = Scan.optional (minus >> K ~1) 1 -- nat >> op *; val real = float_number >> Value.parse_real || int >> Real.fromInt; fun opt_keyword s = Scan.optional ($$$ "(" |-- !!! (($$$ s >> K true) --| $$$ ")")) false; val opt_bang = Scan.optional ($$$ "!" >> K true) false; val begin = $$$ "begin"; val opt_begin = Scan.optional (begin >> K true) false; (* enumerations *) fun enum1_positions sep scan = scan -- Scan.repeat (position ($$$ sep) -- !!! scan) >> (fn (x, ys) => (x :: map #2 ys, map (#2 o #1) ys)); fun enum_positions sep scan = enum1_positions sep scan || Scan.succeed ([], []); fun enum1 sep scan = scan ::: Scan.repeat ($$$ sep |-- !!! scan); fun enum sep scan = enum1 sep scan || Scan.succeed []; fun enum1' sep scan = scan ::: Scan.repeat (Scan.lift ($$$ sep) |-- scan); fun enum' sep scan = enum1' sep scan || Scan.succeed []; fun and_list1 scan = enum1 "and" scan; fun and_list scan = enum "and" scan; fun and_list1' scan = enum1' "and" scan; fun and_list' scan = enum' "and" scan; fun list1 scan = enum1 "," scan; fun list scan = enum "," scan; (* names and embedded content *) val name = group (fn () => "name") (short_ident || long_ident || sym_ident || number || string); val name_range = input name >> Input.source_content_range; val name_position = input name >> Input.source_content; val string_position = input string >> Input.source_content; val binding = name_position >> Binding.make; val embedded = group (fn () => "embedded content") (cartouche || string || short_ident || long_ident || sym_ident || term_var || type_ident || type_var || number); val embedded_inner_syntax = inner_syntax embedded; val embedded_input = input embedded; val embedded_position = embedded_input >> Input.source_content; val text = group (fn () => "text") (embedded || verbatim); val path_input = group (fn () => "file name/path specification") embedded_input; val path = path_input >> Input.string_of; val path_binding = group (fn () => "path binding (strict file name)") (position embedded); val session_name = group (fn () => "session name") name_position; val theory_name = group (fn () => "theory name") name_position; val liberal_name = keyword_with Token.ident_or_symbolic || name; val parname = Scan.optional ($$$ "(" |-- name --| $$$ ")") ""; val parbinding = Scan.optional ($$$ "(" |-- binding --| $$$ ")") Binding.empty; (* type classes *) val class = group (fn () => "type class") (inner_syntax embedded); val sort = group (fn () => "sort") (inner_syntax embedded); val type_const = group (fn () => "type constructor") (inner_syntax embedded); val arity = type_const -- ($$$ "::" |-- !!! (Scan.optional ($$$ "(" |-- !!! (list1 sort --| $$$ ")")) [] -- sort)) >> Scan.triple2; val multi_arity = and_list1 type_const -- ($$$ "::" |-- !!! (Scan.optional ($$$ "(" |-- !!! (list1 sort --| $$$ ")")) [] -- sort)) >> Scan.triple2; (* types *) val typ = group (fn () => "type") (inner_syntax embedded); fun type_arguments arg = arg >> single || $$$ "(" |-- !!! (list1 arg --| $$$ ")") || Scan.succeed []; val type_args = type_arguments type_ident; val type_args_constrained = type_arguments (type_ident -- Scan.option ($$$ "::" |-- !!! sort)); (* mixfix annotations *) local val mfix = input (string || cartouche); val mixfix_ = mfix -- !!! (Scan.optional ($$$ "[" |-- !!! (list nat --| $$$ "]")) [] -- Scan.optional nat 1000) >> (fn (sy, (ps, p)) => fn range => Mixfix (sy, ps, p, range)); val structure_ = $$$ "structure" >> K Structure; val binder_ = $$$ "binder" |-- !!! (mfix -- ($$$ "[" |-- nat --| $$$ "]" -- nat || nat >> (fn n => (n, n)))) >> (fn (sy, (p, q)) => fn range => Binder (sy, p, q, range)); val infixl_ = $$$ "infixl" |-- !!! (mfix -- nat >> (fn (sy, p) => fn range => Infixl (sy, p, range))); val infixr_ = $$$ "infixr" |-- !!! (mfix -- nat >> (fn (sy, p) => fn range => Infixr (sy, p, range))); val infix_ = $$$ "infix" |-- !!! (mfix -- nat >> (fn (sy, p) => fn range => Infix (sy, p, range))); val mixfix_body = mixfix_ || structure_ || binder_ || infixl_ || infixr_ || infix_; fun annotation guard body = Scan.trace ($$$ "(" |-- guard (body --| $$$ ")")) >> (fn (mx, toks) => mx (Token.range_of toks)); fun opt_annotation guard body = Scan.optional (annotation guard body) NoSyn; in val mixfix = annotation !!! mixfix_body; val mixfix' = annotation I mixfix_body; val opt_mixfix = opt_annotation !!! mixfix_body; val opt_mixfix' = opt_annotation I mixfix_body; end; (* syntax mode *) val syntax_mode_spec = ($$$ "output" >> K ("", false)) || name -- Scan.optional ($$$ "output" >> K false) true; val syntax_mode = Scan.optional ($$$ "(" |-- !!! (syntax_mode_spec --| $$$ ")")) Syntax.mode_default; (* fixes *) val where_ = $$$ "where"; val const_decl = name -- ($$$ "::" |-- !!! typ) -- opt_mixfix >> Scan.triple1; val const_binding = binding -- ($$$ "::" |-- !!! typ) -- opt_mixfix >> Scan.triple1; val param_mixfix = binding -- Scan.option ($$$ "::" |-- typ) -- mixfix' >> (single o Scan.triple1); val params = (binding -- Scan.repeat binding) -- Scan.option ($$$ "::" |-- !!! (Scan.ahead typ -- embedded)) >> (fn ((x, ys), T) => (x, Option.map #1 T, NoSyn) :: map (fn y => (y, Option.map #2 T, NoSyn)) ys); val vars = and_list1 (param_mixfix || params) >> flat; val for_fixes = Scan.optional ($$$ "for" |-- !!! vars) []; (* embedded source text *) val ML_source = input (group (fn () => "ML source") text); val document_source = input (group (fn () => "document source") text); val document_marker = group (fn () => "document marker") (RESET_VALUE (Scan.one Token.is_document_marker >> Token.input_of)); (* terms *) val const = group (fn () => "constant") (inner_syntax embedded); val term = group (fn () => "term") (inner_syntax embedded); val prop = group (fn () => "proposition") (inner_syntax embedded); val literal_fact = inner_syntax (group (fn () => "literal fact") (alt_string || cartouche)); (* patterns *) val is_terms = Scan.repeat1 ($$$ "is" |-- term); val is_props = Scan.repeat1 ($$$ "is" |-- prop); val propp = prop -- Scan.optional ($$$ "(" |-- !!! (is_props --| $$$ ")")) []; val termp = term -- Scan.optional ($$$ "(" |-- !!! (is_terms --| $$$ ")")) []; (* target information *) val private = position ($$$ "private") >> #2; val qualified = position ($$$ "qualified") >> #2; val target = ($$$ "(" -- $$$ "in") |-- !!! (name_position --| $$$ ")"); val opt_target = Scan.option target; (* arguments within outer syntax *) local val argument_kinds = [Token.Ident, Token.Long_Ident, Token.Sym_Ident, Token.Var, Token.Type_Ident, Token.Type_Var, Token.Nat, Token.Float, Token.String, Token.Alt_String, Token.Cartouche, Token.Verbatim]; fun arguments is_symid = let fun argument blk = group (fn () => "argument") (Scan.one (fn tok => let val kind = Token.kind_of tok in member (op =) argument_kinds kind orelse Token.keyword_with is_symid tok orelse (blk andalso Token.keyword_with (fn s => s = ",") tok) end)); fun args blk x = Scan.optional (args1 blk) [] x and args1 blk x = (Scan.repeats1 (Scan.repeat1 (argument blk) || argsp "(" ")" || argsp "[" "]")) x and argsp l r x = (token ($$$ l) ::: !!! (args true @@@ (token ($$$ r) >> single))) x; in (args, args1) end; in val args = #1 (arguments Token.ident_or_symbolic) false; fun args1 is_symid = #2 (arguments is_symid) false; end; (* attributes *) val attrib = token liberal_name ::: !!! args; val attribs = $$$ "[" |-- list attrib --| $$$ "]"; val opt_attribs = Scan.optional attribs []; (* theorem references *) val thm_sel = $$$ "(" |-- list1 (nat --| minus -- nat >> Facts.FromTo || nat --| minus >> Facts.From || nat >> Facts.Single) --| $$$ ")"; val thm = $$$ "[" |-- attribs --| $$$ "]" >> pair (Facts.named "") || (literal_fact >> Facts.Fact || name_position -- Scan.option thm_sel >> Facts.Named) -- opt_attribs; val thms1 = Scan.repeat1 thm; (* options *) val option_name = group (fn () => "option name") name_position; val option_value = group (fn () => "option value") ((token real || token name) >> Token.content_of); val option = option_name :-- (fn (_, pos) => Scan.optional ($$$ "=" |-- !!! (position option_value)) ("true", pos)); val options = $$$ "[" |-- list1 option --| $$$ "]"; (* embedded ML *) val embedded_ml = embedded_input >> ML_Lex.read_source || control >> (ML_Lex.read_symbols o Antiquote.control_symbols); val embedded_ml_underscore = input underscore >> ML_Lex.read_source || embedded_ml; + +(* read embedded source, e.g. for antiquotations *) + +fun read_antiq keywords scan (syms, pos) = + let + val toks = syms + |> Token.tokenize (Keyword.no_command_keywords keywords) {strict = true} + |> filter Token.is_proper; + in + (case Scan.read Token.stopper scan toks of + SOME res => res + | NONE => error ("Malformed antiquotation" ^ Position.here pos)) + end; + +fun read_embedded ctxt keywords parse input = + let + val toks = input + |> Input.source_explode + |> Token.tokenize keywords {strict = true} + |> filter Token.is_proper; + val _ = Context_Position.reports_text ctxt (maps (Token.reports keywords) toks); + in + (case Scan.read Token.stopper parse toks of + SOME res => res + | NONE => error ("Bad input" ^ Position.here (Input.pos_of input))) + end; + +fun read_embedded_src ctxt keywords parse src = + Token.syntax (Scan.lift embedded_input) src ctxt + |> #1 |> read_embedded ctxt keywords parse; + end; diff --git a/src/Pure/Isar/token.ML b/src/Pure/Isar/token.ML --- a/src/Pure/Isar/token.ML +++ b/src/Pure/Isar/token.ML @@ -1,839 +1,810 @@ (* Title: Pure/Isar/token.ML Author: Markus Wenzel, TU Muenchen Outer token syntax for Isabelle/Isar. *) signature TOKEN = sig datatype kind = (*immediate source*) Command | Keyword | Ident | Long_Ident | Sym_Ident | Var | Type_Ident | Type_Var | Nat | Float | Space | (*delimited content*) String | Alt_String | Verbatim | Cartouche | Control of Antiquote.control | Comment of Comment.kind option | (*special content*) Error of string | EOF val control_kind: kind val str_of_kind: kind -> string type file = {src_path: Path.T, lines: string list, digest: SHA1.digest, pos: Position.T} type T type src = T list type name_value = {name: string, kind: string, print: Proof.context -> Markup.T * xstring} datatype value = Source of src | Literal of bool * Markup.T | Name of name_value * morphism | Typ of typ | Term of term | Fact of string option * thm list | Attribute of morphism -> attribute | Declaration of declaration | Files of file Exn.result list val pos_of: T -> Position.T val adjust_offsets: (int -> int option) -> T -> T val eof: T val is_eof: T -> bool val not_eof: T -> bool val stopper: T Scan.stopper val kind_of: T -> kind val is_kind: kind -> T -> bool val get_control: T -> Antiquote.control option val is_command: T -> bool val keyword_with: (string -> bool) -> T -> bool val is_command_modifier: T -> bool val ident_with: (string -> bool) -> T -> bool val is_proper: T -> bool val is_comment: T -> bool val is_informal_comment: T -> bool val is_formal_comment: T -> bool val is_document_marker: T -> bool val is_ignored: T -> bool val is_begin_ignore: T -> bool val is_end_ignore: T -> bool val is_error: T -> bool val is_space: T -> bool val is_blank: T -> bool val is_newline: T -> bool val range_of: T list -> Position.range val core_range_of: T list -> Position.range val content_of: T -> string val source_of: T -> string val input_of: T -> Input.source val inner_syntax_of: T -> string val keyword_markup: bool * Markup.T -> string -> Markup.T val completion_report: T -> Position.report_text list val reports: Keyword.keywords -> T -> Position.report_text list val markups: Keyword.keywords -> T -> Markup.T list val unparse: T -> string val print: T -> string val text_of: T -> string * string val file_source: file -> Input.source val get_files: T -> file Exn.result list val put_files: file Exn.result list -> T -> T val get_value: T -> value option val reports_of_value: T -> Position.report list val name_value: name_value -> value val get_name: T -> name_value option val declare_maxidx: T -> Proof.context -> Proof.context val map_facts: (string option -> thm list -> thm list) -> T -> T val trim_context_src: src -> src val transform: morphism -> T -> T val init_assignable: T -> T val assign: value option -> T -> T val evaluate: ('a -> value) -> (T -> 'a) -> T -> 'a val closure: T -> T val pretty_value: Proof.context -> T -> Pretty.T val name_of_src: src -> string * Position.T val args_of_src: src -> T list val checked_src: src -> bool val check_src: Proof.context -> (Proof.context -> 'a Name_Space.table) -> src -> src * 'a val pretty_src: Proof.context -> src -> Pretty.T val ident_or_symbolic: string -> bool val read_cartouche: Symbol_Pos.T list -> T val tokenize: Keyword.keywords -> {strict: bool} -> Symbol_Pos.T list -> T list val explode: Keyword.keywords -> Position.T -> string -> T list val explode0: Keyword.keywords -> string -> T list val print_name: Keyword.keywords -> string -> string val print_properties: Keyword.keywords -> Properties.T -> string val make: (int * int) * string -> Position.T -> T * Position.T val make_string: string * Position.T -> T val make_int: int -> T list val make_src: string * Position.T -> T list -> src type 'a parser = T list -> 'a * T list type 'a context_parser = Context.generic * T list -> 'a * (Context.generic * T list) - val read_antiq: Keyword.keywords -> 'a parser -> Symbol_Pos.T list * Position.T -> 'a - val read_embedded: Proof.context -> Keyword.keywords -> 'a parser -> Input.source -> 'a val syntax_generic: 'a context_parser -> src -> Context.generic -> 'a * Context.generic val syntax: 'a context_parser -> src -> Proof.context -> 'a * Proof.context end; structure Token: TOKEN = struct (** tokens **) (* token kind *) datatype kind = (*immediate source*) Command | Keyword | Ident | Long_Ident | Sym_Ident | Var | Type_Ident | Type_Var | Nat | Float | Space | (*delimited content*) String | Alt_String | Verbatim | Cartouche | Control of Antiquote.control | Comment of Comment.kind option | (*special content*) Error of string | EOF; val control_kind = Control Antiquote.no_control; fun equiv_kind kind kind' = (case (kind, kind') of (Control _, Control _) => true | (Error _, Error _) => true | _ => kind = kind'); val str_of_kind = fn Command => "command" | Keyword => "keyword" | Ident => "identifier" | Long_Ident => "long identifier" | Sym_Ident => "symbolic identifier" | Var => "schematic variable" | Type_Ident => "type variable" | Type_Var => "schematic type variable" | Nat => "natural number" | Float => "floating-point number" | Space => "white space" | String => "quoted string" | Alt_String => "back-quoted string" | Verbatim => "verbatim text" | Cartouche => "text cartouche" | Control _ => "control cartouche" | Comment NONE => "informal comment" | Comment (SOME _) => "formal comment" | Error _ => "bad input" | EOF => "end-of-input"; val immediate_kinds = Vector.fromList [Command, Keyword, Ident, Long_Ident, Sym_Ident, Var, Type_Ident, Type_Var, Nat, Float, Space]; val delimited_kind = (fn String => true | Alt_String => true | Verbatim => true | Cartouche => true | Control _ => true | Comment _ => true | _ => false); (* datatype token *) (*The value slot assigns an (optional) internal value to a token, usually as a side-effect of special scanner setup (see also args.ML). Note that an assignable ref designates an intermediate state of internalization -- it is NOT meant to persist.*) type file = {src_path: Path.T, lines: string list, digest: SHA1.digest, pos: Position.T}; type name_value = {name: string, kind: string, print: Proof.context -> Markup.T * xstring}; datatype T = Token of (Symbol_Pos.text * Position.range) * (kind * string) * slot and slot = Slot | Value of value option | Assignable of value option Unsynchronized.ref and value = Source of T list | Literal of bool * Markup.T | Name of name_value * morphism | Typ of typ | Term of term | Fact of string option * thm list | (*optional name for dynamic fact, i.e. fact "variable"*) Attribute of morphism -> attribute | Declaration of declaration | Files of file Exn.result list; type src = T list; (* position *) fun pos_of (Token ((_, (pos, _)), _, _)) = pos; fun end_pos_of (Token ((_, (_, pos)), _, _)) = pos; fun adjust_offsets adjust (Token ((x, range), y, z)) = Token ((x, apply2 (Position.adjust_offsets adjust) range), y, z); (* stopper *) fun mk_eof pos = Token (("", (pos, Position.none)), (EOF, ""), Slot); val eof = mk_eof Position.none; fun is_eof (Token (_, (EOF, _), _)) = true | is_eof _ = false; val not_eof = not o is_eof; val stopper = Scan.stopper (fn [] => eof | toks => mk_eof (end_pos_of (List.last toks))) is_eof; (* kind of token *) fun kind_of (Token (_, (k, _), _)) = k; fun is_kind k (Token (_, (k', _), _)) = equiv_kind k k'; fun get_control tok = (case kind_of tok of Control control => SOME control | _ => NONE); val is_command = is_kind Command; fun keyword_with pred (Token (_, (Keyword, x), _)) = pred x | keyword_with _ _ = false; val is_command_modifier = keyword_with (fn x => x = "private" orelse x = "qualified"); fun ident_with pred (Token (_, (Ident, x), _)) = pred x | ident_with _ _ = false; fun is_ignored (Token (_, (Space, _), _)) = true | is_ignored (Token (_, (Comment NONE, _), _)) = true | is_ignored _ = false; fun is_proper (Token (_, (Space, _), _)) = false | is_proper (Token (_, (Comment _, _), _)) = false | is_proper _ = true; fun is_comment (Token (_, (Comment _, _), _)) = true | is_comment _ = false; fun is_informal_comment (Token (_, (Comment NONE, _), _)) = true | is_informal_comment _ = false; fun is_formal_comment (Token (_, (Comment (SOME _), _), _)) = true | is_formal_comment _ = false; fun is_document_marker (Token (_, (Comment (SOME Comment.Marker), _), _)) = true | is_document_marker _ = false; fun is_begin_ignore (Token (_, (Comment NONE, "<"), _)) = true | is_begin_ignore _ = false; fun is_end_ignore (Token (_, (Comment NONE, ">"), _)) = true | is_end_ignore _ = false; fun is_error (Token (_, (Error _, _), _)) = true | is_error _ = false; (* blanks and newlines -- space tokens obey lines *) fun is_space (Token (_, (Space, _), _)) = true | is_space _ = false; fun is_blank (Token (_, (Space, x), _)) = not (String.isSuffix "\n" x) | is_blank _ = false; fun is_newline (Token (_, (Space, x), _)) = String.isSuffix "\n" x | is_newline _ = false; (* range of tokens *) fun range_of (toks as tok :: _) = let val pos' = end_pos_of (List.last toks) in Position.range (pos_of tok, pos') end | range_of [] = Position.no_range; val core_range_of = drop_prefix is_ignored #> drop_suffix is_ignored #> range_of; (* token content *) fun content_of (Token (_, (_, x), _)) = x; fun source_of (Token ((source, _), _, _)) = source; fun input_of (Token ((source, range), (kind, _), _)) = Input.source (delimited_kind kind) source range; fun inner_syntax_of tok = let val x = content_of tok in if YXML.detect x then x else Syntax.implode_input (input_of tok) end; (* markup reports *) local val token_kind_markup = fn Var => (Markup.var, "") | Type_Ident => (Markup.tfree, "") | Type_Var => (Markup.tvar, "") | String => (Markup.string, "") | Alt_String => (Markup.alt_string, "") | Verbatim => (Markup.verbatim, "") | Cartouche => (Markup.cartouche, "") | Control _ => (Markup.cartouche, "") | Comment _ => (Markup.comment, "") | Error msg => (Markup.bad (), msg) | _ => (Markup.empty, ""); fun keyword_reports tok = map (fn markup => ((pos_of tok, markup), "")); fun command_markups keywords x = if Keyword.is_theory_end keywords x then [Markup.keyword2 |> Markup.keyword_properties] else (if Keyword.is_proof_asm keywords x then [Markup.keyword3] else if Keyword.is_improper keywords x then [Markup.keyword1, Markup.improper] else [Markup.keyword1]) |> map Markup.command_properties; in fun keyword_markup (important, keyword) x = if important orelse Symbol.is_ascii_identifier x then keyword else Markup.delimiter; fun completion_report tok = if is_kind Keyword tok then map (fn m => ((pos_of tok, m), "")) (Completion.suppress_abbrevs (content_of tok)) else []; fun reports keywords tok = if is_command tok then keyword_reports tok (command_markups keywords (content_of tok)) else if is_kind Keyword tok then keyword_reports tok [keyword_markup (false, Markup.keyword2 |> Markup.keyword_properties) (content_of tok)] else let val pos = pos_of tok; val (m, text) = token_kind_markup (kind_of tok); val deleted = Symbol_Pos.explode_deleted (source_of tok, pos); in ((pos, m), text) :: map (fn p => ((p, Markup.delete), "")) deleted end; fun markups keywords = map (#2 o #1) o reports keywords; end; (* unparse *) fun unparse (Token (_, (kind, x), _)) = (case kind of String => Symbol_Pos.quote_string_qq x | Alt_String => Symbol_Pos.quote_string_bq x | Verbatim => enclose "{*" "*}" x | Cartouche => cartouche x | Control control => Symbol_Pos.content (Antiquote.control_symbols control) | Comment NONE => enclose "(*" "*)" x | EOF => "" | _ => x); fun print tok = Markup.markups (markups Keyword.empty_keywords tok) (unparse tok); fun text_of tok = let val k = str_of_kind (kind_of tok); val ms = markups Keyword.empty_keywords tok; val s = unparse tok; in if s = "" then (k, "") else if size s < 40 andalso not (exists_string (fn c => c = "\n") s) then (k ^ " " ^ Markup.markups ms s, "") else (k, Markup.markups ms s) end; (** associated values **) (* inlined file content *) fun file_source (file: file) = let val text = cat_lines (#lines file); val end_pos = Position.symbol_explode text (#pos file); in Input.source true text (Position.range (#pos file, end_pos)) end; fun get_files (Token (_, _, Value (SOME (Files files)))) = files | get_files _ = []; fun put_files [] tok = tok | put_files files (Token (x, y, Slot)) = Token (x, y, Value (SOME (Files files))) | put_files _ tok = raise Fail ("Cannot put inlined files here" ^ Position.here (pos_of tok)); (* access values *) fun get_value (Token (_, _, Value v)) = v | get_value _ = NONE; fun map_value f (Token (x, y, Value (SOME v))) = Token (x, y, Value (SOME (f v))) | map_value _ tok = tok; (* reports of value *) fun get_assignable_value (Token (_, _, Assignable r)) = ! r | get_assignable_value (Token (_, _, Value v)) = v | get_assignable_value _ = NONE; fun reports_of_value tok = (case get_assignable_value tok of SOME (Literal markup) => let val pos = pos_of tok; val x = content_of tok; in if Position.is_reported pos then map (pair pos) (keyword_markup markup x :: Completion.suppress_abbrevs x) else [] end | _ => []); (* name value *) fun name_value a = Name (a, Morphism.identity); fun get_name tok = (case get_assignable_value tok of SOME (Name (a, _)) => SOME a | _ => NONE); (* maxidx *) fun declare_maxidx tok = (case get_value tok of SOME (Source src) => fold declare_maxidx src | SOME (Typ T) => Variable.declare_maxidx (Term.maxidx_of_typ T) | SOME (Term t) => Variable.declare_maxidx (Term.maxidx_of_term t) | SOME (Fact (_, ths)) => fold (Variable.declare_maxidx o Thm.maxidx_of) ths | SOME (Attribute _) => I (* FIXME !? *) | SOME (Declaration decl) => (fn ctxt => let val ctxt' = Context.proof_map (Morphism.form decl) ctxt in Variable.declare_maxidx (Variable.maxidx_of ctxt') ctxt end) | _ => I); (* fact values *) fun map_facts f = map_value (fn v => (case v of Source src => Source (map (map_facts f) src) | Fact (a, ths) => Fact (a, f a ths) | _ => v)); val trim_context_src = (map o map_facts) (K (map Thm.trim_context)); (* transform *) fun transform phi = map_value (fn v => (case v of Source src => Source (map (transform phi) src) | Literal _ => v | Name (a, psi) => Name (a, psi $> phi) | Typ T => Typ (Morphism.typ phi T) | Term t => Term (Morphism.term phi t) | Fact (a, ths) => Fact (a, Morphism.fact phi ths) | Attribute att => Attribute (Morphism.transform phi att) | Declaration decl => Declaration (Morphism.transform phi decl) | Files _ => v)); (* static binding *) (*1st stage: initialize assignable slots*) fun init_assignable tok = (case tok of Token (x, y, Slot) => Token (x, y, Assignable (Unsynchronized.ref NONE)) | Token (_, _, Value _) => tok | Token (_, _, Assignable r) => (r := NONE; tok)); (*2nd stage: assign values as side-effect of scanning*) fun assign v tok = (case tok of Token (x, y, Slot) => Token (x, y, Value v) | Token (_, _, Value _) => tok | Token (_, _, Assignable r) => (r := v; tok)); fun evaluate mk eval arg = let val x = eval arg in (assign (SOME (mk x)) arg; x) end; (*3rd stage: static closure of final values*) fun closure (Token (x, y, Assignable (Unsynchronized.ref v))) = Token (x, y, Value v) | closure tok = tok; (* pretty *) fun pretty_value ctxt tok = (case get_value tok of SOME (Literal markup) => let val x = content_of tok in Pretty.mark_str (keyword_markup markup x, x) end | SOME (Name ({print, ...}, _)) => Pretty.quote (Pretty.mark_str (print ctxt)) | SOME (Typ T) => Syntax.pretty_typ ctxt T | SOME (Term t) => Syntax.pretty_term ctxt t | SOME (Fact (_, ths)) => Pretty.enclose "(" ")" (Pretty.breaks (map (Pretty.cartouche o Thm.pretty_thm ctxt) ths)) | _ => Pretty.marks_str (markups Keyword.empty_keywords tok, unparse tok)); (* src *) fun dest_src ([]: src) = raise Fail "Empty token source" | dest_src (head :: args) = (head, args); fun name_of_src src = let val head = #1 (dest_src src); val name = (case get_name head of SOME {name, ...} => name | NONE => content_of head); in (name, pos_of head) end; val args_of_src = #2 o dest_src; fun pretty_src ctxt src = let val (head, args) = dest_src src; val prt_name = (case get_name head of SOME {print, ...} => Pretty.mark_str (print ctxt) | NONE => Pretty.str (content_of head)); in Pretty.block (Pretty.breaks (Pretty.quote prt_name :: map (pretty_value ctxt) args)) end; fun checked_src (head :: _) = is_some (get_name head) | checked_src [] = true; fun check_src ctxt get_table src = let val (head, args) = dest_src src; val table = get_table ctxt; in (case get_name head of SOME {name, ...} => (src, Name_Space.get table name) | NONE => let val pos = pos_of head; val (name, x) = Name_Space.check (Context.Proof ctxt) table (content_of head, pos); val _ = Context_Position.report ctxt pos Markup.operator; val kind = Name_Space.kind_of (Name_Space.space_of_table table); fun print ctxt' = Name_Space.markup_extern ctxt' (Name_Space.space_of_table (get_table ctxt')) name; val value = name_value {name = name, kind = kind, print = print}; val head' = closure (assign (SOME value) head); in (head' :: args, x) end) end; (** scanners **) open Basic_Symbol_Pos; val err_prefix = "Outer lexical error: "; fun !!! msg = Symbol_Pos.!!! (fn () => err_prefix ^ msg); (* scan symbolic idents *) val scan_symid = Scan.many1 (Symbol.is_symbolic_char o Symbol_Pos.symbol) || Scan.one (Symbol.is_symbolic o Symbol_Pos.symbol) >> single; fun is_symid str = (case try Symbol.explode str of SOME [s] => Symbol.is_symbolic s orelse Symbol.is_symbolic_char s | SOME ss => forall Symbol.is_symbolic_char ss | _ => false); fun ident_or_symbolic "begin" = false | ident_or_symbolic ":" = true | ident_or_symbolic "::" = true | ident_or_symbolic s = Symbol_Pos.is_identifier s orelse is_symid s; (* scan verbatim text *) val scan_verb = $$$ "*" --| Scan.ahead (~$$ "}") || Scan.one (fn (s, _) => s <> "*" andalso Symbol.not_eof s) >> single; val scan_verbatim = Scan.ahead ($$ "{" -- $$ "*") |-- !!! "unclosed verbatim text" ((Symbol_Pos.scan_pos --| $$ "{" --| $$ "*") -- (Scan.repeats scan_verb -- ($$ "*" |-- $$ "}" |-- Symbol_Pos.scan_pos))); val recover_verbatim = $$$ "{" @@@ $$$ "*" @@@ Scan.repeats scan_verb; (* scan cartouche *) val scan_cartouche = Symbol_Pos.scan_pos -- ((Symbol_Pos.scan_cartouche err_prefix >> Symbol_Pos.cartouche_content) -- Symbol_Pos.scan_pos); (* scan space *) fun space_symbol (s, _) = Symbol.is_blank s andalso s <> "\n"; val scan_space = Scan.many1 space_symbol @@@ Scan.optional ($$$ "\n") [] || Scan.many space_symbol @@@ $$$ "\n"; (* scan comment *) val scan_comment = Symbol_Pos.scan_pos -- (Symbol_Pos.scan_comment_body err_prefix -- Symbol_Pos.scan_pos); (** token sources **) local fun token_leq ((_, syms1), (_, syms2)) = length syms1 <= length syms2; fun token k ss = Token ((Symbol_Pos.implode ss, Symbol_Pos.range ss), (k, Symbol_Pos.content ss), Slot); fun token_range k (pos1, (ss, pos2)) = Token (Symbol_Pos.implode_range (pos1, pos2) ss, (k, Symbol_Pos.content ss), Slot); fun scan_token keywords = !!! "bad input" (Symbol_Pos.scan_string_qq err_prefix >> token_range String || Symbol_Pos.scan_string_bq err_prefix >> token_range Alt_String || scan_verbatim >> token_range Verbatim || scan_comment >> token_range (Comment NONE) || Comment.scan_outer >> (fn (k, ss) => token (Comment (SOME k)) ss) || scan_cartouche >> token_range Cartouche || Antiquote.scan_control err_prefix >> (fn control => token (Control control) (Antiquote.control_symbols control)) || scan_space >> token Space || (Scan.max token_leq (Scan.max token_leq (Scan.literal (Keyword.major_keywords keywords) >> pair Command) (Scan.literal (Keyword.minor_keywords keywords) >> pair Keyword)) (Lexicon.scan_longid >> pair Long_Ident || Lexicon.scan_id >> pair Ident || Lexicon.scan_var >> pair Var || Lexicon.scan_tid >> pair Type_Ident || Lexicon.scan_tvar >> pair Type_Var || Symbol_Pos.scan_float >> pair Float || Symbol_Pos.scan_nat >> pair Nat || scan_symid >> pair Sym_Ident) >> uncurry token)); fun recover msg = (Symbol_Pos.recover_string_qq || Symbol_Pos.recover_string_bq || recover_verbatim || Symbol_Pos.recover_cartouche || Symbol_Pos.recover_comment || Scan.one (Symbol.not_eof o Symbol_Pos.symbol) >> single) >> (single o token (Error msg)); in fun make_source keywords {strict} = let val scan_strict = Scan.bulk (scan_token keywords); val scan = if strict then scan_strict else Scan.recover scan_strict recover; in Source.source Symbol_Pos.stopper scan end; fun read_cartouche syms = (case Scan.read Symbol_Pos.stopper (scan_cartouche >> token_range Cartouche) syms of SOME tok => tok | NONE => error ("Single cartouche expected" ^ Position.here (#1 (Symbol_Pos.range syms)))); end; (* explode *) fun tokenize keywords strict syms = Source.of_list syms |> make_source keywords strict |> Source.exhaust; fun explode keywords pos text = Symbol_Pos.explode (text, pos) |> tokenize keywords {strict = false}; fun explode0 keywords = explode keywords Position.none; (* print names in parsable form *) fun print_name keywords name = ((case explode keywords Position.none name of [tok] => not (member (op =) [Ident, Long_Ident, Sym_Ident, Nat] (kind_of tok)) | _ => true) ? Symbol_Pos.quote_string_qq) name; fun print_properties keywords = map (apply2 (print_name keywords) #> (fn (a, b) => a ^ " = " ^ b)) #> commas #> enclose "[" "]"; (* make *) fun make ((k, n), s) pos = let val pos' = Position.advance_offsets n pos; val range = Position.range (pos, pos'); val tok = if 0 <= k andalso k < Vector.length immediate_kinds then Token ((s, range), (Vector.sub (immediate_kinds, k), s), Slot) else (case explode Keyword.empty_keywords pos s of [tok] => tok | _ => Token ((s, range), (Error (err_prefix ^ "exactly one token expected"), s), Slot)) in (tok, pos') end; fun make_string (s, pos) = let val Token ((x, _), y, z) = #1 (make ((~1, 0), Symbol_Pos.quote_string_qq s) Position.none); val pos' = Position.no_range_position pos; in Token ((x, (pos', pos')), y, z) end; val make_int = explode Keyword.empty_keywords Position.none o signed_string_of_int; fun make_src a args = make_string a :: args; (** parsers **) type 'a parser = T list -> 'a * T list; type 'a context_parser = Context.generic * T list -> 'a * (Context.generic * T list); -(* embedded source, e.g. for antiquotations *) - -fun read_antiq keywords scan (syms, pos) = - let - val toks = syms - |> tokenize (Keyword.no_command_keywords keywords) {strict = true} - |> filter is_proper; - in - (case Scan.read stopper scan toks of - SOME res => res - | NONE => error ("Malformed antiquotation" ^ Position.here pos)) - end; - -fun read_embedded ctxt keywords parse input = - let - val toks = input - |> Input.source_explode - |> tokenize keywords {strict = true} - |> filter is_proper; - val _ = Context_Position.reports_text ctxt (maps (reports keywords) toks); - in - (case Scan.read stopper parse toks of - SOME res => res - | NONE => error ("Bad input" ^ Position.here (Input.pos_of input))) - end; - - (* wrapped syntax *) fun syntax_generic scan src context = let val (name, pos) = name_of_src src; val old_reports = maps reports_of_value src; val args1 = map init_assignable (args_of_src src); fun reported_text () = if Context_Position.reports_enabled_generic context then let val new_reports = maps (reports_of_value o closure) args1 in if old_reports <> new_reports then map (fn (p, m) => Position.reported_text p m "") new_reports else [] end else []; in (case Scan.error (Scan.finite' stopper (Scan.option scan)) (context, args1) of (SOME x, (context', [])) => let val _ = Output.report (reported_text ()) in (x, context') end | (_, (context', args2)) => let val print_name = (case get_name (hd src) of NONE => quote name | SOME {kind, print, ...} => let val ctxt' = Context.proof_of context'; val (markup, xname) = print ctxt'; in plain_words kind ^ " " ^ quote (Markup.markup markup xname) end); val print_args = if null args2 then "" else ":\n " ^ space_implode " " (map print args2); in error ("Bad arguments for " ^ print_name ^ Position.here pos ^ print_args ^ Markup.markup_report (implode (reported_text ()))) end) end; fun syntax scan src = apsnd Context.the_proof o syntax_generic scan src o Context.Proof; end; type 'a parser = 'a Token.parser; type 'a context_parser = 'a Token.context_parser; diff --git a/src/Pure/ML/ml_antiquotations.ML b/src/Pure/ML/ml_antiquotations.ML --- a/src/Pure/ML/ml_antiquotations.ML +++ b/src/Pure/ML/ml_antiquotations.ML @@ -1,471 +1,467 @@ (* Title: Pure/ML/ml_antiquotations.ML Author: Makarius Miscellaneous ML antiquotations. *) signature ML_ANTIQUOTATIONS = sig val make_judgment: Proof.context -> term -> term val dest_judgment: Proof.context -> term -> term end; structure ML_Antiquotations: ML_ANTIQUOTATIONS = struct (* ML support *) val _ = Theory.setup (ML_Antiquotation.inline \<^binding>\undefined\ (Scan.succeed "(raise General.Match)") #> ML_Antiquotation.inline \<^binding>\assert\ (Scan.succeed "(fn b => if b then () else raise General.Fail \"Assertion failed\")") #> ML_Antiquotation.declaration_embedded \<^binding>\print\ (Scan.lift (Scan.optional Parse.embedded "Output.writeln")) (fn src => fn output => fn ctxt => let val struct_name = ML_Context.struct_name ctxt; val (_, pos) = Token.name_of_src src; val (a, ctxt') = ML_Context.variant "output" ctxt; val env = "val " ^ a ^ ": string -> unit =\n\ \ (" ^ output ^ ") o (fn s => s ^ Position.here (" ^ ML_Syntax.print_position pos ^ "));\n"; val body = "(fn x => (" ^ struct_name ^ "." ^ a ^ " (" ^ ML_Pretty.make_string_fn ^ " x); x))"; in (K (env, body), ctxt') end) #> ML_Antiquotation.value \<^binding>\rat\ (Scan.lift (Scan.optional (Args.$$$ "~" >> K ~1) 1 -- Parse.nat -- Scan.optional (Args.$$$ "/" |-- Parse.nat) 1) >> (fn ((sign, a), b) => "Rat.make " ^ ML_Syntax.print_pair ML_Syntax.print_int ML_Syntax.print_int (sign * a, b))) #> ML_Antiquotation.conditional \<^binding>\if_linux\ (fn _ => ML_System.platform_is_linux) #> ML_Antiquotation.conditional \<^binding>\if_macos\ (fn _ => ML_System.platform_is_macos) #> ML_Antiquotation.conditional \<^binding>\if_windows\ (fn _ => ML_System.platform_is_windows) #> ML_Antiquotation.conditional \<^binding>\if_unix\ (fn _ => ML_System.platform_is_unix)); (* formal entities *) val _ = Theory.setup (ML_Antiquotation.value_embedded \<^binding>\system_option\ (Args.context -- Scan.lift Parse.embedded_position >> (fn (ctxt, (name, pos)) => (Completion.check_option (Options.default ()) ctxt (name, pos) |> ML_Syntax.print_string))) #> ML_Antiquotation.value_embedded \<^binding>\theory\ (Args.context -- Scan.lift Parse.embedded_position >> (fn (ctxt, (name, pos)) => (Theory.check {long = false} ctxt (name, pos); "Context.get_theory {long = false} (Proof_Context.theory_of ML_context) " ^ ML_Syntax.print_string name)) || Scan.succeed "Proof_Context.theory_of ML_context") #> ML_Antiquotation.value_embedded \<^binding>\theory_context\ (Args.context -- Scan.lift Parse.embedded_position >> (fn (ctxt, (name, pos)) => (Theory.check {long = false} ctxt (name, pos); "Proof_Context.get_global (Proof_Context.theory_of ML_context) " ^ ML_Syntax.print_string name))) #> ML_Antiquotation.inline \<^binding>\context\ (Args.context >> (fn ctxt => ML_Context.struct_name ctxt ^ ".ML_context")) #> ML_Antiquotation.inline_embedded \<^binding>\typ\ (Args.typ >> (ML_Syntax.atomic o ML_Syntax.print_typ)) #> ML_Antiquotation.inline_embedded \<^binding>\term\ (Args.term >> (ML_Syntax.atomic o ML_Syntax.print_term)) #> ML_Antiquotation.inline_embedded \<^binding>\prop\ (Args.prop >> (ML_Syntax.atomic o ML_Syntax.print_term)) #> ML_Antiquotation.value_embedded \<^binding>\ctyp\ (Args.typ >> (fn T => "Thm.ctyp_of ML_context " ^ ML_Syntax.atomic (ML_Syntax.print_typ T))) #> ML_Antiquotation.value_embedded \<^binding>\cterm\ (Args.term >> (fn t => "Thm.cterm_of ML_context " ^ ML_Syntax.atomic (ML_Syntax.print_term t))) #> ML_Antiquotation.value_embedded \<^binding>\cprop\ (Args.prop >> (fn t => "Thm.cterm_of ML_context " ^ ML_Syntax.atomic (ML_Syntax.print_term t))) #> ML_Antiquotation.inline_embedded \<^binding>\oracle_name\ (Args.context -- Scan.lift Parse.embedded_position >> (fn (ctxt, (name, pos)) => ML_Syntax.print_string (Thm.check_oracle ctxt (name, pos))))); (* schematic variables *) val schematic_input = Args.context -- Scan.lift Parse.embedded_input >> (fn (ctxt, src) => (Proof_Context.set_mode Proof_Context.mode_schematic ctxt, (Syntax.implode_input src, Input.pos_of src))); val _ = Theory.setup (ML_Antiquotation.inline_embedded \<^binding>\tvar\ (schematic_input >> (fn (ctxt, (s, pos)) => (case Syntax.read_typ ctxt s of TVar v => ML_Syntax.print_pair ML_Syntax.print_indexname ML_Syntax.print_sort v | _ => error ("Schematic type variable expected" ^ Position.here pos)))) #> ML_Antiquotation.inline_embedded \<^binding>\var\ (schematic_input >> (fn (ctxt, (s, pos)) => (case Syntax.read_term ctxt s of Var v => ML_Syntax.print_pair ML_Syntax.print_indexname ML_Syntax.print_typ v | _ => error ("Schematic variable expected" ^ Position.here pos))))); (* type classes *) fun class syn = Args.context -- Scan.lift Parse.embedded_inner_syntax >> (fn (ctxt, s) => Proof_Context.read_class ctxt s |> syn ? Lexicon.mark_class |> ML_Syntax.print_string); val _ = Theory.setup (ML_Antiquotation.inline_embedded \<^binding>\class\ (class false) #> ML_Antiquotation.inline_embedded \<^binding>\class_syntax\ (class true) #> ML_Antiquotation.inline_embedded \<^binding>\sort\ (Args.context -- Scan.lift Parse.embedded_inner_syntax >> (fn (ctxt, s) => ML_Syntax.atomic (ML_Syntax.print_sort (Syntax.read_sort ctxt s))))); (* type constructors *) fun type_name kind check = Args.context -- Scan.lift (Parse.token Parse.embedded) >> (fn (ctxt, tok) => let val s = Token.inner_syntax_of tok; val (_, pos) = Input.source_content (Token.input_of tok); val Type (c, _) = Proof_Context.read_type_name {proper = true, strict = false} ctxt s; val decl = Type.the_decl (Proof_Context.tsig_of ctxt) (c, pos); val res = (case try check (c, decl) of SOME res => res | NONE => error ("Not a " ^ kind ^ ": " ^ quote c ^ Position.here pos)); in ML_Syntax.print_string res end); val _ = Theory.setup (ML_Antiquotation.inline_embedded \<^binding>\type_name\ (type_name "logical type" (fn (c, Type.LogicalType _) => c)) #> ML_Antiquotation.inline_embedded \<^binding>\type_abbrev\ (type_name "type abbreviation" (fn (c, Type.Abbreviation _) => c)) #> ML_Antiquotation.inline_embedded \<^binding>\nonterminal\ (type_name "nonterminal" (fn (c, Type.Nonterminal) => c)) #> ML_Antiquotation.inline_embedded \<^binding>\type_syntax\ (type_name "type" (fn (c, _) => Lexicon.mark_type c))); (* constants *) fun const_name check = Args.context -- Scan.lift (Parse.token Parse.embedded) >> (fn (ctxt, tok) => let val s = Token.inner_syntax_of tok; val (_, pos) = Input.source_content (Token.input_of tok); val Const (c, _) = Proof_Context.read_const {proper = true, strict = false} ctxt s; val res = check (Proof_Context.consts_of ctxt, c) handle TYPE (msg, _, _) => error (msg ^ Position.here pos); in ML_Syntax.print_string res end); val _ = Theory.setup (ML_Antiquotation.inline_embedded \<^binding>\const_name\ (const_name (fn (consts, c) => (Consts.the_const consts c; c))) #> ML_Antiquotation.inline_embedded \<^binding>\const_abbrev\ (const_name (fn (consts, c) => (Consts.the_abbreviation consts c; c))) #> ML_Antiquotation.inline_embedded \<^binding>\const_syntax\ (const_name (fn (_, c) => Lexicon.mark_const c)) #> ML_Antiquotation.inline_embedded \<^binding>\syntax_const\ (Args.context -- Scan.lift Parse.embedded_position >> (fn (ctxt, arg) => ML_Syntax.print_string (Proof_Context.check_syntax_const ctxt arg))) #> ML_Antiquotation.inline_embedded \<^binding>\const\ (Args.context -- Scan.lift (Parse.position Parse.embedded_inner_syntax) -- Scan.optional (Scan.lift (Args.$$$ "(") |-- Parse.enum1' "," Args.typ --| Scan.lift (Args.$$$ ")")) [] >> (fn ((ctxt, (raw_c, pos)), Ts) => let val Const (c, _) = Proof_Context.read_const {proper = true, strict = true} ctxt raw_c; val consts = Proof_Context.consts_of ctxt; val n = length (Consts.typargs consts (c, Consts.type_scheme consts c)); val _ = length Ts <> n andalso error ("Constant requires " ^ string_of_int n ^ " type argument(s): " ^ quote c ^ enclose "(" ")" (commas (replicate n "_")) ^ Position.here pos); val const = Const (c, Consts.instance consts (c, Ts)); in ML_Syntax.atomic (ML_Syntax.print_term const) end))); (* object-logic judgment *) fun make_judgment ctxt = let val const = Object_Logic.judgment_const ctxt in fn t => Const const $ t end; fun dest_judgment ctxt = let val is_judgment = Object_Logic.is_judgment ctxt; val drop_judgment = Object_Logic.drop_judgment ctxt; in fn t => if is_judgment t then drop_judgment t else raise TERM ("dest_judgment", [t]) end; val _ = Theory.setup (ML_Antiquotation.value \<^binding>\make_judgment\ (Scan.succeed "ML_Antiquotations.make_judgment ML_context") #> ML_Antiquotation.value \<^binding>\dest_judgment\ (Scan.succeed "ML_Antiquotations.dest_judgment ML_context")); (* type/term constructors *) -fun read_embedded ctxt keywords parse src = - Token.syntax (Scan.lift Parse.embedded_input) src ctxt - |> #1 |> Token.read_embedded ctxt keywords parse; - local val keywords = Keyword.add_minor_keywords ["for", "=>"] Keyword.empty_keywords; val parse_name = Parse.input Parse.name; val parse_args = Scan.repeat Parse.embedded_ml_underscore; val parse_for_args = Scan.optional (Parse.$$$ "for" |-- Parse.!!! parse_args) []; fun parse_body b = if b then Parse.$$$ "=>" |-- Parse.!!! Parse.embedded_input >> (ML_Lex.read_source #> single) else Scan.succeed []; fun is_dummy [Antiquote.Text tok] = ML_Lex.content_of tok = "_" | is_dummy _ = false; val ml = ML_Lex.tokenize_no_range; val ml_range = ML_Lex.tokenize_range; val ml_dummy = ml "_"; fun ml_enclose range x = ml "(" @ x @ ml_range range ")"; fun ml_parens x = ml "(" @ x @ ml ")"; fun ml_bracks x = ml "[" @ x @ ml "]"; fun ml_commas xs = flat (separate (ml ", ") xs); val ml_list = ml_bracks o ml_commas; val ml_string = ml o ML_Syntax.print_string; fun ml_pair (x, y) = ml_parens (ml_commas [x, y]); fun type_antiquotation binding {function} = ML_Context.add_antiquotation binding true (fn range => fn src => fn ctxt => let val ((s, type_args), fn_body) = src - |> read_embedded ctxt keywords (parse_name -- parse_args -- parse_body function); + |> Parse.read_embedded_src ctxt keywords (parse_name -- parse_args -- parse_body function); val pos = Input.pos_of s; val Type (c, Ts) = Proof_Context.read_type_name {proper = true, strict = true} ctxt (Syntax.implode_input s); val n = length Ts; val _ = length type_args = n orelse error ("Type constructor " ^ quote (Proof_Context.markup_type ctxt c) ^ " takes " ^ string_of_int n ^ " argument(s)" ^ Position.here pos); val (decls1, ctxt1) = ML_Context.expand_antiquotes_list type_args ctxt; val (decls2, ctxt2) = ML_Context.expand_antiquotes_list fn_body ctxt1; fun decl' ctxt' = let val (ml_args_env, ml_args_body) = split_list (decls1 ctxt'); val (ml_fn_env, ml_fn_body) = split_list (decls2 ctxt'); val ml1 = ml_enclose range (ml "Term.Type " @ ml_pair (ml_string c, ml_list ml_args_body)); val ml2 = if function then ml_enclose range (ml_range range "fn " @ ml1 @ ml "=> " @ flat ml_fn_body @ ml "| T => " @ ml_range range "raise" @ ml " Term.TYPE (" @ ml_string ("Type_fn " ^ quote c) @ ml ", [T], [])") else ml1; in (flat (ml_args_env @ ml_fn_env), ml2) end; in (decl', ctxt2) end); fun const_antiquotation binding {pattern, function} = ML_Context.add_antiquotation binding true (fn range => fn src => fn ctxt => let val (((s, type_args), term_args), fn_body) = src - |> read_embedded ctxt keywords + |> Parse.read_embedded_src ctxt keywords (parse_name -- parse_args -- parse_for_args -- parse_body function); val Const (c, T) = Proof_Context.read_const {proper = true, strict = true} ctxt (Syntax.implode_input s); val consts = Proof_Context.consts_of ctxt; val type_paths = Consts.type_arguments consts c; val type_params = map Term.dest_TVar (Consts.typargs consts (c, T)); val n = length type_params; val m = length (Term.binder_types T); fun err msg = error ("Constant " ^ quote (Proof_Context.markup_const ctxt c) ^ msg ^ Position.here (Input.pos_of s)); val _ = length type_args <> n andalso err (" takes " ^ string_of_int n ^ " type argument(s)"); val _ = length term_args > m andalso Term.is_Type (Term.body_type T) andalso err (" cannot have more than " ^ string_of_int m ^ " argument(s)"); val (decls1, ctxt1) = ML_Context.expand_antiquotes_list type_args ctxt; val (decls2, ctxt2) = ML_Context.expand_antiquotes_list term_args ctxt1; val (decls3, ctxt3) = ML_Context.expand_antiquotes_list fn_body ctxt2; fun decl' ctxt' = let val (ml_args_env1, ml_args_body1) = split_list (decls1 ctxt'); val (ml_args_env2, ml_args_body2) = split_list (decls2 ctxt'); val (ml_fn_env, ml_fn_body) = split_list (decls3 ctxt'); val relevant = map is_dummy type_args ~~ type_paths; fun relevant_path is = not pattern orelse let val p = rev is in relevant |> exists (fn (u, q) => not u andalso is_prefix (op =) p q) end; val ml_typarg = the o AList.lookup (op =) (type_params ~~ ml_args_body1); fun ml_typ is (Type (d, Us)) = if relevant_path is then ml "Term.Type " @ ml_pair (ml_string d, ml_list (map_index (fn (i, U) => ml_typ (i :: is) U) Us)) else ml_dummy | ml_typ is (TVar arg) = if relevant_path is then ml_typarg arg else ml_dummy | ml_typ _ (TFree _) = raise Match; fun ml_app [] = ml "Term.Const " @ ml_pair (ml_string c, ml_typ [] T) | ml_app (u :: us) = ml "Term.$ " @ ml_pair (ml_app us, u); val ml_env = flat (ml_args_env1 @ ml_args_env2 @ ml_fn_env); val ml1 = ml_enclose range (ml_app (rev ml_args_body2)); val ml2 = if function then ml_enclose range (ml_range range "fn " @ ml1 @ ml "=> " @ flat ml_fn_body @ ml "| t => " @ ml_range range "raise" @ ml " Term.TERM (" @ ml_string ("Const_fn " ^ quote c) @ ml ", [t])") else ml1; in (ml_env, ml2) end; in (decl', ctxt3) end); val _ = Theory.setup (type_antiquotation \<^binding>\Type\ {function = false} #> type_antiquotation \<^binding>\Type_fn\ {function = true} #> const_antiquotation \<^binding>\Const\ {pattern = false, function = false} #> const_antiquotation \<^binding>\Const_\ {pattern = true, function = false} #> const_antiquotation \<^binding>\Const_fn\ {pattern = true, function = true}); in end; (* special forms *) val _ = Theory.setup (ML_Antiquotation.special_form \<^binding>\try\ "() |> Basics.try" #> ML_Antiquotation.special_form \<^binding>\can\ "() |> Basics.can"); (* basic combinators *) local val parameter = Parse.position Parse.nat >> (fn (n, pos) => if n > 1 then n else error ("Bad parameter: " ^ string_of_int n ^ Position.here pos)); fun indices n = map string_of_int (1 upto n); fun empty n = replicate_string n " []"; fun dummy n = replicate_string n " _"; fun vars x n = implode (map (fn a => " " ^ x ^ a) (indices n)); fun cons n = implode (map (fn a => " (x" ^ a ^ " :: xs" ^ a ^ ")") (indices n)); val tuple = enclose "(" ")" o commas; fun tuple_empty n = tuple (replicate n "[]"); fun tuple_vars x n = tuple (map (fn a => x ^ a) (indices n)); fun tuple_cons n = "(" ^ tuple_vars "x" n ^ " :: xs)" fun cons_tuple n = tuple (map (fn a => "x" ^ a ^ " :: xs" ^ a) (indices n)); in val _ = Theory.setup (ML_Antiquotation.value \<^binding>\map\ (Scan.lift parameter >> (fn n => "fn f =>\n\ \ let\n\ \ fun map _" ^ empty n ^ " = []\n\ \ | map f" ^ cons n ^ " = f" ^ vars "x" n ^ " :: map f" ^ vars "xs" n ^ "\n\ \ | map _" ^ dummy n ^ " = raise ListPair.UnequalLengths\n" ^ " in map f end")) #> ML_Antiquotation.value \<^binding>\fold\ (Scan.lift parameter >> (fn n => "fn f =>\n\ \ let\n\ \ fun fold _" ^ empty n ^ " a = a\n\ \ | fold f" ^ cons n ^ " a = fold f" ^ vars "xs" n ^ " (f" ^ vars "x" n ^ " a)\n\ \ | fold _" ^ dummy n ^ " _ = raise ListPair.UnequalLengths\n" ^ " in fold f end")) #> ML_Antiquotation.value \<^binding>\fold_map\ (Scan.lift parameter >> (fn n => "fn f =>\n\ \ let\n\ \ fun fold_map _" ^ empty n ^ " a = ([], a)\n\ \ | fold_map f" ^ cons n ^ " a =\n\ \ let\n\ \ val (x, a') = f" ^ vars "x" n ^ " a\n\ \ val (xs, a'') = fold_map f" ^ vars "xs" n ^ " a'\n\ \ in (x :: xs, a'') end\n\ \ | fold_map _" ^ dummy n ^ " _ = raise ListPair.UnequalLengths\n" ^ " in fold_map f end")) #> ML_Antiquotation.value \<^binding>\split_list\ (Scan.lift parameter >> (fn n => "fn list =>\n\ \ let\n\ \ fun split_list [] =" ^ tuple_empty n ^ "\n\ \ | split_list" ^ tuple_cons n ^ " =\n\ \ let val" ^ tuple_vars "xs" n ^ " = split_list xs\n\ \ in " ^ cons_tuple n ^ "end\n\ \ in split_list list end")) #> ML_Antiquotation.value \<^binding>\apply\ (Scan.lift (parameter -- Scan.option (Args.parens (Parse.position Parse.nat))) >> (fn (n, opt_index) => let val cond = (case opt_index of NONE => K true | SOME (index, index_pos) => if 1 <= index andalso index <= n then equal (string_of_int index) else error ("Bad index: " ^ string_of_int index ^ Position.here index_pos)); in "fn f => fn " ^ tuple_vars "x" n ^ " => " ^ tuple (map (fn a => (if cond a then "f x" else "x") ^ a) (indices n)) end))); end; (* outer syntax *) val _ = Theory.setup (ML_Antiquotation.value_embedded \<^binding>\keyword\ (Args.context -- Scan.lift (Parse.embedded_position || Parse.position (Parse.keyword_with (K true))) >> (fn (ctxt, (name, pos)) => if Keyword.is_keyword (Thy_Header.get_keywords' ctxt) name then (Context_Position.report ctxt pos (Token.keyword_markup (true, Markup.keyword2) name); "Parse.$$$ " ^ ML_Syntax.print_string name) else error ("Bad outer syntax keyword " ^ quote name ^ Position.here pos))) #> ML_Antiquotation.value_embedded \<^binding>\command_keyword\ (Args.context -- Scan.lift Parse.embedded_position >> (fn (ctxt, (name, pos)) => (case Keyword.command_markup (Thy_Header.get_keywords' ctxt) name of SOME markup => (Context_Position.reports ctxt [(pos, markup), (pos, Markup.keyword1)]; ML_Syntax.print_pair ML_Syntax.print_string ML_Syntax.print_position (name, pos)) | NONE => error ("Bad outer syntax command " ^ quote name ^ Position.here pos))))); end; diff --git a/src/Pure/ML/ml_context.ML b/src/Pure/ML/ml_context.ML --- a/src/Pure/ML/ml_context.ML +++ b/src/Pure/ML/ml_context.ML @@ -1,235 +1,235 @@ (* Title: Pure/ML/ml_context.ML Author: Makarius ML context and antiquotations. *) signature ML_CONTEXT = sig val check_antiquotation: Proof.context -> xstring * Position.T -> string val struct_name: Proof.context -> string val variant: string -> Proof.context -> string * Proof.context type decl = Proof.context -> ML_Lex.token list * ML_Lex.token list type decls = Proof.context -> (ML_Lex.token list * ML_Lex.token list) list type antiquotation = Position.range -> Token.src -> Proof.context -> decl * Proof.context val add_antiquotation: binding -> bool -> antiquotation -> theory -> theory val print_antiquotations: bool -> Proof.context -> unit val expand_antiquotes: ML_Lex.token Antiquote.antiquote list -> Proof.context -> decl * Proof.context val expand_antiquotes_list: ML_Lex.token Antiquote.antiquote list list -> Proof.context -> decls * Proof.context val eval: ML_Compiler.flags -> Position.T -> ML_Lex.token Antiquote.antiquote list -> unit val eval_file: ML_Compiler.flags -> Path.T -> unit val eval_source: ML_Compiler.flags -> Input.source -> unit val eval_in: Proof.context option -> ML_Compiler.flags -> Position.T -> ML_Lex.token Antiquote.antiquote list -> unit val eval_source_in: Proof.context option -> ML_Compiler.flags -> Input.source -> unit val exec: (unit -> unit) -> Context.generic -> Context.generic val expression: Position.T -> ML_Lex.token Antiquote.antiquote list -> Context.generic -> Context.generic end structure ML_Context: ML_CONTEXT = struct (** ML antiquotations **) (* names for generated environment *) structure Names = Proof_Data ( type T = string * Name.context; val init_names = ML_Syntax.reserved |> Name.declare "ML_context"; fun init _ = ("Isabelle0", init_names); ); fun struct_name ctxt = #1 (Names.get ctxt); val struct_begin = (Names.map o apfst) (fn _ => "Isabelle" ^ serial_string ()); fun variant a ctxt = let val names = #2 (Names.get ctxt); val (b, names') = Name.variant (Name.desymbolize (SOME false) a) names; val ctxt' = (Names.map o apsnd) (K names') ctxt; in (b, ctxt') end; (* theory data *) type decl = Proof.context -> ML_Lex.token list * ML_Lex.token list; (*final context -> ML env, ML body*) type decls = Proof.context -> (ML_Lex.token list * ML_Lex.token list) list; type antiquotation = Position.range -> Token.src -> Proof.context -> decl * Proof.context; structure Antiquotations = Theory_Data ( type T = (bool * antiquotation) Name_Space.table; val empty : T = Name_Space.empty_table Markup.ML_antiquotationN; fun merge data : T = Name_Space.merge_tables data; ); val get_antiquotations = Antiquotations.get o Proof_Context.theory_of; fun check_antiquotation ctxt = #1 o Name_Space.check (Context.Proof ctxt) (get_antiquotations ctxt); fun add_antiquotation name embedded antiquotation thy = thy |> Antiquotations.map (Name_Space.define (Context.Theory thy) true (name, (embedded, antiquotation)) #> #2); fun print_antiquotations verbose ctxt = Pretty.big_list "ML antiquotations:" (map (Pretty.mark_str o #1) (Name_Space.markup_table verbose ctxt (get_antiquotations ctxt))) |> Pretty.writeln; fun apply_antiquotation range src ctxt = let val (src', (embedded, antiquotation)) = Token.check_src ctxt get_antiquotations src; val _ = if Options.default_bool "update_control_cartouches" then Context_Position.reports_text ctxt (Antiquote.update_reports embedded (#1 range) (map Token.content_of src')) else (); in antiquotation range src' ctxt end; (* parsing *) local val antiq = Parse.!!! ((Parse.token Parse.liberal_name ::: Parse.args) --| Scan.ahead Parse.eof); fun expand_antiquote ant ctxt = (case ant of Antiquote.Text tok => (K ([], [tok]), ctxt) | Antiquote.Control {name, range, body} => ctxt |> apply_antiquotation range (Token.make_src name (if null body then [] else [Token.read_cartouche body])) | Antiquote.Antiq {range, body, ...} => ctxt |> apply_antiquotation range - (Token.read_antiq (Thy_Header.get_keywords' ctxt) antiq (body, #1 range))); + (Parse.read_antiq (Thy_Header.get_keywords' ctxt) antiq (body, #1 range))); in fun expand_antiquotes ants ctxt = let val (decls, ctxt') = fold_map expand_antiquote ants ctxt; fun decl ctxt'' = decls |> map (fn d => d ctxt'') |> split_list |> apply2 flat; in (decl, ctxt') end; fun expand_antiquotes_list antss ctxt = let val (decls, ctxt') = fold_map expand_antiquotes antss ctxt; fun decl' ctxt'' = map (fn decl => decl ctxt'') decls; in (decl', ctxt') end end; (* evaluation *) local fun make_env name visible = (ML_Lex.tokenize ("structure " ^ name ^ " =\nstruct\n\ \val ML_context = Context_Position.set_visible " ^ Bool.toString visible ^ " (Context.the_local_context ());\n"), ML_Lex.tokenize "end;"); fun reset_env name = ML_Lex.tokenize ("structure " ^ name ^ " = struct end"); fun eval_antiquotes ants opt_context = if forall Antiquote.is_text ants orelse is_none opt_context then (([], map (Antiquote.the_text "No context -- cannot expand ML antiquotations") ants), Option.map Context.proof_of opt_context) else let val context = the opt_context; val visible = (case context of Context.Proof ctxt => Context_Position.is_visible ctxt | _ => true); val ctxt = struct_begin (Context.proof_of context); val (begin_env, end_env) = make_env (struct_name ctxt) visible; val (decl, ctxt') = expand_antiquotes ants ctxt; val (ml_env, ml_body) = decl ctxt'; in ((begin_env @ ml_env @ end_env, ml_body), SOME ctxt') end; in fun eval flags pos ants = let val non_verbose = ML_Compiler.verbose false flags; (*prepare source text*) val ((env, body), env_ctxt) = eval_antiquotes ants (Context.get_generic_context ()); val _ = (case env_ctxt of SOME ctxt => if Config.get ctxt ML_Options.source_trace andalso Context_Position.is_visible ctxt then tracing (cat_lines [ML_Lex.flatten env, ML_Lex.flatten body]) else () | NONE => ()); (*prepare environment*) val _ = Context.setmp_generic_context (Option.map (Context.Proof o Context_Position.set_visible false) env_ctxt) (fn () => (ML_Compiler.eval non_verbose Position.none env; Context.get_generic_context ())) () |> (fn NONE => () | SOME context' => Context.>> (ML_Env.inherit [context'])); (*eval body*) val _ = ML_Compiler.eval flags pos body; (*clear environment*) val _ = (case (env_ctxt, is_some (Context.get_generic_context ())) of (SOME ctxt, true) => let val name = struct_name ctxt; val _ = ML_Compiler.eval non_verbose Position.none (reset_env name); val _ = Context.>> (ML_Env.forget_structure name); in () end | _ => ()); in () end; end; (* derived versions *) fun eval_file flags path = let val pos = Path.position path in eval flags pos (ML_Lex.read_text (File.read path, pos)) end; fun eval_source flags source = let val opt_context = Context.get_generic_context (); val {read_source, ...} = ML_Env.operations opt_context (#environment flags); in eval flags (Input.pos_of source) (read_source source) end; fun eval_in ctxt flags pos ants = Context.setmp_generic_context (Option.map Context.Proof ctxt) (fn () => eval flags pos ants) (); fun eval_source_in ctxt flags source = Context.setmp_generic_context (Option.map Context.Proof ctxt) (fn () => eval_source flags source) (); fun exec (e: unit -> unit) context = (case Context.setmp_generic_context (SOME context) (fn () => (e (); Context.get_generic_context ())) () of SOME context' => context' | NONE => error "Missing context after execution"); fun expression pos ants = exec (fn () => eval ML_Compiler.flags pos ants); end; val ML = ML_Context.eval_source (ML_Compiler.verbose true ML_Compiler.flags); diff --git a/src/Pure/Thy/document_antiquotation.ML b/src/Pure/Thy/document_antiquotation.ML --- a/src/Pure/Thy/document_antiquotation.ML +++ b/src/Pure/Thy/document_antiquotation.ML @@ -1,274 +1,274 @@ (* Title: Pure/Thy/document_antiquotation.ML Author: Makarius Document antiquotations. *) signature DOCUMENT_ANTIQUOTATION = sig val thy_output_display: bool Config.T val thy_output_cartouche: bool Config.T val thy_output_quotes: bool Config.T val thy_output_margin: int Config.T val thy_output_indent: int Config.T val thy_output_source: bool Config.T val thy_output_source_cartouche: bool Config.T val thy_output_break: bool Config.T val thy_output_modes: string Config.T val trim_blanks: Proof.context -> string -> string val trim_lines: Proof.context -> string -> string val indent_lines: Proof.context -> string -> string val prepare_lines: Proof.context -> string -> string val delimit: Proof.context -> Pretty.T -> Pretty.T val indent: Proof.context -> Pretty.T -> Pretty.T val format: Proof.context -> Pretty.T -> string val output: Proof.context -> Pretty.T -> Output.output val print_antiquotations: bool -> Proof.context -> unit val check: Proof.context -> xstring * Position.T -> string val check_option: Proof.context -> xstring * Position.T -> string val setup: binding -> 'a context_parser -> ({context: Proof.context, source: Token.src, argument: 'a} -> Latex.text) -> theory -> theory val setup_embedded: binding -> 'a context_parser -> ({context: Proof.context, source: Token.src, argument: 'a} -> Latex.text) -> theory -> theory val setup_option: binding -> (string -> Proof.context -> Proof.context) -> theory -> theory val boolean: string -> bool val integer: string -> int val evaluate: (Symbol_Pos.T list -> Latex.text) -> Proof.context -> Antiquote.text_antiquote -> Latex.text val approx_content: Proof.context -> string -> string end; structure Document_Antiquotation: DOCUMENT_ANTIQUOTATION = struct (* options *) val thy_output_display = Attrib.setup_option_bool ("thy_output_display", \<^here>); val thy_output_break = Attrib.setup_option_bool ("thy_output_break", \<^here>); val thy_output_cartouche = Attrib.setup_option_bool ("thy_output_cartouche", \<^here>); val thy_output_quotes = Attrib.setup_option_bool ("thy_output_quotes", \<^here>); val thy_output_margin = Attrib.setup_option_int ("thy_output_margin", \<^here>); val thy_output_indent = Attrib.setup_option_int ("thy_output_indent", \<^here>); val thy_output_source = Attrib.setup_option_bool ("thy_output_source", \<^here>); val thy_output_source_cartouche = Attrib.setup_option_bool ("thy_output_source_cartouche", \<^here>); val thy_output_modes = Attrib.setup_option_string ("thy_output_modes", \<^here>); (* blanks *) fun trim_blanks ctxt = not (Config.get ctxt thy_output_display) ? Symbol.trim_blanks; fun trim_lines ctxt = if not (Config.get ctxt thy_output_display) then split_lines #> map Symbol.trim_blanks #> space_implode " " else I; fun indent_lines ctxt = if Config.get ctxt thy_output_display then prefix_lines (Symbol.spaces (Config.get ctxt thy_output_indent)) else I; fun prepare_lines ctxt = trim_lines ctxt #> indent_lines ctxt; (* output *) fun delimit ctxt = if Config.get ctxt thy_output_cartouche then Pretty.cartouche else if Config.get ctxt thy_output_quotes then Pretty.quote else I; fun indent ctxt = Config.get ctxt thy_output_display ? Pretty.indent (Config.get ctxt thy_output_indent); fun format ctxt = if Config.get ctxt thy_output_display orelse Config.get ctxt thy_output_break then Pretty.string_of_margin (Config.get ctxt thy_output_margin) else Pretty.unformatted_string_of; fun output ctxt = delimit ctxt #> indent ctxt #> format ctxt #> Output.output; (* theory data *) structure Data = Theory_Data ( type T = (bool * (Token.src -> Proof.context -> Latex.text)) Name_Space.table * (string -> Proof.context -> Proof.context) Name_Space.table; val empty : T = (Name_Space.empty_table Markup.document_antiquotationN, Name_Space.empty_table Markup.document_antiquotation_optionN); fun merge ((commands1, options1), (commands2, options2)) : T = (Name_Space.merge_tables (commands1, commands2), Name_Space.merge_tables (options1, options2)); ); val get_antiquotations = Data.get o Proof_Context.theory_of; fun print_antiquotations verbose ctxt = let val (commands, options) = get_antiquotations ctxt; val command_names = map #1 (Name_Space.markup_table verbose ctxt commands); val option_names = map #1 (Name_Space.markup_table verbose ctxt options); in [Pretty.big_list "document antiquotations:" (map Pretty.mark_str command_names), Pretty.big_list "document antiquotation options:" (map Pretty.mark_str option_names)] end |> Pretty.writeln_chunks; fun check ctxt = #1 o Name_Space.check (Context.Proof ctxt) (#1 (get_antiquotations ctxt)); fun check_option ctxt = #1 o Name_Space.check (Context.Proof ctxt) (#2 (get_antiquotations ctxt)); fun gen_setup name embedded scan body thy = let fun cmd src ctxt = let val (x, ctxt') = Token.syntax scan src ctxt in body {context = ctxt', source = src, argument = x} end; val entry = (name, (embedded, cmd)); in thy |> Data.map (apfst (Name_Space.define (Context.Theory thy) true entry #> #2)) end; fun setup name = gen_setup name false; fun setup_embedded name = gen_setup name true; fun setup_option name opt thy = thy |> Data.map (apsnd (Name_Space.define (Context.Theory thy) true (name, opt) #> #2)); (* syntax *) local val property = Parse.name_position -- Scan.optional (Parse.$$$ "=" |-- Parse.!!! Parse.name) ""; val properties = Scan.optional (Parse.$$$ "[" |-- Parse.!!! (Parse.enum "," property --| Parse.$$$ "]")) []; in val parse_antiq = Parse.!!! (Parse.token Parse.liberal_name -- properties -- Parse.args --| Scan.ahead Parse.eof) >> (fn ((name, opts), args) => (opts, name :: args)); end; (* evaluate *) local fun command pos (opts, src) ctxt = let val (src', (embedded, scan)) = Token.check_src ctxt (#1 o get_antiquotations) src; val _ = if null opts andalso Options.default_bool "update_control_cartouches" then Context_Position.reports_text ctxt (Antiquote.update_reports embedded pos (map Token.content_of src')) else (); in scan src' ctxt end; fun option ((xname, pos), s) ctxt = let val (_, opt) = Name_Space.check (Context.Proof ctxt) (#2 (get_antiquotations ctxt)) (xname, pos); in opt s ctxt end; fun eval ctxt pos (opts, src) = let val preview_ctxt = fold option opts (Config.put show_markup false ctxt); val _ = command pos (opts, src) preview_ctxt; val print_ctxt = Context_Position.set_visible false preview_ctxt; val print_modes = space_explode "," (Config.get print_ctxt thy_output_modes) @ [Latex.latexN, Pretty.regularN]; in Print_Mode.with_modes print_modes (fn () => command pos (opts, src) print_ctxt) () end; in fun read_antiq ctxt ({range = (pos, _), body, ...}: Antiquote.antiq) = - Token.read_antiq (Thy_Header.get_keywords' ctxt) parse_antiq (body, pos); + Parse.read_antiq (Thy_Header.get_keywords' ctxt) parse_antiq (body, pos); fun evaluate eval_text ctxt antiq = (case antiq of Antiquote.Text ss => eval_text ss | Antiquote.Control {name, body, ...} => eval ctxt Position.none ([], Token.make_src name (if null body then [] else [Token.read_cartouche body])) | Antiquote.Antiq antiq => eval ctxt (#1 (#range antiq)) (read_antiq ctxt antiq)); end; (* approximative content, e.g. for index entries *) local val strip_symbols = [Symbol.open_, Symbol.close, "'", "\"", "`"]; fun strip_symbol sym = if member (op =) strip_symbols sym then "" else (case Symbol.decode sym of Symbol.Char s => s | Symbol.UTF8 s => s | Symbol.Sym s => s | Symbol.Control s => s | _ => ""); fun strip_antiq _ (Antiquote.Text body) = map Symbol_Pos.symbol body | strip_antiq _ (Antiquote.Control {body, ...}) = map Symbol_Pos.symbol body | strip_antiq ctxt (Antiquote.Antiq antiq) = read_antiq ctxt antiq |> #2 |> tl |> maps (Symbol.explode o Token.content_of); in fun approx_content ctxt = Symbol_Pos.explode0 #> trim (Symbol.is_blank o Symbol_Pos.symbol) #> Antiquote.parse_comments Position.none #> maps (strip_antiq ctxt) #> map strip_symbol #> implode; end; (* option syntax *) fun boolean "" = true | boolean "true" = true | boolean "false" = false | boolean s = error ("Bad boolean value: " ^ Library.quote s); fun integer s = let fun int ss = (case Library.read_int ss of (i, []) => i | _ => error ("Bad integer value: " ^ Library.quote s)); in (case Symbol.explode s of "-" :: ss => ~ (int ss) | ss => int ss) end; val _ = Theory.setup (setup_option \<^binding>\show_types\ (Config.put show_types o boolean) #> setup_option \<^binding>\show_sorts\ (Config.put show_sorts o boolean) #> setup_option \<^binding>\show_structs\ (Config.put show_structs o boolean) #> setup_option \<^binding>\show_question_marks\ (Config.put show_question_marks o boolean) #> setup_option \<^binding>\show_abbrevs\ (Config.put show_abbrevs o boolean) #> setup_option \<^binding>\names_long\ (Config.put Name_Space.names_long o boolean) #> setup_option \<^binding>\names_short\ (Config.put Name_Space.names_short o boolean) #> setup_option \<^binding>\names_unique\ (Config.put Name_Space.names_unique o boolean) #> setup_option \<^binding>\eta_contract\ (Config.put Syntax_Trans.eta_contract o boolean) #> setup_option \<^binding>\display\ (Config.put thy_output_display o boolean) #> setup_option \<^binding>\break\ (Config.put thy_output_break o boolean) #> setup_option \<^binding>\cartouche\ (Config.put thy_output_cartouche o boolean) #> setup_option \<^binding>\quotes\ (Config.put thy_output_quotes o boolean) #> setup_option \<^binding>\mode\ (Config.put thy_output_modes) #> setup_option \<^binding>\margin\ (Config.put thy_output_margin o integer) #> setup_option \<^binding>\indent\ (Config.put thy_output_indent o integer) #> setup_option \<^binding>\source\ (Config.put thy_output_source o boolean) #> setup_option \<^binding>\source_cartouche\ (Config.put thy_output_source_cartouche o boolean) #> setup_option \<^binding>\goals_limit\ (Config.put Goal_Display.goals_limit o integer)); end; diff --git a/src/Pure/Thy/document_marker.ML b/src/Pure/Thy/document_marker.ML --- a/src/Pure/Thy/document_marker.ML +++ b/src/Pure/Thy/document_marker.ML @@ -1,115 +1,115 @@ (* Title: Pure/Thy/document_marker.ML Author: Makarius Document markers: declarations on the presentation context. *) signature DOCUMENT_MARKER = sig type marker = Proof.context -> Proof.context val check: Proof.context -> string * Position.T -> string * (Token.src -> marker) val setup: binding -> 'a context_parser -> ('a -> marker) -> theory -> theory val setup0: binding -> 'a parser -> ('a -> marker) -> theory -> theory val evaluate: string -> Input.source -> marker val command_name: Proof.context -> string val legacy_tag: string -> Input.source end; structure Document_Marker: DOCUMENT_MARKER = struct (* theory data *) type marker = Proof.context -> Proof.context; structure Markers = Theory_Data ( type T = (Token.src -> Proof.context -> Proof.context) Name_Space.table; val empty : T = Name_Space.empty_table "document_marker"; val merge = Name_Space.merge_tables; ); val get_markers = Markers.get o Proof_Context.theory_of; fun check ctxt = Name_Space.check (Context.Proof ctxt) (get_markers ctxt); fun setup name scan body thy = let fun marker src ctxt = let val (x, ctxt') = Token.syntax scan src ctxt in body x ctxt' end; in thy |> Markers.map (Name_Space.define (Context.Theory thy) true (name, marker) #> #2) end; fun setup0 name scan = setup name (Scan.lift scan); (* evaluate inner syntax *) val config_command_name = Config.declare_string ("command_name", \<^here>) (K ""); val command_name = Config.apply config_command_name; val parse_marker = Parse.token Parse.liberal_name ::: Parse.!!! Parse.args; fun evaluate cmd_name input ctxt = let val syms = Input.source_explode input |> drop_prefix (fn (s, _) => s = Symbol.marker); val pos = #1 (Symbol_Pos.range syms); val _ = Context_Position.reports ctxt (map (pair pos) (Comment.kind_markups Comment.Marker)); val keywords = Keyword.no_command_keywords (Thy_Header.get_keywords' ctxt); val body = Symbol_Pos.cartouche_content syms; val markers = Input.source true (Symbol_Pos.implode body) (Symbol_Pos.range body) - |> Token.read_embedded ctxt keywords (Parse.list parse_marker); + |> Parse.read_embedded ctxt keywords (Parse.list parse_marker); in ctxt |> Config.put config_command_name cmd_name |> fold (fn src => #2 (check ctxt (Token.name_of_src src)) src) markers |> Config.put config_command_name (command_name ctxt) end; (* tag with scope *) val parse_tag = Scan.state :|-- (fn context => let val ctxt = Context.proof_of context; val scope0 = if Keyword.is_theory_goal (Thy_Header.get_keywords' ctxt) (command_name ctxt) then Document_Source.Command else Document_Source.Proof; val tag = Scan.optional Document_Source.tag_scope scope0 -- Document_Source.tag_name >> swap; in Scan.lift (Parse.position tag) end); fun update_tags (tag as (name, _), pos) ctxt = (Context_Position.report ctxt pos (Markup.document_tag name); Document_Source.update_tags tag ctxt); (* concrete markers *) fun meta_data markup arg ctxt = (Context_Position.report_text ctxt (Position.thread_data ()) markup arg; ctxt); val _ = Theory.setup (setup (Binding.make ("tag", \<^here>)) parse_tag update_tags #> setup0 (Binding.make ("title", \<^here>)) Parse.embedded (meta_data Markup.meta_title) #> setup0 (Binding.make ("creator", \<^here>)) Parse.embedded (meta_data Markup.meta_creator) #> setup0 (Binding.make ("contributor", \<^here>)) Parse.embedded (meta_data Markup.meta_contributor) #> setup0 (Binding.make ("date", \<^here>)) Parse.embedded (meta_data Markup.meta_date) #> setup0 (Binding.make ("license", \<^here>)) Parse.embedded (meta_data Markup.meta_license) #> setup0 (Binding.make ("description", \<^here>)) Parse.embedded_input (fn source => fn ctxt => let val (arg, pos) = Input.source_content source; val _ = Context_Position.report ctxt pos Markup.words; in meta_data Markup.meta_description arg ctxt end)); fun legacy_tag name = Input.string (cartouche ("tag (proof) " ^ Symbol_Pos.quote_string_qq name)); end;