diff --git a/src/Pure/PIDE/command.ML b/src/Pure/PIDE/command.ML --- a/src/Pure/PIDE/command.ML +++ b/src/Pure/PIDE/command.ML @@ -1,498 +1,501 @@ (* Title: Pure/PIDE/command.ML Author: Makarius Prover command execution: read -- eval -- print. *) signature COMMAND = sig type blob = {file_node: string, src_path: Path.T, content: (SHA1.digest * string list) option} - val read_file: Path.T -> Position.T -> Path.T -> Token.file + val read_file: Path.T -> Position.T -> bool -> Path.T -> Token.file val read_thy: Toplevel.state -> theory val read: Keyword.keywords -> theory -> Path.T-> (unit -> theory) -> blob Exn.result list * int -> Token.T list -> Toplevel.transition type eval val eval_command_id: eval -> Document_ID.command val eval_exec_id: eval -> Document_ID.exec val eval_eq: eval * eval -> bool val eval_running: eval -> bool val eval_finished: eval -> bool val eval_result_command: eval -> Toplevel.transition val eval_result_state: eval -> Toplevel.state val eval: Keyword.keywords -> Path.T -> (unit -> theory) -> blob Exn.result list * int -> Document_ID.command -> Token.T list -> eval -> eval type print type print_fn = Toplevel.transition -> Toplevel.state -> unit val print0: {pri: int, print_fn: print_fn} -> eval -> print val print: bool -> (string * string list) list -> Keyword.keywords -> string -> eval -> print list -> print list option val parallel_print: print -> bool type print_function = {keywords: Keyword.keywords, command_name: string, args: string list, exec_id: Document_ID.exec} -> {delay: Time.time option, pri: int, persistent: bool, strict: bool, print_fn: print_fn} option val print_function: string -> print_function -> unit val no_print_function: string -> unit type exec = eval * print list val init_exec: theory option -> exec val no_exec: exec val exec_ids: exec option -> Document_ID.exec list val exec: Document_ID.execution -> exec -> unit val exec_parallel_prints: Document_ID.execution -> Future.task list -> exec -> exec option end; structure Command: COMMAND = struct (** main phases of execution **) fun task_context group f = f |> Future.interruptible_task |> Future.task_context "Command.run_process" group; (* read *) type blob = {file_node: string, src_path: Path.T, content: (SHA1.digest * string list) option}; -fun read_file_node file_node master_dir pos src_path = +fun read_file_node file_node master_dir pos delimited src_path = let val _ = if Context_Position.pide_reports () - then Position.report pos Markup.language_path else (); + then Position.report pos (Markup.language_path delimited) else (); val _ = (case try Url.explode file_node of NONE => () | SOME (Url.File _) => () | _ => error ("Prover cannot load remote file " ^ Markup.markup (Markup.path file_node) (quote file_node))); val full_path = File.check_file (File.full_path master_dir src_path); val text = File.read full_path; val lines = split_lines text; val digest = SHA1.digest text; val file_pos = Position.copy_id pos (Path.position full_path); in {src_path = src_path, lines = lines, digest = digest, pos = file_pos} end handle ERROR msg => error (msg ^ Position.here pos); val read_file = read_file_node ""; local fun blob_file src_path lines digest file_node = let val file_pos = Position.file file_node |> (case Position.get_id (Position.thread_data ()) of NONE => I | SOME exec_id => Position.put_id exec_id); in {src_path = src_path, lines = lines, digest = digest, pos = file_pos} end fun resolve_files master_dir (blobs, blobs_index) toks = (case Outer_Syntax.parse_spans toks of - [Command_Span.Span (Command_Span.Command_Span (cmd, _), _)] => + [Command_Span.Span (Command_Span.Command_Span _, _)] => (case try (nth toks) blobs_index of SOME tok => let - val pos = Token.pos_of tok; + val source = Token.input_of tok; + val pos = Input.pos_of source; + val delimited = Input.is_delimited source; + fun make_file (Exn.Res {file_node, src_path, content = NONE}) = Exn.interruptible_capture (fn () => - read_file_node file_node master_dir pos src_path) () + read_file_node file_node master_dir pos delimited src_path) () | make_file (Exn.Res {file_node, src_path, content = SOME (digest, lines)}) = - (Position.report pos Markup.language_path; + (Position.report pos (Markup.language_path delimited); Exn.Res (blob_file src_path lines digest file_node)) | make_file (Exn.Exn e) = Exn.Exn e; val files = map make_file blobs; in toks |> map_index (fn (i, tok) => if i = blobs_index then Token.put_files files tok else tok) end | NONE => toks) | _ => toks); fun reports_of_token keywords tok = let val malformed_symbols = Input.source_explode (Token.input_of tok) |> map_filter (fn (sym, pos) => if Symbol.is_malformed sym then SOME ((pos, Markup.bad ()), "Malformed symbolic character") else NONE); val is_malformed = Token.is_error tok orelse not (null malformed_symbols); val reports = Token.reports keywords tok @ Token.completion_report tok @ malformed_symbols; in (is_malformed, reports) end; in fun read_thy st = Toplevel.theory_of st handle Toplevel.UNDEF => Pure_Syn.bootstrap_thy; fun read keywords thy master_dir init blobs_info span = let val command_reports = Outer_Syntax.command_reports thy; val core_range = Token.range_of (drop_suffix Token.is_ignored span); val pos = (case find_first Token.is_command span of SOME tok => Token.pos_of tok | NONE => #1 core_range); val token_reports = map (reports_of_token keywords) span; val _ = Position.reports_text (maps #2 token_reports @ maps command_reports span); val verbatim = span |> map_filter (fn tok => if Token.kind_of tok = Token.Verbatim then SOME (Token.pos_of tok) else NONE); val _ = if null verbatim then () else legacy_feature ("Old-style {* verbatim *} token -- use \cartouche\ instead" ^ Position.here_list verbatim); in if exists #1 token_reports then Toplevel.malformed pos "Malformed command syntax" else Outer_Syntax.parse_span thy init (resolve_files master_dir blobs_info span) end; end; (* eval *) type eval_state = {failed: bool, command: Toplevel.transition, state: Toplevel.state}; fun init_eval_state opt_thy = {failed = false, command = Toplevel.empty, state = (case opt_thy of NONE => Toplevel.init_toplevel () | SOME thy => Toplevel.theory_toplevel thy)}; datatype eval = Eval of {command_id: Document_ID.command, exec_id: Document_ID.exec, eval_process: eval_state lazy}; fun eval_command_id (Eval {command_id, ...}) = command_id; fun eval_exec_id (Eval {exec_id, ...}) = exec_id; val eval_eq = op = o apply2 eval_exec_id; val eval_running = Execution.is_running_exec o eval_exec_id; fun eval_finished (Eval {eval_process, ...}) = Lazy.is_finished eval_process; fun eval_result (Eval {eval_process, ...}) = Exn.release (Lazy.finished_result eval_process); val eval_result_command = #command o eval_result; val eval_result_state = #state o eval_result; local fun reset_state keywords tr st0 = Toplevel.setmp_thread_position tr (fn () => let val name = Toplevel.name_of tr; val res = if Keyword.is_theory_body keywords name then Toplevel.reset_theory st0 else if Keyword.is_proof keywords name then Toplevel.reset_proof st0 else if Keyword.is_theory_end keywords name then (case Toplevel.reset_notepad st0 of NONE => Toplevel.reset_theory st0 | some => some) else NONE; in (case res of NONE => st0 | SOME st => (Output.error_message (Toplevel.type_error tr ^ " -- using reset state"); st)) end) (); fun run keywords int tr st = if Future.proofs_enabled 1 andalso Keyword.is_diag keywords (Toplevel.name_of tr) then let val (tr1, tr2) = Toplevel.fork_presentation tr; val _ = Execution.fork {name = "Toplevel.diag", pos = Toplevel.pos_of tr, pri = ~1} (fn () => Toplevel.command_exception int tr1 st); in Toplevel.command_errors int tr2 st end else Toplevel.command_errors int tr st; fun check_token_comments ctxt tok = (Thy_Output.check_comments ctxt (Input.source_explode (Token.input_of tok)); []) handle exn => if Exn.is_interrupt exn then Exn.reraise exn else Runtime.exn_messages exn; fun check_span_comments ctxt span tr = Toplevel.setmp_thread_position tr (fn () => maps (check_token_comments ctxt) span) (); fun report tr m = Toplevel.setmp_thread_position tr (fn () => Output.report [Markup.markup_only m]) (); fun status tr m = Toplevel.setmp_thread_position tr (fn () => Output.status [Markup.markup_only m]) (); fun command_indent tr st = (case try Toplevel.proof_of st of SOME prf => let val keywords = Thy_Header.get_keywords (Proof.theory_of prf) in if Keyword.command_kind keywords (Toplevel.name_of tr) = SOME Keyword.prf_script then (case try Proof.goal prf of SOME {goal, ...} => let val n = Thm.nprems_of goal in if n > 1 then report tr (Markup.command_indent (n - 1)) else () end | NONE => ()) else () end | NONE => ()); fun eval_state keywords span tr ({state, ...}: eval_state) = let val _ = Thread_Attributes.expose_interrupt (); val st = reset_state keywords tr state; val _ = command_indent tr st; val _ = status tr Markup.running; val (errs1, result) = run keywords true tr st; val errs2 = (case result of NONE => [] | SOME st' => check_span_comments (Toplevel.presentation_context st') span tr); val errs = errs1 @ errs2; val _ = List.app (Future.error_message (Toplevel.pos_of tr)) errs; in (case result of NONE => let val _ = status tr Markup.failed; val _ = status tr Markup.finished; val _ = if null errs then (status tr Markup.canceled; Exn.interrupt ()) else (); in {failed = true, command = tr, state = st} end | SOME st' => let val _ = if Keyword.is_theory_end keywords (Toplevel.name_of tr) andalso can (Toplevel.end_theory Position.none) st' then status tr Markup.finalized else (); val _ = status tr Markup.finished; in {failed = false, command = tr, state = st'} end) end; in fun eval keywords master_dir init blobs_info command_id span eval0 = let val exec_id = Document_ID.make (); fun process () = let val eval_state0 = eval_result eval0; val thy = read_thy (#state eval_state0); val tr = Position.setmp_thread_data (Position.id_only (Document_ID.print exec_id)) (fn () => read keywords thy master_dir init blobs_info span |> Toplevel.exec_id exec_id) (); in eval_state keywords span tr eval_state0 end; in Eval {command_id = command_id, exec_id = exec_id, eval_process = Lazy.lazy_name "Command.eval" process} end; end; (* print *) datatype print = Print of {name: string, args: string list, delay: Time.time option, pri: int, persistent: bool, exec_id: Document_ID.exec, print_process: unit lazy}; fun print_exec_id (Print {exec_id, ...}) = exec_id; val print_eq = op = o apply2 print_exec_id; type print_fn = Toplevel.transition -> Toplevel.state -> unit; type print_function = {keywords: Keyword.keywords, command_name: string, args: string list, exec_id: Document_ID.exec} -> {delay: Time.time option, pri: int, persistent: bool, strict: bool, print_fn: print_fn} option; local val print_functions = Synchronized.var "Command.print_functions" ([]: (string * print_function) list); fun print_error tr opt_context e = (Toplevel.setmp_thread_position tr o Runtime.controlled_execution opt_context) e () handle exn => if Exn.is_interrupt exn then Exn.reraise exn else List.app (Future.error_message (Toplevel.pos_of tr)) (Runtime.exn_messages exn); fun print_finished (Print {print_process, ...}) = Lazy.is_finished print_process; fun print_persistent (Print {persistent, ...}) = persistent; val overlay_ord = prod_ord string_ord (list_ord string_ord); fun make_print (name, args) {delay, pri, persistent, strict, print_fn} eval = let val exec_id = Document_ID.make (); fun process () = let val {failed, command, state = st', ...} = eval_result eval; val tr = Toplevel.exec_id exec_id command; val opt_context = try Toplevel.generic_theory_of st'; in if failed andalso strict then () else print_error tr opt_context (fn () => print_fn tr st') end; in Print { name = name, args = args, delay = delay, pri = pri, persistent = persistent, exec_id = exec_id, print_process = Lazy.lazy_name "Command.print" process} end; fun bad_print name_args exn = make_print name_args {delay = NONE, pri = 0, persistent = false, strict = false, print_fn = fn _ => fn _ => Exn.reraise exn}; in fun print0 {pri, print_fn} = make_print ("", [serial_string ()]) {delay = NONE, pri = pri, persistent = true, strict = true, print_fn = print_fn}; fun print command_visible command_overlays keywords command_name eval old_prints = let val print_functions = Synchronized.value print_functions; fun new_print (name, args) get_pr = let val params = {keywords = keywords, command_name = command_name, args = args, exec_id = eval_exec_id eval}; in (case Exn.capture (Runtime.controlled_execution NONE get_pr) params of Exn.Res NONE => NONE | Exn.Res (SOME pr) => SOME (make_print (name, args) pr eval) | Exn.Exn exn => SOME (bad_print (name, args) exn eval)) end; fun get_print (name, args) = (case find_first (fn Print print => (#name print, #args print) = (name, args)) old_prints of NONE => (case AList.lookup (op =) print_functions name of NONE => SOME (bad_print (name, args) (ERROR ("Missing print function " ^ quote name)) eval) | SOME get_pr => new_print (name, args) get_pr) | some => some); val retained_prints = filter (fn print => print_finished print andalso print_persistent print) old_prints; val visible_prints = if command_visible then fold (fn (name, _) => cons (name, [])) print_functions command_overlays |> sort_distinct overlay_ord |> map_filter get_print else []; val new_prints = visible_prints @ retained_prints; in if eq_list print_eq (old_prints, new_prints) then NONE else SOME new_prints end; fun parallel_print (Print {pri, ...}) = pri <= 0 orelse (Future.enabled () andalso Options.default_bool "parallel_print"); fun print_function name f = Synchronized.change print_functions (fn funs => (if name = "" then error "Unnamed print function" else (); if not (AList.defined (op =) funs name) then () else warning ("Redefining command print function: " ^ quote name); AList.update (op =) (name, f) funs)); fun no_print_function name = Synchronized.change print_functions (filter_out (equal name o #1)); end; val _ = print_function "Execution.print" (fn {args, exec_id, ...} => if null args then SOME {delay = NONE, pri = Task_Queue.urgent_pri + 2, persistent = false, strict = false, print_fn = fn _ => fn _ => Execution.fork_prints exec_id} else NONE); val _ = print_function "print_state" (fn {keywords, command_name, ...} => if Options.default_bool "editor_output_state" andalso Keyword.is_printed keywords command_name then SOME {delay = NONE, pri = Task_Queue.urgent_pri + 1, persistent = false, strict = false, print_fn = fn _ => fn st => if Toplevel.is_proof st then Output.state (Toplevel.string_of_state st) else ()} else NONE); (* combined execution *) type exec = eval * print list; fun init_exec opt_thy : exec = (Eval {command_id = Document_ID.none, exec_id = Document_ID.none, eval_process = Lazy.value (init_eval_state opt_thy)}, []); val no_exec = init_exec NONE; fun exec_ids NONE = [] | exec_ids (SOME (eval, prints)) = eval_exec_id eval :: map print_exec_id prints; local fun run_process execution_id exec_id process = let val group = Future.worker_subgroup () in if Execution.running execution_id exec_id [group] then ignore (task_context group (fn () => Lazy.force_result {strict = true} process) ()) else () end; fun ignore_process process = Lazy.is_running process orelse Lazy.is_finished process; fun run_eval execution_id (Eval {exec_id, eval_process, ...}) = if Lazy.is_finished eval_process then () else run_process execution_id exec_id eval_process; fun fork_print execution_id deps (Print {name, delay, pri, exec_id, print_process, ...}) = let val group = Future.worker_subgroup (); fun fork () = ignore ((singleton o Future.forks) {name = name, group = SOME group, deps = deps, pri = pri, interrupts = true} (fn () => if ignore_process print_process then () else run_process execution_id exec_id print_process)); in (case delay of NONE => fork () | SOME d => ignore (Event_Timer.request {physical = true} (Time.now () + d) fork)) end; fun run_print execution_id (print as Print {exec_id, print_process, ...}) = if ignore_process print_process then () else if parallel_print print then fork_print execution_id [] print else run_process execution_id exec_id print_process; in fun exec execution_id (eval, prints) = (run_eval execution_id eval; List.app (run_print execution_id) prints); fun exec_parallel_prints execution_id deps (exec as (Eval {eval_process, ...}, prints)) = if Lazy.is_finished eval_process then (List.app (fork_print execution_id deps) prints; NONE) else SOME exec; end; end; diff --git a/src/Pure/PIDE/markup.ML b/src/Pure/PIDE/markup.ML --- a/src/Pure/PIDE/markup.ML +++ b/src/Pure/PIDE/markup.ML @@ -1,786 +1,786 @@ (* Title: Pure/PIDE/markup.ML Author: Makarius Quasi-abstract markup elements. *) signature MARKUP = sig type T = string * Properties.T val empty: T val is_empty: T -> bool val properties: Properties.T -> T -> T val nameN: string val name: string -> T -> T val xnameN: string val xname: string -> T -> T val kindN: string val serialN: string val serial_properties: int -> Properties.T val instanceN: string val meta_titleN: string val meta_title: T val meta_creatorN: string val meta_creator: T val meta_contributorN: string val meta_contributor: T val meta_dateN: string val meta_date: T val meta_licenseN: string val meta_license: T val meta_descriptionN: string val meta_description: T val languageN: string val symbolsN: string val delimitedN: string val is_delimited: Properties.T -> bool val language: {name: string, symbols: bool, antiquotes: bool, delimited: bool} -> T val language': {name: string, symbols: bool, antiquotes: bool} -> bool -> T val language_Isar: bool -> T val language_method: T val language_attribute: T val language_sort: bool -> T val language_type: bool -> T val language_term: bool -> T val language_prop: bool -> T val language_ML: bool -> T val language_SML: bool -> T val language_document: bool -> T val language_document_marker: T val language_antiquotation: T val language_text: bool -> T val language_verbatim: bool -> T val language_latex: bool -> T val language_rail: T - val language_path: T + val language_path: bool -> T val language_mixfix: T val bindingN: string val binding: T val entityN: string val entity: string -> string -> T val defN: string val refN: string val completionN: string val completion: T val no_completionN: string val no_completion: T val updateN: string val update: T val lineN: string val end_lineN: string val offsetN: string val end_offsetN: string val fileN: string val idN: string val position_properties: string list val position_property: Properties.entry -> bool val positionN: string val position: T val expressionN: string val expression: string -> T val citationN: string val citation: string -> T val pathN: string val path: string -> T val export_pathN: string val export_path: string -> T val urlN: string val url: string -> T val docN: string val doc: string -> T val toolN: string val tool: string -> T val markupN: string val consistentN: string val unbreakableN: string val block_properties: string list val indentN: string val widthN: string val blockN: string val block: bool -> int -> T val breakN: string val break: int -> int -> T val fbreakN: string val fbreak: T val itemN: string val item: T val wordsN: string val words: T val hiddenN: string val hidden: T val deleteN: string val delete: T val bash_functionN: string val scala_functionN: string val system_optionN: string val sessionN: string val theoryN: string val classN: string val type_nameN: string val constantN: string val fixedN: string val fixed: string -> T val caseN: string val case_: string -> T val dynamic_factN: string val dynamic_fact: string -> T val literal_factN: string val literal_fact: string -> T val method_modifierN: string val tfreeN: string val tfree: T val tvarN: string val tvar: T val freeN: string val free: T val skolemN: string val skolem: T val boundN: string val bound: T val varN: string val var: T val numeralN: string val numeral: T val literalN: string val literal: T val delimiterN: string val delimiter: T val inner_stringN: string val inner_string: T val inner_cartoucheN: string val inner_cartouche: T val token_rangeN: string val token_range: T val sortingN: string val sorting: T val typingN: string val typing: T val class_parameterN: string val class_parameter: T val ML_keyword1N: string val ML_keyword1: T val ML_keyword2N: string val ML_keyword2: T val ML_keyword3N: string val ML_keyword3: T val ML_delimiterN: string val ML_delimiter: T val ML_tvarN: string val ML_tvar: T val ML_numeralN: string val ML_numeral: T val ML_charN: string val ML_char: T val ML_stringN: string val ML_string: T val ML_commentN: string val ML_comment: T val ML_defN: string val ML_openN: string val ML_structureN: string val ML_typingN: string val ML_typing: T val ML_breakpointN: string val ML_breakpoint: int -> T val antiquotedN: string val antiquoted: T val antiquoteN: string val antiquote: T val file_typeN: string val antiquotationN: string val ML_antiquotationN: string val document_antiquotationN: string val document_antiquotation_optionN: string val raw_textN: string val raw_text: T val plain_textN: string val plain_text: T val paragraphN: string val paragraph: T val text_foldN: string val text_fold: T val document_markerN: string val document_marker: T val document_tagN: string val document_tag: string -> T val markdown_paragraphN: string val markdown_paragraph: T val markdown_itemN: string val markdown_item: T val markdown_bulletN: string val markdown_bullet: int -> T val markdown_listN: string val markdown_list: string -> T val itemizeN: string val enumerateN: string val descriptionN: string val inputN: string val input: bool -> Properties.T -> T val command_keywordN: string val command_keyword: T val commandN: string val command_properties: T -> T val keywordN: string val keyword_properties: T -> T val stringN: string val string: T val alt_stringN: string val alt_string: T val verbatimN: string val verbatim: T val cartoucheN: string val cartouche: T val commentN: string val comment: T val keyword1N: string val keyword1: T val keyword2N: string val keyword2: T val keyword3N: string val keyword3: T val quasi_keywordN: string val quasi_keyword: T val improperN: string val improper: T val operatorN: string val operator: T val comment1N: string val comment1: T val comment2N: string val comment2: T val comment3N: string val comment3: T val elapsedN: string val cpuN: string val gcN: string val timing_properties: {elapsed: Time.time, cpu: Time.time, gc: Time.time} -> Properties.T val parse_command_timing_properties: Properties.T -> ({file: string, offset: int, name: string} * Time.time) option val timingN: string val timing: {elapsed: Time.time, cpu: Time.time, gc: Time.time} -> T val command_indentN: string val command_indent: int -> T val goalN: string val goal: T val subgoalN: string val subgoal: string -> T val taskN: string val forkedN: string val forked: T val joinedN: string val joined: T val runningN: string val running: T val finishedN: string val finished: T val failedN: string val failed: T val canceledN: string val canceled: T val initializedN: string val initialized: T val finalizedN: string val finalized: T val consolidatingN: string val consolidating: T val consolidatedN: string val consolidated: T val exec_idN: string val initN: string val statusN: string val status: T val resultN: string val result: T val writelnN: string val writeln: T val stateN: string val state: T val informationN: string val information: T val tracingN: string val tracing: T val warningN: string val warning: T val legacyN: string val legacy: T val errorN: string val error: T val systemN: string val system: T val protocolN: string val reportN: string val report: T val no_reportN: string val no_report: T val badN: string val bad: unit -> T val intensifyN: string val intensify: T val browserN: string val graphviewN: string val theory_exportsN: string val sendbackN: string val paddingN: string val padding_line: Properties.entry val padding_command: Properties.entry val dialogN: string val dialog: serial -> string -> T val jedit_actionN: string val functionN: string val ML_statistics: {pid: int, stats_dir: string} -> Properties.T val commands_accepted: Properties.T val assign_update: Properties.T val removed_versions: Properties.T val invoke_scala: string -> string -> bool -> Properties.T val cancel_scala: string -> Properties.T val task_statistics: Properties.entry val command_timing: Properties.entry val theory_timing: Properties.entry val session_timing: Properties.entry val loading_theory: string -> Properties.T val finished_theory: string -> Properties.T val build_session_finished: Properties.T val print_operationsN: string val print_operations: Properties.T val exportN: string type export_args = {id: string option, serial: serial, theory_name: string, name: string, executable: bool, compress: bool, strict: bool} val export: export_args -> Properties.T val debugger_state: string -> Properties.T val debugger_output: string -> Properties.T val simp_trace_panelN: string val simp_trace_logN: string val simp_trace_stepN: string val simp_trace_recurseN: string val simp_trace_hintN: string val simp_trace_ignoreN: string val simp_trace_cancel: serial -> Properties.T type output = Output.output * Output.output val no_output: output val add_mode: string -> (T -> output) -> unit val output: T -> output val enclose: T -> Output.output -> Output.output val markup: T -> string -> string val markups: T list -> string -> string val markup_only: T -> string val markup_report: string -> string end; structure Markup: MARKUP = struct (** markup elements **) (* basic markup *) type T = string * Properties.T; val empty = ("", []); fun is_empty ("", _) = true | is_empty _ = false; fun properties more_props ((elem, props): T) = (elem, fold_rev Properties.put more_props props); fun markup_elem name = (name, (name, []): T); fun markup_string name prop = (name, fn s => (name, [(prop, s)]): T); fun markup_int name prop = (name, fn i => (name, [(prop, Value.print_int i)]): T); (* misc properties *) val nameN = "name"; fun name a = properties [(nameN, a)]; val xnameN = "xname"; fun xname a = properties [(xnameN, a)]; val kindN = "kind"; val serialN = "serial"; fun serial_properties i = [(serialN, Value.print_int i)]; val instanceN = "instance"; (* meta data -- see https://www.dublincore.org/specifications/dublin-core/dcmi-terms *) val (meta_titleN, meta_title) = markup_elem "meta_title"; val (meta_creatorN, meta_creator) = markup_elem "meta_creator"; val (meta_contributorN, meta_contributor) = markup_elem "meta_contributor"; val (meta_dateN, meta_date) = markup_elem "meta_date"; val (meta_licenseN, meta_license) = markup_elem "meta_license"; val (meta_descriptionN, meta_description) = markup_elem "meta_description"; (* embedded languages *) val languageN = "language"; val symbolsN = "symbols"; val antiquotesN = "antiquotes"; val delimitedN = "delimited" fun is_delimited props = Properties.get props delimitedN = SOME "true"; fun language {name, symbols, antiquotes, delimited} = (languageN, [(nameN, name), (symbolsN, Value.print_bool symbols), (antiquotesN, Value.print_bool antiquotes), (delimitedN, Value.print_bool delimited)]); fun language' {name, symbols, antiquotes} delimited = language {name = name, symbols = symbols, antiquotes = antiquotes, delimited = delimited}; val language_Isar = language' {name = "Isar", symbols = true, antiquotes = false}; val language_method = language {name = "method", symbols = true, antiquotes = false, delimited = false}; val language_attribute = language {name = "attribute", symbols = true, antiquotes = false, delimited = false}; val language_sort = language' {name = "sort", symbols = true, antiquotes = false}; val language_type = language' {name = "type", symbols = true, antiquotes = false}; val language_term = language' {name = "term", symbols = true, antiquotes = false}; val language_prop = language' {name = "prop", symbols = true, antiquotes = false}; val language_ML = language' {name = "ML", symbols = false, antiquotes = true}; val language_SML = language' {name = "SML", symbols = false, antiquotes = false}; val language_document = language' {name = "document", symbols = false, antiquotes = true}; val language_document_marker = language {name = "document_marker", symbols = true, antiquotes = true, delimited = true}; val language_antiquotation = language {name = "antiquotation", symbols = true, antiquotes = false, delimited = true}; val language_text = language' {name = "text", symbols = true, antiquotes = false}; val language_verbatim = language' {name = "verbatim_text", symbols = true, antiquotes = false}; val language_latex = language' {name = "latex", symbols = false, antiquotes = false}; val language_rail = language {name = "rail", symbols = true, antiquotes = true, delimited = true}; -val language_path = language {name = "path", symbols = false, antiquotes = false, delimited = true}; +val language_path = language' {name = "path", symbols = false, antiquotes = false}; val language_mixfix = language {name = "mixfix_annotation", symbols = true, antiquotes = false, delimited = true}; (* formal entities *) val (bindingN, binding) = markup_elem "binding"; val entityN = "entity"; fun entity kind name = (entityN, (if name = "" then [] else [(nameN, name)]) @ (if kind = "" then [] else [(kindN, kind)])); val defN = "def"; val refN = "ref"; (* completion *) val (completionN, completion) = markup_elem "completion"; val (no_completionN, no_completion) = markup_elem "no_completion"; val (updateN, update) = markup_elem "update"; (* position *) val lineN = "line"; val end_lineN = "end_line"; val offsetN = "offset"; val end_offsetN = "end_offset"; val fileN = "file"; val idN = "id"; val position_properties = [lineN, offsetN, end_offsetN, fileN, idN]; fun position_property (entry: Properties.entry) = member (op =) position_properties (#1 entry); val (positionN, position) = markup_elem "position"; (* expression *) val expressionN = "expression"; fun expression kind = (expressionN, if kind = "" then [] else [(kindN, kind)]); (* citation *) val (citationN, citation) = markup_string "citation" nameN; (* external resources *) val (pathN, path) = markup_string "path" nameN; val (export_pathN, export_path) = markup_string "export_path" nameN; val (urlN, url) = markup_string "url" nameN; val (docN, doc) = markup_string "doc" nameN; val (toolN, tool) = markup_string "tool" nameN; (* pretty printing *) val markupN = "markup"; val consistentN = "consistent"; val unbreakableN = "unbreakable"; val indentN = "indent"; val block_properties = [markupN, consistentN, unbreakableN, indentN]; val widthN = "width"; val blockN = "block"; fun block c i = (blockN, (if c then [(consistentN, Value.print_bool c)] else []) @ (if i <> 0 then [(indentN, Value.print_int i)] else [])); val breakN = "break"; fun break w i = (breakN, (if w <> 0 then [(widthN, Value.print_int w)] else []) @ (if i <> 0 then [(indentN, Value.print_int i)] else [])); val (fbreakN, fbreak) = markup_elem "fbreak"; val (itemN, item) = markup_elem "item"; (* text properties *) val (wordsN, words) = markup_elem "words"; val (hiddenN, hidden) = markup_elem "hidden"; val (deleteN, delete) = markup_elem "delete"; (* misc entities *) val bash_functionN = "bash_function"; val scala_functionN = "scala_function"; val system_optionN = "system_option"; val sessionN = "session"; val theoryN = "theory"; val classN = "class"; val type_nameN = "type_name"; val constantN = "constant"; val (fixedN, fixed) = markup_string "fixed" nameN; val (caseN, case_) = markup_string "case" nameN; val (dynamic_factN, dynamic_fact) = markup_string "dynamic_fact" nameN; val (literal_factN, literal_fact) = markup_string "literal_fact" nameN; val method_modifierN = "method_modifier"; (* inner syntax *) val (tfreeN, tfree) = markup_elem "tfree"; val (tvarN, tvar) = markup_elem "tvar"; val (freeN, free) = markup_elem "free"; val (skolemN, skolem) = markup_elem "skolem"; val (boundN, bound) = markup_elem "bound"; val (varN, var) = markup_elem "var"; val (numeralN, numeral) = markup_elem "numeral"; val (literalN, literal) = markup_elem "literal"; val (delimiterN, delimiter) = markup_elem "delimiter"; val (inner_stringN, inner_string) = markup_elem "inner_string"; val (inner_cartoucheN, inner_cartouche) = markup_elem "inner_cartouche"; val (token_rangeN, token_range) = markup_elem "token_range"; val (sortingN, sorting) = markup_elem "sorting"; val (typingN, typing) = markup_elem "typing"; val (class_parameterN, class_parameter) = markup_elem "class_parameter"; (* ML *) val (ML_keyword1N, ML_keyword1) = markup_elem "ML_keyword1"; val (ML_keyword2N, ML_keyword2) = markup_elem "ML_keyword2"; val (ML_keyword3N, ML_keyword3) = markup_elem "ML_keyword3"; val (ML_delimiterN, ML_delimiter) = markup_elem "ML_delimiter"; val (ML_tvarN, ML_tvar) = markup_elem "ML_tvar"; val (ML_numeralN, ML_numeral) = markup_elem "ML_numeral"; val (ML_charN, ML_char) = markup_elem "ML_char"; val (ML_stringN, ML_string) = markup_elem "ML_string"; val (ML_commentN, ML_comment) = markup_elem "ML_comment"; val ML_defN = "ML_def"; val ML_openN = "ML_open"; val ML_structureN = "ML_structure"; val (ML_typingN, ML_typing) = markup_elem "ML_typing"; val (ML_breakpointN, ML_breakpoint) = markup_int "ML_breakpoint" serialN; (* antiquotations *) val (antiquotedN, antiquoted) = markup_elem "antiquoted"; val (antiquoteN, antiquote) = markup_elem "antiquote"; val file_typeN = "file_type"; val antiquotationN = "antiquotation"; val ML_antiquotationN = "ML_antiquotation"; val document_antiquotationN = "document_antiquotation"; val document_antiquotation_optionN = "document_antiquotation_option"; (* document text *) val (raw_textN, raw_text) = markup_elem "raw_text"; val (plain_textN, plain_text) = markup_elem "plain_text"; val (paragraphN, paragraph) = markup_elem "paragraph"; val (text_foldN, text_fold) = markup_elem "text_fold"; val (document_markerN, document_marker) = markup_elem "document_marker"; val (document_tagN, document_tag) = markup_string "document_tag" nameN; (* Markdown document structure *) val (markdown_paragraphN, markdown_paragraph) = markup_elem "markdown_paragraph"; val (markdown_itemN, markdown_item) = markup_elem "markdown_item"; val (markdown_bulletN, markdown_bullet) = markup_int "markdown_bullet" "depth"; val (markdown_listN, markdown_list) = markup_string "markdown_list" kindN; val itemizeN = "itemize"; val enumerateN = "enumerate"; val descriptionN = "description"; (* formal input *) val inputN = "input"; fun input delimited props = (inputN, (delimitedN, Value.print_bool delimited) :: props); (* outer syntax *) val (command_keywordN, command_keyword) = markup_elem "command_keyword"; val commandN = "command"; val command_properties = properties [(kindN, commandN)]; val keywordN = "keyword"; val keyword_properties = properties [(kindN, keywordN)]; val (keyword1N, keyword1) = markup_elem "keyword1"; val (keyword2N, keyword2) = markup_elem "keyword2"; val (keyword3N, keyword3) = markup_elem "keyword3"; val (quasi_keywordN, quasi_keyword) = markup_elem "quasi_keyword"; val (improperN, improper) = markup_elem "improper"; val (operatorN, operator) = markup_elem "operator"; val (stringN, string) = markup_elem "string"; val (alt_stringN, alt_string) = markup_elem "alt_string"; val (verbatimN, verbatim) = markup_elem "verbatim"; val (cartoucheN, cartouche) = markup_elem "cartouche"; val (commentN, comment) = markup_elem "comment"; (* comments *) val (comment1N, comment1) = markup_elem "comment1"; val (comment2N, comment2) = markup_elem "comment2"; val (comment3N, comment3) = markup_elem "comment3"; (* timing *) val elapsedN = "elapsed"; val cpuN = "cpu"; val gcN = "gc"; fun timing_properties {elapsed, cpu, gc} = [(elapsedN, Value.print_time elapsed), (cpuN, Value.print_time cpu), (gcN, Value.print_time gc)]; val timingN = "timing"; fun timing t = (timingN, timing_properties t); (* command timing *) fun parse_command_timing_properties props = (case (Properties.get props fileN, Properties.get props offsetN, Properties.get props nameN) of (SOME file, SOME offset, SOME name) => SOME ({file = file, offset = Value.parse_int offset, name = name}, Properties.seconds props elapsedN) | _ => NONE); (* indentation *) val (command_indentN, command_indent) = markup_int "command_indent" indentN; (* goals *) val (goalN, goal) = markup_elem "goal"; val (subgoalN, subgoal) = markup_string "subgoal" nameN; (* command status *) val taskN = "task"; val (forkedN, forked) = markup_elem "forked"; val (joinedN, joined) = markup_elem "joined"; val (runningN, running) = markup_elem "running"; val (finishedN, finished) = markup_elem "finished"; val (failedN, failed) = markup_elem "failed"; val (canceledN, canceled) = markup_elem "canceled"; val (initializedN, initialized) = markup_elem "initialized"; val (finalizedN, finalized) = markup_elem "finalized"; val (consolidatingN, consolidating) = markup_elem "consolidating"; val (consolidatedN, consolidated) = markup_elem "consolidated"; (* messages *) val exec_idN = "exec_id"; val initN = "init"; val (statusN, status) = markup_elem "status"; val (resultN, result) = markup_elem "result"; val (writelnN, writeln) = markup_elem "writeln"; val (stateN, state) = markup_elem "state" val (informationN, information) = markup_elem "information"; val (tracingN, tracing) = markup_elem "tracing"; val (warningN, warning) = markup_elem "warning"; val (legacyN, legacy) = markup_elem "legacy"; val (errorN, error) = markup_elem "error"; val (systemN, system) = markup_elem "system"; val protocolN = "protocol"; val (reportN, report) = markup_elem "report"; val (no_reportN, no_report) = markup_elem "no_report"; val badN = "bad"; fun bad () = (badN, serial_properties (serial ())); val (intensifyN, intensify) = markup_elem "intensify"; (* active areas *) val browserN = "browser" val graphviewN = "graphview"; val theory_exportsN = "theory_exports"; val sendbackN = "sendback"; val paddingN = "padding"; val padding_line = (paddingN, "line"); val padding_command = (paddingN, "command"); val dialogN = "dialog"; fun dialog i result = (dialogN, [(serialN, Value.print_int i), (resultN, result)]); val jedit_actionN = "jedit_action"; (* protocol message functions *) val functionN = "function" fun ML_statistics {pid, stats_dir} = [(functionN, "ML_statistics"), ("pid", Value.print_int pid), ("stats_dir", stats_dir)]; val commands_accepted = [(functionN, "commands_accepted")]; val assign_update = [(functionN, "assign_update")]; val removed_versions = [(functionN, "removed_versions")]; fun invoke_scala name id thread = [(functionN, "invoke_scala"), (nameN, name), (idN, id), ("thread", Value.print_bool thread)]; fun cancel_scala id = [(functionN, "cancel_scala"), (idN, id)]; val task_statistics = (functionN, "task_statistics"); val command_timing = (functionN, "command_timing"); val theory_timing = (functionN, "theory_timing"); val session_timing = (functionN, "session_timing"); fun loading_theory name = [("function", "loading_theory"), (nameN, name)]; fun finished_theory name = [("function", "finished_theory"), (nameN, name)]; val build_session_finished = [("function", "build_session_finished")]; val print_operationsN = "print_operations"; val print_operations = [(functionN, print_operationsN)]; (* export *) val exportN = "export"; type export_args = {id: string option, serial: serial, theory_name: string, name: string, executable: bool, compress: bool, strict: bool}; fun export ({id, serial, theory_name, name, executable, compress, strict}: export_args) = [(functionN, exportN), (idN, the_default "" id), (serialN, Value.print_int serial), ("theory_name", theory_name), (nameN, name), ("executable", Value.print_bool executable), ("compress", Value.print_bool compress), ("strict", Value.print_bool strict)]; (* debugger *) fun debugger_state name = [(functionN, "debugger_state"), (nameN, name)]; fun debugger_output name = [(functionN, "debugger_output"), (nameN, name)]; (* simplifier trace *) val simp_trace_panelN = "simp_trace_panel"; val simp_trace_logN = "simp_trace_log"; val simp_trace_stepN = "simp_trace_step"; val simp_trace_recurseN = "simp_trace_recurse"; val simp_trace_hintN = "simp_trace_hint"; val simp_trace_ignoreN = "simp_trace_ignore"; fun simp_trace_cancel i = [(functionN, "simp_trace_cancel"), (serialN, Value.print_int i)]; (** print mode operations **) type output = Output.output * Output.output; val no_output = ("", ""); local val default = {output = Output_Primitives.markup_fn}; val modes = Synchronized.var "Markup.modes" (Symtab.make [("", default)]); in fun add_mode name output = Synchronized.change modes (fn tab => (if not (Symtab.defined tab name) then () else Output.warning ("Redefining markup mode " ^ quote name); Symtab.update (name, {output = output}) tab)); fun get_mode () = the_default default (Library.get_first (Symtab.lookup (Synchronized.value modes)) (print_mode_value ())); end; fun output m = if is_empty m then no_output else #output (get_mode ()) m; val enclose = output #-> Library.enclose; fun markup m = let val (bg, en) = output m in Library.enclose (Output.escape bg) (Output.escape en) end; val markups = fold_rev markup; fun markup_only m = markup m ""; fun markup_report "" = "" | markup_report txt = markup report txt; end; diff --git a/src/Pure/PIDE/resources.ML b/src/Pure/PIDE/resources.ML --- a/src/Pure/PIDE/resources.ML +++ b/src/Pure/PIDE/resources.ML @@ -1,393 +1,399 @@ (* Title: Pure/PIDE/resources.ML Author: Makarius Resources for theories and auxiliary files. *) signature RESOURCES = sig val default_qualifier: string val init_session: {session_positions: (string * Properties.T) list, session_directories: (string * string) list, session_chapters: (string * string) list, bibtex_entries: (string * string list) list, command_timings: Properties.T list, scala_functions: (string * Position.T) list, global_theories: (string * string) list, loaded_theories: string list} -> unit val init_session_yxml: string -> unit val init_session_file: Path.T -> unit val finish_session_base: unit -> unit val global_theory: string -> string option val loaded_theory: string -> bool val check_session: Proof.context -> string * Position.T -> string val session_chapter: string -> string val last_timing: Toplevel.transition -> Time.time val scala_function_pos: string -> Position.T val master_directory: theory -> Path.T val imports_of: theory -> (string * Position.T) list val begin_theory: Path.T -> Thy_Header.header -> theory list -> theory val thy_path: Path.T -> Path.T val theory_qualifier: string -> string val theory_bibtex_entries: string -> string list val find_theory_file: string -> Path.T option val import_name: string -> Path.T -> string -> {node_name: Path.T, master_dir: Path.T, theory_name: string} val check_thy: Path.T -> string -> {master: Path.T * SHA1.digest, text: string, theory_pos: Position.T, imports: (string * Position.T) list, keywords: Thy_Header.keywords} val parse_files: (Path.T -> Path.T list) -> (theory -> Token.file list) parser val parse_file: (theory -> Token.file) parser val provide: Path.T * SHA1.digest -> theory -> theory val provide_file: Token.file -> theory -> theory val provide_parse_files: (Path.T -> Path.T list) -> (theory -> Token.file list * theory) parser val provide_parse_file: (theory -> Token.file * theory) parser val loaded_files_current: theory -> bool - val check_path: Proof.context -> Path.T option -> string * Position.T -> Path.T - val check_file: Proof.context -> Path.T option -> string * Position.T -> Path.T - val check_dir: Proof.context -> Path.T option -> string * Position.T -> Path.T + val check_path: Proof.context -> Path.T option -> Input.source -> Path.T + val check_file: Proof.context -> Path.T option -> Input.source -> Path.T + val check_dir: Proof.context -> Path.T option -> Input.source -> Path.T end; structure Resources: RESOURCES = struct (* command timings *) type timings = ((string * Time.time) Inttab.table) Symtab.table; (*file -> offset -> name, time*) val empty_timings: timings = Symtab.empty; fun update_timings props = (case Markup.parse_command_timing_properties props of SOME ({file, offset, name}, time) => Symtab.map_default (file, Inttab.empty) (Inttab.map_default (offset, (name, time)) (fn (_, t) => (name, t + time))) | NONE => I); fun make_timings command_timings = fold update_timings command_timings empty_timings; fun approximative_id name pos = (case (Position.file_of pos, Position.offset_of pos) of (SOME file, SOME offset) => if name = "" then NONE else SOME {file = file, offset = offset, name = name} | _ => NONE); fun get_timings timings tr = (case approximative_id (Toplevel.name_of tr) (Toplevel.pos_of tr) of SOME {file, offset, name} => (case Symtab.lookup timings file of SOME offsets => (case Inttab.lookup offsets offset of SOME (name', time) => if name = name' then SOME time else NONE | NONE => NONE) | NONE => NONE) | NONE => NONE) |> the_default Time.zeroTime; (* session base *) val default_qualifier = "Draft"; type entry = {pos: Position.T, serial: serial}; fun make_entry props : entry = {pos = Position.of_properties props, serial = serial ()}; val empty_session_base = ({session_positions = []: (string * entry) list, session_directories = Symtab.empty: Path.T list Symtab.table, session_chapters = Symtab.empty: string Symtab.table, bibtex_entries = Symtab.empty: string list Symtab.table, timings = empty_timings, scala_functions = Symtab.empty: Position.T Symtab.table}, {global_theories = Symtab.empty: string Symtab.table, loaded_theories = Symtab.empty: unit Symtab.table}); val global_session_base = Synchronized.var "Sessions.base" empty_session_base; fun init_session {session_positions, session_directories, session_chapters, bibtex_entries, command_timings, scala_functions, global_theories, loaded_theories} = Synchronized.change global_session_base (fn _ => ({session_positions = sort_by #1 (map (apsnd make_entry) session_positions), session_directories = fold_rev (fn (dir, name) => Symtab.cons_list (name, Path.explode dir)) session_directories Symtab.empty, session_chapters = Symtab.make session_chapters, bibtex_entries = Symtab.make bibtex_entries, timings = make_timings command_timings, scala_functions = Symtab.make scala_functions}, {global_theories = Symtab.make global_theories, loaded_theories = Symtab.make_set loaded_theories})); fun init_session_yxml yxml = let val (session_positions, (session_directories, (session_chapters, (bibtex_entries, (command_timings, (scala_functions, (global_theories, loaded_theories))))))) = YXML.parse_body yxml |> let open XML.Decode in (pair (list (pair string properties)) (pair (list (pair string string)) (pair (list (pair string string)) (pair (list (pair string (list string))) (pair (list properties) (pair (list (pair string properties)) (pair (list (pair string string)) (list string)))))))) end; in init_session {session_positions = session_positions, session_directories = session_directories, session_chapters = session_chapters, bibtex_entries = bibtex_entries, command_timings = command_timings, scala_functions = map (apsnd Position.of_properties) scala_functions, global_theories = global_theories, loaded_theories = loaded_theories} end; fun init_session_file path = init_session_yxml (File.read path) before File.rm path; fun finish_session_base () = Synchronized.change global_session_base (apfst (K (#1 empty_session_base))); fun get_session_base f = f (Synchronized.value global_session_base); fun get_session_base1 f = get_session_base (f o #1); fun get_session_base2 f = get_session_base (f o #2); fun global_theory a = Symtab.lookup (get_session_base2 #global_theories) a; fun loaded_theory a = Symtab.defined (get_session_base2 #loaded_theories) a; fun check_session ctxt arg = Completion.check_item "session" (fn (name, {pos, serial}) => Markup.entity Markup.sessionN name |> Markup.properties (Position.entity_properties_of false serial pos)) (get_session_base1 #session_positions) ctxt arg; fun session_chapter name = the_default "Unsorted" (Symtab.lookup (get_session_base1 #session_chapters) name); fun last_timing tr = get_timings (get_session_base1 #timings) tr; fun scala_function_pos name = Symtab.lookup (get_session_base1 #scala_functions) name |> the_default Position.none; (* manage source files *) type files = {master_dir: Path.T, (*master directory of theory source*) imports: (string * Position.T) list, (*source specification of imports*) provided: (Path.T * SHA1.digest) list}; (*source path, digest*) fun make_files (master_dir, imports, provided): files = {master_dir = master_dir, imports = imports, provided = provided}; structure Files = Theory_Data ( type T = files; val empty = make_files (Path.current, [], []); val extend = I; fun merge ({master_dir, imports, provided = provided1}, {provided = provided2, ...}) = let val provided' = Library.merge (op =) (provided1, provided2) in make_files (master_dir, imports, provided') end ); fun map_files f = Files.map (fn {master_dir, imports, provided} => make_files (f (master_dir, imports, provided))); val master_directory = #master_dir o Files.get; val imports_of = #imports o Files.get; fun begin_theory master_dir {name, imports, keywords} parents = Theory.begin_theory name parents |> map_files (fn _ => (Path.explode (Path.implode_symbolic master_dir), imports, [])) |> Thy_Header.add_keywords keywords; (* theory files *) val thy_path = Path.ext "thy"; fun theory_qualifier theory = (case global_theory theory of SOME qualifier => qualifier | NONE => Long_Name.qualifier theory); fun theory_name qualifier theory = if Long_Name.is_qualified theory orelse is_some (global_theory theory) then theory else Long_Name.qualify qualifier theory; fun theory_bibtex_entries theory = Symtab.lookup_list (get_session_base1 #bibtex_entries) (theory_qualifier theory); fun find_theory_file thy_name = let val thy_file = thy_path (Path.basic (Long_Name.base_name thy_name)); val session = theory_qualifier thy_name; val dirs = Symtab.lookup_list (get_session_base1 #session_directories) session; in dirs |> get_first (fn dir => let val path = dir + thy_file in if File.is_file path then SOME path else NONE end) end; fun make_theory_node node_name theory = {node_name = node_name, master_dir = Path.dir node_name, theory_name = theory}; fun loaded_theory_node theory = {node_name = Path.basic theory, master_dir = Path.current, theory_name = theory}; fun import_name qualifier dir s = let val theory = theory_name qualifier (Thy_Header.import_name s); fun theory_node () = make_theory_node (File.full_path dir (thy_path (Path.expand (Path.explode s)))) theory; in if not (Thy_Header.is_base_name s) then theory_node () else if loaded_theory theory then loaded_theory_node theory else (case find_theory_file theory of SOME node_name => make_theory_node node_name theory | NONE => if Long_Name.is_qualified s then loaded_theory_node theory else theory_node ()) end; fun check_file dir file = File.check_file (File.full_path dir file); fun check_thy dir thy_name = let val thy_base_name = Long_Name.base_name thy_name; val master_file = (case find_theory_file thy_name of SOME path => check_file Path.current path | NONE => check_file dir (thy_path (Path.basic thy_base_name))); val text = File.read master_file; val {name = (name, pos), imports, keywords} = Thy_Header.read (Path.position master_file) text; val _ = thy_base_name <> name andalso error ("Bad theory name " ^ quote name ^ " for file " ^ Path.print (Path.base master_file) ^ Position.here pos); in {master = (master_file, SHA1.digest text), text = text, theory_pos = pos, imports = imports, keywords = keywords} end; (* load files *) fun parse_files make_paths = - Scan.ahead Parse.not_eof -- Parse.path >> (fn (tok, name) => fn thy => + Scan.ahead Parse.not_eof -- Parse.path_input >> (fn (tok, source) => fn thy => (case Token.get_files tok of [] => let val master_dir = master_directory thy; - val pos = Token.pos_of tok; + val name = Input.string_of source; + val pos = Input.pos_of source; + val delimited = Input.is_delimited source; val src_paths = make_paths (Path.explode name); - in map (Command.read_file master_dir pos) src_paths end + in map (Command.read_file master_dir pos delimited) src_paths end | files => map Exn.release files)); val parse_file = parse_files single >> (fn f => f #> the_single); fun provide (src_path, id) = map_files (fn (master_dir, imports, provided) => if AList.defined (op =) provided src_path then error ("Duplicate use of source file: " ^ Path.print src_path) else (master_dir, imports, (src_path, id) :: provided)); fun provide_file (file: Token.file) = provide (#src_path file, #digest file); fun provide_parse_files make_paths = parse_files make_paths >> (fn files => fn thy => let val fs = files thy; val thy' = fold (fn {src_path, digest, ...} => provide (src_path, digest)) fs thy; in (fs, thy') end); val provide_parse_file = provide_parse_files single >> (fn f => f #>> the_single); fun load_file thy src_path = let val full_path = check_file (master_directory thy) src_path; val text = File.read full_path; val id = SHA1.digest text; in ((full_path, id), text) end; fun loaded_files_current thy = #provided (Files.get thy) |> forall (fn (src_path, id) => (case try (load_file thy) src_path of NONE => false | SOME ((_, id'), _) => id = id')); (* formal check *) -fun formal_check check_file ctxt opt_dir (name, pos) = +fun formal_check check_file ctxt opt_dir source = let + val name = Input.string_of source; + val pos = Input.pos_of source; + val delimited = Input.is_delimited source; + + val _ = Context_Position.report ctxt pos (Markup.language_path delimited); + fun err msg = error (msg ^ Position.here pos); - - val _ = Context_Position.report ctxt pos Markup.language_path; val dir = (case opt_dir of SOME dir => dir | NONE => master_directory (Proof_Context.theory_of ctxt)); val path = dir + Path.explode name handle ERROR msg => err msg; val _ = Path.expand path handle ERROR msg => err msg; val _ = Context_Position.report ctxt pos (Markup.path (Path.implode_symbolic path)); - val _ = check_file path handle ERROR msg => err msg; + val _ : Path.T = check_file path handle ERROR msg => err msg; in path end; val check_path = formal_check I; val check_file = formal_check File.check_file; val check_dir = formal_check File.check_dir; (* antiquotations *) local -fun document_antiq (check: Proof.context -> Path.T option -> string * Position.T -> Path.T) = - Args.context -- Scan.lift (Parse.position Parse.path) >> (fn (ctxt, (name, pos)) => +fun document_antiq (check: Proof.context -> Path.T option -> Input.source -> Path.T) = + Args.context -- Scan.lift Parse.path_input >> (fn (ctxt, source) => let - val _ = check ctxt NONE (name, pos); - val latex = Latex.string (Latex.output_ascii_breakable "/" name); + val _ = check ctxt NONE source; + val latex = Latex.string (Latex.output_ascii_breakable "/" (Input.string_of source)); in Latex.enclose_block "\\isatt{" "}" [latex] end); fun ML_antiq check = - Args.context -- Scan.lift (Parse.position Parse.path) >> (fn (ctxt, (name, pos)) => - check ctxt (SOME Path.current) (name, pos) |> ML_Syntax.print_path); + Args.context -- Scan.lift Parse.path_input >> (fn (ctxt, source) => + check ctxt (SOME Path.current) source |> ML_Syntax.print_path); in val _ = Theory.setup (Thy_Output.antiquotation_verbatim_embedded \<^binding>\session\ (Scan.lift Parse.embedded_position) check_session #> Thy_Output.antiquotation_raw_embedded \<^binding>\path\ (document_antiq check_path) (K I) #> Thy_Output.antiquotation_raw_embedded \<^binding>\file\ (document_antiq check_file) (K I) #> Thy_Output.antiquotation_raw_embedded \<^binding>\dir\ (document_antiq check_dir) (K I) #> ML_Antiquotation.value_embedded \<^binding>\path\ (ML_antiq check_path) #> ML_Antiquotation.value_embedded \<^binding>\file\ (ML_antiq check_file) #> ML_Antiquotation.value_embedded \<^binding>\dir\ (ML_antiq check_dir) #> ML_Antiquotation.value_embedded \<^binding>\path_binding\ (Scan.lift (Parse.position Parse.path) >> (ML_Syntax.print_path_binding o Path.explode_binding)) #> ML_Antiquotation.value \<^binding>\master_dir\ (Args.theory >> (ML_Syntax.print_path o master_directory))); end; end; diff --git a/src/Pure/Pure.thy b/src/Pure/Pure.thy --- a/src/Pure/Pure.thy +++ b/src/Pure/Pure.thy @@ -1,1539 +1,1539 @@ (* Title: Pure/Pure.thy Author: Makarius The Pure theory, with definitions of Isar commands and some lemmas. *) theory Pure keywords "!!" "!" "+" ":" ";" "<" "<=" "==" "=>" "?" "[" "\" "\" "\" "\" "\" "\" "]" "binder" "in" "infix" "infixl" "infixr" "is" "open" "output" "overloaded" "pervasive" "premises" "structure" "unchecked" and "private" "qualified" :: before_command and "assumes" "constrains" "defines" "fixes" "for" "if" "includes" "notes" "rewrites" "obtains" "shows" "when" "where" "|" :: quasi_command and "text" "txt" :: document_body and "text_raw" :: document_raw and "default_sort" :: thy_decl and "typedecl" "nonterminal" "judgment" "consts" "syntax" "no_syntax" "translations" "no_translations" "type_notation" "no_type_notation" "notation" "no_notation" "alias" "type_alias" "declare" "hide_class" "hide_type" "hide_const" "hide_fact" :: thy_decl and "type_synonym" "definition" "abbreviation" "lemmas" :: thy_defn and "axiomatization" :: thy_stmt and "external_file" "bibtex_file" "ROOTS_file" :: thy_load and "generate_file" :: thy_decl and "export_generated_files" :: diag and "compile_generated_files" :: diag and "external_files" "export_files" "export_prefix" and "ML_file" "ML_file_debug" "ML_file_no_debug" :: thy_load % "ML" and "SML_file" "SML_file_debug" "SML_file_no_debug" :: thy_load % "ML" and "SML_import" "SML_export" "ML_export" :: thy_decl % "ML" and "ML_prf" :: prf_decl % "proof" (* FIXME % "ML" ?? *) and "ML_val" "ML_command" :: diag % "ML" and "simproc_setup" :: thy_decl % "ML" and "setup" "local_setup" "attribute_setup" "method_setup" "declaration" "syntax_declaration" "parse_ast_translation" "parse_translation" "print_translation" "typed_print_translation" "print_ast_translation" "oracle" :: thy_decl % "ML" and "bundle" :: thy_decl_block and "unbundle" :: thy_decl and "include" "including" :: prf_decl and "print_bundles" :: diag and "context" "locale" "experiment" :: thy_decl_block and "interpret" :: prf_goal % "proof" and "interpretation" "global_interpretation" "sublocale" :: thy_goal and "class" :: thy_decl_block and "subclass" :: thy_goal and "instantiation" :: thy_decl_block and "instance" :: thy_goal and "overloading" :: thy_decl_block and "opening" :: quasi_command and "code_datatype" :: thy_decl and "theorem" "lemma" "corollary" "proposition" :: thy_goal_stmt and "schematic_goal" :: thy_goal_stmt and "notepad" :: thy_decl_block and "have" :: prf_goal % "proof" and "hence" :: prf_goal % "proof" and "show" :: prf_asm_goal % "proof" and "thus" :: prf_asm_goal % "proof" and "then" "from" "with" :: prf_chain % "proof" and "note" :: prf_decl % "proof" and "supply" :: prf_script % "proof" and "using" "unfolding" :: prf_decl % "proof" and "fix" "assume" "presume" "define" :: prf_asm % "proof" and "consider" :: prf_goal % "proof" and "obtain" :: prf_asm_goal % "proof" and "guess" :: prf_script_asm_goal % "proof" and "let" "write" :: prf_decl % "proof" and "case" :: prf_asm % "proof" and "{" :: prf_open % "proof" and "}" :: prf_close % "proof" and "next" :: next_block % "proof" and "qed" :: qed_block % "proof" and "by" ".." "." "sorry" "\" :: "qed" % "proof" and "done" :: "qed_script" % "proof" and "oops" :: qed_global % "proof" and "defer" "prefer" "apply" :: prf_script % "proof" and "apply_end" :: prf_script % "proof" and "subgoal" :: prf_script_goal % "proof" and "proof" :: prf_block % "proof" and "also" "moreover" :: prf_decl % "proof" and "finally" "ultimately" :: prf_chain % "proof" and "back" :: prf_script % "proof" and "help" "print_commands" "print_options" "print_context" "print_theory" "print_definitions" "print_syntax" "print_abbrevs" "print_defn_rules" "print_theorems" "print_locales" "print_classes" "print_locale" "print_interps" "print_attributes" "print_simpset" "print_rules" "print_trans_rules" "print_methods" "print_antiquotations" "print_ML_antiquotations" "thy_deps" "locale_deps" "class_deps" "thm_deps" "thm_oracles" "print_term_bindings" "print_facts" "print_cases" "print_statement" "thm" "prf" "full_prf" "prop" "term" "typ" "print_codesetup" "unused_thms" :: diag and "print_state" :: diag and "welcome" :: diag and "end" :: thy_end and "realizers" :: thy_decl and "realizability" :: thy_decl and "extract_type" "extract" :: thy_decl and "find_theorems" "find_consts" :: diag and "named_theorems" :: thy_decl abbrevs "\\tag" = "\<^marker>\tag \" and "===>" = "===>" (*prevent replacement of very long arrows*) and "--->" = "\\" and "hence" "thus" "default_sort" "simproc_setup" "apply_end" "realizers" "realizability" = "" and "hence" = "then have" and "thus" = "then show" begin section \Isar commands\ subsection \Other files\ ML \ local val _ = Outer_Syntax.command \<^command_keyword>\external_file\ "formal dependency on external file" (Resources.provide_parse_file >> (fn get_file => Toplevel.theory (#2 o get_file))); val _ = Outer_Syntax.command \<^command_keyword>\bibtex_file\ "check bibtex database file in Prover IDE" (Resources.provide_parse_file >> (fn get_file => Toplevel.theory (fn thy => let val ({lines, pos, ...}, thy') = get_file thy; val _ = Bibtex.check_database_output pos (cat_lines lines); in thy' end))); val _ = Outer_Syntax.command \<^command_keyword>\ROOTS_file\ "session ROOTS file" (Resources.provide_parse_file >> (fn get_file => Toplevel.theory (fn thy => let val ({src_path, lines, pos = pos0, ...}, thy') = get_file thy; val ctxt = Proof_Context.init_global thy'; val dir = Path.dir (Path.expand (Resources.master_directory thy' + src_path)); val _ = (lines, pos0) |-> fold (fn line => fn pos1 => let val pos2 = pos1 |> fold Position.advance (Symbol.explode line); - val pos = Position.range_position (pos1, pos2); + val range = Position.range (pos1, pos2); val _ = if line = "" then () else if String.isPrefix "#" line then - Context_Position.report ctxt pos Markup.comment + Context_Position.report ctxt (#1 range) Markup.comment else - (ignore (Resources.check_dir ctxt (SOME dir) (line, pos)) + (ignore (Resources.check_dir ctxt (SOME dir) (Input.source true line range)) handle ERROR msg => Output.error_message msg); in pos2 |> Position.advance "\n" end); in thy' end))); val _ = Outer_Syntax.local_theory \<^command_keyword>\generate_file\ "generate source file, with antiquotations" (Parse.path_binding -- (\<^keyword>\=\ |-- Parse.embedded_input) >> Generated_Files.generate_file_cmd); val files_in_theory = (Parse.underscore >> K [] || Scan.repeat1 Parse.path_binding) -- Scan.option (\<^keyword>\(\ |-- Parse.!!! (\<^keyword>\in\ |-- Parse.theory_name --| \<^keyword>\)\)); val _ = Outer_Syntax.command \<^command_keyword>\export_generated_files\ "export generated files from given theories" (Parse.and_list1 files_in_theory >> (fn args => Toplevel.keep (fn st => Generated_Files.export_generated_files_cmd (Toplevel.context_of st) args))); val base_dir = Scan.optional (\<^keyword>\(\ |-- - Parse.!!! (\<^keyword>\in\ |-- Parse.position Parse.path --| \<^keyword>\)\)) ("", Position.none); + Parse.!!! (\<^keyword>\in\ |-- Parse.path_input --| \<^keyword>\)\)) (Input.string ""); - val external_files = Scan.repeat1 (Parse.position Parse.path) -- base_dir; + val external_files = Scan.repeat1 Parse.path_input -- base_dir; val exe = Parse.reserved "exe" >> K true || Parse.reserved "executable" >> K false; val executable = \<^keyword>\(\ |-- Parse.!!! (exe --| \<^keyword>\)\) >> SOME || Scan.succeed NONE; val export_files = Scan.repeat1 Parse.path_binding -- executable; val _ = Outer_Syntax.command \<^command_keyword>\compile_generated_files\ "compile generated files and export results" (Parse.and_list files_in_theory -- Scan.optional (\<^keyword>\external_files\ |-- Parse.!!! (Parse.and_list1 external_files)) [] -- Scan.optional (\<^keyword>\export_files\ |-- Parse.!!! (Parse.and_list1 export_files)) [] -- Scan.optional (\<^keyword>\export_prefix\ |-- Parse.path_binding) ("", Position.none) -- (Parse.where_ |-- Parse.!!! Parse.ML_source) >> (fn ((((args, external), export), export_prefix), source) => Toplevel.keep (fn st => Generated_Files.compile_generated_files_cmd (Toplevel.context_of st) args external export export_prefix source))); in end\ external_file "ROOT0.ML" external_file "ROOT.ML" subsection \Embedded ML text\ ML \ local val semi = Scan.option \<^keyword>\;\; val _ = Outer_Syntax.command \<^command_keyword>\ML_file\ "read and evaluate Isabelle/ML file" (Resources.parse_file --| semi >> ML_File.ML NONE); val _ = Outer_Syntax.command \<^command_keyword>\ML_file_debug\ "read and evaluate Isabelle/ML file (with debugger information)" (Resources.parse_file --| semi >> ML_File.ML (SOME true)); val _ = Outer_Syntax.command \<^command_keyword>\ML_file_no_debug\ "read and evaluate Isabelle/ML file (no debugger information)" (Resources.parse_file --| semi >> ML_File.ML (SOME false)); val _ = Outer_Syntax.command \<^command_keyword>\SML_file\ "read and evaluate Standard ML file" (Resources.parse_file --| semi >> ML_File.SML NONE); val _ = Outer_Syntax.command \<^command_keyword>\SML_file_debug\ "read and evaluate Standard ML file (with debugger information)" (Resources.parse_file --| semi >> ML_File.SML (SOME true)); val _ = Outer_Syntax.command \<^command_keyword>\SML_file_no_debug\ "read and evaluate Standard ML file (no debugger information)" (Resources.parse_file --| semi >> ML_File.SML (SOME false)); val _ = Outer_Syntax.command \<^command_keyword>\SML_export\ "evaluate SML within Isabelle/ML environment" (Parse.ML_source >> (fn source => let val flags: ML_Compiler.flags = {environment = ML_Env.SML_export, redirect = false, verbose = true, debug = NONE, writeln = writeln, warning = warning}; in Toplevel.theory (Context.theory_map (ML_Context.exec (fn () => ML_Context.eval_source flags source))) end)); val _ = Outer_Syntax.command \<^command_keyword>\SML_import\ "evaluate Isabelle/ML within SML environment" (Parse.ML_source >> (fn source => let val flags: ML_Compiler.flags = {environment = ML_Env.SML_import, redirect = false, verbose = true, debug = NONE, writeln = writeln, warning = warning}; in Toplevel.generic_theory (ML_Context.exec (fn () => ML_Context.eval_source flags source) #> Local_Theory.propagate_ml_env) end)); val _ = Outer_Syntax.command ("ML_export", \<^here>) "ML text within theory or local theory, and export to bootstrap environment" (Parse.ML_source >> (fn source => Toplevel.generic_theory (fn context => context |> Config.put_generic ML_Env.ML_environment ML_Env.Isabelle |> Config.put_generic ML_Env.ML_write_global true |> ML_Context.exec (fn () => ML_Context.eval_source (ML_Compiler.verbose true ML_Compiler.flags) source) |> Config.restore_generic ML_Env.ML_write_global context |> Config.restore_generic ML_Env.ML_environment context |> Local_Theory.propagate_ml_env))); val _ = Outer_Syntax.command \<^command_keyword>\ML_prf\ "ML text within proof" (Parse.ML_source >> (fn source => Toplevel.proof (Proof.map_context (Context.proof_map (ML_Context.exec (fn () => ML_Context.eval_source (ML_Compiler.verbose true ML_Compiler.flags) source))) #> Proof.propagate_ml_env))); val _ = Outer_Syntax.command \<^command_keyword>\ML_val\ "diagnostic ML text" (Parse.ML_source >> Isar_Cmd.ml_diag true); val _ = Outer_Syntax.command \<^command_keyword>\ML_command\ "diagnostic ML text (silent)" (Parse.ML_source >> Isar_Cmd.ml_diag false); val _ = Outer_Syntax.command \<^command_keyword>\setup\ "ML setup for global theory" (Parse.ML_source >> (Toplevel.theory o Isar_Cmd.setup)); val _ = Outer_Syntax.local_theory \<^command_keyword>\local_setup\ "ML setup for local theory" (Parse.ML_source >> Isar_Cmd.local_setup); val _ = Outer_Syntax.command \<^command_keyword>\oracle\ "declare oracle" (Parse.range Parse.name -- (\<^keyword>\=\ |-- Parse.ML_source) >> (fn (x, y) => Toplevel.theory (Isar_Cmd.oracle x y))); val _ = Outer_Syntax.local_theory \<^command_keyword>\attribute_setup\ "define attribute in ML" (Parse.name_position -- Parse.!!! (\<^keyword>\=\ |-- Parse.ML_source -- Scan.optional Parse.text "") >> (fn (name, (txt, cmt)) => Attrib.attribute_setup name txt cmt)); val _ = Outer_Syntax.local_theory \<^command_keyword>\method_setup\ "define proof method in ML" (Parse.name_position -- Parse.!!! (\<^keyword>\=\ |-- Parse.ML_source -- Scan.optional Parse.text "") >> (fn (name, (txt, cmt)) => Method.method_setup name txt cmt)); val _ = Outer_Syntax.local_theory \<^command_keyword>\declaration\ "generic ML declaration" (Parse.opt_keyword "pervasive" -- Parse.ML_source >> (fn (pervasive, txt) => Isar_Cmd.declaration {syntax = false, pervasive = pervasive} txt)); val _ = Outer_Syntax.local_theory \<^command_keyword>\syntax_declaration\ "generic ML syntax declaration" (Parse.opt_keyword "pervasive" -- Parse.ML_source >> (fn (pervasive, txt) => Isar_Cmd.declaration {syntax = true, pervasive = pervasive} txt)); val _ = Outer_Syntax.local_theory \<^command_keyword>\simproc_setup\ "define simproc in ML" (Parse.name_position -- (\<^keyword>\(\ |-- Parse.enum1 "|" Parse.term --| \<^keyword>\)\ --| \<^keyword>\=\) -- Parse.ML_source >> (fn ((a, b), c) => Isar_Cmd.simproc_setup a b c)); in end\ subsection \Theory commands\ subsubsection \Sorts and types\ ML \ local val _ = Outer_Syntax.local_theory \<^command_keyword>\default_sort\ "declare default sort for explicit type variables" (Parse.sort >> (fn s => fn lthy => Local_Theory.set_defsort (Syntax.read_sort lthy s) lthy)); val _ = Outer_Syntax.local_theory \<^command_keyword>\typedecl\ "type declaration" (Parse.type_args -- Parse.binding -- Parse.opt_mixfix >> (fn ((args, a), mx) => Typedecl.typedecl {final = true} (a, map (rpair dummyS) args, mx) #> snd)); val _ = Outer_Syntax.local_theory \<^command_keyword>\type_synonym\ "declare type abbreviation" (Parse.type_args -- Parse.binding -- (\<^keyword>\=\ |-- Parse.!!! (Parse.typ -- Parse.opt_mixfix')) >> (fn ((args, a), (rhs, mx)) => snd o Typedecl.abbrev_cmd (a, args, mx) rhs)); in end\ subsubsection \Consts\ ML \ local val _ = Outer_Syntax.command \<^command_keyword>\judgment\ "declare object-logic judgment" (Parse.const_binding >> (Toplevel.theory o Object_Logic.add_judgment_cmd)); val _ = Outer_Syntax.command \<^command_keyword>\consts\ "declare constants" (Scan.repeat1 Parse.const_binding >> (Toplevel.theory o Sign.add_consts_cmd)); in end\ subsubsection \Syntax and translations\ ML \ local val _ = Outer_Syntax.command \<^command_keyword>\nonterminal\ "declare syntactic type constructors (grammar nonterminal symbols)" (Parse.and_list1 Parse.binding >> (Toplevel.theory o Sign.add_nonterminals_global)); val _ = Outer_Syntax.command \<^command_keyword>\syntax\ "add raw syntax clauses" (Parse.syntax_mode -- Scan.repeat1 Parse.const_decl >> (Toplevel.theory o uncurry Sign.add_syntax_cmd)); val _ = Outer_Syntax.command \<^command_keyword>\no_syntax\ "delete raw syntax clauses" (Parse.syntax_mode -- Scan.repeat1 Parse.const_decl >> (Toplevel.theory o uncurry Sign.del_syntax_cmd)); val trans_pat = Scan.optional (\<^keyword>\(\ |-- Parse.!!! (Parse.inner_syntax Parse.name --| \<^keyword>\)\)) "logic" -- Parse.inner_syntax Parse.string; fun trans_arrow toks = ((\<^keyword>\\\ || \<^keyword>\=>\) >> K Syntax.Parse_Rule || (\<^keyword>\\\ || \<^keyword>\<=\) >> K Syntax.Print_Rule || (\<^keyword>\\\ || \<^keyword>\==\) >> K Syntax.Parse_Print_Rule) toks; val trans_line = trans_pat -- Parse.!!! (trans_arrow -- trans_pat) >> (fn (left, (arr, right)) => arr (left, right)); val _ = Outer_Syntax.command \<^command_keyword>\translations\ "add syntax translation rules" (Scan.repeat1 trans_line >> (Toplevel.theory o Isar_Cmd.translations)); val _ = Outer_Syntax.command \<^command_keyword>\no_translations\ "delete syntax translation rules" (Scan.repeat1 trans_line >> (Toplevel.theory o Isar_Cmd.no_translations)); in end\ subsubsection \Translation functions\ ML \ local val _ = Outer_Syntax.command \<^command_keyword>\parse_ast_translation\ "install parse ast translation functions" (Parse.ML_source >> (Toplevel.theory o Isar_Cmd.parse_ast_translation)); val _ = Outer_Syntax.command \<^command_keyword>\parse_translation\ "install parse translation functions" (Parse.ML_source >> (Toplevel.theory o Isar_Cmd.parse_translation)); val _ = Outer_Syntax.command \<^command_keyword>\print_translation\ "install print translation functions" (Parse.ML_source >> (Toplevel.theory o Isar_Cmd.print_translation)); val _ = Outer_Syntax.command \<^command_keyword>\typed_print_translation\ "install typed print translation functions" (Parse.ML_source >> (Toplevel.theory o Isar_Cmd.typed_print_translation)); val _ = Outer_Syntax.command \<^command_keyword>\print_ast_translation\ "install print ast translation functions" (Parse.ML_source >> (Toplevel.theory o Isar_Cmd.print_ast_translation)); in end\ subsubsection \Specifications\ ML \ local val _ = Outer_Syntax.local_theory' \<^command_keyword>\definition\ "constant definition" (Scan.option Parse_Spec.constdecl -- (Parse_Spec.opt_thm_name ":" -- Parse.prop) -- Parse_Spec.if_assumes -- Parse.for_fixes >> (fn (((decl, spec), prems), params) => #2 oo Specification.definition_cmd decl params prems spec)); val _ = Outer_Syntax.local_theory' \<^command_keyword>\abbreviation\ "constant abbreviation" (Parse.syntax_mode -- Scan.option Parse_Spec.constdecl -- Parse.prop -- Parse.for_fixes >> (fn (((mode, decl), spec), params) => Specification.abbreviation_cmd mode decl params spec)); val axiomatization = Parse.and_list1 (Parse_Spec.thm_name ":" -- Parse.prop) -- Parse_Spec.if_assumes -- Parse.for_fixes >> (fn ((a, b), c) => (c, b, a)); val _ = Outer_Syntax.command \<^command_keyword>\axiomatization\ "axiomatic constant specification" (Scan.optional Parse.vars [] -- Scan.optional (Parse.where_ |-- Parse.!!! axiomatization) ([], [], []) >> (fn (a, (b, c, d)) => Toplevel.theory (#2 o Specification.axiomatization_cmd a b c d))); val _ = Outer_Syntax.local_theory \<^command_keyword>\alias\ "name-space alias for constant" (Parse.binding -- (Parse.!!! \<^keyword>\=\ |-- Parse.name_position) >> Specification.alias_cmd); val _ = Outer_Syntax.local_theory \<^command_keyword>\type_alias\ "name-space alias for type constructor" (Parse.binding -- (Parse.!!! \<^keyword>\=\ |-- Parse.name_position) >> Specification.type_alias_cmd); in end\ subsubsection \Notation\ ML \ local val _ = Outer_Syntax.local_theory \<^command_keyword>\type_notation\ "add concrete syntax for type constructors" (Parse.syntax_mode -- Parse.and_list1 (Parse.type_const -- Parse.mixfix) >> (fn (mode, args) => Specification.type_notation_cmd true mode args)); val _ = Outer_Syntax.local_theory \<^command_keyword>\no_type_notation\ "delete concrete syntax for type constructors" (Parse.syntax_mode -- Parse.and_list1 (Parse.type_const -- Parse.mixfix) >> (fn (mode, args) => Specification.type_notation_cmd false mode args)); val _ = Outer_Syntax.local_theory \<^command_keyword>\notation\ "add concrete syntax for constants / fixed variables" (Parse.syntax_mode -- Parse.and_list1 (Parse.const -- Parse.mixfix) >> (fn (mode, args) => Specification.notation_cmd true mode args)); val _ = Outer_Syntax.local_theory \<^command_keyword>\no_notation\ "delete concrete syntax for constants / fixed variables" (Parse.syntax_mode -- Parse.and_list1 (Parse.const -- Parse.mixfix) >> (fn (mode, args) => Specification.notation_cmd false mode args)); in end\ subsubsection \Theorems\ ML \ local val long_keyword = Parse_Spec.includes >> K "" || Parse_Spec.long_statement_keyword; val long_statement = Scan.optional (Parse_Spec.opt_thm_name ":" --| Scan.ahead long_keyword) Binding.empty_atts -- Scan.optional Parse_Spec.includes [] -- Parse_Spec.long_statement >> (fn ((binding, includes), (elems, concl)) => (true, binding, includes, elems, concl)); val short_statement = Parse_Spec.statement -- Parse_Spec.if_statement -- Parse.for_fixes >> (fn ((shows, assumes), fixes) => (false, Binding.empty_atts, [], [Element.Fixes fixes, Element.Assumes assumes], Element.Shows shows)); fun theorem spec schematic descr = Outer_Syntax.local_theory_to_proof' spec ("state " ^ descr) ((long_statement || short_statement) >> (fn (long, binding, includes, elems, concl) => ((if schematic then Specification.schematic_theorem_cmd else Specification.theorem_cmd) long Thm.theoremK NONE (K I) binding includes elems concl))); val _ = theorem \<^command_keyword>\theorem\ false "theorem"; val _ = theorem \<^command_keyword>\lemma\ false "lemma"; val _ = theorem \<^command_keyword>\corollary\ false "corollary"; val _ = theorem \<^command_keyword>\proposition\ false "proposition"; val _ = theorem \<^command_keyword>\schematic_goal\ true "schematic goal"; in end\ ML \ local val _ = Outer_Syntax.local_theory' \<^command_keyword>\lemmas\ "define theorems" (Parse_Spec.name_facts -- Parse.for_fixes >> (fn (facts, fixes) => #2 oo Specification.theorems_cmd Thm.theoremK facts fixes)); val _ = Outer_Syntax.local_theory' \<^command_keyword>\declare\ "declare theorems" (Parse.and_list1 Parse.thms1 -- Parse.for_fixes >> (fn (facts, fixes) => #2 oo Specification.theorems_cmd "" [(Binding.empty_atts, flat facts)] fixes)); val _ = Outer_Syntax.local_theory \<^command_keyword>\named_theorems\ "declare named collection of theorems" (Parse.and_list1 (Parse.binding -- Scan.optional Parse.text "") >> fold (fn (b, descr) => snd o Named_Theorems.declare b descr)); in end\ subsubsection \Hide names\ ML \ local fun hide_names command_keyword what hide parse prep = Outer_Syntax.command command_keyword ("hide " ^ what ^ " from name space") ((Parse.opt_keyword "open" >> not) -- Scan.repeat1 parse >> (fn (fully, args) => (Toplevel.theory (fn thy => let val ctxt = Proof_Context.init_global thy in fold (hide fully o prep ctxt) args thy end)))); val _ = hide_names \<^command_keyword>\hide_class\ "classes" Sign.hide_class Parse.class Proof_Context.read_class; val _ = hide_names \<^command_keyword>\hide_type\ "types" Sign.hide_type Parse.type_const ((#1 o dest_Type) oo Proof_Context.read_type_name {proper = true, strict = false}); val _ = hide_names \<^command_keyword>\hide_const\ "consts" Sign.hide_const Parse.const ((#1 o dest_Const) oo Proof_Context.read_const {proper = true, strict = false}); val _ = hide_names \<^command_keyword>\hide_fact\ "facts" Global_Theory.hide_fact Parse.name_position (Global_Theory.check_fact o Proof_Context.theory_of); in end\ subsection \Bundled declarations\ ML \ local val _ = Outer_Syntax.maybe_begin_local_theory \<^command_keyword>\bundle\ "define bundle of declarations" ((Parse.binding --| \<^keyword>\=\) -- Parse.thms1 -- Parse.for_fixes >> (uncurry Bundle.bundle_cmd)) (Parse.binding --| Parse.begin >> Bundle.init); val _ = Outer_Syntax.local_theory \<^command_keyword>\unbundle\ "activate declarations from bundle in local theory" (Scan.repeat1 Parse.name_position >> Bundle.unbundle_cmd); val _ = Outer_Syntax.command \<^command_keyword>\include\ "activate declarations from bundle in proof body" (Scan.repeat1 Parse.name_position >> (Toplevel.proof o Bundle.include_cmd)); val _ = Outer_Syntax.command \<^command_keyword>\including\ "activate declarations from bundle in goal refinement" (Scan.repeat1 Parse.name_position >> (Toplevel.proof o Bundle.including_cmd)); val _ = Outer_Syntax.command \<^command_keyword>\print_bundles\ "print bundles of declarations" (Parse.opt_bang >> (fn b => Toplevel.keep (Bundle.print_bundles b o Toplevel.context_of))); in end\ subsection \Local theory specifications\ subsubsection \Specification context\ ML \ local val _ = Outer_Syntax.command \<^command_keyword>\context\ "begin local theory context" (((Parse.name_position -- Scan.optional Parse_Spec.opening []) >> (fn (name, incls) => Toplevel.begin_main_target true (Target_Context.context_begin_named_cmd incls name)) || Scan.optional Parse_Spec.includes [] -- Scan.repeat Parse_Spec.context_element >> (fn (incls, elems) => Toplevel.begin_nested_target (Target_Context.context_begin_nested_cmd incls elems))) --| Parse.begin); val _ = Outer_Syntax.command \<^command_keyword>\end\ "end context" (Scan.succeed (Toplevel.exit o Toplevel.end_main_target o Toplevel.end_nested_target o Toplevel.end_proof (K Proof.end_notepad))); in end\ subsubsection \Locales and interpretation\ ML \ local val locale_context_elements = Scan.repeat1 Parse_Spec.context_element; val locale_val = ((Parse_Spec.locale_expression -- Scan.optional Parse_Spec.opening []) || Parse_Spec.opening >> pair ([], [])) -- Scan.optional (\<^keyword>\+\ |-- Parse.!!! locale_context_elements) [] || locale_context_elements >> pair (([], []), []); val _ = Outer_Syntax.command \<^command_keyword>\locale\ "define named specification context" (Parse.binding -- Scan.optional (\<^keyword>\=\ |-- Parse.!!! locale_val) ((([], []), []), []) -- Parse.opt_begin >> (fn ((name, ((expr, includes), elems)), begin) => Toplevel.begin_main_target begin (Expression.add_locale_cmd name Binding.empty includes expr elems #> snd))); val _ = Outer_Syntax.command \<^command_keyword>\experiment\ "open private specification context" (Scan.repeat Parse_Spec.context_element --| Parse.begin >> (fn elems => Toplevel.begin_main_target true (Experiment.experiment_cmd elems #> snd))); val _ = Outer_Syntax.command \<^command_keyword>\interpret\ "prove interpretation of locale expression in proof context" (Parse.!!! Parse_Spec.locale_expression >> (fn expr => Toplevel.proof (Interpretation.interpret_cmd expr))); val interpretation_args_with_defs = Parse.!!! Parse_Spec.locale_expression -- (Scan.optional (\<^keyword>\defines\ |-- Parse.and_list1 (Parse_Spec.opt_thm_name ":" -- ((Parse.binding -- Parse.opt_mixfix') --| \<^keyword>\=\ -- Parse.term))) ([])); val _ = Outer_Syntax.local_theory_to_proof \<^command_keyword>\global_interpretation\ "prove interpretation of locale expression into global theory" (interpretation_args_with_defs >> (fn (expr, defs) => Interpretation.global_interpretation_cmd expr defs)); val _ = Outer_Syntax.command \<^command_keyword>\sublocale\ "prove sublocale relation between a locale and a locale expression" ((Parse.name_position --| (\<^keyword>\\\ || \<^keyword>\<\) -- interpretation_args_with_defs >> (fn (loc, (expr, defs)) => Toplevel.theory_to_proof (Interpretation.global_sublocale_cmd loc expr defs))) || interpretation_args_with_defs >> (fn (expr, defs) => Toplevel.local_theory_to_proof NONE NONE (Interpretation.sublocale_cmd expr defs))); val _ = Outer_Syntax.command \<^command_keyword>\interpretation\ "prove interpretation of locale expression in local theory or into global theory" (Parse.!!! Parse_Spec.locale_expression >> (fn expr => Toplevel.local_theory_to_proof NONE NONE (Interpretation.isar_interpretation_cmd expr))); in end\ subsubsection \Type classes\ ML \ local val class_context_elements = Scan.repeat1 Parse_Spec.context_element; val class_val = ((Parse_Spec.class_expression -- Scan.optional Parse_Spec.opening []) || Parse_Spec.opening >> pair []) -- Scan.optional (\<^keyword>\+\ |-- Parse.!!! class_context_elements) [] || class_context_elements >> pair ([], []); val _ = Outer_Syntax.command \<^command_keyword>\class\ "define type class" (Parse.binding -- Scan.optional (\<^keyword>\=\ |-- class_val) (([], []), []) -- Parse.opt_begin >> (fn ((name, ((supclasses, includes), elems)), begin) => Toplevel.begin_main_target begin (Class_Declaration.class_cmd name includes supclasses elems #> snd))); val _ = Outer_Syntax.local_theory_to_proof \<^command_keyword>\subclass\ "prove a subclass relation" (Parse.class >> Class_Declaration.subclass_cmd); val _ = Outer_Syntax.command \<^command_keyword>\instantiation\ "instantiate and prove type arity" (Parse.multi_arity --| Parse.begin >> (fn arities => Toplevel.begin_main_target true (Class.instantiation_cmd arities))); val _ = Outer_Syntax.command \<^command_keyword>\instance\ "prove type arity or subclass relation" ((Parse.class -- ((\<^keyword>\\\ || \<^keyword>\<\) |-- Parse.!!! Parse.class) >> Class.classrel_cmd || Parse.multi_arity >> Class.instance_arity_cmd) >> Toplevel.theory_to_proof || Scan.succeed (Toplevel.local_theory_to_proof NONE NONE (Class.instantiation_instance I))); in end\ subsubsection \Arbitrary overloading\ ML \ local val _ = Outer_Syntax.command \<^command_keyword>\overloading\ "overloaded definitions" (Scan.repeat1 (Parse.name --| (\<^keyword>\==\ || \<^keyword>\\\) -- Parse.term -- Scan.optional (\<^keyword>\(\ |-- (\<^keyword>\unchecked\ >> K false) --| \<^keyword>\)\) true >> Scan.triple1) --| Parse.begin >> (fn operations => Toplevel.begin_main_target true (Overloading.overloading_cmd operations))); in end\ subsection \Proof commands\ ML \ local val _ = Outer_Syntax.local_theory_to_proof \<^command_keyword>\notepad\ "begin proof context" (Parse.begin >> K Proof.begin_notepad); in end\ subsubsection \Statements\ ML \ local val structured_statement = Parse_Spec.statement -- Parse_Spec.cond_statement -- Parse.for_fixes >> (fn ((shows, (strict, assumes)), fixes) => (strict, fixes, assumes, shows)); val _ = Outer_Syntax.command \<^command_keyword>\have\ "state local goal" (structured_statement >> (fn (a, b, c, d) => Toplevel.proof' (fn int => Proof.have_cmd a NONE (K I) b c d int #> #2))); val _ = Outer_Syntax.command \<^command_keyword>\show\ "state local goal, to refine pending subgoals" (structured_statement >> (fn (a, b, c, d) => Toplevel.proof' (fn int => Proof.show_cmd a NONE (K I) b c d int #> #2))); val _ = Outer_Syntax.command \<^command_keyword>\hence\ "old-style alias of \"then have\"" (structured_statement >> (fn (a, b, c, d) => Toplevel.proof' (fn int => Proof.chain #> Proof.have_cmd a NONE (K I) b c d int #> #2))); val _ = Outer_Syntax.command \<^command_keyword>\thus\ "old-style alias of \"then show\"" (structured_statement >> (fn (a, b, c, d) => Toplevel.proof' (fn int => Proof.chain #> Proof.show_cmd a NONE (K I) b c d int #> #2))); in end\ subsubsection \Local facts\ ML \ local val facts = Parse.and_list1 Parse.thms1; val _ = Outer_Syntax.command \<^command_keyword>\then\ "forward chaining" (Scan.succeed (Toplevel.proof Proof.chain)); val _ = Outer_Syntax.command \<^command_keyword>\from\ "forward chaining from given facts" (facts >> (Toplevel.proof o Proof.from_thmss_cmd)); val _ = Outer_Syntax.command \<^command_keyword>\with\ "forward chaining from given and current facts" (facts >> (Toplevel.proof o Proof.with_thmss_cmd)); val _ = Outer_Syntax.command \<^command_keyword>\note\ "define facts" (Parse_Spec.name_facts >> (Toplevel.proof o Proof.note_thmss_cmd)); val _ = Outer_Syntax.command \<^command_keyword>\supply\ "define facts during goal refinement (unstructured)" (Parse_Spec.name_facts >> (Toplevel.proof o Proof.supply_cmd)); val _ = Outer_Syntax.command \<^command_keyword>\using\ "augment goal facts" (facts >> (Toplevel.proof o Proof.using_cmd)); val _ = Outer_Syntax.command \<^command_keyword>\unfolding\ "unfold definitions in goal and facts" (facts >> (Toplevel.proof o Proof.unfolding_cmd)); in end\ subsubsection \Proof context\ ML \ local val structured_statement = Parse_Spec.statement -- Parse_Spec.if_statement' -- Parse.for_fixes >> (fn ((shows, assumes), fixes) => (fixes, assumes, shows)); val _ = Outer_Syntax.command \<^command_keyword>\fix\ "fix local variables (Skolem constants)" (Parse.vars >> (Toplevel.proof o Proof.fix_cmd)); val _ = Outer_Syntax.command \<^command_keyword>\assume\ "assume propositions" (structured_statement >> (fn (a, b, c) => Toplevel.proof (Proof.assume_cmd a b c))); val _ = Outer_Syntax.command \<^command_keyword>\presume\ "assume propositions, to be established later" (structured_statement >> (fn (a, b, c) => Toplevel.proof (Proof.presume_cmd a b c))); val _ = Outer_Syntax.command \<^command_keyword>\define\ "local definition (non-polymorphic)" ((Parse.vars --| Parse.where_) -- Parse_Spec.statement -- Parse.for_fixes >> (fn ((a, b), c) => Toplevel.proof (Proof.define_cmd a c b))); val _ = Outer_Syntax.command \<^command_keyword>\consider\ "state cases rule" (Parse_Spec.obtains >> (Toplevel.proof' o Obtain.consider_cmd)); val _ = Outer_Syntax.command \<^command_keyword>\obtain\ "generalized elimination" (Parse.parbinding -- Scan.optional (Parse.vars --| Parse.where_) [] -- structured_statement >> (fn ((a, b), (c, d, e)) => Toplevel.proof' (Obtain.obtain_cmd a b c d e))); val _ = Outer_Syntax.command \<^command_keyword>\guess\ "wild guessing (unstructured)" (Scan.optional Parse.vars [] >> (Toplevel.proof' o Obtain.guess_cmd)); val _ = Outer_Syntax.command \<^command_keyword>\let\ "bind text variables" (Parse.and_list1 (Parse.and_list1 Parse.term -- (\<^keyword>\=\ |-- Parse.term)) >> (Toplevel.proof o Proof.let_bind_cmd)); val _ = Outer_Syntax.command \<^command_keyword>\write\ "add concrete syntax for constants / fixed variables" (Parse.syntax_mode -- Parse.and_list1 (Parse.const -- Parse.mixfix) >> (fn (mode, args) => Toplevel.proof (Proof.write_cmd mode args))); val _ = Outer_Syntax.command \<^command_keyword>\case\ "invoke local context" (Parse_Spec.opt_thm_name ":" -- (\<^keyword>\(\ |-- Parse.!!! (Parse.name_position -- Scan.repeat (Parse.maybe Parse.binding) --| \<^keyword>\)\) || Parse.name_position >> rpair []) >> (Toplevel.proof o Proof.case_cmd)); in end\ subsubsection \Proof structure\ ML \ local val _ = Outer_Syntax.command \<^command_keyword>\{\ "begin explicit proof block" (Scan.succeed (Toplevel.proof Proof.begin_block)); val _ = Outer_Syntax.command \<^command_keyword>\}\ "end explicit proof block" (Scan.succeed (Toplevel.proof Proof.end_block)); val _ = Outer_Syntax.command \<^command_keyword>\next\ "enter next proof block" (Scan.succeed (Toplevel.proof Proof.next_block)); in end\ subsubsection \End proof\ ML \ local val _ = Outer_Syntax.command \<^command_keyword>\qed\ "conclude proof" (Scan.option Method.parse >> (fn m => (Option.map Method.report m; Isar_Cmd.qed m))); val _ = Outer_Syntax.command \<^command_keyword>\by\ "terminal backward proof" (Method.parse -- Scan.option Method.parse >> (fn (m1, m2) => (Method.report m1; Option.map Method.report m2; Isar_Cmd.terminal_proof (m1, m2)))); val _ = Outer_Syntax.command \<^command_keyword>\..\ "default proof" (Scan.succeed Isar_Cmd.default_proof); val _ = Outer_Syntax.command \<^command_keyword>\.\ "immediate proof" (Scan.succeed Isar_Cmd.immediate_proof); val _ = Outer_Syntax.command \<^command_keyword>\done\ "done proof" (Scan.succeed Isar_Cmd.done_proof); val _ = Outer_Syntax.command \<^command_keyword>\sorry\ "skip proof (quick-and-dirty mode only!)" (Scan.succeed Isar_Cmd.skip_proof); val _ = Outer_Syntax.command \<^command_keyword>\\\ "dummy proof (quick-and-dirty mode only!)" (Scan.succeed Isar_Cmd.skip_proof); val _ = Outer_Syntax.command \<^command_keyword>\oops\ "forget proof" (Scan.succeed Toplevel.forget_proof); in end\ subsubsection \Proof steps\ ML \ local val _ = Outer_Syntax.command \<^command_keyword>\defer\ "shuffle internal proof state" (Scan.optional Parse.nat 1 >> (Toplevel.proof o Proof.defer)); val _ = Outer_Syntax.command \<^command_keyword>\prefer\ "shuffle internal proof state" (Parse.nat >> (Toplevel.proof o Proof.prefer)); val _ = Outer_Syntax.command \<^command_keyword>\apply\ "initial goal refinement step (unstructured)" (Method.parse >> (fn m => (Method.report m; Toplevel.proofs (Proof.apply m)))); val _ = Outer_Syntax.command \<^command_keyword>\apply_end\ "terminal goal refinement step (unstructured)" (Method.parse >> (fn m => (Method.report m; Toplevel.proofs (Proof.apply_end m)))); val _ = Outer_Syntax.command \<^command_keyword>\proof\ "backward proof step" (Scan.option Method.parse >> (fn m => (Option.map Method.report m; Toplevel.proof (fn state => let val state' = state |> Proof.proof m |> Seq.the_result ""; val _ = Output.information (Proof_Context.print_cases_proof (Proof.context_of state) (Proof.context_of state')); in state' end)))) in end\ subsubsection \Subgoal focus\ ML \ local val opt_fact_binding = Scan.optional (Parse.binding -- Parse.opt_attribs || Parse.attribs >> pair Binding.empty) Binding.empty_atts; val for_params = Scan.optional (\<^keyword>\for\ |-- Parse.!!! ((Scan.option Parse.dots >> is_some) -- (Scan.repeat1 (Parse.maybe_position Parse.name_position)))) (false, []); val _ = Outer_Syntax.command \<^command_keyword>\subgoal\ "focus on first subgoal within backward refinement" (opt_fact_binding -- (Scan.option (\<^keyword>\premises\ |-- Parse.!!! opt_fact_binding)) -- for_params >> (fn ((a, b), c) => Toplevel.proofs (Seq.make_results o Seq.single o #2 o Subgoal.subgoal_cmd a b c))); in end\ subsubsection \Calculation\ ML \ local val calculation_args = Scan.option (\<^keyword>\(\ |-- Parse.!!! ((Parse.thms1 --| \<^keyword>\)\))); val _ = Outer_Syntax.command \<^command_keyword>\also\ "combine calculation and current facts" (calculation_args >> (Toplevel.proofs' o Calculation.also_cmd)); val _ = Outer_Syntax.command \<^command_keyword>\finally\ "combine calculation and current facts, exhibit result" (calculation_args >> (Toplevel.proofs' o Calculation.finally_cmd)); val _ = Outer_Syntax.command \<^command_keyword>\moreover\ "augment calculation by current facts" (Scan.succeed (Toplevel.proof' Calculation.moreover)); val _ = Outer_Syntax.command \<^command_keyword>\ultimately\ "augment calculation by current facts, exhibit result" (Scan.succeed (Toplevel.proof' Calculation.ultimately)); val _ = Outer_Syntax.command \<^command_keyword>\print_trans_rules\ "print transitivity rules" (Scan.succeed (Toplevel.keep (Calculation.print_rules o Toplevel.context_of))); in end\ subsubsection \Proof navigation\ ML \ local fun report_back () = Output.report [Markup.markup (Markup.bad ()) "Explicit backtracking"]; val _ = Outer_Syntax.command \<^command_keyword>\back\ "explicit backtracking of proof command" (Scan.succeed (Toplevel.actual_proof (fn prf => (report_back (); Proof_Node.back prf)) o Toplevel.skip_proof report_back)); in end\ subsection \Diagnostic commands (for interactive mode only)\ ML \ local val opt_modes = Scan.optional (\<^keyword>\(\ |-- Parse.!!! (Scan.repeat1 Parse.name --| \<^keyword>\)\)) []; val _ = Outer_Syntax.command \<^command_keyword>\help\ "retrieve outer syntax commands according to name patterns" (Scan.repeat Parse.name >> (fn pats => Toplevel.keep (fn st => Outer_Syntax.help (Toplevel.theory_of st) pats))); val _ = Outer_Syntax.command \<^command_keyword>\print_commands\ "print outer syntax commands" (Scan.succeed (Toplevel.keep (Outer_Syntax.print_commands o Toplevel.theory_of))); val _ = Outer_Syntax.command \<^command_keyword>\print_options\ "print configuration options" (Parse.opt_bang >> (fn b => Toplevel.keep (Attrib.print_options b o Toplevel.context_of))); val _ = Outer_Syntax.command \<^command_keyword>\print_context\ "print context of local theory target" (Scan.succeed (Toplevel.keep (Pretty.writeln_chunks o Toplevel.pretty_context))); val _ = Outer_Syntax.command \<^command_keyword>\print_theory\ "print logical theory contents" (Parse.opt_bang >> (fn b => Toplevel.keep (Pretty.writeln o Proof_Display.pretty_theory b o Toplevel.context_of))); val _ = Outer_Syntax.command \<^command_keyword>\print_definitions\ "print dependencies of definitional theory content" (Parse.opt_bang >> (fn b => Toplevel.keep (Pretty.writeln o Proof_Display.pretty_definitions b o Toplevel.context_of))); val _ = Outer_Syntax.command \<^command_keyword>\print_syntax\ "print inner syntax of context" (Scan.succeed (Toplevel.keep (Proof_Context.print_syntax o Toplevel.context_of))); val _ = Outer_Syntax.command \<^command_keyword>\print_defn_rules\ "print definitional rewrite rules of context" (Scan.succeed (Toplevel.keep (Local_Defs.print_rules o Toplevel.context_of))); val _ = Outer_Syntax.command \<^command_keyword>\print_abbrevs\ "print constant abbreviations of context" (Parse.opt_bang >> (fn b => Toplevel.keep (Proof_Context.print_abbrevs b o Toplevel.context_of))); val _ = Outer_Syntax.command \<^command_keyword>\print_theorems\ "print theorems of local theory or proof context" (Parse.opt_bang >> (fn b => Toplevel.keep (Pretty.writeln o Pretty.chunks o Isar_Cmd.pretty_theorems b))); val _ = Outer_Syntax.command \<^command_keyword>\print_locales\ "print locales of this theory" (Parse.opt_bang >> (fn verbose => Toplevel.keep (fn state => let val thy = Toplevel.theory_of state in Pretty.writeln (Locale.pretty_locales thy verbose) end))); val _ = Outer_Syntax.command \<^command_keyword>\print_classes\ "print classes of this theory" (Scan.succeed (Toplevel.keep (Class.print_classes o Toplevel.context_of))); val _ = Outer_Syntax.command \<^command_keyword>\print_locale\ "print locale of this theory" (Parse.opt_bang -- Parse.name_position >> (fn (show_facts, raw_name) => Toplevel.keep (fn state => let val thy = Toplevel.theory_of state; val name = Locale.check thy raw_name; in Pretty.writeln (Locale.pretty_locale thy show_facts name) end))); val _ = Outer_Syntax.command \<^command_keyword>\print_interps\ "print interpretations of locale for this theory or proof context" (Parse.name_position >> (fn raw_name => Toplevel.keep (fn state => let val ctxt = Toplevel.context_of state; val thy = Toplevel.theory_of state; val name = Locale.check thy raw_name; in Pretty.writeln (Locale.pretty_registrations ctxt name) end))); val _ = Outer_Syntax.command \<^command_keyword>\print_attributes\ "print attributes of this theory" (Parse.opt_bang >> (fn b => Toplevel.keep (Attrib.print_attributes b o Toplevel.context_of))); val _ = Outer_Syntax.command \<^command_keyword>\print_simpset\ "print context of Simplifier" (Parse.opt_bang >> (fn b => Toplevel.keep (Pretty.writeln o Simplifier.pretty_simpset b o Toplevel.context_of))); val _ = Outer_Syntax.command \<^command_keyword>\print_rules\ "print intro/elim rules" (Scan.succeed (Toplevel.keep (Context_Rules.print_rules o Toplevel.context_of))); val _ = Outer_Syntax.command \<^command_keyword>\print_methods\ "print methods of this theory" (Parse.opt_bang >> (fn b => Toplevel.keep (Method.print_methods b o Toplevel.context_of))); val _ = Outer_Syntax.command \<^command_keyword>\print_antiquotations\ "print document antiquotations" (Parse.opt_bang >> (fn b => Toplevel.keep (Document_Antiquotation.print_antiquotations b o Toplevel.context_of))); val _ = Outer_Syntax.command \<^command_keyword>\print_ML_antiquotations\ "print ML antiquotations" (Parse.opt_bang >> (fn b => Toplevel.keep (ML_Context.print_antiquotations b o Toplevel.context_of))); val _ = Outer_Syntax.command \<^command_keyword>\locale_deps\ "visualize locale dependencies" (Scan.succeed (Toplevel.keep (Toplevel.theory_of #> (fn thy => Locale.pretty_locale_deps thy |> map (fn {name, parents, body} => ((name, Graph_Display.content_node (Locale.extern thy name) [body]), parents)) |> Graph_Display.display_graph_old)))); val _ = Outer_Syntax.command \<^command_keyword>\print_term_bindings\ "print term bindings of proof context" (Scan.succeed (Toplevel.keep (Pretty.writeln_chunks o Proof_Context.pretty_term_bindings o Toplevel.context_of))); val _ = Outer_Syntax.command \<^command_keyword>\print_facts\ "print facts of proof context" (Parse.opt_bang >> (fn b => Toplevel.keep (Proof_Context.print_local_facts b o Toplevel.context_of))); val _ = Outer_Syntax.command \<^command_keyword>\print_cases\ "print cases of proof context" (Scan.succeed (Toplevel.keep (Pretty.writeln_chunks o Proof_Context.pretty_cases o Toplevel.context_of))); val _ = Outer_Syntax.command \<^command_keyword>\print_statement\ "print theorems as long statements" (opt_modes -- Parse.thms1 >> Isar_Cmd.print_stmts); val _ = Outer_Syntax.command \<^command_keyword>\thm\ "print theorems" (opt_modes -- Parse.thms1 >> Isar_Cmd.print_thms); val _ = Outer_Syntax.command \<^command_keyword>\prf\ "print proof terms of theorems" (opt_modes -- Scan.option Parse.thms1 >> Isar_Cmd.print_prfs false); val _ = Outer_Syntax.command \<^command_keyword>\full_prf\ "print full proof terms of theorems" (opt_modes -- Scan.option Parse.thms1 >> Isar_Cmd.print_prfs true); val _ = Outer_Syntax.command \<^command_keyword>\prop\ "read and print proposition" (opt_modes -- Parse.term >> Isar_Cmd.print_prop); val _ = Outer_Syntax.command \<^command_keyword>\term\ "read and print term" (opt_modes -- Parse.term >> Isar_Cmd.print_term); val _ = Outer_Syntax.command \<^command_keyword>\typ\ "read and print type" (opt_modes -- (Parse.typ -- Scan.option (\<^keyword>\::\ |-- Parse.!!! Parse.sort)) >> Isar_Cmd.print_type); val _ = Outer_Syntax.command \<^command_keyword>\print_codesetup\ "print code generator setup" (Scan.succeed (Toplevel.keep (Code.print_codesetup o Toplevel.theory_of))); val _ = Outer_Syntax.command \<^command_keyword>\print_state\ "print current proof state (if present)" (opt_modes >> (fn modes => Toplevel.keep (Print_Mode.with_modes modes (Output.state o Toplevel.string_of_state)))); val _ = Outer_Syntax.command \<^command_keyword>\welcome\ "print welcome message" (Scan.succeed (Toplevel.keep (fn _ => writeln (Session.welcome ())))); in end\ subsection \Dependencies\ ML \ local val theory_bounds = Parse.theory_name >> single || (\<^keyword>\(\ |-- Parse.enum "|" Parse.theory_name --| \<^keyword>\)\); val _ = Outer_Syntax.command \<^command_keyword>\thy_deps\ "visualize theory dependencies" (Scan.option theory_bounds -- Scan.option theory_bounds >> (fn args => Toplevel.keep (fn st => Thy_Deps.thy_deps_cmd (Toplevel.context_of st) args))); val class_bounds = Parse.sort >> single || (\<^keyword>\(\ |-- Parse.enum "|" Parse.sort --| \<^keyword>\)\); val _ = Outer_Syntax.command \<^command_keyword>\class_deps\ "visualize class dependencies" (Scan.option class_bounds -- Scan.option class_bounds >> (fn args => Toplevel.keep (fn st => Class_Deps.class_deps_cmd (Toplevel.context_of st) args))); val _ = Outer_Syntax.command \<^command_keyword>\thm_deps\ "print theorem dependencies (immediate non-transitive)" (Parse.thms1 >> (fn args => Toplevel.keep (fn st => let val thy = Toplevel.theory_of st; val ctxt = Toplevel.context_of st; in Pretty.writeln (Thm_Deps.pretty_thm_deps thy (Attrib.eval_thms ctxt args)) end))); val _ = Outer_Syntax.command \<^command_keyword>\thm_oracles\ "print all oracles used in theorems (full graph of transitive dependencies)" (Parse.thms1 >> (fn args => Toplevel.keep (fn st => let val ctxt = Toplevel.context_of st; val thms = Attrib.eval_thms ctxt args; in Pretty.writeln (Thm_Deps.pretty_thm_oracles ctxt thms) end))); val thy_names = Scan.repeat1 (Scan.unless Parse.minus Parse.theory_name); val _ = Outer_Syntax.command \<^command_keyword>\unused_thms\ "find unused theorems" (Scan.option ((thy_names --| Parse.minus) -- Scan.option thy_names) >> (fn opt_range => Toplevel.keep (fn st => let val thy = Toplevel.theory_of st; val ctxt = Toplevel.context_of st; fun pretty_thm (a, th) = Proof_Context.pretty_fact ctxt (a, [th]); val check = Theory.check {long = false} ctxt; in Thm_Deps.unused_thms_cmd (case opt_range of NONE => (Theory.parents_of thy, [thy]) | SOME (xs, NONE) => (map check xs, [thy]) | SOME (xs, SOME ys) => (map check xs, map check ys)) |> map pretty_thm |> Pretty.writeln_chunks end))); in end\ subsubsection \Find consts and theorems\ ML \ local val _ = Outer_Syntax.command \<^command_keyword>\find_consts\ "find constants by name / type patterns" (Find_Consts.query_parser >> (fn spec => Toplevel.keep (fn st => Pretty.writeln (Find_Consts.pretty_consts (Toplevel.context_of st) spec)))); val options = Scan.optional (Parse.$$$ "(" |-- Parse.!!! (Scan.option Parse.nat -- Scan.optional (Parse.reserved "with_dups" >> K false) true --| Parse.$$$ ")")) (NONE, true); val _ = Outer_Syntax.command \<^command_keyword>\find_theorems\ "find theorems meeting specified criteria" (options -- Find_Theorems.query_parser >> (fn ((opt_lim, rem_dups), spec) => Toplevel.keep (fn st => Pretty.writeln (Find_Theorems.pretty_theorems (Find_Theorems.proof_state st) opt_lim rem_dups spec)))); in end\ subsection \Code generation\ ML \ local val _ = Outer_Syntax.command \<^command_keyword>\code_datatype\ "define set of code datatype constructors" (Scan.repeat1 Parse.term >> (Toplevel.theory o Code.declare_datatype_cmd)); in end\ subsection \Extraction of programs from proofs\ ML \ local val parse_vars = Scan.optional (Parse.$$$ "(" |-- Parse.list1 Parse.name --| Parse.$$$ ")") []; val _ = Outer_Syntax.command \<^command_keyword>\realizers\ "specify realizers for primitive axioms / theorems, together with correctness proof" (Scan.repeat1 (Parse.name -- parse_vars --| Parse.$$$ ":" -- Parse.string -- Parse.string) >> (fn xs => Toplevel.theory (fn thy => Extraction.add_realizers (map (fn (((a, vs), s1), s2) => (Global_Theory.get_thm thy a, (vs, s1, s2))) xs) thy))); val _ = Outer_Syntax.command \<^command_keyword>\realizability\ "add equations characterizing realizability" (Scan.repeat1 Parse.string >> (Toplevel.theory o Extraction.add_realizes_eqns)); val _ = Outer_Syntax.command \<^command_keyword>\extract_type\ "add equations characterizing type of extracted program" (Scan.repeat1 Parse.string >> (Toplevel.theory o Extraction.add_typeof_eqns)); val _ = Outer_Syntax.command \<^command_keyword>\extract\ "extract terms from proofs" (Scan.repeat1 (Parse.name -- parse_vars) >> (fn xs => Toplevel.theory (fn thy => Extraction.extract (map (apfst (Global_Theory.get_thm thy)) xs) thy))); in end\ section \Auxiliary lemmas\ subsection \Meta-level connectives in assumptions\ lemma meta_mp: assumes "PROP P \ PROP Q" and "PROP P" shows "PROP Q" by (rule \PROP P \ PROP Q\ [OF \PROP P\]) lemmas meta_impE = meta_mp [elim_format] lemma meta_spec: assumes "\x. PROP P x" shows "PROP P x" by (rule \\x. PROP P x\) lemmas meta_allE = meta_spec [elim_format] lemma swap_params: "(\x y. PROP P x y) \ (\y x. PROP P x y)" .. lemma equal_allI: \(\x. PROP P x) \ (\x. PROP Q x)\ if \\x. PROP P x \ PROP Q x\ by (simp only: that) subsection \Meta-level conjunction\ lemma all_conjunction: "(\x. PROP A x &&& PROP B x) \ ((\x. PROP A x) &&& (\x. PROP B x))" proof assume conj: "\x. PROP A x &&& PROP B x" show "(\x. PROP A x) &&& (\x. PROP B x)" proof - fix x from conj show "PROP A x" by (rule conjunctionD1) from conj show "PROP B x" by (rule conjunctionD2) qed next assume conj: "(\x. PROP A x) &&& (\x. PROP B x)" fix x show "PROP A x &&& PROP B x" proof - show "PROP A x" by (rule conj [THEN conjunctionD1, rule_format]) show "PROP B x" by (rule conj [THEN conjunctionD2, rule_format]) qed qed lemma imp_conjunction: "(PROP A \ PROP B &&& PROP C) \ ((PROP A \ PROP B) &&& (PROP A \ PROP C))" proof assume conj: "PROP A \ PROP B &&& PROP C" show "(PROP A \ PROP B) &&& (PROP A \ PROP C)" proof - assume "PROP A" from conj [OF \PROP A\] show "PROP B" by (rule conjunctionD1) from conj [OF \PROP A\] show "PROP C" by (rule conjunctionD2) qed next assume conj: "(PROP A \ PROP B) &&& (PROP A \ PROP C)" assume "PROP A" show "PROP B &&& PROP C" proof - from \PROP A\ show "PROP B" by (rule conj [THEN conjunctionD1]) from \PROP A\ show "PROP C" by (rule conj [THEN conjunctionD2]) qed qed lemma conjunction_imp: "(PROP A &&& PROP B \ PROP C) \ (PROP A \ PROP B \ PROP C)" proof assume r: "PROP A &&& PROP B \ PROP C" assume ab: "PROP A" "PROP B" show "PROP C" proof (rule r) from ab show "PROP A &&& PROP B" . qed next assume r: "PROP A \ PROP B \ PROP C" assume conj: "PROP A &&& PROP B" show "PROP C" proof (rule r) from conj show "PROP A" by (rule conjunctionD1) from conj show "PROP B" by (rule conjunctionD2) qed qed declare [[ML_write_global = false]] end diff --git a/src/Pure/Thy/document_antiquotations.ML b/src/Pure/Thy/document_antiquotations.ML --- a/src/Pure/Thy/document_antiquotations.ML +++ b/src/Pure/Thy/document_antiquotations.ML @@ -1,344 +1,350 @@ (* Title: Pure/Thy/document_antiquotations.ML Author: Makarius Miscellaneous document antiquotations. *) structure Document_Antiquotations: sig end = struct (* basic entities *) local type style = term -> term; fun pretty_term_style ctxt (style: style, t) = Thy_Output.pretty_term ctxt (style t); fun pretty_thms_style ctxt (style: style, ths) = map (fn th => Thy_Output.pretty_term ctxt (style (Thm.full_prop_of th))) ths; fun pretty_term_typ ctxt (style: style, t) = let val t' = style t in Thy_Output.pretty_term ctxt (Type.constraint (Term.fastype_of t') t') end; fun pretty_term_typeof ctxt (style: style, t) = Syntax.pretty_typ ctxt (Term.fastype_of (style t)); fun pretty_const ctxt c = let val t = Const (c, Consts.type_scheme (Proof_Context.consts_of ctxt) c) handle TYPE (msg, _, _) => error msg; val (t', _) = yield_singleton (Variable.import_terms true) t ctxt; in Thy_Output.pretty_term ctxt t' end; fun pretty_abbrev ctxt s = let val t = Syntax.read_term (Proof_Context.set_mode Proof_Context.mode_abbrev ctxt) s; fun err () = error ("Abbreviated constant expected: " ^ Syntax.string_of_term ctxt t); val (head, args) = Term.strip_comb t; val (c, T) = Term.dest_Const head handle TERM _ => err (); val (U, u) = Consts.the_abbreviation (Proof_Context.consts_of ctxt) c handle TYPE _ => err (); val t' = Term.betapplys (Envir.expand_atom T (U, u), args); val eq = Logic.mk_equals (t, t'); val ctxt' = Proof_Context.augment eq ctxt; in Proof_Context.pretty_term_abbrev ctxt' eq end; fun pretty_locale ctxt (name, pos) = let val thy = Proof_Context.theory_of ctxt in Pretty.str (Locale.extern thy (Locale.check thy (name, pos))) end; fun pretty_class ctxt s = Pretty.str (Proof_Context.extern_class ctxt (Proof_Context.read_class ctxt s)); fun pretty_type ctxt s = let val Type (name, _) = Proof_Context.read_type_name {proper = true, strict = false} ctxt s in Pretty.str (Proof_Context.extern_type ctxt name) end; fun pretty_prf full ctxt = Proof_Syntax.pretty_standard_proof_of ctxt full; fun pretty_theory ctxt (name, pos) = (Theory.check {long = true} ctxt (name, pos); Pretty.str name); val basic_entity = Thy_Output.antiquotation_pretty_source_embedded; fun basic_entities name scan pretty = Document_Antiquotation.setup name scan (fn {context = ctxt, source = src, argument = xs} => Thy_Output.pretty_items_source ctxt {embedded = false} src (map (pretty ctxt) xs)); in val _ = Theory.setup (basic_entity \<^binding>\prop\ (Term_Style.parse -- Args.prop) pretty_term_style #> basic_entity \<^binding>\term\ (Term_Style.parse -- Args.term) pretty_term_style #> basic_entity \<^binding>\term_type\ (Term_Style.parse -- Args.term) pretty_term_typ #> basic_entity \<^binding>\typeof\ (Term_Style.parse -- Args.term) pretty_term_typeof #> basic_entity \<^binding>\const\ (Args.const {proper = true, strict = false}) pretty_const #> basic_entity \<^binding>\abbrev\ (Scan.lift Args.embedded_inner_syntax) pretty_abbrev #> basic_entity \<^binding>\typ\ Args.typ_abbrev Syntax.pretty_typ #> basic_entity \<^binding>\locale\ (Scan.lift Args.embedded_position) pretty_locale #> basic_entity \<^binding>\class\ (Scan.lift Args.embedded_inner_syntax) pretty_class #> basic_entity \<^binding>\type\ (Scan.lift Args.embedded_inner_syntax) pretty_type #> basic_entity \<^binding>\theory\ (Scan.lift Args.embedded_position) pretty_theory #> basic_entities \<^binding>\prf\ Attrib.thms (pretty_prf false) #> basic_entities \<^binding>\full_prf\ Attrib.thms (pretty_prf true) #> Document_Antiquotation.setup \<^binding>\thm\ (Term_Style.parse -- Attrib.thms) (fn {context = ctxt, source = src, argument = arg} => Thy_Output.pretty_items_source ctxt {embedded = false} src (pretty_thms_style ctxt arg))); end; (* Markdown errors *) local fun markdown_error binding = Document_Antiquotation.setup binding (Scan.succeed ()) (fn {source = src, ...} => error ("Bad Markdown structure: illegal " ^ quote (Binding.name_of binding) ^ Position.here (Position.no_range_position (#1 (Token.range_of src))))) in val _ = Theory.setup (markdown_error \<^binding>\item\ #> markdown_error \<^binding>\enum\ #> markdown_error \<^binding>\descr\); end; (* control spacing *) val _ = Theory.setup (Thy_Output.antiquotation_raw \<^binding>\noindent\ (Scan.succeed ()) (fn _ => fn () => Latex.string "\\noindent") #> Thy_Output.antiquotation_raw \<^binding>\smallskip\ (Scan.succeed ()) (fn _ => fn () => Latex.string "\\smallskip") #> Thy_Output.antiquotation_raw \<^binding>\medskip\ (Scan.succeed ()) (fn _ => fn () => Latex.string "\\medskip") #> Thy_Output.antiquotation_raw \<^binding>\bigskip\ (Scan.succeed ()) (fn _ => fn () => Latex.string "\\bigskip")); (* control style *) local fun control_antiquotation name s1 s2 = Thy_Output.antiquotation_raw_embedded name (Scan.lift Args.cartouche_input) (fn ctxt => Latex.enclose_block s1 s2 o Thy_Output.output_document ctxt {markdown = false}); in val _ = Theory.setup (control_antiquotation \<^binding>\footnote\ "\\footnote{" "}" #> control_antiquotation \<^binding>\emph\ "\\emph{" "}" #> control_antiquotation \<^binding>\bold\ "\\textbf{" "}"); end; (* quasi-formal text (unchecked) *) local fun report_text ctxt text = let val pos = Input.pos_of text in Context_Position.reports ctxt [(pos, Markup.language_text (Input.is_delimited text)), (pos, Markup.raw_text)] end; fun prepare_text ctxt = Input.source_content #> #1 #> Document_Antiquotation.prepare_lines ctxt; fun text_antiquotation name = Thy_Output.antiquotation_raw_embedded name (Scan.lift Args.text_input) (fn ctxt => fn text => let val _ = report_text ctxt text; in prepare_text ctxt text |> Thy_Output.output_source ctxt |> Thy_Output.isabelle ctxt end); val theory_text_antiquotation = Thy_Output.antiquotation_raw_embedded \<^binding>\theory_text\ (Scan.lift Args.text_input) (fn ctxt => fn text => let val keywords = Thy_Header.get_keywords' ctxt; val _ = report_text ctxt text; val _ = Input.source_explode text |> Token.tokenize keywords {strict = true} |> maps (Token.reports keywords) |> Context_Position.reports_text ctxt; in prepare_text ctxt text |> Token.explode0 keywords |> maps (Thy_Output.output_token ctxt) |> Thy_Output.isabelle ctxt end); in val _ = Theory.setup (text_antiquotation \<^binding>\text\ #> text_antiquotation \<^binding>\cartouche\ #> theory_text_antiquotation); end; (* goal state *) local fun goal_state name main = Thy_Output.antiquotation_pretty name (Scan.succeed ()) (fn ctxt => fn () => Goal_Display.pretty_goal (Config.put Goal_Display.show_main_goal main ctxt) (#goal (Proof.goal (Toplevel.proof_of (Toplevel.presentation_state ctxt))))); in val _ = Theory.setup (goal_state \<^binding>\goals\ true #> goal_state \<^binding>\subgoals\ false); end; (* embedded lemma *) val _ = Theory.setup (Document_Antiquotation.setup \<^binding>\lemma\ (Scan.lift (Scan.ahead Parse.not_eof) -- Args.prop -- Scan.lift (Parse.position (Parse.reserved "by") -- Method.parse -- Scan.option Method.parse)) (fn {context = ctxt, source = src, argument = ((prop_tok, prop), (((_, by_pos), m1), m2))} => let val reports = (by_pos, Markup.keyword1 |> Markup.keyword_properties) :: maps Method.reports_of (m1 :: the_list m2); val _ = Context_Position.reports ctxt reports; (* FIXME check proof!? *) val _ = ctxt |> Proof.theorem NONE (K I) [[(prop, [])]] |> Proof.global_terminal_proof (m1, m2); in Thy_Output.pretty_source ctxt {embedded = false} [hd src, prop_tok] (Thy_Output.pretty_term ctxt prop) end)); (* verbatim text *) val _ = Theory.setup (Thy_Output.antiquotation_verbatim_embedded \<^binding>\verbatim\ (Scan.lift Args.text_input) (fn ctxt => fn text => let val pos = Input.pos_of text; val _ = Context_Position.reports ctxt [(pos, Markup.language_verbatim (Input.is_delimited text)), (pos, Markup.raw_text)]; in #1 (Input.source_content text) end)); (* bash functions *) val _ = Theory.setup (Thy_Output.antiquotation_verbatim_embedded \<^binding>\bash_function\ (Scan.lift Args.embedded_position) Isabelle_System.check_bash_function); (* system options *) val _ = Theory.setup (Thy_Output.antiquotation_verbatim_embedded \<^binding>\system_option\ (Scan.lift Args.embedded_position) (fn ctxt => fn (name, pos) => let val _ = Completion.check_option (Options.default ()) ctxt (name, pos); in name end)); (* ML text *) local fun ml_text name ml = Thy_Output.antiquotation_verbatim_embedded name (Scan.lift Args.text_input) (fn ctxt => fn text => let val _ = ML_Context.eval_in (SOME ctxt) ML_Compiler.flags (Input.pos_of text) (ml text) in #1 (Input.source_content text) end); fun ml_enclose bg en source = ML_Lex.read bg @ ML_Lex.read_source source @ ML_Lex.read en; in val _ = Theory.setup (ml_text \<^binding>\ML\ (ml_enclose "fn _ => (" ");") #> ml_text \<^binding>\ML_op\ (ml_enclose "fn _ => (op " ");") #> ml_text \<^binding>\ML_type\ (ml_enclose "val _ = NONE : (" ") option;") #> ml_text \<^binding>\ML_structure\ (ml_enclose "functor XXX() = struct structure XX = " " end;") #> ml_text \<^binding>\ML_functor\ (* FIXME formal treatment of functor name (!?) *) (fn source => ML_Lex.read ("ML_Env.check_functor " ^ ML_Syntax.print_string (#1 (Input.source_content source)))) #> ml_text \<^binding>\ML_text\ (K [])); end; (* URLs *) val escape_url = translate_string (fn c => if c = "%" orelse c = "#" orelse c = "^" then "\\" ^ c else c); val _ = Theory.setup - (Thy_Output.antiquotation_raw_embedded \<^binding>\url\ (Scan.lift Args.embedded_position) - (fn ctxt => fn (url, pos) => - let val _ = Context_Position.reports ctxt [(pos, Markup.language_path), (pos, Markup.url url)]; + (Thy_Output.antiquotation_raw_embedded \<^binding>\url\ (Scan.lift Args.embedded_input) + (fn ctxt => fn source => + let + val url = Input.string_of source; + val pos = Input.pos_of source; + val delimited = Input.is_delimited source; + val _ = + Context_Position.reports ctxt + [(pos, Markup.language_path delimited), (pos, Markup.url url)]; in Latex.enclose_block "\\url{" "}" [Latex.string (escape_url url)] end)); (* formal entities *) local fun entity_antiquotation name check bg en = Thy_Output.antiquotation_raw name (Scan.lift Args.name_position) (fn ctxt => fn (name, pos) => let val _ = check ctxt (name, pos) in Latex.enclose_block bg en [Latex.string (Output.output name)] end); in val _ = Theory.setup (entity_antiquotation \<^binding>\command\ Outer_Syntax.check_command "\\isacommand{" "}" #> entity_antiquotation \<^binding>\method\ Method.check_name "\\isa{" "}" #> entity_antiquotation \<^binding>\attribute\ Attrib.check_name "\\isa{" "}"); end; end; diff --git a/src/Pure/Thy/sessions.ML b/src/Pure/Thy/sessions.ML --- a/src/Pure/Thy/sessions.ML +++ b/src/Pure/Thy/sessions.ML @@ -1,123 +1,130 @@ (* Title: Pure/Thy/sessions.ML Author: Makarius Support for session ROOT syntax. *) signature SESSIONS = sig val root_name: string val theory_name: string val command_parser: (Toplevel.transition -> Toplevel.transition) parser end; structure Sessions: SESSIONS = struct val root_name = "ROOT"; val theory_name = "Pure.Sessions"; local -val theory_entry = Parse.theory_name --| Parse.opt_keyword "global"; +val theory_entry = Parse.input Parse.theory_name --| Parse.opt_keyword "global"; val theories = Parse.$$$ "theories" |-- Parse.!!! (Scan.optional Parse.options [] -- Scan.repeat1 theory_entry); val in_path = - Parse.$$$ "(" |-- Parse.!!! (Parse.$$$ "in" |-- Parse.position Parse.path --| Parse.$$$ ")"); + Parse.$$$ "(" |-- Parse.!!! (Parse.$$$ "in" |-- Parse.path_input --| Parse.$$$ ")"); val document_theories = - Parse.$$$ "document_theories" |-- Scan.repeat1 (Parse.position Parse.name); + Parse.$$$ "document_theories" |-- Scan.repeat1 (Parse.input Parse.theory_name); val document_files = Parse.$$$ "document_files" |-- - Parse.!!! (Scan.optional in_path ("document", Position.none) - -- Scan.repeat1 (Parse.position Parse.path)); + Parse.!!! (Scan.optional in_path (Input.string "document") -- Scan.repeat1 Parse.path_input); val prune = Scan.optional (Parse.$$$ "[" |-- Parse.!!! (Parse.nat --| Parse.$$$ "]")) 0; val export_files = Parse.$$$ "export_files" |-- - Parse.!!! - (Scan.optional in_path ("export", Position.none) -- prune -- Scan.repeat1 Parse.embedded); + Parse.!!! (Scan.optional in_path (Input.string "export") -- prune -- Scan.repeat1 Parse.embedded); + +fun path_source source path = + Input.source (Input.is_delimited source) (Path.implode path) (Input.range_of source); in val command_parser = Parse.session_name -- Scan.optional (Parse.$$$ "(" |-- Parse.!!! (Scan.repeat1 Parse.name --| Parse.$$$ ")")) [] -- - Scan.optional (Parse.$$$ "in" |-- Parse.!!! (Parse.position Parse.path)) (".", Position.none) -- + Scan.optional (Parse.$$$ "in" |-- Parse.!!! Parse.path_input) (Input.string ".") -- (Parse.$$$ "=" |-- Parse.!!! (Scan.option (Parse.session_name --| Parse.!!! (Parse.$$$ "+")) -- Scan.optional (Parse.$$$ "description" |-- Parse.!!! (Parse.input Parse.text)) Input.empty -- Scan.optional (Parse.$$$ "options" |-- Parse.!!! Parse.options) [] -- Scan.optional (Parse.$$$ "sessions" |-- Parse.!!! (Scan.repeat1 Parse.session_name)) [] -- - Scan.optional - (Parse.$$$ "directories" |-- Parse.!!! (Scan.repeat1 (Parse.position Parse.path))) [] -- + Scan.optional (Parse.$$$ "directories" |-- Parse.!!! (Scan.repeat1 Parse.path_input)) [] -- Scan.repeat theories -- Scan.optional document_theories [] -- Scan.repeat document_files -- Scan.repeat export_files)) >> (fn (((((session, _), _), dir), (((((((((parent, descr), options), sessions), directories), theories), document_theories), document_files), export_files)))) => Toplevel.keep (fn state => let val ctxt = Toplevel.context_of state; val session_dir = Resources.check_dir ctxt NONE dir; val _ = (the_list parent @ sessions) |> List.app (fn arg => ignore (Resources.check_session ctxt arg) handle ERROR msg => Output.error_message msg); val _ = Context_Position.report ctxt (Position.range_position (Symbol_Pos.range (Input.source_explode descr))) Markup.comment; val _ = (options @ maps #1 theories) |> List.app (fn (x, y) => ignore (Completion.check_option_value ctxt x y (Options.default ())) handle ERROR msg => Output.error_message msg); - fun check_thy (path, pos) = - ignore (Resources.check_file ctxt (SOME Path.current) (Path.implode path, pos)) + fun check_thy source = + ignore (Resources.check_file ctxt (SOME Path.current) source) handle ERROR msg => Output.error_message msg; val _ = - maps #2 theories |> List.app (fn (s, pos) => + maps #2 theories |> List.app (fn source => let + val s = Input.string_of source; + val pos = Input.pos_of source; val {node_name, theory_name, ...} = Resources.import_name session session_dir s handle ERROR msg => error (msg ^ Position.here pos); - val theory_path = the_default node_name (Resources.find_theory_file theory_name); - in check_thy (theory_path, pos) end); + val thy_path = the_default node_name (Resources.find_theory_file theory_name); + in check_thy (path_source source thy_path) end); val _ = directories |> List.app (ignore o Resources.check_dir ctxt (SOME session_dir)); val _ = - document_theories |> List.app (fn (thy, pos) => - (case Resources.find_theory_file thy of - NONE => Output.error_message ("Unknown theory " ^ quote thy ^ Position.here pos) - | SOME path => check_thy (path, pos))); + document_theories |> List.app (fn source => + let + val thy = Input.string_of source; + val pos = Input.pos_of source; + in + (case Resources.find_theory_file thy of + NONE => Output.error_message ("Unknown theory " ^ quote thy ^ Position.here pos) + | SOME path => check_thy (path_source source path)) + end); val _ = document_files |> List.app (fn (doc_dir, doc_files) => let val dir = Resources.check_dir ctxt (SOME session_dir) doc_dir; val _ = List.app (ignore o Resources.check_file ctxt (SOME dir)) doc_files; in () end); val _ = export_files |> List.app (fn ((export_dir, _), _) => ignore (Resources.check_path ctxt (SOME session_dir) export_dir)); in () end)); end; end; diff --git a/src/Pure/Tools/generated_files.ML b/src/Pure/Tools/generated_files.ML --- a/src/Pure/Tools/generated_files.ML +++ b/src/Pure/Tools/generated_files.ML @@ -1,393 +1,395 @@ (* Title: Pure/Tools/generated_files.ML Author: Makarius Generated source files for other languages: with antiquotations, without Isabelle symbols. *) signature GENERATED_FILES = sig val add_files: Path.binding * string -> theory -> theory type file = {path: Path.T, pos: Position.T, content: string} val file_binding: file -> Path.binding val file_markup: file -> Markup.T val print_file: file -> string val report_file: Proof.context -> Position.T -> file -> unit val get_files: theory -> file list val get_file: theory -> Path.binding -> file val get_files_in: Path.binding list * theory -> (file * Position.T) list val check_files_in: Proof.context -> (string * Position.T) list * (string * Position.T) option -> Path.binding list * theory val write_file: Path.T -> file -> unit val export_file: theory -> file -> unit type file_type = {name: string, ext: string, make_comment: string -> string, make_string: string -> string} val file_type: binding -> {ext: string, make_comment: string -> string, make_string: string -> string} -> theory -> theory val antiquotation: binding -> 'a Token.context_parser -> ({context: Proof.context, source: Token.src, file_type: file_type, argument: 'a} -> string) -> theory -> theory val set_string: string -> Proof.context -> Proof.context val generate_file: Path.binding * Input.source -> Proof.context -> local_theory val generate_file_cmd: (string * Position.T) * Input.source -> local_theory -> local_theory val export_generated_files: Proof.context -> (Path.binding list * theory) list -> unit val export_generated_files_cmd: Proof.context -> ((string * Position.T) list * (string * Position.T) option) list -> unit val with_compile_dir: (Path.T -> unit) -> unit val compile_generated_files: Proof.context -> (Path.binding list * theory) list -> (Path.T list * Path.T) list -> (Path.binding list * bool option) list -> Path.binding -> Input.source -> unit val compile_generated_files_cmd: Proof.context -> ((string * Position.T) list * (string * Position.T) option) list -> - ((string * Position.T) list * (string * Position.T)) list -> + (Input.source list * Input.source) list -> ((string * Position.T) list * bool option) list -> string * Position.T -> Input.source -> unit val execute: Path.T -> Input.source -> string -> string end; structure Generated_Files: GENERATED_FILES = struct (** context data **) type file = Path.T * (Position.T * string); type file_type = {name: string, ext: string, make_comment: string -> string, make_string: string -> string}; type antiquotation = Token.src -> Proof.context -> file_type -> string; fun err_dup_files path pos pos' = error ("Duplicate generated file: " ^ Path.print path ^ Position.here_list [pos, pos']); structure Data = Theory_Data ( type T = file list Symtab.table * (*files for named theory*) file_type Name_Space.table * (*file types*) antiquotation Name_Space.table; (*antiquotations*) val empty = (Symtab.empty, Name_Space.empty_table Markup.file_typeN, Name_Space.empty_table Markup.antiquotationN); val extend = I; fun merge ((files1, types1, antiqs1), (files2, types2, antiqs2)) = let val files' = (files1, files2) |> Symtab.join (K (Library.merge (fn ((path1, file1), (path2, file2)) => if path1 <> path2 then false else if file1 = file2 then true else err_dup_files path1 (#1 file1) (#1 file2)))); val types' = Name_Space.merge_tables (types1, types2); val antiqs' = Name_Space.merge_tables (antiqs1, antiqs2); in (files', types', antiqs') end; ); fun lookup_files thy = Symtab.lookup_list (#1 (Data.get thy)) (Context.theory_long_name thy); fun update_files thy_files' thy = (Data.map o @{apply 3(1)}) (Symtab.update (Context.theory_long_name thy, thy_files')) thy; fun add_files (binding, content) thy = let val _ = Path.proper_binding binding; val (path, pos) = Path.dest_binding binding; val thy_files = lookup_files thy; val thy_files' = (case AList.lookup (op =) thy_files path of SOME (pos', _) => err_dup_files path pos pos' | NONE => (path, (pos, content)) :: thy_files); in update_files thy_files' thy end; (* get files *) type file = {path: Path.T, pos: Position.T, content: string}; fun file_binding (file: file) = Path.binding (#path file, #pos file); fun file_markup (file: file) = (Markup.entityN, Position.def_properties_of (#pos file)); fun print_file (file: file) = Markup.markup (file_markup file) (quote (Path.implode (#path file))); fun report_file ctxt pos file = Context_Position.report ctxt pos (file_markup file); fun get_files thy = lookup_files thy |> rev |> map (fn (path, (pos, content)) => {path = path, pos = pos, content = content}: file); fun get_file thy binding = let val (path, pos) = Path.dest_binding binding in (case find_first (fn file => #path file = path) (get_files thy) of SOME file => file | NONE => error ("Missing generated file " ^ Path.print path ^ " in theory " ^ quote (Context.theory_long_name thy) ^ Position.here pos)) end; fun get_files_in ([], thy) = map (rpair Position.none) (get_files thy) | get_files_in (files, thy) = map (fn binding => (get_file thy binding, Path.pos_of_binding binding)) files; (* check and output files *) fun check_files_in ctxt (files, opt_thy) = let val thy = (case opt_thy of SOME name => Theory.check {long = false} ctxt name | NONE => Proof_Context.theory_of ctxt); in (map Path.explode_binding files, thy) end; fun write_file dir (file: file) = let val path = Path.expand (dir + #path file); val _ = Isabelle_System.make_directory (Path.dir path); val _ = File.generate path (#content file); in () end; fun export_file thy (file: file) = Export.export thy (file_binding file) [XML.Text (#content file)]; (* file types *) fun get_file_type thy ext = if ext = "" then error "Bad file extension" else (#2 (Data.get thy), NONE) |-> Name_Space.fold_table (fn (_, file_type) => fn res => if #ext file_type = ext then SOME file_type else res) |> (fn SOME file_type => file_type | NONE => error ("Unknown file type for extension " ^ quote ext)); fun file_type binding {ext, make_comment, make_string} thy = let val name = Binding.name_of binding; val pos = Binding.pos_of binding; val file_type = {name = name, ext = ext, make_comment = make_comment, make_string = make_string}; val table = #2 (Data.get thy); val space = Name_Space.space_of_table table; val context = Context.Theory thy |> Name_Space.map_naming (K Name_Space.global_naming); val (_, data') = table |> Name_Space.define context true (Binding.make (name, pos), file_type); val _ = (case try (#name o get_file_type thy) ext of NONE => () | SOME name' => error ("Extension " ^ quote ext ^ " already registered for file type " ^ quote (Markup.markup (Name_Space.markup space name') name') ^ Position.here pos)); in thy |> (Data.map o @{apply 3(2)}) (K data') end; (* antiquotations *) val get_antiquotations = #3 o Data.get o Proof_Context.theory_of; fun antiquotation name scan body thy = let fun ant src ctxt file_type : string = let val (x, ctxt') = Token.syntax scan src ctxt in body {context = ctxt', source = src, file_type = file_type, argument = x} end; in thy |> (Data.map o @{apply 3(3)}) (Name_Space.define (Context.Theory thy) true (name, ant) #> #2) end; val scan_antiq = Antiquote.scan_control >> Antiquote.Control || Scan.one (Symbol.not_eof o Symbol_Pos.symbol) >> (Antiquote.Text o Symbol_Pos.symbol); fun read_source ctxt (file_type: file_type) source = let val _ = Context_Position.report ctxt (Input.pos_of source) (Markup.language {name = #name file_type, symbols = false, antiquotes = true, delimited = Input.is_delimited source}); val (input, _) = Input.source_explode source |> Scan.error (Scan.finite Symbol_Pos.stopper (Scan.repeat scan_antiq)); val _ = Context_Position.reports ctxt (Antiquote.antiq_reports input); fun expand antiq = (case antiq of Antiquote.Text s => s | Antiquote.Control {name, body, ...} => let val src = Token.make_src name (if null body then [] else [Token.read_cartouche body]); val (src', ant) = Token.check_src ctxt get_antiquotations src; in ant src' ctxt file_type end | Antiquote.Antiq antiq => error ("Bad antiquotation " ^ Position.here (#1 (#range antiq)))); in implode (map expand input) end; (** Isar commands **) (* generate_file *) fun generate_file (binding, source) lthy = let val thy = Proof_Context.theory_of lthy; val (path, pos) = Path.dest_binding binding; val file_type = get_file_type thy (#2 (Path.split_ext path)) handle ERROR msg => error (msg ^ Position.here pos); val header = #make_comment file_type " generated by Isabelle "; val content = header ^ "\n" ^ read_source lthy file_type source; in lthy |> (Local_Theory.background_theory o add_files) (binding, content) end; fun generate_file_cmd (file, source) lthy = generate_file (Path.explode_binding file, source) lthy; (* export_generated_files *) fun export_generated_files ctxt args = let val thy = Proof_Context.theory_of ctxt in (case map #1 (maps get_files_in args) of [] => () | files => (List.app (export_file thy) files; writeln (Export.message thy Path.current); writeln (cat_lines (map (prefix " " o print_file) files)))) end; fun export_generated_files_cmd ctxt args = export_generated_files ctxt (map (check_files_in ctxt) args); (* compile_generated_files *) val compile_dir = Config.declare_string ("compile_dir", \<^here>) (K ""); fun with_compile_dir body : unit = body (Path.explode (Config.get (Context.the_local_context ()) compile_dir)); fun compile_generated_files ctxt args external export export_prefix source = Isabelle_System.with_tmp_dir "compile" (fn dir => let val thy = Proof_Context.theory_of ctxt; val files = maps get_files_in args; val _ = List.app (fn (file, pos) => report_file ctxt pos file) files; val _ = List.app (write_file dir o #1) files; val _ = external |> List.app (fn (files, base_dir) => files |> List.app (fn file => Isabelle_System.copy_file_base (base_dir, file) dir)); val _ = ML_Context.eval_in (SOME (Config.put compile_dir (Path.implode dir) ctxt)) ML_Compiler.flags (Input.pos_of source) (ML_Lex.read "Generated_Files.with_compile_dir (" @ ML_Lex.read_source source @ ML_Lex.read ")"); val _ = export |> List.app (fn (bindings, executable) => bindings |> List.app (fn binding0 => let val binding = binding0 |> Path.map_binding (executable = SOME true ? Path.exe); val (path, pos) = Path.dest_binding binding; val content = (case try File.read (dir + path) of SOME context => context | NONE => error ("Missing result file " ^ Path.print path ^ Position.here pos)); val _ = Export.report_export thy export_prefix; val binding' = Path.map_binding (Path.append (Path.path_of_binding export_prefix)) binding; in (if is_some executable then Export.export_executable else Export.export) thy binding' [XML.Text content] end)); val _ = if null export then () else writeln (Export.message thy (Path.path_of_binding export_prefix)); in () end); fun compile_generated_files_cmd ctxt args external export export_prefix source = compile_generated_files ctxt (map (check_files_in ctxt) args) (external |> map (fn (raw_files, raw_base_dir) => let val base_dir = Resources.check_dir ctxt NONE raw_base_dir; - fun check (s, pos) = (Resources.check_file ctxt (SOME base_dir) (s, pos); Path.explode s); + fun check source = + (Resources.check_file ctxt (SOME base_dir) source; + Path.explode (Input.string_of source)); val files = map check raw_files; in (files, base_dir) end)) ((map o apfst o map) Path.explode_binding export) (Path.explode_binding export_prefix) source; (* execute compiler -- auxiliary *) fun execute dir title compile = Isabelle_System.bash_output_check ("cd " ^ File.bash_path dir ^ " && " ^ compile) handle ERROR msg => let val (s, pos) = Input.source_content title in error (s ^ " failed" ^ Position.here pos ^ ":\n" ^ msg) end; (** concrete file types **) val _ = Theory.setup (file_type \<^binding>\Haskell\ {ext = "hs", make_comment = enclose "{-" "-}", make_string = GHC.print_string}); (** concrete antiquotations **) (* ML expression as string literal *) structure Local_Data = Proof_Data ( type T = string option; fun init _ = NONE; ); val set_string = Local_Data.put o SOME; fun the_string ctxt = (case Local_Data.get ctxt of SOME s => s | NONE => raise Fail "No result string"); val _ = Theory.setup (antiquotation \<^binding>\cartouche\ (Scan.lift Args.cartouche_input) (fn {context = ctxt, file_type, argument, ...} => ctxt |> Context.proof_map (ML_Context.expression (Input.pos_of argument) (ML_Lex.read "Theory.local_setup (Generated_Files.set_string (" @ ML_Lex.read_source argument @ ML_Lex.read "))")) |> the_string |> #make_string file_type)); (* file-system paths *) fun path_antiquotation binding check = antiquotation binding - (Args.context -- Scan.lift (Parse.position Parse.path) >> - (fn (ctxt, (name, pos)) => (check ctxt (SOME Path.current) (name, pos) |> Path.implode))) + (Args.context -- Scan.lift Parse.path_input >> + (fn (ctxt, source) => (check ctxt (SOME Path.current) source |> Path.implode))) (fn {file_type, argument, ...} => #make_string file_type argument); val _ = Theory.setup (path_antiquotation \<^binding>\path\ Resources.check_path #> path_antiquotation \<^binding>\file\ Resources.check_file #> path_antiquotation \<^binding>\dir\ Resources.check_dir); end; diff --git a/src/Tools/Code/code_target.ML b/src/Tools/Code/code_target.ML --- a/src/Tools/Code/code_target.ML +++ b/src/Tools/Code/code_target.ML @@ -1,791 +1,797 @@ (* Title: Tools/Code/code_target.ML Author: Florian Haftmann, TU Muenchen Generic infrastructure for target language data. *) signature CODE_TARGET = sig val cert_tyco: Proof.context -> string -> string val read_tyco: Proof.context -> string -> string datatype pretty_modules = Singleton of string * Pretty.T | Hierarchy of (string list * Pretty.T) list; val next_export: theory -> string * theory val export_code_for: ({physical: bool} * (Path.T * Position.T)) option -> string -> string -> int option -> Token.T list -> Code_Thingol.program -> bool -> Code_Symbol.T list -> local_theory -> local_theory val produce_code_for: Proof.context -> string -> string -> int option -> Token.T list -> Code_Thingol.program -> bool -> Code_Symbol.T list -> (string list * string) list * string option list val present_code_for: Proof.context -> string -> string -> int option -> Token.T list -> Code_Thingol.program -> Code_Symbol.T list * Code_Symbol.T list -> string val check_code_for: string -> bool -> Token.T list -> Code_Thingol.program -> bool -> Code_Symbol.T list -> local_theory -> local_theory val export_code: bool -> string list -> (((string * string) * ({physical: bool} * (Path.T * Position.T)) option) * Token.T list) list -> local_theory -> local_theory val export_code_cmd: bool -> string list - -> (((string * string) * ({physical: bool} * (string * Position.T)) option) * Token.T list) list + -> (((string * string) * ({physical: bool} * Input.source) option) * Token.T list) list -> local_theory -> local_theory val produce_code: Proof.context -> bool -> string list -> string -> string -> int option -> Token.T list -> (string list * string) list * string option list val present_code: Proof.context -> string list -> Code_Symbol.T list -> string -> string -> int option -> Token.T list -> string val check_code: bool -> string list -> ((string * bool) * Token.T list) list -> local_theory -> local_theory val codeN: string val generatedN: string val code_path: Path.T -> Path.T val code_export_message: theory -> unit val export: Path.binding -> string -> theory -> theory val compilation_text: Proof.context -> string -> Code_Thingol.program -> Code_Symbol.T list -> bool -> ((string * class list) list * Code_Thingol.itype) * Code_Thingol.iterm -> (string list * string) list * string val compilation_text': Proof.context -> string -> string option -> Code_Thingol.program -> Code_Symbol.T list -> bool -> ((string * class list) list * Code_Thingol.itype) * Code_Thingol.iterm -> ((string list * string) list * string) * (Code_Symbol.T -> string option) type serializer type literals = Code_Printer.literals type language type ancestry val assert_target: theory -> string -> string val add_language: string * language -> theory -> theory val add_derived_target: string * ancestry -> theory -> theory val the_literals: Proof.context -> string -> literals val parse_args: 'a parser -> Token.T list -> 'a val default_code_width: int Config.T type ('a, 'b, 'c, 'd, 'e, 'f) symbol_attr_decl val set_identifiers: (string, string, string, string, string, string) symbol_attr_decl -> theory -> theory val set_printings: (Code_Printer.raw_const_syntax, Code_Printer.tyco_syntax, string, unit, unit, string * Code_Symbol.T list) symbol_attr_decl -> theory -> theory val add_reserved: string -> string -> theory -> theory end; structure Code_Target : CODE_TARGET = struct open Basic_Code_Symbol; open Basic_Code_Thingol; type literals = Code_Printer.literals; type ('a, 'b, 'c, 'd, 'e, 'f) symbol_attr_decl = (string * (string * 'a option) list, string * (string * 'b option) list, class * (string * 'c option) list, (class * class) * (string * 'd option) list, (class * string) * (string * 'e option) list, string * (string * 'f option) list) Code_Symbol.attr; type tyco_syntax = Code_Printer.tyco_syntax; type raw_const_syntax = Code_Printer.raw_const_syntax; (** checking and parsing of symbols **) fun cert_const ctxt const = let val _ = if Sign.declared_const (Proof_Context.theory_of ctxt) const then () else error ("No such constant: " ^ quote const); in const end; fun read_const ctxt = Code.read_const (Proof_Context.theory_of ctxt); fun cert_tyco ctxt tyco = let val _ = if Sign.declared_tyname (Proof_Context.theory_of ctxt) tyco then () else error ("No such type constructor: " ^ quote tyco); in tyco end; fun read_tyco ctxt = #1 o dest_Type o Proof_Context.read_type_name {proper = true, strict = true} ctxt; fun cert_class ctxt class = let val _ = Axclass.get_info (Proof_Context.theory_of ctxt) class; in class end; val parse_classrel_ident = Parse.class --| \<^keyword>\<\ -- Parse.class; fun cert_inst ctxt (class, tyco) = (cert_class ctxt class, cert_tyco ctxt tyco); fun read_inst ctxt (raw_tyco, raw_class) = (read_tyco ctxt raw_tyco, Proof_Context.read_class ctxt raw_class); val parse_inst_ident = Parse.name --| \<^keyword>\::\ -- Parse.class; fun cert_syms ctxt = Code_Symbol.map_attr (cert_const ctxt) (cert_tyco ctxt) (cert_class ctxt) (apply2 (cert_class ctxt)) (cert_inst ctxt) I; fun read_syms ctxt = Code_Symbol.map_attr (read_const ctxt) (read_tyco ctxt) (Proof_Context.read_class ctxt) (apply2 (Proof_Context.read_class ctxt)) (read_inst ctxt) I; fun cert_syms' ctxt = Code_Symbol.map_attr (apfst (cert_const ctxt)) (apfst (cert_tyco ctxt)) (apfst (cert_class ctxt)) ((apfst o apply2) (cert_class ctxt)) (apfst (cert_inst ctxt)) I; fun read_syms' ctxt = Code_Symbol.map_attr (apfst (read_const ctxt)) (apfst (read_tyco ctxt)) (apfst (Proof_Context.read_class ctxt)) ((apfst o apply2) (Proof_Context.read_class ctxt)) (apfst (read_inst ctxt)) I; fun check_name is_module s = let val _ = if s = "" then error "Bad empty code name" else (); val xs = Long_Name.explode s; val xs' = if is_module then map (Name.desymbolize NONE) xs else if length xs < 2 then error ("Bad code name without module component: " ^ quote s) else let val (ys, y) = split_last xs; val ys' = map (Name.desymbolize NONE) ys; val y' = Name.desymbolize NONE y; in ys' @ [y'] end; in if xs' = xs then if is_module then (xs, "") else split_last xs else error ("Invalid code name: " ^ quote s ^ "\n" ^ "better try " ^ quote (Long_Name.implode xs')) end; (** theory data **) datatype pretty_modules = Singleton of string * Pretty.T | Hierarchy of (string list * Pretty.T) list; type serializer = Token.T list -> Proof.context -> { reserved_syms: string list, identifiers: Code_Printer.identifiers, includes: (string * Pretty.T) list, class_syntax: string -> string option, tyco_syntax: string -> Code_Printer.tyco_syntax option, const_syntax: string -> Code_Printer.const_syntax option, module_name: string } -> Code_Thingol.program -> Code_Symbol.T list -> pretty_modules * (Code_Symbol.T -> string option); type language = {serializer: serializer, literals: literals, check: {env_var: string, make_destination: Path.T -> Path.T, make_command: string -> string}, evaluation_args: Token.T list}; type ancestry = (string * (Code_Thingol.program -> Code_Thingol.program)) list; val merge_ancestry : ancestry * ancestry -> ancestry = AList.join (op =) (K snd); type target = {serial: serial, language: language, ancestry: ancestry}; structure Targets = Theory_Data ( type T = (target * Code_Printer.data) Symtab.table * int; val empty = (Symtab.empty, 0); val extend = I; fun merge ((targets1, index1), (targets2, index2)) : T = let val targets' = Symtab.join (fn target_name => fn ((target1, data1), (target2, data2)) => if #serial target1 = #serial target2 then ({serial = #serial target1, language = #language target1, ancestry = merge_ancestry (#ancestry target1, #ancestry target2)}, Code_Printer.merge_data (data1, data2)) else error ("Incompatible targets: " ^ quote target_name)) (targets1, targets2) val index' = Int.max (index1, index2); in (targets', index') end; ); val exists_target = Symtab.defined o #1 o Targets.get; val lookup_target_data = Symtab.lookup o #1 o Targets.get; fun assert_target thy target_name = if exists_target thy target_name then target_name else error ("Unknown code target language: " ^ quote target_name); fun reset_index thy = if #2 (Targets.get thy) = 0 then NONE else SOME ((Targets.map o apsnd) (K 0) thy); val _ = Theory.setup (Theory.at_begin reset_index); fun next_export thy = let val thy' = (Targets.map o apsnd) (fn i => i + 1) thy; val i = #2 (Targets.get thy'); in ("export" ^ string_of_int i, thy') end; fun fold1 f xs = fold f (tl xs) (hd xs); fun join_ancestry thy target_name = let val _ = assert_target thy target_name; val the_target_data = the o lookup_target_data thy; val (target, this_data) = the_target_data target_name; val ancestry = #ancestry target; val modifies = rev (map snd ancestry); val modify = fold (curry (op o)) modifies I; val datas = rev (map (snd o the_target_data o fst) ancestry) @ [this_data]; val data = fold1 (fn data' => fn data => Code_Printer.merge_data (data, data')) datas; in (modify, (target, data)) end; fun allocate_target target_name target thy = let val _ = if exists_target thy target_name then error ("Attempt to overwrite existing target " ^ quote target_name) else (); in thy |> (Targets.map o apfst o Symtab.update) (target_name, (target, Code_Printer.empty_data)) end; fun add_language (target_name, language) = allocate_target target_name {serial = serial (), language = language, ancestry = []}; fun add_derived_target (target_name, initial_ancestry) thy = let val _ = if null initial_ancestry then error "Must derive from existing target(s)" else (); fun the_target_data target_name' = case lookup_target_data thy target_name' of NONE => error ("Unknown code target language: " ^ quote target_name') | SOME target_data' => target_data'; val targets = rev (map (fst o the_target_data o fst) initial_ancestry); val supremum = fold1 (fn target' => fn target => if #serial target = #serial target' then target else error "Incompatible targets") targets; val ancestries = map #ancestry targets @ [initial_ancestry]; val ancestry = fold1 (fn ancestry' => fn ancestry => merge_ancestry (ancestry, ancestry')) ancestries; in allocate_target target_name {serial = #serial supremum, language = #language supremum, ancestry = ancestry} thy end; fun map_data target_name f thy = let val _ = assert_target thy target_name; in thy |> (Targets.map o apfst o Symtab.map_entry target_name o apsnd o Code_Printer.map_data) f end; fun map_reserved target_name = map_data target_name o @{apply 3(1)}; fun map_identifiers target_name = map_data target_name o @{apply 3(2)}; fun map_printings target_name = map_data target_name o @{apply 3(3)}; (** serializers **) val codeN = "code"; val generatedN = "Generated_Code"; val code_path = Path.append (Path.basic codeN); fun code_export_message thy = writeln (Export.message thy (Path.basic codeN)); fun export binding content thy = let val thy' = thy |> Generated_Files.add_files (binding, content); val _ = Export.export thy' binding [XML.Text content]; in thy' end; local fun export_logical (file_prefix, file_pos) format pretty_modules = let fun binding path = Path.binding (path, file_pos); val prefix = code_path file_prefix; in (case pretty_modules of Singleton (ext, p) => export (binding (Path.ext ext prefix)) (format p) | Hierarchy modules => fold (fn (names, p) => export (binding (prefix + Path.make names)) (format p)) modules) #> tap code_export_message end; fun export_physical root format pretty_modules = (case pretty_modules of Singleton (_, p) => File.write root (format p) | Hierarchy code_modules => (Isabelle_System.make_directory root; List.app (fn (names, p) => File.write (Path.appends (root :: map Path.basic names)) (format p)) code_modules)); in fun export_result some_file format (pretty_code, _) thy = (case some_file of NONE => let val (file_prefix, thy') = next_export thy; in export_logical (Path.basic file_prefix, Position.none) format pretty_code thy' end | SOME ({physical = false}, file_prefix) => export_logical file_prefix format pretty_code thy | SOME ({physical = true}, (file, _)) => let val root = File.full_path (Resources.master_directory thy) file; val _ = File.check_dir (Path.dir root); val _ = export_physical root format pretty_code; in thy end); fun produce_result syms width pretty_modules = let val format = Code_Printer.format [] width in (case pretty_modules of (Singleton (_, p), deresolve) => ([([], format p)], map deresolve syms) | (Hierarchy code_modules, deresolve) => ((map o apsnd) format code_modules, map deresolve syms)) end; fun present_result selects width (pretty_modules, _) = let val format = Code_Printer.format selects width in (case pretty_modules of Singleton (_, p) => format p | Hierarchy code_modules => space_implode "\n\n" (map (format o #2) code_modules)) end; end; (** serializer usage **) (* technical aside: pretty printing width *) val default_code_width = Attrib.setup_config_int \<^binding>\default_code_width\ (K 80); fun default_width ctxt = Config.get ctxt default_code_width; val the_width = the_default o default_width; (* montage *) fun the_language ctxt = #language o fst o the o lookup_target_data (Proof_Context.theory_of ctxt); fun the_literals ctxt = #literals o the_language ctxt; fun the_evaluation_args ctxt = #evaluation_args o the_language ctxt; local fun activate_target ctxt target_name = let val thy = Proof_Context.theory_of ctxt; val (modify, (target, data)) = join_ancestry thy target_name; val serializer = (#serializer o #language) target; in { serializer = serializer, data = data, modify = modify } end; fun project_program_for_syms ctxt syms_hidden syms1 program1 = let val syms2 = subtract (op =) syms_hidden syms1; val program2 = Code_Symbol.Graph.restrict (not o member (op =) syms_hidden) program1; val unimplemented = Code_Thingol.unimplemented program2; val _ = if null unimplemented then () else error ("No code equations for " ^ commas (map (Proof_Context.markup_const ctxt) unimplemented)); val syms3 = Code_Symbol.Graph.all_succs program2 syms2; val program3 = Code_Symbol.Graph.restrict (member (op =) syms3) program2; in program3 end; fun project_includes_for_syms syms includes = let fun select_include (name, (content, cs)) = if null cs orelse exists (member (op =) syms) cs then SOME (name, content) else NONE; in map_filter select_include includes end; fun prepare_serializer ctxt target_name module_name args proto_program syms = let val { serializer, data, modify } = activate_target ctxt target_name; val printings = Code_Printer.the_printings data; val _ = if module_name = "" then () else (check_name true module_name; ()) val hidden_syms = Code_Symbol.symbols_of printings; val prepared_program = project_program_for_syms ctxt hidden_syms syms proto_program; val prepared_syms = subtract (op =) hidden_syms syms; val all_syms = Code_Symbol.Graph.all_succs proto_program syms; val includes = project_includes_for_syms all_syms (Code_Symbol.dest_module_data printings); val prepared_serializer = serializer args ctxt { reserved_syms = Code_Printer.the_reserved data, identifiers = Code_Printer.the_identifiers data, includes = includes, const_syntax = Code_Symbol.lookup_constant_data printings, tyco_syntax = Code_Symbol.lookup_type_constructor_data printings, class_syntax = Code_Symbol.lookup_type_class_data printings, module_name = module_name }; in (prepared_serializer o modify, (prepared_program, prepared_syms)) end; fun invoke_serializer ctxt target_name module_name args program all_public syms = let val (prepared_serializer, (prepared_program, prepared_syms)) = prepare_serializer ctxt target_name module_name args program syms; val exports = if all_public then [] else prepared_syms; in Code_Preproc.timed_exec "serializing" (fn () => prepared_serializer prepared_program exports) ctxt end; fun assert_module_name "" = error "Empty module name not allowed here" | assert_module_name module_name = module_name; in fun export_code_for some_file target_name module_name some_width args program all_public cs lthy = let val format = Code_Printer.format [] (the_width lthy some_width); val res = invoke_serializer lthy target_name module_name args program all_public cs; in Local_Theory.background_theory (export_result some_file format res) lthy end; fun produce_code_for ctxt target_name module_name some_width args = let val serializer = invoke_serializer ctxt target_name (assert_module_name module_name) args; in fn program => fn all_public => fn syms => produce_result syms (the_width ctxt some_width) (serializer program all_public syms) end; fun present_code_for ctxt target_name module_name some_width args = let val serializer = invoke_serializer ctxt target_name (assert_module_name module_name) args; in fn program => fn (syms, selects) => present_result selects (the_width ctxt some_width) (serializer program false syms) end; fun check_code_for target_name strict args program all_public syms lthy = let val { env_var, make_destination, make_command } = #check (the_language lthy target_name); val format = Code_Printer.format [] 80; fun ext_check p = let val destination = make_destination p; val lthy' = lthy |> Local_Theory.background_theory (export_result (SOME ({physical = true}, (destination, Position.none))) format (invoke_serializer lthy target_name generatedN args program all_public syms)); val cmd = make_command generatedN; val context_cmd = "cd " ^ File.bash_path p ^ " && " ^ cmd ^ " 2>&1"; in if Isabelle_System.bash context_cmd <> 0 then error ("Code check failed for " ^ target_name ^ ": " ^ cmd) else lthy' end; in if not (env_var = "") andalso getenv env_var = "" then if strict then error (env_var ^ " not set; cannot check code for " ^ target_name) else (warning (env_var ^ " not set; skipped checking code for " ^ target_name); lthy) else Isabelle_System.with_tmp_dir "Code_Test" ext_check end; fun dynamic_compilation_text prepared_serializer width prepared_program syms all_public ((vs, ty), t) = let val _ = if Code_Thingol.contains_dict_var t then error "Term to be evaluated contains free dictionaries" else (); val v' = singleton (Name.variant_list (map fst vs)) "a"; val vs' = (v', []) :: vs; val ty' = ITyVar v' `-> ty; val program = prepared_program |> Code_Symbol.Graph.new_node (Code_Symbol.value, Code_Thingol.Fun (((vs', ty'), [(([IVar (SOME "dummy")], t), (NONE, true))]), NONE)) |> fold (curry (perhaps o try o Code_Symbol.Graph.add_edge) Code_Symbol.value) syms; val (pretty_code, deresolve) = prepared_serializer program (if all_public then [] else [Code_Symbol.value]); val (compilation_code, [SOME value_name]) = produce_result [Code_Symbol.value] width (pretty_code, deresolve); in ((compilation_code, value_name), deresolve) end; fun compilation_text' ctxt target_name some_module_name program syms = let val width = default_width ctxt; val evaluation_args = the_evaluation_args ctxt target_name; val (prepared_serializer, (prepared_program, _)) = prepare_serializer ctxt target_name (the_default generatedN some_module_name) evaluation_args program syms; in Code_Preproc.timed_exec "serializing" (fn () => dynamic_compilation_text prepared_serializer width prepared_program syms) ctxt end; fun compilation_text ctxt target_name program syms = fst oo compilation_text' ctxt target_name NONE program syms end; (* local *) (* code generation *) -fun prep_destination (location, (s, pos)) = - if location = {physical = false} - then (location, Path.explode_pos (s, pos)) - else - let - val _ = - if s = "" - then error ("Bad bad empty " ^ Markup.markup Markup.keyword2 "file" ^ " argument") - else (); - val _ = - legacy_feature - (Markup.markup Markup.keyword1 "export_code" ^ " with " ^ - Markup.markup Markup.keyword2 "file" ^ " argument" ^ Position.here pos); - val _ = Position.report pos Markup.language_path; - val path = #1 (Path.explode_pos (s, pos)); - val _ = Position.report pos (Markup.path (Path.implode_symbolic path)); - in (location, (path, pos)) end; +fun prep_destination (location, source) = + let + val s = Input.string_of source + val pos = Input.pos_of source + val delimited = Input.is_delimited source + in + if location = {physical = false} + then (location, Path.explode_pos (s, pos)) + else + let + val _ = + if s = "" + then error ("Bad bad empty " ^ Markup.markup Markup.keyword2 "file" ^ " argument") + else (); + val _ = + legacy_feature + (Markup.markup Markup.keyword1 "export_code" ^ " with " ^ + Markup.markup Markup.keyword2 "file" ^ " argument" ^ Position.here pos); + val _ = Position.report pos (Markup.language_path delimited); + val path = #1 (Path.explode_pos (s, pos)); + val _ = Position.report pos (Markup.path (Path.implode_symbolic path)); + in (location, (path, pos)) end + end; fun export_code all_public cs seris lthy = let val program = Code_Thingol.consts_program lthy cs; in (seris, lthy) |-> fold (fn (((target_name, module_name), some_file), args) => export_code_for some_file target_name module_name NONE args program all_public (map Constant cs)) end; fun export_code_cmd all_public raw_cs seris lthy = let val cs = Code_Thingol.read_const_exprs lthy raw_cs; in export_code all_public cs ((map o apfst o apsnd o Option.map) prep_destination seris) lthy end; fun produce_code ctxt all_public cs target_name some_width some_module_name args = let val program = Code_Thingol.consts_program ctxt cs; in produce_code_for ctxt target_name some_width some_module_name args program all_public (map Constant cs) end; fun present_code ctxt cs syms target_name some_width some_module_name args = let val program = Code_Thingol.consts_program ctxt cs; in present_code_for ctxt target_name some_width some_module_name args program (map Constant cs, syms) end; fun check_code all_public cs seris lthy = let val program = Code_Thingol.consts_program lthy cs; in (seris, lthy) |-> fold (fn ((target_name, strict), args) => check_code_for target_name strict args program all_public (map Constant cs)) end; fun check_code_cmd all_public raw_cs seris lthy = check_code all_public (Code_Thingol.read_const_exprs lthy raw_cs) seris lthy; (** serializer configuration **) (* reserved symbol names *) fun add_reserved target_name sym thy = let val (_, (_, data)) = join_ancestry thy target_name; val _ = if member (op =) (Code_Printer.the_reserved data) sym then error ("Reserved symbol " ^ quote sym ^ " already declared") else (); in thy |> map_reserved target_name (insert (op =) sym) end; (* checking of syntax *) fun check_const_syntax ctxt target_name c syn = if Code_Printer.requires_args syn > Code.args_number (Proof_Context.theory_of ctxt) c then error ("Too many arguments in syntax for constant " ^ quote c) else Code_Printer.prep_const_syntax (Proof_Context.theory_of ctxt) (the_literals ctxt target_name) c syn; fun check_tyco_syntax ctxt target_name tyco syn = if fst syn <> Sign.arity_number (Proof_Context.theory_of ctxt) tyco then error ("Number of arguments mismatch in syntax for type constructor " ^ quote tyco) else syn; (* custom symbol names *) fun arrange_name_decls x = let fun arrange is_module (sym, target_names) = map (fn (target, some_name) => (target, (sym, Option.map (check_name is_module) some_name))) target_names; in Code_Symbol.maps_attr' (arrange false) (arrange false) (arrange false) (arrange false) (arrange false) (arrange true) x end; fun cert_name_decls ctxt = cert_syms' ctxt #> arrange_name_decls; fun read_name_decls ctxt = read_syms' ctxt #> arrange_name_decls; fun set_identifier (target_name, sym_name) = map_identifiers target_name (Code_Symbol.set_data sym_name); fun gen_set_identifiers prep_name_decl raw_name_decls thy = fold set_identifier (prep_name_decl (Proof_Context.init_global thy) raw_name_decls) thy; val set_identifiers = gen_set_identifiers cert_name_decls; val set_identifiers_cmd = gen_set_identifiers read_name_decls; (* custom printings *) fun arrange_printings prep_syms ctxt = let fun arrange check (sym, target_syns) = map (fn (target_name, some_syn) => (target_name, (sym, Option.map (check ctxt target_name sym) some_syn))) target_syns; in Code_Symbol.maps_attr' (arrange check_const_syntax) (arrange check_tyco_syntax) (arrange ((K o K o K) I)) (arrange ((K o K o K) I)) (arrange ((K o K o K) I)) (arrange (fn ctxt => fn _ => fn _ => fn (raw_content, raw_syms) => (Pretty.blk (0, Pretty.fbreaks (map Code_Printer.str (split_lines raw_content))), map (prep_syms ctxt) raw_syms))) end; fun cert_printings ctxt = cert_syms' ctxt #> arrange_printings cert_syms ctxt; fun read_printings ctxt = read_syms' ctxt #> arrange_printings read_syms ctxt; fun set_printing (target_name, sym_syn) = map_printings target_name (Code_Symbol.set_data sym_syn); fun gen_set_printings prep_print_decl raw_print_decls thy = fold set_printing (prep_print_decl (Proof_Context.init_global thy) raw_print_decls) thy; val set_printings = gen_set_printings cert_printings; val set_printings_cmd = gen_set_printings read_printings; (* concrete syntax *) fun parse_args f args = case Scan.read Token.stopper f args of SOME x => x | NONE => error "Bad serializer arguments"; (** Isar setup **) val (constantK, type_constructorK, type_classK, class_relationK, class_instanceK, code_moduleK) = (\<^keyword>\constant\, \<^keyword>\type_constructor\, \<^keyword>\type_class\, \<^keyword>\class_relation\, \<^keyword>\class_instance\, \<^keyword>\code_module\); local val parse_constants = constantK |-- Scan.repeat1 Parse.term >> map Constant; val parse_type_constructors = type_constructorK |-- Scan.repeat1 Parse.type_const >> map Type_Constructor; val parse_classes = type_classK |-- Scan.repeat1 Parse.class >> map Type_Class; val parse_class_relations = class_relationK |-- Scan.repeat1 parse_classrel_ident >> map Class_Relation; val parse_instances = class_instanceK |-- Scan.repeat1 parse_inst_ident >> map Class_Instance; val parse_simple_symbols = Scan.repeats1 (parse_constants || parse_type_constructors || parse_classes || parse_class_relations || parse_instances); fun parse_single_symbol_pragma parse_keyword parse_isa parse_target = parse_keyword |-- Parse.!!! (parse_isa --| (\<^keyword>\\\ || \<^keyword>\=>\) -- Parse.and_list1 (\<^keyword>\(\ |-- (Parse.name --| \<^keyword>\)\ -- Scan.option parse_target))); fun parse_symbol_pragma parse_const parse_tyco parse_class parse_classrel parse_inst parse_module = parse_single_symbol_pragma constantK Parse.term parse_const >> Constant || parse_single_symbol_pragma type_constructorK Parse.type_const parse_tyco >> Type_Constructor || parse_single_symbol_pragma type_classK Parse.class parse_class >> Type_Class || parse_single_symbol_pragma class_relationK parse_classrel_ident parse_classrel >> Class_Relation || parse_single_symbol_pragma class_instanceK parse_inst_ident parse_inst >> Class_Instance || parse_single_symbol_pragma code_moduleK Parse.name parse_module >> Module; fun parse_symbol_pragmas parse_const parse_tyco parse_class parse_classrel parse_inst parse_module = Parse.enum1 "|" (Parse.group (fn () => "code symbol pragma") (parse_symbol_pragma parse_const parse_tyco parse_class parse_classrel parse_inst parse_module)); val code_expr_argsP = Scan.optional (\<^keyword>\(\ |-- Parse.args --| \<^keyword>\)\) []; fun code_expr_inP (all_public, raw_cs) = Scan.repeat (\<^keyword>\in\ |-- Parse.!!! (Parse.name -- Scan.optional (\<^keyword>\module_name\ |-- Parse.name) "" -- Scan.option ((\<^keyword>\file_prefix\ >> K {physical = false} || \<^keyword>\file\ >> K {physical = true}) - -- Parse.!!! (Parse.position Parse.path)) + -- Parse.!!! Parse.path_input) -- code_expr_argsP)) >> (fn seri_args => export_code_cmd all_public raw_cs seri_args); fun code_expr_checkingP (all_public, raw_cs) = (\<^keyword>\checking\ |-- Parse.!!! (Scan.repeat (Parse.name -- (Scan.optional (\<^keyword>\?\ >> K false) true) -- code_expr_argsP))) >> (fn seri_args => check_code_cmd all_public raw_cs seri_args); in val _ = Outer_Syntax.command \<^command_keyword>\code_reserved\ "declare words as reserved for target language" (Parse.name -- Scan.repeat1 Parse.name >> (fn (target, reserveds) => (Toplevel.theory o fold (add_reserved target)) reserveds)); val _ = Outer_Syntax.command \<^command_keyword>\code_identifier\ "declare mandatory names for code symbols" (parse_symbol_pragmas Parse.name Parse.name Parse.name Parse.name Parse.name Parse.name >> (Toplevel.theory o fold set_identifiers_cmd)); val _ = Outer_Syntax.command \<^command_keyword>\code_printing\ "declare dedicated printing for code symbols" (parse_symbol_pragmas (Code_Printer.parse_const_syntax) (Code_Printer.parse_tyco_syntax) Parse.string (Parse.minus >> K ()) (Parse.minus >> K ()) (Parse.text -- Scan.optional (\<^keyword>\for\ |-- parse_simple_symbols) []) >> (Toplevel.theory o fold set_printings_cmd)); val _ = Outer_Syntax.local_theory \<^command_keyword>\export_code\ "generate executable code for constants" (Scan.optional (\<^keyword>\open\ >> K true) false -- Scan.repeat1 Parse.term :|-- (fn args => (code_expr_checkingP args || code_expr_inP args))); end; local val parse_const_terms = Args.theory -- Scan.repeat1 Args.term >> uncurry (fn thy => map (Code.check_const thy)); fun parse_symbols keyword parse internalize mark_symbol = Scan.lift (keyword --| Args.colon) |-- Args.theory -- Scan.repeat1 parse >> uncurry (fn thy => map (mark_symbol o internalize thy)); val parse_consts = parse_symbols constantK Args.term Code.check_const Constant; val parse_types = parse_symbols type_constructorK (Scan.lift Args.name) Sign.intern_type Type_Constructor; val parse_classes = parse_symbols type_classK (Scan.lift Args.name) Sign.intern_class Type_Class; val parse_instances = parse_symbols class_instanceK (Scan.lift (Args.name --| Args.$$$ "::" -- Args.name)) (fn thy => fn (raw_tyco, raw_class) => (Sign.intern_class thy raw_tyco, Sign.intern_type thy raw_class)) Class_Instance; in val _ = Theory.setup (Thy_Output.antiquotation_raw \<^binding>\code_stmts\ (parse_const_terms -- Scan.repeats (parse_consts || parse_types || parse_classes || parse_instances) -- Scan.lift (Args.parens (Args.name -- Scan.option Parse.int))) (fn ctxt => fn ((consts, symbols), (target_name, some_width)) => present_code ctxt consts symbols target_name "Example" some_width [] |> trim_line |> Thy_Output.verbatim (Config.put Document_Antiquotation.thy_output_display true ctxt))); end; end; (*struct*)