diff --git a/src/Pure/PIDE/resources.ML b/src/Pure/PIDE/resources.ML --- a/src/Pure/PIDE/resources.ML +++ b/src/Pure/PIDE/resources.ML @@ -1,446 +1,446 @@ (* Title: Pure/PIDE/resources.ML Author: Makarius Resources for theories and auxiliary files. *) signature RESOURCES = sig val default_qualifier: string val init_session: {session_positions: (string * Properties.T) list, session_directories: (string * string) list, bibtex_entries: (string * string list) list, command_timings: Properties.T list, load_commands: (string * Position.T) list, scala_functions: (string * (bool * Position.T)) list, global_theories: (string * string) list, loaded_theories: string list} -> unit val init_session_yxml: string -> unit val init_session_file: Path.T -> unit val finish_session_base: unit -> unit val global_theory: string -> string option val loaded_theory: string -> bool val check_session: Proof.context -> string * Position.T -> string val last_timing: Toplevel.transition -> Time.time val check_load_command: Proof.context -> string * Position.T -> string val scala_functions: unit -> string list val check_scala_function: Proof.context -> string * Position.T -> string * bool val master_directory: theory -> Path.T val imports_of: theory -> (string * Position.T) list val begin_theory: Path.T -> Thy_Header.header -> theory list -> theory val thy_path: Path.T -> Path.T val theory_qualifier: string -> string val theory_bibtex_entries: string -> string list val find_theory_file: string -> Path.T option val import_name: string -> Path.T -> string -> {node_name: Path.T, master_dir: Path.T, theory_name: string} val check_thy: Path.T -> string -> {master: Path.T * SHA1.digest, text: string, theory_pos: Position.T, imports: (string * Position.T) list, keywords: Thy_Header.keywords} val parse_files: (Path.T -> Path.T list) -> (theory -> Token.file list) parser val parse_file: (theory -> Token.file) parser val provide: Path.T * SHA1.digest -> theory -> theory val provide_file: Token.file -> theory -> theory val provide_parse_files: (Path.T -> Path.T list) -> (theory -> Token.file list * theory) parser val provide_parse_file: (theory -> Token.file * theory) parser val loaded_files_current: theory -> bool val check_path: Proof.context -> Path.T option -> Input.source -> Path.T val check_file: Proof.context -> Path.T option -> Input.source -> Path.T val check_dir: Proof.context -> Path.T option -> Input.source -> Path.T val check_session_dir: Proof.context -> Path.T option -> Input.source -> Path.T end; structure Resources: RESOURCES = struct (* command timings *) type timings = ((string * Time.time) Inttab.table) Symtab.table; (*file -> offset -> name, time*) val empty_timings: timings = Symtab.empty; fun update_timings props = (case Markup.parse_command_timing_properties props of SOME ({file, offset, name}, time) => Symtab.map_default (file, Inttab.empty) (Inttab.map_default (offset, (name, time)) (fn (_, t) => (name, t + time))) | NONE => I); fun make_timings command_timings = fold update_timings command_timings empty_timings; fun approximative_id name pos = (case (Position.file_of pos, Position.offset_of pos) of (SOME file, SOME offset) => if name = "" then NONE else SOME {file = file, offset = offset, name = name} | _ => NONE); fun get_timings timings tr = (case approximative_id (Toplevel.name_of tr) (Toplevel.pos_of tr) of SOME {file, offset, name} => (case Symtab.lookup timings file of SOME offsets => (case Inttab.lookup offsets offset of SOME (name', time) => if name = name' then SOME time else NONE | NONE => NONE) | NONE => NONE) | NONE => NONE) |> the_default Time.zeroTime; (* session base *) val default_qualifier = "Draft"; type entry = {pos: Position.T, serial: serial}; fun make_entry props : entry = {pos = Position.of_properties props, serial = serial ()}; val empty_session_base = ({session_positions = []: (string * entry) list, session_directories = Symtab.empty: Path.T list Symtab.table, bibtex_entries = Symtab.empty: string list Symtab.table, timings = empty_timings, load_commands = []: (string * Position.T) list, scala_functions = Symtab.empty: (bool * Position.T) Symtab.table}, {global_theories = Symtab.empty: string Symtab.table, loaded_theories = Symtab.empty: unit Symtab.table}); val global_session_base = Synchronized.var "Sessions.base" empty_session_base; fun init_session {session_positions, session_directories, bibtex_entries, command_timings, load_commands, scala_functions, global_theories, loaded_theories} = Synchronized.change global_session_base (fn _ => ({session_positions = sort_by #1 (map (apsnd make_entry) session_positions), session_directories = Symtab.build (session_directories |> fold_rev (fn (dir, name) => Symtab.cons_list (name, Path.explode dir))), bibtex_entries = Symtab.make bibtex_entries, timings = make_timings command_timings, load_commands = load_commands, scala_functions = Symtab.make scala_functions}, {global_theories = Symtab.make global_theories, loaded_theories = Symtab.make_set loaded_theories})); fun init_session_yxml yxml = let val (session_positions, (session_directories, (bibtex_entries, (command_timings, (load_commands, (scala_functions, (global_theories, loaded_theories))))))) = YXML.parse_body yxml |> let open XML.Decode in (pair (list (pair string properties)) (pair (list (pair string string)) (pair (list (pair string (list string))) (pair (list properties) (pair (list (pair string properties)) (pair (list (pair string (pair bool properties))) (pair (list (pair string string)) (list string)))))))) end; in init_session {session_positions = session_positions, session_directories = session_directories, bibtex_entries = bibtex_entries, command_timings = command_timings, load_commands = (map o apsnd) Position.of_properties load_commands, scala_functions = (map o apsnd o apsnd) Position.of_properties scala_functions, global_theories = global_theories, loaded_theories = loaded_theories} end; fun init_session_file path = init_session_yxml (File.read path) before File.rm path; fun finish_session_base () = Synchronized.change global_session_base (apfst (K (#1 empty_session_base))); fun get_session_base f = f (Synchronized.value global_session_base); fun get_session_base1 f = get_session_base (f o #1); fun get_session_base2 f = get_session_base (f o #2); fun global_theory a = Symtab.lookup (get_session_base2 #global_theories) a; fun loaded_theory a = Symtab.defined (get_session_base2 #loaded_theories) a; fun check_session ctxt arg = Completion.check_item "session" (fn (name, {pos, serial}) => Position.make_entity_markup {def = false} serial Markup.sessionN (name, pos)) (get_session_base1 #session_positions) ctxt arg; fun last_timing tr = get_timings (get_session_base1 #timings) tr; fun check_load_command ctxt arg = Completion.check_entity Markup.load_commandN (get_session_base1 #load_commands) ctxt arg; (* Scala functions *) (*raw bootstrap environment*) fun scala_functions () = space_explode "," (getenv "ISABELLE_SCALA_FUNCTIONS"); (*regular resources*) fun scala_function a = (a, the_default (false, Position.none) (Symtab.lookup (get_session_base1 #scala_functions) a)); fun check_scala_function ctxt arg = let val funs = scala_functions () |> sort_strings |> map scala_function; val name = Completion.check_entity Markup.scala_functionN (map (apsnd #2) funs) ctxt arg; val multi = (case AList.lookup (op =) funs name of SOME (multi, _) => multi | NONE => false); in (name, multi) end; val _ = Theory.setup (Document_Output.antiquotation_verbatim_embedded \<^binding>\scala_function\ (Scan.lift Parse.embedded_position) (#1 oo check_scala_function) #> ML_Antiquotation.inline_embedded \<^binding>\scala_function\ (Args.context -- Scan.lift Parse.embedded_position >> (uncurry check_scala_function #> #1 #> ML_Syntax.print_string)) #> ML_Antiquotation.value_embedded \<^binding>\scala\ (Args.context -- Scan.lift Parse.embedded_position >> (fn (ctxt, arg) => let val (name, multi) = check_scala_function ctxt arg; val func = if multi then "Scala.function" else "Scala.function1"; in ML_Syntax.atomic (func ^ " " ^ ML_Syntax.print_string name) end))); (* manage source files *) type files = {master_dir: Path.T, (*master directory of theory source*) imports: (string * Position.T) list, (*source specification of imports*) provided: (Path.T * SHA1.digest) list}; (*source path, digest*) fun make_files (master_dir, imports, provided): files = {master_dir = master_dir, imports = imports, provided = provided}; structure Files = Theory_Data ( type T = files; val empty = make_files (Path.current, [], []); fun merge ({master_dir, imports, provided = provided1}, {provided = provided2, ...}) = let val provided' = Library.merge (op =) (provided1, provided2) in make_files (master_dir, imports, provided') end ); fun map_files f = Files.map (fn {master_dir, imports, provided} => make_files (f (master_dir, imports, provided))); val master_directory = #master_dir o Files.get; val imports_of = #imports o Files.get; fun begin_theory master_dir {name, imports, keywords} parents = let val thy = Theory.begin_theory name parents |> map_files (fn _ => (Path.explode (Path.implode_symbolic master_dir), imports, [])) |> Thy_Header.add_keywords keywords; val ctxt = Proof_Context.init_global thy; val _ = List.app (ignore o check_load_command ctxt o #load_command o #2) keywords; in thy end; (* theory files *) val thy_path = Path.ext "thy"; fun theory_qualifier theory = (case global_theory theory of SOME qualifier => qualifier | NONE => Long_Name.qualifier theory); fun theory_name qualifier theory = if Long_Name.is_qualified theory orelse is_some (global_theory theory) then theory else Long_Name.qualify qualifier theory; fun theory_bibtex_entries theory = Symtab.lookup_list (get_session_base1 #bibtex_entries) (theory_qualifier theory); fun find_theory_file thy_name = let val thy_file = thy_path (Path.basic (Long_Name.base_name thy_name)); val session = theory_qualifier thy_name; val dirs = Symtab.lookup_list (get_session_base1 #session_directories) session; in dirs |> get_first (fn dir => let val path = dir + thy_file in if File.is_file path then SOME path else NONE end) end; fun make_theory_node node_name theory = {node_name = node_name, master_dir = Path.dir node_name, theory_name = theory}; fun loaded_theory_node theory = {node_name = Path.basic theory, master_dir = Path.current, theory_name = theory}; fun import_name qualifier dir s = let val theory = theory_name qualifier (Thy_Header.import_name s); fun theory_node () = make_theory_node (File.full_path dir (thy_path (Path.expand (Path.explode s)))) theory; in if not (Thy_Header.is_base_name s) then theory_node () else if loaded_theory theory then loaded_theory_node theory else (case find_theory_file theory of SOME node_name => make_theory_node node_name theory | NONE => if Long_Name.is_qualified s then loaded_theory_node theory else theory_node ()) end; fun check_file dir file = File.check_file (File.full_path dir file); fun check_thy dir thy_name = let val thy_base_name = Long_Name.base_name thy_name; val master_file = (case find_theory_file thy_name of SOME path => check_file Path.current path | NONE => check_file dir (thy_path (Path.basic thy_base_name))); val text = File.read master_file; val {name = (name, pos), imports, keywords} = Thy_Header.read (Path.position master_file) text; val _ = thy_base_name <> name andalso error ("Bad theory name " ^ quote name ^ " for file " ^ Path.print (Path.base master_file) ^ Position.here pos); in {master = (master_file, SHA1.digest text), text = text, theory_pos = pos, imports = imports, keywords = keywords} end; (* load files *) fun parse_files make_paths = Scan.ahead Parse.not_eof -- Parse.path_input >> (fn (tok, source) => fn thy => (case Token.get_files tok of [] => let val master_dir = master_directory thy; val name = Input.string_of source; val pos = Input.pos_of source; val delimited = Input.is_delimited source; val src_paths = make_paths (Path.explode name); in map (Command.read_file master_dir pos delimited) src_paths end | files => map Exn.release files)); val parse_file = parse_files single >> (fn f => f #> the_single); fun provide (src_path, id) = map_files (fn (master_dir, imports, provided) => if AList.defined (op =) provided src_path then error ("Duplicate use of source file: " ^ Path.print src_path) else (master_dir, imports, (src_path, id) :: provided)); fun provide_file (file: Token.file) = provide (#src_path file, #digest file); fun provide_parse_files make_paths = parse_files make_paths >> (fn files => fn thy => let val fs = files thy; val thy' = fold (fn {src_path, digest, ...} => provide (src_path, digest)) fs thy; in (fs, thy') end); val provide_parse_file = provide_parse_files single >> (fn f => f #>> the_single); fun load_file thy src_path = let val full_path = check_file (master_directory thy) src_path; val text = File.read full_path; val id = SHA1.digest text; in ((full_path, id), text) end; fun loaded_files_current thy = #provided (Files.get thy) |> forall (fn (src_path, id) => (case try (load_file thy) src_path of NONE => false | SOME ((_, id'), _) => id = id')); (* formal check *) fun formal_check check_file ctxt opt_dir source = let val name = Input.string_of source; val pos = Input.pos_of source; val delimited = Input.is_delimited source; val _ = Context_Position.report ctxt pos (Markup.language_path delimited); fun err msg = error (msg ^ Position.here pos); val dir = (case opt_dir of SOME dir => dir | NONE => master_directory (Proof_Context.theory_of ctxt)); val path = dir + Path.explode name handle ERROR msg => err msg; val _ = Path.expand path handle ERROR msg => err msg; val _ = Context_Position.report ctxt pos (Markup.path (Path.implode_symbolic path)); val _ : Path.T = check_file path handle ERROR msg => err msg; in path end; val check_path = formal_check I; val check_file = formal_check File.check_file; val check_dir = formal_check File.check_dir; fun check_session_dir ctxt opt_dir s = let val dir = Path.expand (check_dir ctxt opt_dir s); val ok = File.is_file (dir + Path.explode("ROOT")) orelse File.is_file (dir + Path.explode("ROOTS")); in if ok then dir else error ("Bad session root directory (missing ROOT or ROOTS): " ^ Path.print dir ^ Position.here (Input.pos_of s)) end; (* antiquotations *) local fun document_antiq (check: Proof.context -> Path.T option -> Input.source -> Path.T) = Args.context -- Scan.lift Parse.path_input >> (fn (ctxt, source) => (check ctxt NONE source; Latex.string (Latex.output_ascii_breakable "/" (Input.string_of source)) - |> Latex.enclose_text "\\isatt{" "}")); + |> XML.enclose "\\isatt{" "}")); fun ML_antiq check = Args.context -- Scan.lift Parse.path_input >> (fn (ctxt, source) => check ctxt (SOME Path.current) source |> ML_Syntax.print_path); in val _ = Theory.setup (Document_Output.antiquotation_verbatim_embedded \<^binding>\session\ (Scan.lift Parse.embedded_position) check_session #> Document_Output.antiquotation_raw_embedded \<^binding>\path\ (document_antiq check_path) (K I) #> Document_Output.antiquotation_raw_embedded \<^binding>\file\ (document_antiq check_file) (K I) #> Document_Output.antiquotation_raw_embedded \<^binding>\dir\ (document_antiq check_dir) (K I) #> ML_Antiquotation.value_embedded \<^binding>\path\ (ML_antiq check_path) #> ML_Antiquotation.value_embedded \<^binding>\file\ (ML_antiq check_file) #> ML_Antiquotation.value_embedded \<^binding>\dir\ (ML_antiq check_dir) #> ML_Antiquotation.value_embedded \<^binding>\path_binding\ (Scan.lift (Parse.position Parse.path) >> (ML_Syntax.print_path_binding o Path.explode_binding)) #> ML_Antiquotation.value \<^binding>\master_dir\ (Args.theory >> (ML_Syntax.print_path o master_directory))); end; end; diff --git a/src/Pure/PIDE/xml.ML b/src/Pure/PIDE/xml.ML --- a/src/Pure/PIDE/xml.ML +++ b/src/Pure/PIDE/xml.ML @@ -1,423 +1,426 @@ (* Title: Pure/PIDE/xml.ML Author: David Aspinall Author: Stefan Berghofer Author: Makarius Untyped XML trees and representation of ML values. *) signature XML_DATA_OPS = sig type 'a A type 'a T type 'a V type 'a P val int_atom: int A val bool_atom: bool A val unit_atom: unit A val properties: Properties.T T val string: string T val int: int T val bool: bool T val unit: unit T val pair: 'a T -> 'b T -> ('a * 'b) T val triple: 'a T -> 'b T -> 'c T -> ('a * 'b * 'c) T val list: 'a T -> 'a list T val option: 'a T -> 'a option T val variant: 'a V list -> 'a T end; signature XML = sig type attributes = (string * string) list datatype tree = Elem of (string * attributes) * tree list | Text of string type body = tree list - val string: string -> body val blob: string list -> body val is_empty: tree -> bool val is_empty_body: body -> bool + val string: string -> body + val enclose: string -> string -> body -> body val xml_elemN: string val xml_nameN: string val xml_bodyN: string val wrap_elem: ((string * attributes) * tree list) * tree list -> tree val unwrap_elem: tree -> (((string * attributes) * tree list) * tree list) option val add_content: tree -> Buffer.T -> Buffer.T val content_of: body -> string val trim_blanks: body -> body val header: string val text: string -> string val element: string -> attributes -> string list -> string val output_markup: Markup.T -> Markup.output val string_of: tree -> string val pretty: int -> tree -> Pretty.T val parse_comments: string list -> unit * string list val parse_string : string -> string option val parse_element: string list -> tree * string list val parse_document: string list -> tree * string list val parse: string -> tree exception XML_ATOM of string exception XML_BODY of body structure Encode: sig include XML_DATA_OPS val tree: tree T end structure Decode: sig include XML_DATA_OPS val tree: tree T end end; structure XML: XML = struct (** XML trees **) open Output_Primitives.XML; -fun string "" = [] - | string s = [Text s]; - val blob = map Text; fun is_empty (Text "") = true | is_empty _ = false; val is_empty_body = forall is_empty; +fun string "" = [] + | string s = [Text s]; + +fun enclose bg en body = string bg @ body @ string en; + (* wrapped elements *) val xml_elemN = "xml_elem"; val xml_nameN = "xml_name"; val xml_bodyN = "xml_body"; fun wrap_elem (((a, atts), body1), body2) = Elem ((xml_elemN, (xml_nameN, a) :: atts), Elem ((xml_bodyN, []), body1) :: body2); fun unwrap_elem (Elem ((name, (n, a) :: atts), Elem ((name', atts'), body1) :: body2)) = if name = xml_elemN andalso n = xml_nameN andalso name' = xml_bodyN andalso null atts' then SOME (((a, atts), body1), body2) else NONE | unwrap_elem _ = NONE; (* text content *) fun add_content tree = (case unwrap_elem tree of SOME (_, ts) => fold add_content ts | NONE => (case tree of Elem (_, ts) => fold add_content ts | Text s => Buffer.add s)); val content_of = Buffer.build_content o fold add_content; (* trim blanks *) fun trim_blanks trees = trees |> maps (fn Elem (markup, body) => [Elem (markup, trim_blanks body)] | Text s => s |> raw_explode |> trim Symbol.is_blank |> implode |> string); (** string representation **) val header = "\n"; (* escaped text *) fun decode "<" = "<" | decode ">" = ">" | decode "&" = "&" | decode "'" = "'" | decode """ = "\"" | decode c = c; fun encode "<" = "<" | encode ">" = ">" | encode "&" = "&" | encode "'" = "'" | encode "\"" = """ | encode c = c; val text = translate_string encode; (* elements *) fun elem name atts = space_implode " " (name :: map (fn (a, x) => a ^ "=\"" ^ text x ^ "\"") atts); fun element name atts body = let val b = implode body in - if b = "" then enclose "<" "/>" (elem name atts) - else enclose "<" ">" (elem name atts) ^ b ^ enclose "" name + if b = "" then Library.enclose "<" "/>" (elem name atts) + else Library.enclose "<" ">" (elem name atts) ^ b ^ Library.enclose "" name end; fun output_markup (markup as (name, atts)) = if Markup.is_empty markup then Markup.no_output - else (enclose "<" ">" (elem name atts), enclose "" name); + else (Library.enclose "<" ">" (elem name atts), Library.enclose "" name); (* output content *) fun content_depth depth = let fun traverse _ (Elem ((name, atts), [])) = Buffer.add "<" #> Buffer.add (elem name atts) #> Buffer.add "/>" | traverse d (Elem ((name, atts), ts)) = Buffer.add "<" #> Buffer.add (elem name atts) #> Buffer.add ">" #> traverse_body d ts #> Buffer.add " Buffer.add name #> Buffer.add ">" | traverse _ (Text s) = Buffer.add (text s) and traverse_body 0 _ = Buffer.add "..." | traverse_body d ts = fold (traverse (d - 1)) ts; in Buffer.build_content o traverse depth end; val string_of = content_depth ~1; fun pretty depth tree = Pretty.str (content_depth (Int.max (0, depth)) tree); val _ = ML_system_pp (fn depth => fn _ => Pretty.to_polyml o pretty (FixedInt.toInt depth)); (** XML parsing **) local fun err msg (xs, _) = fn () => "XML parsing error: " ^ msg () ^ "\nfound: " ^ quote (Symbol.beginning 100 xs); fun ignored _ = []; fun name_start_char c = Symbol.is_ascii_letter c orelse c = ":" orelse c = "_"; fun name_char c = name_start_char c orelse Symbol.is_ascii_digit c orelse c = "-" orelse c = "."; val parse_name = Scan.one name_start_char ::: Scan.many name_char; val blanks = Scan.many Symbol.is_blank; val special = $$ "&" ^^ (parse_name >> implode) ^^ $$ ";" >> decode; val regular = Scan.one Symbol.not_eof; fun regular_except x = Scan.one (fn c => Symbol.not_eof c andalso c <> x); val parse_chars = Scan.repeat1 (special || regular_except "<") >> implode; val parse_cdata = Scan.this_string "") regular) >> implode) --| Scan.this_string "]]>"; val parse_att = ((parse_name >> implode) --| (blanks -- $$ "=" -- blanks)) -- (($$ "\"" || $$ "'") :|-- (fn s => (Scan.repeat (special || regular_except s) >> implode) --| $$ s)); val parse_comment = Scan.this_string "") regular) -- Scan.this_string "-->" >> ignored; val parse_processing_instruction = Scan.this_string "") regular) -- Scan.this_string "?>" >> ignored; val parse_doctype = Scan.this_string "") regular) -- $$ ">" >> ignored; val parse_misc = Scan.one Symbol.is_blank >> ignored || parse_processing_instruction || parse_comment; val parse_optional_text = Scan.optional (parse_chars >> (single o Text)) []; in val parse_comments = blanks -- Scan.repeat (parse_comment -- blanks >> K ()) >> K (); val parse_string = Scan.read Symbol.stopper parse_chars o raw_explode; fun parse_content xs = (parse_optional_text @@@ Scan.repeats ((parse_element >> single || parse_cdata >> (single o Text) || parse_processing_instruction || parse_comment) @@@ parse_optional_text)) xs and parse_element xs = ($$ "<" |-- parse_name -- Scan.repeat (blanks |-- parse_att) --| blanks :-- (fn (name, _) => !! (err (fn () => "Expected > or />")) ($$ "/" -- $$ ">" >> ignored || $$ ">" |-- parse_content --| !! (err (fn () => "Expected ")) ($$ "<" -- $$ "/" -- Scan.this name -- blanks -- $$ ">"))) >> (fn ((name, atts), body) => Elem ((implode name, atts), body))) xs; val parse_document = (Scan.repeat parse_misc -- Scan.option parse_doctype -- Scan.repeat parse_misc) |-- parse_element; fun parse s = (case Scan.finite Symbol.stopper (Scan.error (!! (err (fn () => "Malformed element")) (blanks |-- parse_document --| blanks))) (raw_explode s) of (x, []) => x | (_, ys) => error ("XML parsing error: unprocessed input\n" ^ Symbol.beginning 100 ys)); end; (** XML as data representation language **) exception XML_ATOM of string; exception XML_BODY of tree list; structure Encode = struct type 'a A = 'a -> string; type 'a T = 'a -> body; type 'a V = 'a -> string list * body; type 'a P = 'a -> string list; (* atomic values *) fun int_atom i = Value.print_int i; fun bool_atom false = "0" | bool_atom true = "1"; fun unit_atom () = ""; (* structural nodes *) fun node ts = Elem ((":", []), ts); fun vector xs = map_index (fn (i, x) => (int_atom i, x)) xs; fun tagged (tag, (xs, ts)) = Elem ((int_atom tag, vector xs), ts); (* representation of standard types *) fun tree (t: tree) = [t]; fun properties props = [Elem ((":", props), [])]; fun string "" = [] | string s = [Text s]; val int = string o int_atom; val bool = string o bool_atom; val unit = string o unit_atom; fun pair f g (x, y) = [node (f x), node (g y)]; fun triple f g h (x, y, z) = [node (f x), node (g y), node (h z)]; fun list f xs = map (node o f) xs; fun option _ NONE = [] | option f (SOME x) = [node (f x)]; fun variant fs x = [tagged (the (get_index (fn f => SOME (f x) handle General.Match => NONE) fs))]; end; structure Decode = struct type 'a A = string -> 'a; type 'a T = body -> 'a; type 'a V = string list * body -> 'a; type 'a P = string list -> 'a; (* atomic values *) fun int_atom s = Value.parse_int s handle Fail _ => raise XML_ATOM s; fun bool_atom "0" = false | bool_atom "1" = true | bool_atom s = raise XML_ATOM s; fun unit_atom "" = () | unit_atom s = raise XML_ATOM s; (* structural nodes *) fun node (Elem ((":", []), ts)) = ts | node t = raise XML_BODY [t]; fun vector atts = map_index (fn (i, (a, x)) => if int_atom a = i then x else raise XML_ATOM a) atts; fun tagged (Elem ((name, atts), ts)) = (int_atom name, (vector atts, ts)) | tagged t = raise XML_BODY [t]; (* representation of standard types *) fun tree [t] = t | tree ts = raise XML_BODY ts; fun properties [Elem ((":", props), [])] = props | properties ts = raise XML_BODY ts; fun string [] = "" | string [Text s] = s | string ts = raise XML_BODY ts; val int = int_atom o string; val bool = bool_atom o string; val unit = unit_atom o string; fun pair f g [t1, t2] = (f (node t1), g (node t2)) | pair _ _ ts = raise XML_BODY ts; fun triple f g h [t1, t2, t3] = (f (node t1), g (node t2), h (node t3)) | triple _ _ _ ts = raise XML_BODY ts; fun list f ts = map (f o node) ts; fun option _ [] = NONE | option f [t] = SOME (f (node t)) | option _ ts = raise XML_BODY ts; fun variant fs [t] = let val (tag, (xs, ts)) = tagged t; val f = nth fs tag handle General.Subscript => raise XML_BODY [t]; in f (xs, ts) end | variant _ ts = raise XML_BODY ts; end; end; diff --git a/src/Pure/PIDE/xml.scala b/src/Pure/PIDE/xml.scala --- a/src/Pure/PIDE/xml.scala +++ b/src/Pure/PIDE/xml.scala @@ -1,461 +1,464 @@ /* Title: Pure/PIDE/xml.scala Author: Makarius Untyped XML trees and basic data representation. */ package isabelle object XML { /** XML trees **/ /* datatype representation */ type Attribute = Properties.Entry type Attributes = Properties.T sealed abstract class Tree { override def toString: String = string_of_tree(this) } type Body = List[Tree] case class Elem(markup: Markup, body: Body) extends Tree { private lazy val hash: Int = (markup, body).hashCode() override def hashCode(): Int = hash def name: String = markup.name def update_attributes(more_attributes: Attributes): Elem = if (more_attributes.isEmpty) this else Elem(markup.update_properties(more_attributes), body) def + (att: Attribute): Elem = Elem(markup + att, body) } case class Text(content: String) extends Tree { private lazy val hash: Int = content.hashCode() override def hashCode(): Int = hash } def elem(markup: Markup): XML.Elem = XML.Elem(markup, Nil) def elem(name: String, body: Body): XML.Elem = XML.Elem(Markup(name, Nil), body) def elem(name: String): XML.Elem = XML.Elem(Markup(name, Nil), Nil) val no_text: Text = Text("") val newline: Text = Text("\n") def string(s: String): Body = if (s.isEmpty) Nil else List(Text(s)) + def enclose(bg: String, en:String, body: Body): Body = + string(bg) ::: body ::: string(en) + /* name space */ object Namespace { def apply(prefix: String, target: String): Namespace = new Namespace(prefix, target) } final class Namespace private(prefix: String, target: String) { def apply(name: String): String = prefix + ":" + name val attribute: XML.Attribute = ("xmlns:" + prefix, target) override def toString: String = attribute.toString } /* wrapped elements */ val XML_ELEM = "xml_elem" val XML_NAME = "xml_name" val XML_BODY = "xml_body" object Wrapped_Elem { def apply(markup: Markup, body1: Body, body2: Body): XML.Elem = XML.Elem(Markup(XML_ELEM, (XML_NAME, markup.name) :: markup.properties), XML.Elem(Markup(XML_BODY, Nil), body1) :: body2) def unapply(tree: Tree): Option[(Markup, Body, Body)] = tree match { case XML.Elem(Markup(XML_ELEM, (XML_NAME, name) :: props), XML.Elem(Markup(XML_BODY, Nil), body1) :: body2) => Some(Markup(name, props), body1, body2) case _ => None } } object Root_Elem { def apply(body: Body): XML.Elem = XML.Elem(Markup(XML_ELEM, Nil), body) def unapply(tree: Tree): Option[Body] = tree match { case XML.Elem(Markup(XML_ELEM, Nil), body) => Some(body) case _ => None } } /* traverse text */ def traverse_text[A](body: Body)(a: A)(op: (A, String) => A): A = { def traverse(x: A, t: Tree): A = t match { case XML.Wrapped_Elem(_, _, ts) => ts.foldLeft(x)(traverse) case XML.Elem(_, ts) => ts.foldLeft(x)(traverse) case XML.Text(s) => op(x, s) } body.foldLeft(a)(traverse) } def text_length(body: Body): Int = traverse_text(body)(0) { case (n, s) => n + s.length } def symbol_length(body: Body): Int = traverse_text(body)(0) { case (n, s) => n + Symbol.length(s) } /* text content */ def content(body: Body): String = { val text = new StringBuilder(text_length(body)) traverse_text(body)(()) { case (_, s) => text.append(s) } text.toString } def content(tree: Tree): String = content(List(tree)) /** string representation **/ val header: String = "\n" def output_char(s: StringBuilder, c: Char, permissive: Boolean = false): Unit = { c match { case '<' => s ++= "<" case '>' => s ++= ">" case '&' => s ++= "&" case '"' if !permissive => s ++= """ case '\'' if !permissive => s ++= "'" case _ => s += c } } def output_string(s: StringBuilder, str: String, permissive: Boolean = false): Unit = { if (str == null) s ++= str else str.iterator.foreach(output_char(s, _, permissive = permissive)) } def output_elem(s: StringBuilder, markup: Markup, end: Boolean = false): Unit = { s += '<' s ++= markup.name for ((a, b) <- markup.properties) { s += ' ' s ++= a s += '=' s += '"' output_string(s, b) s += '"' } if (end) s += '/' s += '>' } def output_elem_end(s: StringBuilder, name: String): Unit = { s += '<' s += '/' s ++= name s += '>' } def string_of_body(body: Body): String = { val s = new StringBuilder def tree(t: Tree): Unit = t match { case XML.Elem(markup, Nil) => output_elem(s, markup, end = true) case XML.Elem(markup, ts) => output_elem(s, markup) ts.foreach(tree) output_elem_end(s, markup.name) case XML.Text(txt) => output_string(s, txt) } body.foreach(tree) s.toString } def string_of_tree(tree: XML.Tree): String = string_of_body(List(tree)) def text(s: String): String = string_of_tree(XML.Text(s)) /** cache **/ object Cache { def make( xz: XZ.Cache = XZ.Cache.make(), max_string: Int = isabelle.Cache.default_max_string, initial_size: Int = isabelle.Cache.default_initial_size): Cache = new Cache(xz, max_string, initial_size) val none: Cache = make(XZ.Cache.none, max_string = 0) } class Cache(val xz: XZ.Cache, max_string: Int, initial_size: Int) extends isabelle.Cache(max_string, initial_size) { protected def cache_props(x: Properties.T): Properties.T = { if (x.isEmpty) x else lookup(x) match { case Some(y) => y case None => store(x.map(p => (Library.isolate_substring(p._1).intern, cache_string(p._2)))) } } protected def cache_markup(x: Markup): Markup = { lookup(x) match { case Some(y) => y case None => x match { case Markup(name, props) => store(Markup(cache_string(name), cache_props(props))) } } } protected def cache_tree(x: XML.Tree): XML.Tree = { lookup(x) match { case Some(y) => y case None => x match { case XML.Elem(markup, body) => store(XML.Elem(cache_markup(markup), cache_body(body))) case XML.Text(text) => store(XML.Text(cache_string(text))) } } } protected def cache_body(x: XML.Body): XML.Body = { if (x.isEmpty) x else lookup(x) match { case Some(y) => y case None => x.map(cache_tree) } } // support hash-consing def tree0(x: XML.Tree): XML.Tree = if (no_cache) x else synchronized { lookup(x) getOrElse store(x) } // main methods def props(x: Properties.T): Properties.T = if (no_cache) x else synchronized { cache_props(x) } def markup(x: Markup): Markup = if (no_cache) x else synchronized { cache_markup(x) } def tree(x: XML.Tree): XML.Tree = if (no_cache) x else synchronized { cache_tree(x) } def body(x: XML.Body): XML.Body = if (no_cache) x else synchronized { cache_body(x) } def elem(x: XML.Elem): XML.Elem = if (no_cache) x else synchronized { cache_tree(x).asInstanceOf[XML.Elem] } } /** XML as data representation language **/ abstract class Error(s: String) extends Exception(s) class XML_Atom(s: String) extends Error(s) class XML_Body(body: XML.Body) extends Error("") object Encode { type T[A] = A => XML.Body type V[A] = PartialFunction[A, (List[String], XML.Body)] type P[A] = PartialFunction[A, List[String]] /* atomic values */ def long_atom(i: Long): String = Library.signed_string_of_long(i) def int_atom(i: Int): String = Library.signed_string_of_int(i) def bool_atom(b: Boolean): String = if (b) "1" else "0" def unit_atom(u: Unit) = "" /* structural nodes */ private def node(ts: XML.Body): XML.Tree = XML.Elem(Markup(":", Nil), ts) private def vector(xs: List[String]): XML.Attributes = xs.zipWithIndex.map({ case (x, i) => (int_atom(i), x) }) private def tagged(tag: Int, data: (List[String], XML.Body)): XML.Tree = XML.Elem(Markup(int_atom(tag), vector(data._1)), data._2) /* representation of standard types */ val tree: T[XML.Tree] = (t => List(t)) val properties: T[Properties.T] = (props => List(XML.Elem(Markup(":", props), Nil))) val string: T[String] = (s => if (s.isEmpty) Nil else List(XML.Text(s))) val long: T[Long] = (x => string(long_atom(x))) val int: T[Int] = (x => string(int_atom(x))) val bool: T[Boolean] = (x => string(bool_atom(x))) val unit: T[Unit] = (x => string(unit_atom(x))) def pair[A, B](f: T[A], g: T[B]): T[(A, B)] = (x => List(node(f(x._1)), node(g(x._2)))) def triple[A, B, C](f: T[A], g: T[B], h: T[C]): T[(A, B, C)] = (x => List(node(f(x._1)), node(g(x._2)), node(h(x._3)))) def list[A](f: T[A]): T[List[A]] = (xs => xs.map((x: A) => node(f(x)))) def option[A](f: T[A]): T[Option[A]] = { case None => Nil case Some(x) => List(node(f(x))) } def variant[A](fs: List[V[A]]): T[A] = { case x => val (f, tag) = fs.iterator.zipWithIndex.find(p => p._1.isDefinedAt(x)).get List(tagged(tag, f(x))) } } object Decode { type T[A] = XML.Body => A type V[A] = (List[String], XML.Body) => A type P[A] = PartialFunction[List[String], A] /* atomic values */ def long_atom(s: String): Long = try { java.lang.Long.parseLong(s) } catch { case e: NumberFormatException => throw new XML_Atom(s) } def int_atom(s: String): Int = try { Integer.parseInt(s) } catch { case e: NumberFormatException => throw new XML_Atom(s) } def bool_atom(s: String): Boolean = if (s == "1") true else if (s == "0") false else throw new XML_Atom(s) def unit_atom(s: String): Unit = if (s == "") () else throw new XML_Atom(s) /* structural nodes */ private def node(t: XML.Tree): XML.Body = t match { case XML.Elem(Markup(":", Nil), ts) => ts case _ => throw new XML_Body(List(t)) } private def vector(atts: XML.Attributes): List[String] = atts.iterator.zipWithIndex.map( { case ((a, x), i) => if (int_atom(a) == i) x else throw new XML_Atom(a) }).toList private def tagged(t: XML.Tree): (Int, (List[String], XML.Body)) = t match { case XML.Elem(Markup(name, atts), ts) => (int_atom(name), (vector(atts), ts)) case _ => throw new XML_Body(List(t)) } /* representation of standard types */ val tree: T[XML.Tree] = { case List(t) => t case ts => throw new XML_Body(ts) } val properties: T[Properties.T] = { case List(XML.Elem(Markup(":", props), Nil)) => props case ts => throw new XML_Body(ts) } val string: T[String] = { case Nil => "" case List(XML.Text(s)) => s case ts => throw new XML_Body(ts) } val long: T[Long] = (x => long_atom(string(x))) val int: T[Int] = (x => int_atom(string(x))) val bool: T[Boolean] = (x => bool_atom(string(x))) val unit: T[Unit] = (x => unit_atom(string(x))) def pair[A, B](f: T[A], g: T[B]): T[(A, B)] = { case List(t1, t2) => (f(node(t1)), g(node(t2))) case ts => throw new XML_Body(ts) } def triple[A, B, C](f: T[A], g: T[B], h: T[C]): T[(A, B, C)] = { case List(t1, t2, t3) => (f(node(t1)), g(node(t2)), h(node(t3))) case ts => throw new XML_Body(ts) } def list[A](f: T[A]): T[List[A]] = (ts => ts.map(t => f(node(t)))) def option[A](f: T[A]): T[Option[A]] = { case Nil => None case List(t) => Some(f(node(t))) case ts => throw new XML_Body(ts) } def variant[A](fs: List[V[A]]): T[A] = { case List(t) => val (tag, (xs, ts)) = tagged(t) val f = try { fs(tag) } catch { case _: IndexOutOfBoundsException => throw new XML_Body(List(t)) } f(xs, ts) case ts => throw new XML_Body(ts) } } } diff --git a/src/Pure/System/scala_compiler.ML b/src/Pure/System/scala_compiler.ML --- a/src/Pure/System/scala_compiler.ML +++ b/src/Pure/System/scala_compiler.ML @@ -1,99 +1,99 @@ (* Title: Pure/System/scala_compiler.ML Author: Makarius Scala compiler operations. *) signature SCALA_COMPILER = sig val toplevel: bool -> string -> unit val static_check: string * Position.T -> unit end; structure Scala_Compiler: SCALA_COMPILER = struct (* check declaration *) fun toplevel interpret source = let val errors = (interpret, source) |> let open XML.Encode in pair bool string end |> YXML.string_of_body |> \<^scala>\scala_toplevel\ |> YXML.parse_body |> let open XML.Decode in list string end in if null errors then () else error (cat_lines errors) end; fun static_check (source, pos) = toplevel false ("package test\nclass __Dummy__ { __dummy__ => " ^ source ^ " }") handle ERROR msg => error (msg ^ Position.here pos); (* antiquotations *) local fun make_list bg en = space_implode "," #> enclose bg en; fun print_args [] = "" | print_args xs = make_list "(" ")" xs; fun print_types [] = "" | print_types Ts = make_list "[" "]" Ts; fun print_class (c, Ts) = c ^ print_types Ts; val types = Scan.optional (Parse.$$$ "[" |-- Parse.list1 Parse.name --| Parse.$$$ "]") []; val class = Scan.option (Parse.$$$ "(" |-- Parse.!!! (Parse.$$$ "in" |-- Parse.name -- types --| Parse.$$$ ")")); val arguments = (Parse.nat >> (fn n => replicate n "_") || Parse.list (Parse.underscore || Parse.name >> (fn T => "_ : " ^ T))) >> print_args; val args = Scan.optional (Parse.$$$ "(" |-- arguments --| Parse.$$$ ")") " _"; fun scala_name name = Latex.string (Latex.output_ascii_breakable "." name) - |> Latex.enclose_text "\\isatt{" "}"; + |> XML.enclose "\\isatt{" "}"; in val _ = Theory.setup (Document_Output.antiquotation_verbatim_embedded \<^binding>\scala\ (Scan.lift Parse.embedded_position) (fn _ => fn (s, pos) => (static_check (s, pos); s)) #> Document_Output.antiquotation_raw_embedded \<^binding>\scala_type\ (Scan.lift (Parse.embedded_position -- (types >> print_types))) (fn _ => fn ((t, pos), type_args) => (static_check ("type _Test_" ^ type_args ^ " = " ^ t ^ type_args, pos); scala_name (t ^ type_args))) #> Document_Output.antiquotation_raw_embedded \<^binding>\scala_object\ (Scan.lift Parse.embedded_position) (fn _ => fn (x, pos) => (static_check ("val _test_ = " ^ x, pos); scala_name x)) #> Document_Output.antiquotation_raw_embedded \<^binding>\scala_method\ (Scan.lift (class -- Parse.embedded_position -- types -- args)) (fn _ => fn (((class_context, (method, pos)), method_types), method_args) => let val class_types = (case class_context of SOME (_, Ts) => Ts | NONE => []); val def = "def _test_" ^ print_types (merge (op =) (method_types, class_types)); val def_context = (case class_context of NONE => def ^ " = " | SOME c => def ^ "(_this_ : " ^ print_class c ^ ") = _this_."); val source = def_context ^ method ^ method_args; val _ = static_check (source, pos); val text = (case class_context of NONE => method | SOME c => print_class c ^ "." ^ method); in scala_name text end)); end; end; diff --git a/src/Pure/Thy/document_antiquotations.ML b/src/Pure/Thy/document_antiquotations.ML --- a/src/Pure/Thy/document_antiquotations.ML +++ b/src/Pure/Thy/document_antiquotations.ML @@ -1,447 +1,447 @@ (* Title: Pure/Thy/document_antiquotations.ML Author: Makarius Miscellaneous document antiquotations. *) structure Document_Antiquotations: sig end = struct (* basic entities *) local type style = term -> term; fun pretty_term_style ctxt (style: style, t) = Document_Output.pretty_term ctxt (style t); fun pretty_thms_style ctxt (style: style, ths) = map (fn th => Document_Output.pretty_term ctxt (style (Thm.full_prop_of th))) ths; fun pretty_term_typ ctxt (style: style, t) = let val t' = style t in Document_Output.pretty_term ctxt (Type.constraint (Term.fastype_of t') t') end; fun pretty_term_typeof ctxt (style: style, t) = Syntax.pretty_typ ctxt (Term.fastype_of (style t)); fun pretty_const ctxt c = let val t = Const (c, Consts.type_scheme (Proof_Context.consts_of ctxt) c) handle TYPE (msg, _, _) => error msg; val (t', _) = yield_singleton (Variable.import_terms true) t ctxt; in Document_Output.pretty_term ctxt t' end; fun pretty_abbrev ctxt s = let val t = Syntax.read_term (Proof_Context.set_mode Proof_Context.mode_abbrev ctxt) s; fun err () = error ("Abbreviated constant expected: " ^ Syntax.string_of_term ctxt t); val (head, args) = Term.strip_comb t; val (c, T) = Term.dest_Const head handle TERM _ => err (); val (U, u) = Consts.the_abbreviation (Proof_Context.consts_of ctxt) c handle TYPE _ => err (); val t' = Term.betapplys (Envir.expand_atom T (U, u), args); val eq = Logic.mk_equals (t, t'); val ctxt' = Proof_Context.augment eq ctxt; in Proof_Context.pretty_term_abbrev ctxt' eq end; fun pretty_locale ctxt (name, pos) = let val thy = Proof_Context.theory_of ctxt in Pretty.str (Locale.extern thy (Locale.check thy (name, pos))) end; fun pretty_bundle ctxt (name, pos) = Pretty.str (Bundle.extern ctxt (Bundle.check ctxt (name, pos))); fun pretty_class ctxt s = Pretty.str (Proof_Context.extern_class ctxt (Proof_Context.read_class ctxt s)); fun pretty_type ctxt s = let val Type (name, _) = Proof_Context.read_type_name {proper = true, strict = false} ctxt s in Pretty.str (Proof_Context.extern_type ctxt name) end; fun pretty_prf full ctxt = Proof_Syntax.pretty_standard_proof_of ctxt full; fun pretty_theory ctxt (name, pos) = (Theory.check {long = true} ctxt (name, pos); Pretty.str name); val basic_entity = Document_Output.antiquotation_pretty_source_embedded; fun basic_entities name scan pretty = Document_Antiquotation.setup name scan (fn {context = ctxt, source = src, argument = xs} => Document_Output.pretty_items_source ctxt {embedded = false} src (map (pretty ctxt) xs)); val _ = Theory.setup (basic_entity \<^binding>\prop\ (Term_Style.parse -- Args.prop) pretty_term_style #> basic_entity \<^binding>\term\ (Term_Style.parse -- Args.term) pretty_term_style #> basic_entity \<^binding>\term_type\ (Term_Style.parse -- Args.term) pretty_term_typ #> basic_entity \<^binding>\typeof\ (Term_Style.parse -- Args.term) pretty_term_typeof #> basic_entity \<^binding>\const\ (Args.const {proper = true, strict = false}) pretty_const #> basic_entity \<^binding>\abbrev\ (Scan.lift Parse.embedded_inner_syntax) pretty_abbrev #> basic_entity \<^binding>\typ\ Args.typ_abbrev Syntax.pretty_typ #> basic_entity \<^binding>\locale\ (Scan.lift Parse.embedded_position) pretty_locale #> basic_entity \<^binding>\bundle\ (Scan.lift Parse.embedded_position) pretty_bundle #> basic_entity \<^binding>\class\ (Scan.lift Parse.embedded_inner_syntax) pretty_class #> basic_entity \<^binding>\type\ (Scan.lift Parse.embedded_inner_syntax) pretty_type #> basic_entity \<^binding>\theory\ (Scan.lift Parse.embedded_position) pretty_theory #> basic_entities \<^binding>\prf\ Attrib.thms (pretty_prf false) #> basic_entities \<^binding>\full_prf\ Attrib.thms (pretty_prf true) #> Document_Antiquotation.setup \<^binding>\thm\ (Term_Style.parse -- Attrib.thms) (fn {context = ctxt, source = src, argument = arg} => Document_Output.pretty_items_source ctxt {embedded = false} src (pretty_thms_style ctxt arg))); in end; (* Markdown errors *) local fun markdown_error binding = Document_Antiquotation.setup binding (Scan.succeed ()) (fn {source = src, ...} => error ("Bad Markdown structure: illegal " ^ quote (Binding.name_of binding) ^ Position.here (Position.no_range_position (#1 (Token.range_of src))))) val _ = Theory.setup (markdown_error \<^binding>\item\ #> markdown_error \<^binding>\enum\ #> markdown_error \<^binding>\descr\); in end; (* control spacing *) val _ = Theory.setup (Document_Output.antiquotation_raw \<^binding>\noindent\ (Scan.succeed ()) (fn _ => fn () => Latex.string "\\noindent") #> Document_Output.antiquotation_raw \<^binding>\smallskip\ (Scan.succeed ()) (fn _ => fn () => Latex.string "\\smallskip") #> Document_Output.antiquotation_raw \<^binding>\medskip\ (Scan.succeed ()) (fn _ => fn () => Latex.string "\\medskip") #> Document_Output.antiquotation_raw \<^binding>\bigskip\ (Scan.succeed ()) (fn _ => fn () => Latex.string "\\bigskip")); (* nested document text *) local fun nested_antiquotation name s1 s2 = Document_Output.antiquotation_raw_embedded name (Scan.lift Args.cartouche_input) (fn ctxt => fn txt => (Context_Position.reports ctxt (Document_Output.document_reports txt); - Latex.enclose_text s1 s2 (Document_Output.output_document ctxt {markdown = false} txt))); + XML.enclose s1 s2 (Document_Output.output_document ctxt {markdown = false} txt))); val _ = Theory.setup (nested_antiquotation \<^binding>\footnote\ "\\footnote{" "}" #> nested_antiquotation \<^binding>\emph\ "\\emph{" "}" #> nested_antiquotation \<^binding>\bold\ "\\textbf{" "}"); in end; (* index entries *) local val index_like = Parse.$$$ "(" |-- Parse.!!! (Parse.$$$ "is" |-- Args.name --| Parse.$$$ ")"); val index_args = Parse.enum1 "!" (Parse.embedded_input -- Scan.option index_like); fun output_text ctxt = Document_Output.output_document ctxt {markdown = false}; fun index binding def = Document_Output.antiquotation_raw binding (Scan.lift index_args) (fn ctxt => fn args => let val _ = Context_Position.reports ctxt (maps (Document_Output.document_reports o #1) args); fun make_item (txt, opt_like) = let val text = output_text ctxt txt; val like = (case opt_like of SOME s => s | NONE => Document_Antiquotation.approx_content ctxt (Input.string_of txt)); val _ = if is_none opt_like andalso Context_Position.is_visible ctxt then writeln ("(" ^ Markup.markup Markup.keyword2 "is" ^ " " ^ quote like ^ ")" ^ Position.here (Input.pos_of txt)) else (); in {text = text, like = like} end; in Latex.index_entry {items = map make_item args, def = def} end); val _ = Theory.setup (index \<^binding>\index_ref\ false #> index \<^binding>\index_def\ true); in end; (* quasi-formal text (unchecked) *) local fun report_text ctxt text = let val pos = Input.pos_of text in Context_Position.reports ctxt [(pos, Markup.language_text (Input.is_delimited text)), (pos, Markup.raw_text)] end; fun prepare_text ctxt = Input.source_content #> #1 #> Document_Antiquotation.prepare_lines ctxt; fun text_antiquotation name = Document_Output.antiquotation_raw_embedded name (Scan.lift Args.text_input) (fn ctxt => fn text => let val _ = report_text ctxt text; in prepare_text ctxt text |> Document_Output.output_source ctxt |> Document_Output.isabelle ctxt end); val theory_text_antiquotation = Document_Output.antiquotation_raw_embedded \<^binding>\theory_text\ (Scan.lift Args.text_input) (fn ctxt => fn text => let val keywords = Thy_Header.get_keywords' ctxt; val _ = report_text ctxt text; val _ = Input.source_explode text |> Token.tokenize keywords {strict = true} |> maps (Token.reports keywords) |> Context_Position.reports_text ctxt; in prepare_text ctxt text |> Token.explode0 keywords |> maps (Document_Output.output_token ctxt) |> Document_Output.isabelle ctxt end); val _ = Theory.setup (text_antiquotation \<^binding>\text\ #> text_antiquotation \<^binding>\cartouche\ #> theory_text_antiquotation); in end; (* goal state *) local fun goal_state name main = Document_Output.antiquotation_pretty name (Scan.succeed ()) (fn ctxt => fn () => Goal_Display.pretty_goal (Config.put Goal_Display.show_main_goal main ctxt) (#goal (Proof.goal (Toplevel.proof_of (Toplevel.presentation_state ctxt))))); val _ = Theory.setup (goal_state \<^binding>\goals\ true #> goal_state \<^binding>\subgoals\ false); in end; (* embedded lemma *) val _ = Theory.setup (Document_Antiquotation.setup \<^binding>\lemma\ (Scan.lift (Scan.ahead Parse.not_eof) -- Args.prop -- Scan.lift Method.parse_by) (fn {context = ctxt, source = src, argument = ((prop_tok, prop), (methods, reports))} => let val _ = Context_Position.reports ctxt reports; (* FIXME check proof!? *) val _ = ctxt |> Proof.theorem NONE (K I) [[(prop, [])]] |> Proof.global_terminal_proof methods; in Document_Output.pretty_source ctxt {embedded = false} [hd src, prop_tok] (Document_Output.pretty_term ctxt prop) end)); (* verbatim text *) val _ = Theory.setup (Document_Output.antiquotation_verbatim_embedded \<^binding>\verbatim\ (Scan.lift Args.text_input) (fn ctxt => fn text => let val pos = Input.pos_of text; val _ = Context_Position.reports ctxt [(pos, Markup.language_verbatim (Input.is_delimited text)), (pos, Markup.raw_text)]; in #1 (Input.source_content text) end)); (* bash functions *) val _ = Theory.setup (Document_Output.antiquotation_verbatim_embedded \<^binding>\bash_function\ (Scan.lift Parse.embedded_position) Isabelle_System.check_bash_function); (* system options *) val _ = Theory.setup (Document_Output.antiquotation_verbatim_embedded \<^binding>\system_option\ (Scan.lift Parse.embedded_position) (fn ctxt => fn (name, pos) => let val _ = Completion.check_option (Options.default ()) ctxt (name, pos); in name end)); (* ML text *) local fun test_val (ml1, []) = ML_Lex.read "fn _ => (" @ ml1 @ ML_Lex.read ");" | test_val (ml1, ml2) = ML_Lex.read "fn _ => (" @ ml1 @ ML_Lex.read " : " @ ml2 @ ML_Lex.read ");"; fun test_op (ml1, ml2) = test_val (ML_Lex.read "op " @ ml1, ml2); fun test_type (ml1, []) = ML_Lex.read "val _ = NONE : (" @ ml1 @ ML_Lex.read ") option;" | test_type (ml1, ml2) = ML_Lex.read "val _ = [NONE : (" @ ml1 @ ML_Lex.read ") option, NONE : (" @ ml2 @ ML_Lex.read ") option];"; fun test_exn (ml1, []) = ML_Lex.read "fn _ => (" @ ml1 @ ML_Lex.read " : exn);" | test_exn (ml1, ml2) = ML_Lex.read "fn _ => (" @ ml1 @ ML_Lex.read " : " @ ml2 @ ML_Lex.read " -> exn);"; fun test_struct (ml, _) = ML_Lex.read "functor XXX() = struct structure XX = " @ ml @ ML_Lex.read " end;"; fun test_functor (Antiquote.Text tok :: _, _) = ML_Lex.read "ML_Env.check_functor " @ ML_Lex.read (ML_Syntax.print_string (ML_Lex.content_of tok)) | test_functor _ = raise Fail "Bad ML functor specification"; val parse_ml0 = Args.text_input >> (fn source => ("", (source, Input.empty))); val parse_ml = Args.text_input -- Scan.optional (Args.colon |-- Args.text_input) Input.empty >> pair ""; val parse_exn = Args.text_input -- Scan.optional (Args.$$$ "of" |-- Args.text_input) Input.empty >> pair ""; val parse_type = (Parse.type_args >> (fn [] => "" | [a] => a ^ " " | bs => enclose "(" ") " (commas bs))) -- (Args.text_input -- Scan.optional (Args.$$$ "=" |-- Args.text_input) Input.empty); fun eval ctxt pos ml = ML_Context.eval_in (SOME ctxt) ML_Compiler.flags pos ml handle ERROR msg => error (msg ^ Position.here pos); fun make_text sep sources = let val (txt1, txt2) = apply2 (#1 o Input.source_content) sources; val is_ident = (case try List.last (Symbol.explode txt1) of NONE => false | SOME s => Symbol.is_ascii_letdig s); val txt = if txt2 = "" then txt1 else if sep = ":" andalso is_ident then txt1 ^ ": " ^ txt2 else txt1 ^ " " ^ sep ^ " " ^ txt2 in (txt, txt1) end; fun antiquotation_ml parse test kind show_kind binding index = Document_Output.antiquotation_raw binding (Scan.lift parse) (fn ctxt => fn (txt0, sources) => let val (ml1, ml2) = apply2 ML_Lex.read_source sources; val ml0 = ML_Lex.read_source (Input.string txt0); val _ = test (ml0 @ ml1, ml2) |> eval ctxt (Input.pos_of (#1 sources)); val sep = if kind = "type" then "=" else if kind = "exception" then "of" else ":"; val (txt, idx) = make_text sep sources; val main_text = Document_Output.verbatim ctxt ((if kind = "" orelse not show_kind then "" else kind ^ " ") ^ txt0 ^ txt); val index_text = (case index of NONE => [] | SOME def => let val ctxt' = Config.put Document_Antiquotation.thy_output_display false ctxt; val kind' = if kind = "" then " (ML)" else " (ML " ^ kind ^ ")"; val txt' = Document_Output.verbatim ctxt' idx @ Latex.string kind'; val like = Document_Antiquotation.approx_content ctxt' idx; in Latex.index_entry {items = [{text = txt', like = like}], def = def} end); in index_text @ main_text end); fun antiquotation_ml0 test kind = antiquotation_ml parse_ml0 test kind false; fun antiquotation_ml1 parse test kind binding = antiquotation_ml parse test kind true binding (SOME true); in val _ = Theory.setup (Latex.index_variants (antiquotation_ml0 test_val "") \<^binding>\ML\ #> Latex.index_variants (antiquotation_ml0 test_op "infix") \<^binding>\ML_infix\ #> Latex.index_variants (antiquotation_ml0 test_type "type") \<^binding>\ML_type\ #> Latex.index_variants (antiquotation_ml0 test_struct "structure") \<^binding>\ML_structure\ #> Latex.index_variants (antiquotation_ml0 test_functor "functor") \<^binding>\ML_functor\ #> antiquotation_ml0 (K []) "text" \<^binding>\ML_text\ NONE #> antiquotation_ml1 parse_ml test_val "" \<^binding>\define_ML\ #> antiquotation_ml1 parse_ml test_op "infix" \<^binding>\define_ML_infix\ #> antiquotation_ml1 parse_type test_type "type" \<^binding>\define_ML_type\ #> antiquotation_ml1 parse_exn test_exn "exception" \<^binding>\define_ML_exception\ #> antiquotation_ml1 parse_ml0 test_struct "structure" \<^binding>\define_ML_structure\ #> antiquotation_ml1 parse_ml0 test_functor "functor" \<^binding>\define_ML_functor\); end; (* URLs *) val escape_url = translate_string (fn c => if c = "%" orelse c = "#" orelse c = "^" then "\\" ^ c else c); val _ = Theory.setup (Document_Output.antiquotation_raw_embedded \<^binding>\url\ (Scan.lift Parse.embedded_input) (fn ctxt => fn source => let val url = Input.string_of source; val pos = Input.pos_of source; val delimited = Input.is_delimited source; val _ = Context_Position.reports ctxt [(pos, Markup.language_url delimited), (pos, Markup.url url)]; - in Latex.enclose_text "\\url{" "}" (Latex.string (escape_url url)) end)); + in XML.enclose "\\url{" "}" (Latex.string (escape_url url)) end)); (* formal entities *) local fun entity_antiquotation name check bg en = Document_Output.antiquotation_raw name (Scan.lift Args.name_position) (fn ctxt => fn (name, pos) => let val _ = check ctxt (name, pos) - in Latex.enclose_text bg en (Latex.string (Output.output name)) end); + in XML.enclose bg en (Latex.string (Output.output name)) end); val _ = Theory.setup (entity_antiquotation \<^binding>\command\ Outer_Syntax.check_command "\\isacommand{" "}" #> entity_antiquotation \<^binding>\method\ Method.check_name "\\isa{" "}" #> entity_antiquotation \<^binding>\attribute\ Attrib.check_name "\\isa{" "}"); in end; end; diff --git a/src/Pure/Thy/document_output.ML b/src/Pure/Thy/document_output.ML --- a/src/Pure/Thy/document_output.ML +++ b/src/Pure/Thy/document_output.ML @@ -1,573 +1,573 @@ (* Title: Pure/Thy/document_output.ML Author: Makarius Theory document output. *) signature DOCUMENT_OUTPUT = sig val document_reports: Input.source -> Position.report list val output_document: Proof.context -> {markdown: bool} -> Input.source -> Latex.text val check_comments: Proof.context -> Symbol_Pos.T list -> unit val output_token: Proof.context -> Token.T -> Latex.text val output_source: Proof.context -> string -> Latex.text type segment = {span: Command_Span.span, command: Toplevel.transition, prev_state: Toplevel.state, state: Toplevel.state} val present_thy: Options.T -> theory -> segment list -> Latex.text val pretty_term: Proof.context -> term -> Pretty.T val pretty_thm: Proof.context -> thm -> Pretty.T val isabelle: Proof.context -> Latex.text -> Latex.text val isabelle_typewriter: Proof.context -> Latex.text -> Latex.text val typewriter: Proof.context -> string -> Latex.text val verbatim: Proof.context -> string -> Latex.text val source: Proof.context -> {embedded: bool} -> Token.src -> Latex.text val pretty: Proof.context -> Pretty.T -> Latex.text val pretty_source: Proof.context -> {embedded: bool} -> Token.src -> Pretty.T -> Latex.text val pretty_items: Proof.context -> Pretty.T list -> Latex.text val pretty_items_source: Proof.context -> {embedded: bool} -> Token.src -> Pretty.T list -> Latex.text val antiquotation_pretty: binding -> 'a context_parser -> (Proof.context -> 'a -> Pretty.T) -> theory -> theory val antiquotation_pretty_embedded: binding -> 'a context_parser -> (Proof.context -> 'a -> Pretty.T) -> theory -> theory val antiquotation_pretty_source: binding -> 'a context_parser -> (Proof.context -> 'a -> Pretty.T) -> theory -> theory val antiquotation_pretty_source_embedded: binding -> 'a context_parser -> (Proof.context -> 'a -> Pretty.T) -> theory -> theory val antiquotation_raw: binding -> 'a context_parser -> (Proof.context -> 'a -> Latex.text) -> theory -> theory val antiquotation_raw_embedded: binding -> 'a context_parser -> (Proof.context -> 'a -> Latex.text) -> theory -> theory val antiquotation_verbatim: binding -> 'a context_parser -> (Proof.context -> 'a -> string) -> theory -> theory val antiquotation_verbatim_embedded: binding -> 'a context_parser -> (Proof.context -> 'a -> string) -> theory -> theory end; structure Document_Output: DOCUMENT_OUTPUT = struct (* output document source *) fun document_reports txt = let val pos = Input.pos_of txt in [(pos, Markup.language_document (Input.is_delimited txt)), (pos, Markup.plain_text)] end; fun output_comment ctxt (kind, syms) = (case kind of Comment.Comment => Input.cartouche_content syms |> output_document (ctxt |> Config.put Document_Antiquotation.thy_output_display false) {markdown = false} - |> Latex.enclose_text "%\n\\isamarkupcmt{" "%\n}" + |> XML.enclose "%\n\\isamarkupcmt{" "%\n}" | Comment.Cancel => Symbol_Pos.cartouche_content syms |> Latex.symbols_output - |> Latex.enclose_text "%\n\\isamarkupcancel{" "}" + |> XML.enclose "%\n\\isamarkupcancel{" "}" | Comment.Latex => Latex.symbols (Symbol_Pos.cartouche_content syms) | Comment.Marker => []) and output_comment_document ctxt (comment, syms) = (case comment of SOME kind => output_comment ctxt (kind, syms) | NONE => Latex.symbols syms) and output_document_text ctxt syms = Comment.read_body syms |> maps (output_comment_document ctxt) and output_document ctxt {markdown} source = let val pos = Input.pos_of source; val syms = Input.source_explode source; val output_antiquotes = maps (Document_Antiquotation.evaluate (output_document_text ctxt) ctxt); fun output_line line = (if Markdown.line_is_item line then Latex.string "\\item " else []) @ output_antiquotes (Markdown.line_content line); fun output_block (Markdown.Par lines) = separate (XML.Text "\n") (map (Latex.block o output_line) lines) | output_block (Markdown.List {kind, body, ...}) = Latex.environment_text (Markdown.print_kind kind) (output_blocks body) and output_blocks blocks = separate (XML.Text "\n\n") (map (Latex.block o output_block) blocks); in if Toplevel.is_skipped_proof (Toplevel.presentation_state ctxt) then [] else if markdown andalso exists (Markdown.is_control o Symbol_Pos.symbol) syms then let val ants = Antiquote.parse_comments pos syms; val reports = Antiquote.antiq_reports ants; val blocks = Markdown.read_antiquotes ants; val _ = Context_Position.reports ctxt (reports @ Markdown.reports blocks); in output_blocks blocks end else let val ants = Antiquote.parse_comments pos (trim (Symbol.is_blank o Symbol_Pos.symbol) syms); val reports = Antiquote.antiq_reports ants; val _ = Context_Position.reports ctxt (reports @ Markdown.text_reports ants); in output_antiquotes ants end end; (* output tokens with formal comments *) local val output_symbols_antiq = (fn Antiquote.Text syms => Latex.symbols_output syms | Antiquote.Control {name = (name, _), body, ...} => Latex.string (Latex.output_symbols [Symbol.encode (Symbol.Control name)]) @ Latex.symbols_output body | Antiquote.Antiq {body, ...} => - Latex.enclose_text "%\n\\isaantiq\n" "{}%\n\\endisaantiq\n" (Latex.symbols_output body)); + XML.enclose "%\n\\isaantiq\n" "{}%\n\\endisaantiq\n" (Latex.symbols_output body)); fun output_comment_symbols ctxt {antiq} (comment, syms) = (case (comment, antiq) of (NONE, false) => Latex.symbols_output syms | (NONE, true) => Antiquote.parse_comments (#1 (Symbol_Pos.range syms)) syms |> maps output_symbols_antiq | (SOME comment, _) => output_comment ctxt (comment, syms)); fun output_body ctxt antiq bg en syms = Comment.read_body syms |> maps (output_comment_symbols ctxt {antiq = antiq}) - |> Latex.enclose_text bg en; + |> XML.enclose bg en; in fun output_token ctxt tok = let fun output antiq bg en = output_body ctxt antiq bg en (Input.source_explode (Token.input_of tok)); in (case Token.kind_of tok of Token.Comment NONE => [] | Token.Comment (SOME Comment.Marker) => [] | Token.Command => output false "\\isacommand{" "}" | Token.Keyword => if Symbol.is_ascii_identifier (Token.content_of tok) then output false "\\isakeyword{" "}" else output false "" "" | Token.String => output false "{\\isachardoublequoteopen}" "{\\isachardoublequoteclose}" | Token.Alt_String => output false "{\\isacharbackquoteopen}" "{\\isacharbackquoteclose}" | Token.Verbatim => output true "{\\isacharverbatimopen}" "{\\isacharverbatimclose}" | Token.Cartouche => output false "{\\isacartoucheopen}" "{\\isacartoucheclose}" | Token.Control control => output_body ctxt false "" "" (Antiquote.control_symbols control) | _ => output false "" "") end handle ERROR msg => error (msg ^ Position.here (Token.pos_of tok)); fun output_source ctxt s = output_body ctxt false "" "" (Symbol_Pos.explode (s, Position.none)); fun check_comments ctxt = Comment.read_body #> List.app (fn (comment, syms) => let val pos = #1 (Symbol_Pos.range syms); val _ = comment |> Option.app (fn kind => Context_Position.reports ctxt (map (pair pos) (Comment.kind_markups kind))); val _ = output_comment_symbols ctxt {antiq = false} (comment, syms); in if comment = SOME Comment.Comment then check_comments ctxt syms else () end); end; (** present theory source **) (* presentation tokens *) datatype token = Ignore | Token of Token.T | Heading of string * Input.source | Body of string * Input.source | Raw of Input.source; fun token_with pred (Token tok) = pred tok | token_with _ _ = false; val white_token = token_with Document_Source.is_white; val white_comment_token = token_with Document_Source.is_white_comment; val blank_token = token_with Token.is_blank; val newline_token = token_with Token.is_newline; fun present_token ctxt tok = (case tok of Ignore => [] | Token tok => output_token ctxt tok | Heading (cmd, source) => - Latex.enclose_text ("%\n\\isamarkup" ^ cmd ^ "{") "%\n}\n" + XML.enclose ("%\n\\isamarkup" ^ cmd ^ "{") "%\n}\n" (output_document ctxt {markdown = false} source) | Body (cmd, source) => Latex.environment_text ("isamarkup" ^ cmd) (output_document ctxt {markdown = true} source) | Raw source => Latex.string "%\n" @ output_document ctxt {markdown = true} source @ Latex.string "\n"); (* command spans *) type command = string * Position.T; (*name, position*) type source = (token * (string * int)) list; (*token, markup flag, meta-comment depth*) datatype span = Span of command * (source * source * source * source) * bool; fun make_span cmd src = let fun chop_newline (tok :: toks) = if newline_token (fst tok) then ([tok], toks, true) else ([], tok :: toks, false) | chop_newline [] = ([], [], false); val (((src_prefix, src_main), src_suffix1), (src_suffix2, src_appendix, newline)) = src |> chop_prefix (white_token o fst) ||>> chop_suffix (white_token o fst) ||>> chop_prefix (white_comment_token o fst) ||> chop_newline; in Span (cmd, (src_prefix, src_main, src_suffix1 @ src_suffix2, src_appendix), newline) end; (* present spans *) local fun err_bad_nesting here = error ("Bad nesting of commands in presentation" ^ here); fun edge which f (x: string option, y) = if x = y then I else (case which (x, y) of NONE => I | SOME txt => fold cons (Latex.string (f txt))); val begin_tag = edge #2 Latex.begin_tag; val end_tag = edge #1 Latex.end_tag; fun open_delim delim e = edge #2 Latex.begin_delim e #> delim #> edge #2 Latex.end_delim e; fun close_delim delim e = edge #1 Latex.begin_delim e #> delim #> edge #1 Latex.end_delim e; fun document_tag cmd_pos state state' tagging_stack = let val ctxt' = Toplevel.presentation_context state'; val nesting = Toplevel.level state' - Toplevel.level state; val (tagging, taggings) = tagging_stack; val (tag', tagging') = Document_Source.update_tagging ctxt' tagging; val tagging_stack' = if nesting = 0 andalso not (Toplevel.is_proof state) then tagging_stack else if nesting >= 0 then (tagging', replicate nesting tagging @ taggings) else (case drop (~ nesting - 1) taggings of tg :: tgs => (tg, tgs) | [] => err_bad_nesting (Position.here cmd_pos)); in (tag', tagging_stack') end; fun read_tag s = (case space_explode "%" s of ["", b] => (SOME b, NONE) | [a, b] => (NONE, SOME (a, b)) | _ => error ("Bad document_tags specification: " ^ quote s)); in fun make_command_tag options keywords = let val document_tags = map read_tag (space_explode "," (Options.string options \<^system_option>\document_tags\)); val document_tags_default = map_filter #1 document_tags; val document_tags_command = map_filter #2 document_tags; in fn cmd_name => fn state => fn state' => fn active_tag => let val keyword_tags = if cmd_name = "end" andalso Toplevel.is_end_theory state' then ["theory"] else Keyword.command_tags keywords cmd_name; val command_tags = the_list (AList.lookup (op =) document_tags_command cmd_name) @ keyword_tags @ document_tags_default; val active_tag' = (case command_tags of default_tag :: _ => SOME default_tag | [] => if Keyword.is_vacuous keywords cmd_name andalso Toplevel.is_proof state then active_tag else NONE); in active_tag' end end; fun present_span command_tag span state state' (tagging_stack, active_tag, newline, latex, present_cont) = let val ctxt' = Toplevel.presentation_context state'; val present = fold (fn (tok, (flag, 0)) => fold cons (present_token ctxt' tok) #> fold cons (Latex.string flag) | _ => I); val Span ((cmd_name, cmd_pos), srcs, span_newline) = span; val (tag', tagging_stack') = document_tag cmd_pos state state' tagging_stack; val active_tag' = if is_some tag' then Option.map #1 tag' else command_tag cmd_name state state' active_tag; val edge = (active_tag, active_tag'); val newline' = if is_none active_tag' then span_newline else newline; val latex' = latex |> end_tag edge |> close_delim (fst present_cont) edge |> snd present_cont |> open_delim (present (#1 srcs)) edge |> begin_tag edge |> present (#2 srcs); val present_cont' = if newline then (present (#3 srcs), present (#4 srcs)) else (I, present (#3 srcs) #> present (#4 srcs)); in (tagging_stack', active_tag', newline', latex', present_cont') end; fun present_trailer ((_, tags), active_tag, _, latex, present_cont) = if not (null tags) then err_bad_nesting " at end of theory" else latex |> end_tag (active_tag, NONE) |> close_delim (fst present_cont) (active_tag, NONE) |> snd present_cont; end; (* present_thy *) local val markup_true = "\\isamarkuptrue%\n"; val markup_false = "\\isamarkupfalse%\n"; val opt_newline = Scan.option (Scan.one Token.is_newline); val ignore = Scan.depend (fn d => opt_newline |-- Scan.one Token.is_begin_ignore >> pair (d + 1)) || Scan.depend (fn d => Scan.one Token.is_end_ignore --| (if d = 0 then Scan.fail_with (K (fn () => "Bad nesting of meta-comments")) else opt_newline) >> pair (d - 1)); val locale = Scan.option ((Parse.$$$ "(" -- Document_Source.improper -- Parse.$$$ "in") |-- Parse.!!! (Document_Source.improper |-- Parse.name --| (Document_Source.improper -- Parse.$$$ ")"))); in type segment = {span: Command_Span.span, command: Toplevel.transition, prev_state: Toplevel.state, state: Toplevel.state}; fun present_thy options thy (segments: segment list) = let val keywords = Thy_Header.get_keywords thy; (* tokens *) val ignored = Scan.state --| ignore >> (fn d => (NONE, (Ignore, ("", d)))); fun markup pred mk flag = Scan.peek (fn d => Document_Source.improper |-- Parse.position (Scan.one (fn tok => Token.is_command tok andalso pred keywords (Token.content_of tok))) -- (Document_Source.annotation |-- Parse.!!!! ((Document_Source.improper -- locale -- Document_Source.improper) |-- Parse.document_source --| Document_Source.improper_end)) >> (fn ((tok, pos'), source) => let val name = Token.content_of tok in (SOME (name, pos'), (mk (name, source), (flag, d))) end)); val command = Scan.peek (fn d => Scan.optional (Scan.one Token.is_command_modifier ::: Document_Source.improper) [] -- Scan.one Token.is_command --| Document_Source.annotation >> (fn (cmd_mod, cmd) => map (fn tok => (NONE, (Token tok, ("", d)))) cmd_mod @ [(SOME (Token.content_of cmd, Token.pos_of cmd), (Token cmd, (markup_false, d)))])); val cmt = Scan.peek (fn d => Scan.one Document_Source.is_black_comment >> (fn tok => (NONE, (Token tok, ("", d))))); val other = Scan.peek (fn d => Parse.not_eof >> (fn tok => (NONE, (Token tok, ("", d))))); val tokens = (ignored || markup Keyword.is_document_heading Heading markup_true || markup Keyword.is_document_body Body markup_true || markup Keyword.is_document_raw (Raw o #2) "") >> single || command || (cmt || other) >> single; (* spans *) val is_eof = fn (_, (Token x, _)) => Token.is_eof x | _ => false; val stopper = Scan.stopper (K (NONE, (Token Token.eof, ("", 0)))) is_eof; val cmd = Scan.one (is_some o fst); val non_cmd = Scan.one (is_none o fst andf not o is_eof) >> #2; val white_comments = Scan.many (white_comment_token o fst o snd); val blank = Scan.one (blank_token o fst o snd); val newline = Scan.one (newline_token o fst o snd); val before_cmd = Scan.option (newline -- white_comments) -- Scan.option (newline -- white_comments) -- Scan.option (blank -- white_comments) -- cmd; val span = Scan.repeat non_cmd -- cmd -- Scan.repeat (Scan.unless before_cmd non_cmd) -- Scan.option (newline >> (single o snd)) >> (fn (((toks1, (cmd, tok2)), toks3), tok4) => make_span (the cmd) (toks1 @ (tok2 :: (toks3 @ the_default [] tok4)))); val spans = segments |> maps (Command_Span.content o #span) |> drop_suffix Token.is_space |> Source.of_list |> Source.source' 0 Token.stopper (Scan.error (Scan.bulk tokens >> flat)) |> Source.source stopper (Scan.error (Scan.bulk span)) |> Source.exhaust; val command_results = segments |> map_filter (fn {command, state, ...} => if Toplevel.is_ignored command then NONE else SOME (command, state)); (* present commands *) val command_tag = make_command_tag options keywords; fun present_command tr span st st' = Toplevel.setmp_thread_position tr (present_span command_tag span st st'); fun present _ [] = I | present st ((span, (tr, st')) :: rest) = present_command tr span st st' #> present st' rest; in if length command_results = length spans then (([], []), NONE, true, [], (I, I)) |> present (Toplevel.init_toplevel ()) (spans ~~ command_results) |> present_trailer |> rev else error "Messed-up outer syntax for presentation" end; end; (** standard output operations **) (* pretty printing *) fun pretty_term ctxt t = Syntax.pretty_term (Proof_Context.augment t ctxt) t; fun pretty_thm ctxt = pretty_term ctxt o Thm.full_prop_of; (* default output *) fun isabelle ctxt body = if Config.get ctxt Document_Antiquotation.thy_output_display then Latex.environment_text "isabelle" body - else Latex.enclose_text "\\isa{" "}" body; + else XML.enclose "\\isa{" "}" body; fun isabelle_typewriter ctxt body = if Config.get ctxt Document_Antiquotation.thy_output_display then Latex.environment_text "isabellett" body - else Latex.enclose_text "\\isatt{" "}" body; + else XML.enclose "\\isatt{" "}" body; fun typewriter ctxt s = isabelle_typewriter ctxt (Latex.string (Latex.output_ascii s)); fun verbatim ctxt = if Config.get ctxt Document_Antiquotation.thy_output_display then Document_Antiquotation.indent_lines ctxt #> typewriter ctxt else split_lines #> map (typewriter ctxt #> Latex.block) #> separate (XML.Text "\\isanewline%\n"); fun token_source ctxt {embedded} tok = if Token.is_kind Token.Cartouche tok andalso embedded andalso not (Config.get ctxt Document_Antiquotation.thy_output_source_cartouche) then Token.content_of tok else Token.unparse tok; fun is_source ctxt = Config.get ctxt Document_Antiquotation.thy_output_source orelse Config.get ctxt Document_Antiquotation.thy_output_source_cartouche; fun source ctxt embedded = Token.args_of_src #> map (token_source ctxt embedded #> Document_Antiquotation.prepare_lines ctxt) #> space_implode " " #> output_source ctxt #> isabelle ctxt; fun pretty ctxt = Document_Antiquotation.output ctxt #> Latex.string #> isabelle ctxt; fun pretty_source ctxt embedded src prt = if is_source ctxt then source ctxt embedded src else pretty ctxt prt; fun pretty_items ctxt = map (Document_Antiquotation.output ctxt #> XML.Text) #> separate (XML.Text "\\isasep\\isanewline%\n") #> isabelle ctxt; fun pretty_items_source ctxt embedded src prts = if is_source ctxt then source ctxt embedded src else pretty_items ctxt prts; (* antiquotation variants *) local fun gen_setup name embedded = if embedded then Document_Antiquotation.setup_embedded name else Document_Antiquotation.setup name; fun gen_antiquotation_pretty name embedded scan f = gen_setup name embedded scan (fn {context = ctxt, argument = x, ...} => pretty ctxt (f ctxt x)); fun gen_antiquotation_pretty_source name embedded scan f = gen_setup name embedded scan (fn {context = ctxt, source = src, argument = x} => pretty_source ctxt {embedded = embedded} src (f ctxt x)); fun gen_antiquotation_raw name embedded scan f = gen_setup name embedded scan (fn {context = ctxt, argument = x, ...} => f ctxt x); fun gen_antiquotation_verbatim name embedded scan f = gen_antiquotation_raw name embedded scan (fn ctxt => verbatim ctxt o f ctxt); in fun antiquotation_pretty name = gen_antiquotation_pretty name false; fun antiquotation_pretty_embedded name = gen_antiquotation_pretty name true; fun antiquotation_pretty_source name = gen_antiquotation_pretty_source name false; fun antiquotation_pretty_source_embedded name = gen_antiquotation_pretty_source name true; fun antiquotation_raw name = gen_antiquotation_raw name false; fun antiquotation_raw_embedded name = gen_antiquotation_raw name true; fun antiquotation_verbatim name = gen_antiquotation_verbatim name false; fun antiquotation_verbatim_embedded name = gen_antiquotation_verbatim name true; end; end; diff --git a/src/Pure/Thy/latex.scala b/src/Pure/Thy/latex.scala --- a/src/Pure/Thy/latex.scala +++ b/src/Pure/Thy/latex.scala @@ -1,294 +1,294 @@ /* Title: Pure/Thy/latex.scala Author: Makarius Support for LaTeX. */ package isabelle import java.io.{File => JFile} import scala.annotation.tailrec import scala.collection.mutable import scala.util.matching.Regex object Latex { /* index entries */ def index_escape(str: String): String = { val special1 = "!\"@|" val special2 = "\\{}#" if (str.exists(c => special1.contains(c) || special2.contains(c))) { val res = new StringBuilder for (c <- str) { if (special1.contains(c)) { res ++= "\\char" res ++= Value.Int(c) } else { if (special2.contains(c)) { res += '"'} res += c } } res.toString } else str } object Index_Item { sealed case class Value(text: Text, like: String) def unapply(tree: XML.Tree): Option[Value] = tree match { case XML.Wrapped_Elem(Markup.Latex_Index_Item(_), text, like) => Some(Value(text, XML.content(like))) case _ => None } } object Index_Entry { sealed case class Value(items: List[Index_Item.Value], kind: String) def unapply(tree: XML.Tree): Option[Value] = tree match { case XML.Elem(Markup.Latex_Index_Entry(kind), body) => val items = body.map(Index_Item.unapply) if (items.forall(_.isDefined)) Some(Value(items.map(_.get), kind)) else None case _ => None } } /* output text and positions */ type Text = XML.Body def position(a: String, b: String): String = "%:%" + a + "=" + b + "%:%\n" def init_position(file_pos: String): List[String] = if (file_pos.isEmpty) Nil else List("\\endinput\n", position(Markup.FILE, file_pos)) class Output { def latex_output(latex_text: Text): String = apply(latex_text) - def latex_macro(name: String, arg: Text): Text = - XML.string("\\" + name + "{") ::: arg ::: XML.string("}") + def latex_macro(name: String, body: Text): Text = + XML.enclose("\\" + name + "{", "}", body) def index_item(item: Index_Item.Value): String = { val like = if (item.like.isEmpty) "" else index_escape(item.like) + "@" val text = index_escape(latex_output(item.text)) like + text } def index_entry(entry: Index_Entry.Value): Text = { val items = entry.items.map(index_item).mkString("!") val kind = if (entry.kind.isEmpty) "" else "|" + index_escape(entry.kind) latex_macro("index", XML.string(items + kind)) } /* standard output of text with per-line positions */ def apply(latex_text: Text, file_pos: String = ""): String = { var line = 1 val result = new mutable.ListBuffer[String] val positions = new mutable.ListBuffer[String] ++= init_position(file_pos) def traverse(body: XML.Body): Unit = { body.foreach { case XML.Text(s) => line += s.count(_ == '\n') result += s case XML.Elem(Markup.Document_Latex(props), body) => for { l <- Position.Line.unapply(props) if positions.nonEmpty } { val s = position(Value.Int(line), Value.Int(l)) if (positions.last != s) positions += s } traverse(body) case XML.Elem(Markup.Latex_Output(_), body) => traverse(XML.string(latex_output(body))) case Index_Entry(entry) => traverse(index_entry(entry)) case _: XML.Elem => } } traverse(latex_text) result ++= positions result.mkString } } /* generated .tex file */ private val File_Pattern = """^%:%file=(.+)%:%$""".r private val Line_Pattern = """^*%:%(\d+)=(\d+)%:%$""".r def read_tex_file(tex_file: Path): Tex_File = { val positions = Line.logical_lines(File.read(tex_file)).reverse. takeWhile(_ != "\\endinput").reverse val source_file = positions match { case File_Pattern(file) :: _ => Some(file) case _ => None } val source_lines = if (source_file.isEmpty) Nil else positions.flatMap(line => line match { case Line_Pattern(Value.Int(line), Value.Int(source_line)) => Some(line -> source_line) case _ => None }) new Tex_File(tex_file, source_file, source_lines) } final class Tex_File private[Latex]( tex_file: Path, source_file: Option[String], source_lines: List[(Int, Int)]) { override def toString: String = tex_file.toString def source_position(l: Int): Option[Position.T] = source_file match { case None => None case Some(file) => val source_line = source_lines.iterator.takeWhile({ case (m, _) => m <= l }). foldLeft(0) { case (_, (_, n)) => n } if (source_line == 0) None else Some(Position.Line_File(source_line, file)) } def position(line: Int): Position.T = source_position(line) getOrElse Position.Line_File(line, tex_file.implode) } /* latex log */ def latex_errors(dir: Path, root_name: String): List[String] = { val root_log_path = dir + Path.explode(root_name).ext("log") if (root_log_path.is_file) { for { (msg, pos) <- filter_errors(dir, File.read(root_log_path)) } yield "Latex error" + Position.here(pos) + ":\n" + Library.indent_lines(2, msg) } else Nil } def filter_errors(dir: Path, root_log: String): List[(String, Position.T)] = { val seen_files = Synchronized(Map.empty[JFile, Tex_File]) def check_tex_file(path: Path): Option[Tex_File] = seen_files.change_result(seen => seen.get(path.file) match { case None => if (path.is_file) { val tex_file = read_tex_file(path) (Some(tex_file), seen + (path.file -> tex_file)) } else (None, seen) case some => (some, seen) }) def tex_file_position(path: Path, line: Int): Position.T = check_tex_file(path) match { case Some(tex_file) => tex_file.position(line) case None => Position.Line_File(line, path.implode) } object File_Line_Error { val Pattern: Regex = """^(.*?\.\w\w\w):(\d+): (.*)$""".r def unapply(line: String): Option[(Path, Int, String)] = line match { case Pattern(file, Value.Int(line), message) => val path = File.standard_path(file) if (Path.is_wellformed(path)) { val file = (dir + Path.explode(path)).canonical val msg = Library.perhaps_unprefix("LaTeX Error: ", message) if (file.is_file) Some((file, line, msg)) else None } else None case _ => None } } val Line_Error = """^l\.\d+ (.*)$""".r val More_Error = List( "", "