diff --git a/thys/Isabelle_C/C11-FrontEnd/src/C_Lexer_Annotation.thy b/thys/Isabelle_C/C11-FrontEnd/src/C_Lexer_Annotation.thy --- a/thys/Isabelle_C/C11-FrontEnd/src/C_Lexer_Annotation.thy +++ b/thys/Isabelle_C/C11-FrontEnd/src/C_Lexer_Annotation.thy @@ -1,1410 +1,1410 @@ (****************************************************************************** * Isabelle/C * * Copyright (c) 2018-2019 Université Paris-Saclay, Univ. Paris-Sud, France * * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are * met: * * * Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * * Redistributions in binary form must reproduce the above * copyright notice, this list of conditions and the following * disclaimer in the documentation and/or other materials provided * with the distribution. * * * Neither the name of the copyright holders nor the names of its * contributors may be used to endorse or promote products derived * from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. ******************************************************************************) section \Annotation Language: Parsing Combinator\ theory C_Lexer_Annotation imports C_Lexer_Language begin ML \ \\<^file>\~~/src/Pure/Isar/keyword.ML\\ (* Author: Frédéric Tuong, Université Paris-Saclay *) (* Title: Pure/Isar/keyword.ML Author: Makarius Isar keyword classification. *) \ structure C_Keyword = struct (** keyword classification **) (* kinds *) val command_kinds = [Keyword.diag, Keyword.document_heading, Keyword.document_body, Keyword.document_raw, Keyword.thy_begin, Keyword.thy_end, Keyword.thy_load, Keyword.thy_decl, Keyword.thy_decl_block, Keyword.thy_defn, Keyword.thy_stmt, Keyword.thy_goal, Keyword.thy_goal_defn, Keyword.thy_goal_stmt, Keyword.qed, Keyword.qed_script, Keyword.qed_block, Keyword.qed_global, Keyword.prf_goal, Keyword.prf_block, Keyword.next_block, Keyword.prf_open, Keyword.prf_close, Keyword.prf_chain, Keyword.prf_decl, Keyword.prf_asm, Keyword.prf_asm_goal, Keyword.prf_script, Keyword.prf_script_goal, Keyword.prf_script_asm_goal]; (* specifications *) type entry = {pos: Position.T, id: serial, kind: string, files: string list, (*extensions of embedded files*) tags: string list}; fun check_spec pos ((kind, files), tags) : entry = if not (member (op =) command_kinds kind) then error ("Unknown annotation syntax keyword kind " ^ quote kind) else if not (null files) andalso kind <> Keyword.thy_load then error ("Illegal specification of files for " ^ quote kind) else {pos = pos, id = serial (), kind = kind, files = files, tags = tags}; (** keyword tables **) (* type keywords *) datatype keywords = Keywords of {minor: Scan.lexicon, major: Scan.lexicon, commands: entry Symtab.table}; fun minor_keywords (Keywords {minor, ...}) = minor; fun major_keywords (Keywords {major, ...}) = major; fun make_keywords (minor, major, commands) = Keywords {minor = minor, major = major, commands = commands}; fun map_keywords f (Keywords {minor, major, commands}) = make_keywords (f (minor, major, commands)); (* build keywords *) val empty_keywords = make_keywords (Scan.empty_lexicon, Scan.empty_lexicon, Symtab.empty); fun empty_keywords' minor = make_keywords (minor, Scan.empty_lexicon, Symtab.empty); fun merge_keywords (Keywords {minor = minor1, major = major1, commands = commands1}, Keywords {minor = minor2, major = major2, commands = commands2}) = make_keywords (Scan.merge_lexicons (minor1, minor2), Scan.merge_lexicons (major1, major2), Symtab.merge (K true) (commands1, commands2)); val add_keywords0 = fold (fn ((name, pos), force_minor, spec as ((kind, _), _)) => map_keywords (fn (minor, major, commands) => let val extend = Scan.extend_lexicon (Symbol.explode name) fun update spec = Symtab.update (name, spec) in if force_minor then (extend minor, major, update (check_spec pos spec) commands) else if kind = "" orelse kind = Keyword.before_command orelse kind = Keyword.quasi_command then (extend minor, major, commands) else (minor, extend major, update (check_spec pos spec) commands) end)); val add_keywords = add_keywords0 o map (fn (cmd, spec) => (cmd, false, spec)) val add_keywords_minor = add_keywords0 o map (fn (cmd, spec) => (cmd, true, spec)) (* keyword status *) fun is_command (Keywords {commands, ...}) = Symtab.defined commands; fun dest_commands (Keywords {commands, ...}) = Symtab.keys commands; (* command keywords *) fun lookup_command (Keywords {commands, ...}) = Symtab.lookup commands; fun command_markup keywords name = lookup_command keywords name |> Option.map (fn {pos, id, ...} => - Position.make_entity_markup false id Markup.command_keywordN (name, pos)); + Position.make_entity_markup {def = false} id Markup.command_keywordN (name, pos)); fun command_files keywords name path = (case lookup_command keywords name of NONE => [] | SOME {kind, files, ...} => if kind <> Keyword.thy_load then [] else if null files then [path] else map (fn ext => Path.ext ext path) files); (* command categories *) fun command_category ks = let val tab = Symtab.make_set ks; fun pred keywords name = (case lookup_command keywords name of NONE => false | SOME {kind, ...} => Symtab.defined tab kind); in pred end; val is_theory_end = command_category [Keyword.thy_end]; val is_proof_asm = command_category [Keyword.prf_asm, Keyword.prf_asm_goal]; val is_improper = command_category [ Keyword.qed_script , Keyword.prf_script , Keyword.prf_script_goal , Keyword.prf_script_asm_goal]; end; \ text \ Notes: \<^item> The next structure contains a duplicated copy of the type \<^ML_type>\Token.T\, since it is not possible to set an arbitrary \<^emph>\slot\ value in \<^ML_structure>\Token\. \<^item> Parsing priorities in C and HOL slightly differ, see for instance \<^ML>\Token.explode\. \ ML \ \\<^file>\~~/src/Pure/Isar/token.ML\\ (* Author: Frédéric Tuong, Université Paris-Saclay *) (* Title: Pure/Isar/token.ML Author: Markus Wenzel, TU Muenchen Outer token syntax for Isabelle/Isar. *) \ structure C_Token = struct (** tokens **) (* token kind *) val immediate_kinds' = fn Token.Command => 0 | Token.Keyword => 1 | Token.Ident => 2 | Token.Long_Ident => 3 | Token.Sym_Ident => 4 | Token.Var => 5 | Token.Type_Ident => 6 | Token.Type_Var => 7 | Token.Nat => 8 | Token.Float => 9 | Token.Space => 10 | _ => ~1 val delimited_kind = (fn Token.String => true | Token.Alt_String => true | Token.Verbatim => true | Token.Cartouche => true | Token.Comment _ => true | _ => false); (* datatype token *) (*The value slot assigns an (optional) internal value to a token, usually as a side-effect of special scanner setup (see also args.ML). Note that an assignable ref designates an intermediate state of internalization -- it is NOT meant to persist.*) datatype T = Token of (Symbol_Pos.text * Position.range) * (Token.kind * string) * slot and slot = Slot | Value of value option | Assignable of value option Unsynchronized.ref and value = Source of T list | Literal of bool * Markup.T | Name of Token.name_value * morphism | Typ of typ | Term of term | Fact of string option * thm list | (*optional name for dynamic fact, i.e. fact "variable"*) Attribute of morphism -> attribute | Declaration of declaration | Files of Token.file Exn.result list; type src = T list; (* position *) fun pos_of (Token ((_, (pos, _)), _, _)) = pos; fun end_pos_of (Token ((_, (_, pos)), _, _)) = pos; fun adjust_offsets adjust (Token ((x, range), y, z)) = Token ((x, apply2 (Position.adjust_offsets adjust) range), y, z); (* stopper *) fun mk_eof pos = Token (("", (pos, Position.none)), (Token.EOF, ""), Slot); val eof = mk_eof Position.none; fun is_eof (Token (_, (Token.EOF, _), _)) = true | is_eof _ = false; val not_eof = not o is_eof; val stopper = Scan.stopper (fn [] => eof | toks => mk_eof (end_pos_of (List.last toks))) is_eof; (* kind of token *) fun kind_of (Token (_, (k, _), _)) = k; fun is_kind k (Token (_, (k', _), _)) = k = k'; val is_command = is_kind Token.Command; fun keyword_with pred (Token (_, (Token.Keyword, x), _)) = pred x | keyword_with _ _ = false; val is_command_modifier = keyword_with (fn x => x = "private" orelse x = "qualified"); fun ident_with pred (Token (_, (Token.Ident, x), _)) = pred x | ident_with _ _ = false; fun is_ignored (Token (_, (Token.Space, _), _)) = true | is_ignored (Token (_, (Token.Comment NONE, _), _)) = true | is_ignored _ = false; fun is_proper (Token (_, (Token.Space, _), _)) = false | is_proper (Token (_, (Token.Comment _, _), _)) = false | is_proper _ = true; fun is_comment (Token (_, (Token.Comment _, _), _)) = true | is_comment _ = false; fun is_informal_comment (Token (_, (Token.Comment NONE, _), _)) = true | is_informal_comment _ = false; fun is_formal_comment (Token (_, (Token.Comment (SOME _), _), _)) = true | is_formal_comment _ = false; fun is_document_marker (Token (_, (Token.Comment (SOME Comment.Marker), _), _)) = true | is_document_marker _ = false; fun is_begin_ignore (Token (_, (Token.Comment NONE, "<"), _)) = true | is_begin_ignore _ = false; fun is_end_ignore (Token (_, (Token.Comment NONE, ">"), _)) = true | is_end_ignore _ = false; fun is_error (Token (_, (Token.Error _, _), _)) = true | is_error _ = false; fun is_error' (Token (_, (Token.Error msg, _), _)) = SOME msg | is_error' _ = NONE; fun content_of (Token (_, (_, x), _)) = x; fun content_of' (Token (_, (_, _), Value (SOME (Source l)))) = map (fn Token ((_, (pos, _)), (_, x), _) => (x, pos)) l | content_of' _ = []; val is_stack1 = fn Token (_, (Token.Sym_Ident, _), Value (SOME (Source l))) => forall (fn tok => content_of tok = "+") l | _ => false; val is_stack2 = fn Token (_, (Token.Sym_Ident, _), Value (SOME (Source l))) => forall (fn tok => content_of tok = "@") l | _ => false; val is_stack3 = fn Token (_, (Token.Sym_Ident, _), Value (SOME (Source l))) => forall (fn tok => content_of tok = "&") l | _ => false; (* blanks and newlines -- space tokens obey lines *) fun is_space (Token (_, (Space, _), _)) = true | is_space _ = false; fun is_blank (Token (_, (Space, x), _)) = not (String.isSuffix "\n" x) | is_blank _ = false; fun is_newline (Token (_, (Space, x), _)) = String.isSuffix "\n" x | is_newline _ = false; (* range of tokens *) fun range_of (toks as tok :: _) = let val pos' = end_pos_of (List.last toks) in Position.range (pos_of tok, pos') end | range_of [] = Position.no_range; val core_range_of = drop_prefix is_ignored #> drop_suffix is_ignored #> range_of; (* token content *) fun content_of (Token (_, (_, x), _)) = x; fun source_of (Token ((source, _), _, _)) = source; fun input_of (Token ((source, range), (kind, _), _)) = Input.source (delimited_kind kind) source range; fun inner_syntax_of tok = let val x = content_of tok in if YXML.detect x then x else Syntax.implode_input (input_of tok) end; (* markup reports *) local val token_kind_markup = fn Token.Var => (Markup.var, "") | Token.Type_Ident => (Markup.tfree, "") | Token.Type_Var => (Markup.tvar, "") | Token.String => (Markup.string, "") | Token.Alt_String => (Markup.alt_string, "") | Token.Verbatim => (Markup.verbatim, "") | Token.Cartouche => (Markup.cartouche, "") | Token.Comment _ => (Markup.ML_comment, "") | Token.Error msg => (Markup.bad (), msg) | _ => (Markup.empty, ""); fun keyword_reports tok = map (fn markup => ((pos_of tok, markup), "")); fun command_markups keywords x = if C_Keyword.is_theory_end keywords x then [Markup.keyword2 |> Markup.keyword_properties] else (if C_Keyword.is_proof_asm keywords x then [Markup.keyword3] else if C_Keyword.is_improper keywords x then [Markup.keyword1, Markup.improper] else [Markup.keyword1]) |> map Markup.command_properties; fun keyword_markup (important, keyword) x = if important orelse Symbol.is_ascii_identifier x then keyword else Markup.delimiter; fun command_minor_markups keywords x = if C_Keyword.is_theory_end keywords x then [Markup.keyword2 |> Markup.keyword_properties] else (if C_Keyword.is_proof_asm keywords x then [Markup.keyword3] else if C_Keyword.is_improper keywords x then [Markup.keyword1, Markup.improper] else if C_Keyword.is_command keywords x then [Markup.keyword1] else [keyword_markup (false, Markup.keyword2 |> Markup.keyword_properties) x]); in fun completion_report tok = if is_kind Token.Keyword tok then map (fn m => ((pos_of tok, m), "")) (Completion.suppress_abbrevs (content_of tok)) else []; fun reports keywords tok = if is_command tok then keyword_reports tok (command_markups keywords (content_of tok)) else if is_stack1 tok orelse is_stack2 tok orelse is_stack3 tok then keyword_reports tok [Markup.keyword2 |> Markup.keyword_properties] else if is_kind Token.Keyword tok then keyword_reports tok (command_minor_markups keywords (content_of tok)) else let val pos = pos_of tok; val (m, text) = token_kind_markup (kind_of tok); val deleted = Symbol_Pos.explode_deleted (source_of tok, pos); in ((pos, m), text) :: map (fn p => ((p, Markup.delete), "")) deleted end; fun markups keywords = map (#2 o #1) o reports keywords; end; (* unparse *) fun unparse' (Token ((source0, _), (kind, x), _)) = let val source = \ \ We are computing a reverse function of \<^ML>\Symbol_Pos.implode_range\ taking into account consecutive \<^ML>\Symbol.DEL\ symbols potentially appearing at the beginning, or at the end of the string. As remark, \<^ML>\Symbol_Pos.explode\ will remove any potentially consecutive \<^ML>\Symbol.DEL\ symbols. This is why it is not used here.\ case Symbol.explode source0 of x :: xs => if x = Symbol.DEL then case rev xs of x' :: xs => if x' = Symbol.DEL then implode (rev xs) else source0 | _ => source0 else source0 | _ => source0 in case kind of Token.String => Symbol_Pos.quote_string_qq source | Token.Alt_String => Symbol_Pos.quote_string_bq source | Token.Verbatim => enclose "{*" "*}" source | Token.Cartouche => cartouche source | Token.Comment NONE => enclose "(*" "*)" source | Token.EOF => "" | _ => x end; fun text_of tok = let val k = Token.str_of_kind (kind_of tok); val ms = markups C_Keyword.empty_keywords tok; val s = unparse' tok; in if s = "" then (k, "") else if size s < 40 andalso not (exists_string (fn c => c = "\n") s) then (k ^ " " ^ Markup.markups ms s, "") else (k, Markup.markups ms s) end; (** associated values **) (* inlined file content *) fun file_source (file: Token.file) = let val text = cat_lines (#lines file); val end_pos = Position.symbol_explode text (#pos file); in Input.source true text (Position.range (#pos file, end_pos)) end; fun get_files (Token (_, _, Value (SOME (Files files)))) = files | get_files _ = []; fun put_files [] tok = tok | put_files files (Token (x, y, Slot)) = Token (x, y, Value (SOME (Files files))) | put_files _ tok = raise Fail ("Cannot put inlined files here" ^ Position.here (pos_of tok)); (* access values *) (* reports of value *) (* name value *) (* maxidx *) (* fact values *) (* transform *) (* static binding *) (*1st stage: initialize assignable slots*) fun init_assignable tok = (case tok of Token (x, y, Slot) => Token (x, y, Assignable (Unsynchronized.ref NONE)) | Token (_, _, Value _) => tok | Token (_, _, Assignable r) => (r := NONE; tok)); (*2nd stage: assign values as side-effect of scanning*) fun assign v tok = (case tok of Token (x, y, Slot) => Token (x, y, Value v) | Token (_, _, Value _) => tok | Token (_, _, Assignable r) => (r := v; tok)); fun evaluate mk eval arg = let val x = eval arg in (assign (SOME (mk x)) arg; x) end; (*3rd stage: static closure of final values*) fun closure (Token (x, y, Assignable (Unsynchronized.ref v))) = Token (x, y, Value v) | closure tok = tok; (* pretty *) (* src *) (** scanners **) open Basic_Symbol_Pos; val err_prefix = "Annotation lexical error: "; fun !!! msg = Symbol_Pos.!!! (fn () => err_prefix ^ msg); (* scan stack *) fun scan_stack is_stack = Scan.optional (Scan.one is_stack >> content_of') [] (* scan symbolic idents *) val scan_symid = Scan.many1 (Symbol.is_symbolic_char o Symbol_Pos.symbol) || Scan.one (Symbol.is_symbolic o Symbol_Pos.symbol) >> single; fun is_symid str = (case try Symbol.explode str of SOME [s] => Symbol.is_symbolic s orelse Symbol.is_symbolic_char s | SOME ss => forall Symbol.is_symbolic_char ss | _ => false); fun ident_or_symbolic "begin" = false | ident_or_symbolic ":" = true | ident_or_symbolic "::" = true | ident_or_symbolic s = Symbol_Pos.is_identifier s orelse is_symid s; (* scan verbatim text *) val scan_verb = $$$ "*" --| Scan.ahead (~$$ "}") || Scan.one (fn (s, _) => s <> "*" andalso Symbol.not_eof s) >> single; val scan_verbatim = Scan.ahead ($$ "{" -- $$ "*") |-- !!! "unclosed verbatim text" ((Symbol_Pos.scan_pos --| $$ "{" --| $$ "*") -- (Scan.repeats scan_verb -- ($$ "*" |-- $$ "}" |-- Symbol_Pos.scan_pos))); val recover_verbatim = $$$ "{" @@@ $$$ "*" @@@ Scan.repeats scan_verb; (* scan cartouche *) val scan_cartouche = Symbol_Pos.scan_pos -- ((Symbol_Pos.scan_cartouche err_prefix >> Symbol_Pos.cartouche_content) -- Symbol_Pos.scan_pos); (* scan space *) fun space_symbol (s, _) = Symbol.is_blank s andalso s <> "\n"; val scan_space = Scan.many1 space_symbol @@@ Scan.optional ($$$ "\n") [] || Scan.many space_symbol @@@ $$$ "\n"; (* scan comment *) val scan_comment = Symbol_Pos.scan_pos -- (Symbol_Pos.scan_comment_body err_prefix -- Symbol_Pos.scan_pos); (** token sources **) local fun token_leq ((_, syms1), (_, syms2)) = length syms1 <= length syms2; fun token k ss = Token ((Symbol_Pos.implode ss, Symbol_Pos.range ss), (k, Symbol_Pos.content ss), Slot); fun token' (mk_value, k) ss = if mk_value then Token ( (Symbol_Pos.implode ss, Symbol_Pos.range ss) , (k, Symbol_Pos.content ss) , Value (SOME (Source (map (fn (s, pos) => Token (("", (pos, Position.none)), (k, s), Slot)) ss)))) else token k ss; fun token_t k = token' (true, k) fun token_range k (pos1, (ss, pos2)) = Token (Symbol_Pos.implode_range (pos1, pos2) ss, (k, Symbol_Pos.content ss), Slot); fun scan_token keywords = !!! "bad input" (Symbol_Pos.scan_string_qq err_prefix >> token_range Token.String || Symbol_Pos.scan_string_bq err_prefix >> token_range Token.Alt_String || scan_verbatim >> token_range Token.Verbatim || scan_cartouche >> token_range Token.Cartouche || scan_comment >> token_range (Token.Comment NONE) || Comment.scan_outer >> (fn (k, ss) => token (Token.Comment (SOME k)) ss) || scan_space >> token Token.Space || Scan.repeats1 ($$$ "+") >> token_t Token.Sym_Ident || Scan.repeats1 ($$$ "@") >> token_t Token.Sym_Ident || Scan.repeats1 ($$$ "&") >> token_t Token.Sym_Ident || (Scan.max token_leq (Scan.max token_leq (Scan.literal (C_Keyword.major_keywords keywords) >> pair Token.Command) (Scan.literal (C_Keyword.minor_keywords keywords) >> pair Token.Keyword)) (Lexicon.scan_longid >> pair Token.Long_Ident || Scan.max token_leq (C_Lex.scan_ident >> pair Token.Ident) (Lexicon.scan_id >> pair Token.Ident) || Lexicon.scan_var >> pair Token.Var || Lexicon.scan_tid >> pair Token.Type_Ident || Lexicon.scan_tvar >> pair Token.Type_Var || Symbol_Pos.scan_float >> pair Token.Float || Symbol_Pos.scan_nat >> pair Token.Nat || scan_symid >> pair Token.Sym_Ident)) >> uncurry (token' o pair false)); fun recover msg = (Symbol_Pos.recover_string_qq || Symbol_Pos.recover_string_bq || recover_verbatim || Symbol_Pos.recover_cartouche || Symbol_Pos.recover_comment || Scan.one (Symbol.not_eof o Symbol_Pos.symbol) >> single) >> (single o token (Token.Error msg)); in fun make_source keywords {strict} = let val scan_strict = Scan.bulk (scan_token keywords); val scan = if strict then scan_strict else Scan.recover scan_strict recover; in Source.source Symbol_Pos.stopper scan end; end; (* explode *) fun tokenize keywords strict syms = Source.of_list syms |> make_source keywords strict |> Source.exhaust; fun explode keywords pos text = Symbol_Pos.explode (text, pos) |> tokenize keywords {strict = false}; fun explode0 keywords = explode keywords Position.none; (* print name in parsable form *) (* make *) (** parsers **) type 'a parser = T list -> 'a * T list; type 'a context_parser = Context.generic * T list -> 'a * (Context.generic * T list); (* read body -- e.g. antiquotation source *) fun read_with_commands'0 keywords syms = Source.of_list syms |> make_source keywords {strict = false} |> Source.filter (not o is_proper) |> Source.exhaust fun read_with_commands' keywords scan syms = Source.of_list syms |> make_source keywords {strict = false} |> Source.filter is_proper |> Source.source stopper (Scan.recover (Scan.bulk scan) (fn msg => Scan.one (not o is_eof) >> (fn tok => [C_Scan.Right let val msg = case is_error' tok of SOME msg0 => msg0 ^ " (" ^ msg ^ ")" | NONE => msg in ( msg , [((pos_of tok, Markup.bad ()), msg)] , tok) end]))) |> Source.exhaust; fun read_antiq' keywords scan = read_with_commands' keywords (scan >> C_Scan.Left); (* wrapped syntax *) local fun make src pos = Token.make src pos |> #1 fun make_default text pos = make ((~1, 0), text) pos fun explode keywords pos text = case Token.explode keywords pos text of [tok] => tok | _ => make_default text pos in fun syntax' f = I #> map (fn tok0 as Token ((source, (pos1, pos2)), (kind, x), _) => if is_stack1 tok0 orelse is_stack2 tok0 orelse is_stack3 tok0 then make_default source pos1 else if is_eof tok0 then Token.eof else if delimited_kind kind then explode Keyword.empty_keywords pos1 (unparse' tok0) else let val tok1 = explode ((case kind of Token.Keyword => Keyword.add_keywords [((x, Position.none), Keyword.no_spec)] | Token.Command => Keyword.add_keywords [( (x, Position.none), (Keyword.thy_decl, []))] | _ => I) Keyword.empty_keywords) pos1 source in if Token.kind_of tok1 = kind then tok1 else make ( ( immediate_kinds' kind , case Position.distance_of (pos1, pos2) of NONE => 0 | SOME i => i) , source) pos1 end) #> f #> apsnd (map (fn tok => Token ( (Token.source_of tok, Token.range_of [tok]) , (Token.kind_of tok, Token.content_of tok) , Slot))) end end; type 'a c_parser = 'a C_Token.parser; type 'a c_context_parser = 'a C_Token.context_parser; \ ML \ \\<^file>\~~/src/Pure/Isar/parse.ML\\ (* Author: Frédéric Tuong, Université Paris-Saclay *) (* Title: Pure/Isar/parse.ML Author: Markus Wenzel, TU Muenchen Generic parsers for Isabelle/Isar outer syntax. *) \ signature C_PARSE = sig type T type src = T list type 'a parser = T list -> 'a * T list type 'a context_parser = Context.generic * T list -> 'a * (Context.generic * T list) (**) val C_source: Input.source parser val star: string parser (**) val group: (unit -> string) -> (T list -> 'a) -> T list -> 'a val !!! : (T list -> 'a) -> T list -> 'a val !!!! : (T list -> 'a) -> T list -> 'a val not_eof: T parser val token: 'a parser -> T parser val range: 'a parser -> ('a * Position.range) parser val position: 'a parser -> ('a * Position.T) parser val input: 'a parser -> Input.source parser val inner_syntax: 'a parser -> string parser val command: string parser val keyword: string parser val short_ident: string parser val long_ident: string parser val sym_ident: string parser val dots: string parser val minus: string parser val term_var: string parser val type_ident: string parser val type_var: string parser val number: string parser val float_number: string parser val string: string parser val string_position: (string * Position.T) parser val alt_string: string parser val verbatim: string parser val cartouche: string parser val eof: string parser val command_name: string -> string parser val keyword_with: (string -> bool) -> string parser val keyword_markup: bool * Markup.T -> string -> string parser val keyword_improper: string -> string parser val $$$ : string -> string parser val reserved: string -> string parser val underscore: string parser val maybe: 'a parser -> 'a option parser val maybe_position: ('a * Position.T) parser -> ('a option * Position.T) parser val opt_keyword: string -> bool parser val opt_bang: bool parser val begin: string parser val opt_begin: bool parser val nat: int parser val int: int parser val real: real parser val enum_positions: string -> 'a parser -> ('a list * Position.T list) parser val enum1_positions: string -> 'a parser -> ('a list * Position.T list) parser val enum: string -> 'a parser -> 'a list parser val enum1: string -> 'a parser -> 'a list parser val and_list: 'a parser -> 'a list parser val and_list1: 'a parser -> 'a list parser val enum': string -> 'a context_parser -> 'a list context_parser val enum1': string -> 'a context_parser -> 'a list context_parser val and_list': 'a context_parser -> 'a list context_parser val and_list1': 'a context_parser -> 'a list context_parser val list: 'a parser -> 'a list parser val list1: 'a parser -> 'a list parser val name: string parser val name_range: (string * Position.range) parser val name_position: (string * Position.T) parser val binding: binding parser val embedded: string parser val embedded_input: Input.source parser val embedded_position: (string * Position.T) parser val text: string parser val path: string parser val path_binding: (string * Position.T) parser val session_name: (string * Position.T) parser val theory_name: (string * Position.T) parser val liberal_name: string parser val parname: string parser val parbinding: binding parser val class: string parser val sort: string parser val type_const: string parser val arity: (string * string list * string) parser val multi_arity: (string list * string list * string) parser val type_args: string list parser val type_args_constrained: (string * string option) list parser val typ: string parser val mixfix: mixfix parser val mixfix': mixfix parser val opt_mixfix: mixfix parser val opt_mixfix': mixfix parser val syntax_mode: Syntax.mode parser val where_: string parser val const_decl: (string * string * mixfix) parser val const_binding: (binding * string * mixfix) parser val params: (binding * string option * mixfix) list parser val vars: (binding * string option * mixfix) list parser val for_fixes: (binding * string option * mixfix) list parser val ML_source: Input.source parser val document_source: Input.source parser val document_marker: Input.source parser val const: string parser val term: string parser val prop: string parser val literal_fact: string parser val propp: (string * string list) parser val termp: (string * string list) parser val private: Position.T parser val qualified: Position.T parser val target: (string * Position.T) parser val opt_target: (string * Position.T) option parser val args: T list parser val args1: (string -> bool) -> T list parser val attribs: src list parser val opt_attribs: src list parser val thm_sel: Facts.interval list parser val thm: (Facts.ref * src list) parser val thms1: (Facts.ref * src list) list parser val options: ((string * Position.T) * (string * Position.T)) list parser end; structure C_Parse: C_PARSE = struct type T = C_Token.T type src = T list type 'a parser = T list -> 'a * T list type 'a context_parser = Context.generic * T list -> 'a * (Context.generic * T list) structure Token = struct open Token open C_Token end (** error handling **) (* group atomic parsers (no cuts!) *) fun group s scan = scan || Scan.fail_with (fn [] => (fn () => s () ^ " expected,\nbut end-of-input was found") | tok :: _ => (fn () => (case Token.text_of tok of (txt, "") => s () ^ " expected,\nbut " ^ txt ^ Position.here (Token.pos_of tok) ^ " was found" | (txt1, txt2) => s () ^ " expected,\nbut " ^ txt1 ^ Position.here (Token.pos_of tok) ^ " was found:\n" ^ txt2))); (* cut *) fun cut kind scan = let fun get_pos [] = " (end-of-input)" | get_pos (tok :: _) = Position.here (Token.pos_of tok); fun err (toks, NONE) = (fn () => kind ^ get_pos toks) | err (toks, SOME msg) = (fn () => let val s = msg () in if String.isPrefix kind s then s else kind ^ get_pos toks ^ ": " ^ s end); in Scan.!! err scan end; fun !!! scan = cut "Annotation syntax error" scan; fun !!!! scan = cut "Corrupted annotation syntax in presentation" scan; (** basic parsers **) (* tokens *) fun RESET_VALUE atom = (*required for all primitive parsers*) Scan.ahead (Scan.one (K true)) -- atom >> (fn (arg, x) => (Token.assign NONE arg; x)); val not_eof = RESET_VALUE (Scan.one Token.not_eof); fun token atom = Scan.ahead not_eof --| atom; fun range scan = (Scan.ahead not_eof >> (Token.range_of o single)) -- scan >> Library.swap; fun position scan = (Scan.ahead not_eof >> Token.pos_of) -- scan >> Library.swap; fun input atom = Scan.ahead atom |-- not_eof >> Token.input_of; fun inner_syntax atom = Scan.ahead atom |-- not_eof >> Token.inner_syntax_of; fun kind k = group (fn () => Token.str_of_kind k) (RESET_VALUE (Scan.one (Token.is_kind k) >> Token.content_of)); val command = kind Token.Command; val keyword = kind Token.Keyword; val short_ident = kind Token.Ident; val long_ident = kind Token.Long_Ident; val sym_ident = kind Token.Sym_Ident; val term_var = kind Token.Var; val type_ident = kind Token.Type_Ident; val type_var = kind Token.Type_Var; val number = kind Token.Nat; val float_number = kind Token.Float; val string = kind Token.String; val alt_string = kind Token.Alt_String; val verbatim = kind Token.Verbatim; val cartouche = kind Token.Cartouche; val eof = kind Token.EOF; fun command_name x = group (fn () => Token.str_of_kind Token.Command ^ " " ^ quote x) (RESET_VALUE (Scan.one (fn tok => Token.is_command tok andalso Token.content_of tok = x))) >> Token.content_of; fun keyword_with pred = RESET_VALUE (Scan.one (Token.keyword_with pred) >> Token.content_of); fun keyword_markup markup x = group (fn () => Token.str_of_kind Token.Keyword ^ " " ^ quote x) (Scan.ahead not_eof -- keyword_with (fn y => x = y)) >> (fn (tok, x) => (Token.assign (SOME (Token.Literal markup)) tok; x)); val keyword_improper = keyword_markup (true, Markup.improper); val $$$ = keyword_markup (false, Markup.quasi_keyword); fun reserved x = group (fn () => "reserved identifier " ^ quote x) (RESET_VALUE (Scan.one (Token.ident_with (fn y => x = y)) >> Token.content_of)); val dots = sym_ident :-- (fn "\" => Scan.succeed () | _ => Scan.fail) >> #1; val minus = sym_ident :-- (fn "-" => Scan.succeed () | _ => Scan.fail) >> #1; val underscore = sym_ident :-- (fn "_" => Scan.succeed () | _ => Scan.fail) >> #1; fun maybe scan = underscore >> K NONE || scan >> SOME; fun maybe_position scan = position (underscore >> K NONE) || scan >> apfst SOME; val nat = number >> (#1 o Library.read_int o Symbol.explode); val int = Scan.optional (minus >> K ~1) 1 -- nat >> op *; val real = float_number >> Value.parse_real || int >> Real.fromInt; fun opt_keyword s = Scan.optional ($$$ "(" |-- !!! (($$$ s >> K true) --| $$$ ")")) false; val opt_bang = Scan.optional ($$$ "!" >> K true) false; val begin = $$$ "begin"; val opt_begin = Scan.optional (begin >> K true) false; (* enumerations *) fun enum1_positions sep scan = scan -- Scan.repeat (position ($$$ sep) -- !!! scan) >> (fn (x, ys) => (x :: map #2 ys, map (#2 o #1) ys)); fun enum_positions sep scan = enum1_positions sep scan || Scan.succeed ([], []); fun enum1 sep scan = scan ::: Scan.repeat ($$$ sep |-- !!! scan); fun enum sep scan = enum1 sep scan || Scan.succeed []; fun enum1' sep scan = scan ::: Scan.repeat (Scan.lift ($$$ sep) |-- scan); fun enum' sep scan = enum1' sep scan || Scan.succeed []; fun and_list1 scan = enum1 "and" scan; fun and_list scan = enum "and" scan; fun and_list1' scan = enum1' "and" scan; fun and_list' scan = enum' "and" scan; fun list1 scan = enum1 "," scan; fun list scan = enum "," scan; (* names and embedded content *) val name = group (fn () => "name") (short_ident || long_ident || sym_ident || number || string); val name_range = input name >> Input.source_content_range; val name_position = input name >> Input.source_content; val string_position = input string >> Input.source_content; val binding = name_position >> Binding.make; val embedded = group (fn () => "embedded content") (cartouche || string || short_ident || long_ident || sym_ident || term_var || type_ident || type_var || number); val embedded_input = input embedded; val embedded_position = embedded_input >> Input.source_content; val text = group (fn () => "text") (embedded || verbatim); val path = group (fn () => "file name/path specification") embedded; val path_binding = group (fn () => "path binding (strict file name)") (position embedded); val session_name = group (fn () => "session name") name_position; val theory_name = group (fn () => "theory name") name_position; val liberal_name = keyword_with Token.ident_or_symbolic || name; val parname = Scan.optional ($$$ "(" |-- name --| $$$ ")") ""; val parbinding = Scan.optional ($$$ "(" |-- binding --| $$$ ")") Binding.empty; (* type classes *) val class = group (fn () => "type class") (inner_syntax embedded); val sort = group (fn () => "sort") (inner_syntax embedded); val type_const = group (fn () => "type constructor") (inner_syntax embedded); val arity = type_const -- ($$$ "::" |-- !!! (Scan.optional ($$$ "(" |-- !!! (list1 sort --| $$$ ")")) [] -- sort)) >> Scan.triple2; val multi_arity = and_list1 type_const -- ($$$ "::" |-- !!! (Scan.optional ($$$ "(" |-- !!! (list1 sort --| $$$ ")")) [] -- sort)) >> Scan.triple2; (* types *) val typ = group (fn () => "type") (inner_syntax embedded); fun type_arguments arg = arg >> single || $$$ "(" |-- !!! (list1 arg --| $$$ ")") || Scan.succeed []; val type_args = type_arguments type_ident; val type_args_constrained = type_arguments (type_ident -- Scan.option ($$$ "::" |-- !!! sort)); (* mixfix annotations *) local val mfix = input (string || cartouche); val mixfix_ = mfix -- !!! (Scan.optional ($$$ "[" |-- !!! (list nat --| $$$ "]")) [] -- Scan.optional nat 1000) >> (fn (sy, (ps, p)) => fn range => Mixfix (sy, ps, p, range)); val structure_ = $$$ "structure" >> K Structure; val binder_ = $$$ "binder" |-- !!! (mfix -- ($$$ "[" |-- nat --| $$$ "]" -- nat || nat >> (fn n => (n, n)))) >> (fn (sy, (p, q)) => fn range => Binder (sy, p, q, range)); val infixl_ = $$$ "infixl" |-- !!! (mfix -- nat >> (fn (sy, p) => fn range => Infixl (sy, p, range))); val infixr_ = $$$ "infixr" |-- !!! (mfix -- nat >> (fn (sy, p) => fn range => Infixr (sy, p, range))); val infix_ = $$$ "infix" |-- !!! (mfix -- nat >> (fn (sy, p) => fn range => Infix (sy, p, range))); val mixfix_body = mixfix_ || structure_ || binder_ || infixl_ || infixr_ || infix_; fun annotation guard body = Scan.trace ($$$ "(" |-- guard (body --| $$$ ")")) >> (fn (mx, toks) => mx (Token.range_of toks)); fun opt_annotation guard body = Scan.optional (annotation guard body) NoSyn; in val mixfix = annotation !!! mixfix_body; val mixfix' = annotation I mixfix_body; val opt_mixfix = opt_annotation !!! mixfix_body; val opt_mixfix' = opt_annotation I mixfix_body; end; (* syntax mode *) val syntax_mode_spec = ($$$ "output" >> K ("", false)) || name -- Scan.optional ($$$ "output" >> K false) true; val syntax_mode = Scan.optional ($$$ "(" |-- !!! (syntax_mode_spec --| $$$ ")")) Syntax.mode_default; (* fixes *) val where_ = $$$ "where"; val const_decl = name -- ($$$ "::" |-- !!! typ) -- opt_mixfix >> Scan.triple1; val const_binding = binding -- ($$$ "::" |-- !!! typ) -- opt_mixfix >> Scan.triple1; val param_mixfix = binding -- Scan.option ($$$ "::" |-- typ) -- mixfix' >> (single o Scan.triple1); val params = (binding -- Scan.repeat binding) -- Scan.option ($$$ "::" |-- !!! (Scan.ahead typ -- embedded)) >> (fn ((x, ys), T) => (x, Option.map #1 T, NoSyn) :: map (fn y => (y, Option.map #2 T, NoSyn)) ys); val vars = and_list1 (param_mixfix || params) >> flat; val for_fixes = Scan.optional ($$$ "for" |-- !!! vars) []; (* embedded source text *) val ML_source = input (group (fn () => "ML source") text); val document_source = input (group (fn () => "document source") text); val document_marker = group (fn () => "document marker") (RESET_VALUE (Scan.one Token.is_document_marker >> Token.input_of)); (* terms *) val const = group (fn () => "constant") (inner_syntax embedded); val term = group (fn () => "term") (inner_syntax embedded); val prop = group (fn () => "proposition") (inner_syntax embedded); val literal_fact = inner_syntax (group (fn () => "literal fact") (alt_string || cartouche)); (* patterns *) val is_terms = Scan.repeat1 ($$$ "is" |-- term); val is_props = Scan.repeat1 ($$$ "is" |-- prop); val propp = prop -- Scan.optional ($$$ "(" |-- !!! (is_props --| $$$ ")")) []; val termp = term -- Scan.optional ($$$ "(" |-- !!! (is_terms --| $$$ ")")) []; (* target information *) val private = position ($$$ "private") >> #2; val qualified = position ($$$ "qualified") >> #2; val target = ($$$ "(" -- $$$ "in") |-- !!! (name_position --| $$$ ")"); val opt_target = Scan.option target; (* arguments within outer syntax *) local val argument_kinds = [Token.Ident, Token.Long_Ident, Token.Sym_Ident, Token.Var, Token.Type_Ident, Token.Type_Var, Token.Nat, Token.Float, Token.String, Token.Alt_String, Token.Cartouche, Token.Verbatim]; fun arguments is_symid = let fun argument blk = group (fn () => "argument") (Scan.one (fn tok => let val kind = Token.kind_of tok in member (op =) argument_kinds kind orelse Token.keyword_with is_symid tok orelse (blk andalso Token.keyword_with (fn s => s = ",") tok) end)); fun args blk x = Scan.optional (args1 blk) [] x and args1 blk x = (Scan.repeats1 (Scan.repeat1 (argument blk) || argsp "(" ")" || argsp "[" "]")) x and argsp l r x = (token ($$$ l) ::: !!! (args true @@@ (token ($$$ r) >> single))) x; in (args, args1) end; in val args = #1 (arguments Token.ident_or_symbolic) false; fun args1 is_symid = #2 (arguments is_symid) false; end; (* attributes *) val attrib = token liberal_name ::: !!! args; val attribs = $$$ "[" |-- list attrib --| $$$ "]"; val opt_attribs = Scan.optional attribs []; (* theorem references *) val thm_sel = $$$ "(" |-- list1 (nat --| minus -- nat >> Facts.FromTo || nat --| minus >> Facts.From || nat >> Facts.Single) --| $$$ ")"; val thm = $$$ "[" |-- attribs --| $$$ "]" >> pair (Facts.named "") || (literal_fact >> Facts.Fact || name_position -- Scan.option thm_sel >> Facts.Named) -- opt_attribs; val thms1 = Scan.repeat1 thm; (* options *) val option_name = group (fn () => "option name") name_position; val option_value = group (fn () => "option value") ((token real || token name) >> Token.content_of); val option = option_name :-- (fn (_, pos) => Scan.optional ($$$ "=" |-- !!! (position option_value)) ("true", pos)); val options = $$$ "[" |-- list1 option --| $$$ "]"; (** C basic parsers **) (* embedded source text *) val C_source = input (group (fn () => "C source") text); (* AutoCorres (MODIFIES) *) val star = sym_ident :-- (fn "*" => Scan.succeed () | _ => Scan.fail) >> #1; end; structure C_Parse_Native: C_PARSE = struct open Token open Parse (** C basic parsers **) (* embedded source text *) val C_source = input (group (fn () => "C source") text); (* AutoCorres (MODIFIES) *) val star = sym_ident :-- (fn "*" => Scan.succeed () | _ => Scan.fail) >> #1; end; \ ML \ \\<^file>\~~/src/Pure/Thy/thy_header.ML\\ (* Author: Frédéric Tuong, Université Paris-Saclay *) (* Title: Pure/Thy/thy_header.ML Author: Makarius Static theory header information. *) \ structure C_Thy_Header = struct val bootstrap_keywords = C_Keyword.empty_keywords' (Keyword.minor_keywords (Thy_Header.get_keywords @{theory})) (* theory data *) structure Data = Theory_Data ( type T = C_Keyword.keywords; val empty = bootstrap_keywords; val extend = I; val merge = C_Keyword.merge_keywords; ); val add_keywords = Data.map o C_Keyword.add_keywords; val add_keywords_minor = Data.map o C_Keyword.add_keywords_minor; val get_keywords = Data.get; val get_keywords' = get_keywords o Proof_Context.theory_of; end \ end diff --git a/thys/Isabelle_C/C11-FrontEnd/src/C_Parser_Annotation.thy b/thys/Isabelle_C/C11-FrontEnd/src/C_Parser_Annotation.thy --- a/thys/Isabelle_C/C11-FrontEnd/src/C_Parser_Annotation.thy +++ b/thys/Isabelle_C/C11-FrontEnd/src/C_Parser_Annotation.thy @@ -1,252 +1,252 @@ (****************************************************************************** * Isabelle/C * * Copyright (c) 2018-2019 Université Paris-Saclay, Univ. Paris-Sud, France * * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are * met: * * * Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * * Redistributions in binary form must reproduce the above * copyright notice, this list of conditions and the following * disclaimer in the documentation and/or other materials provided * with the distribution. * * * Neither the name of the copyright holders nor the names of its * contributors may be used to endorse or promote products derived * from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. ******************************************************************************) section \Annotation Language: Command Parser Registration\ theory C_Parser_Annotation imports C_Lexer_Annotation C_Environment begin ML \ \\<^file>\~~/src/Pure/Isar/outer_syntax.ML\\ (* Author: Frédéric Tuong, Université Paris-Saclay *) (* Title: Pure/Isar/outer_syntax.ML Author: Markus Wenzel, TU Muenchen Isabelle/Isar outer syntax. *) \ structure C_Annotation = struct (** outer syntax **) (* errors *) fun err_command msg name ps = error (msg ^ quote (Markup.markup Markup.keyword1 name) ^ Position.here_list ps); fun err_dup_command name ps = err_command "Duplicate annotation syntax command " name ps; (* command parsers *) datatype command_parser = Parser of (Symbol_Pos.T list * (bool * Symbol_Pos.T list)) * Position.range -> C_Env.eval_time c_parser; datatype command = Command of {comment: string, command_parser: command_parser, pos: Position.T, id: serial}; fun eq_command (Command {id = id1, ...}, Command {id = id2, ...}) = id1 = id2; fun new_command comment command_parser pos = Command {comment = comment, command_parser = command_parser, pos = pos, id = serial ()}; fun command_pos (Command {pos, ...}) = pos; fun command_markup def (name, Command {pos, id, ...}) = Position.make_entity_markup def id Markup.commandN (name, pos); (* theory data *) structure Data = Theory_Data ( type T = command Symtab.table; val empty = Symtab.empty; val extend = I; fun merge data : T = data |> Symtab.join (fn name => fn (cmd1, cmd2) => if eq_command (cmd1, cmd2) then raise Symtab.SAME else err_dup_command name [command_pos cmd1, command_pos cmd2]); ); val get_commands = Data.get; val dest_commands = get_commands #> Symtab.dest #> sort_by #1; val lookup_commands = Symtab.lookup o get_commands; (* maintain commands *) fun add_command name cmd thy = let val _ = C_Keyword.is_command (C_Thy_Header.get_keywords thy) name orelse err_command "Undeclared outer syntax command " name [command_pos cmd]; val _ = (case lookup_commands thy name of NONE => () | SOME cmd' => err_dup_command name [command_pos cmd, command_pos cmd']); val _ = Context_Position.report_generic (Context.the_generic_context ()) - (command_pos cmd) (command_markup true (name, cmd)); + (command_pos cmd) (command_markup {def = true} (name, cmd)); in Data.map (Symtab.update (name, cmd)) thy end; fun delete_command (name, pos) thy = let val _ = C_Keyword.is_command (C_Thy_Header.get_keywords thy) name orelse err_command "Undeclared outer syntax command " name [pos]; in Data.map (Symtab.delete name) thy end; (* implicit theory setup *) type command_keyword = string * Position.T; fun raw_command0 kind (name, pos) comment command_parser = C_Thy_Header.add_keywords [((name, pos), ((kind, []), [name]))] #> add_command name (new_command comment command_parser pos); fun raw_command (name, pos) comment command_parser = let val setup = add_command name (new_command comment command_parser pos) in Context.>> (Context.mapping setup (Local_Theory.background_theory setup)) end; fun command (name, pos) comment parse = raw_command (name, pos) comment (Parser parse); fun command'' kind (name, pos) comment parse = raw_command0 kind (name, pos) comment (Parser parse); val command' = command'' Keyword.thy_decl; (** toplevel parsing **) (* parse spans *) (* parse commands *) local fun scan_stack' f b = Scan.one f >> (pair b o C_Token.content_of') in val before_command = C_Token.scan_stack C_Token.is_stack1 -- Scan.optional ( scan_stack' C_Token.is_stack2 false || scan_stack' C_Token.is_stack3 true) (pair false []) end fun parse_command thy = Scan.ahead (before_command |-- C_Parse.position C_Parse.command) :|-- (fn (name, pos) => let val command_tags = before_command -- C_Parse.range C_Parse.command >> (fn (cmd, (_, range)) => (cmd, range)); in case lookup_commands thy name of SOME (cmd as Command {command_parser = Parser parse, ...}) => C_Parse.!!! (command_tags :|-- parse) - >> pair [((pos, command_markup false (name, cmd)), "")] + >> pair [((pos, command_markup {def = false} (name, cmd)), "")] | NONE => Scan.fail_with (fn _ => fn _ => let val msg = "undefined command "; in msg ^ quote (Markup.markup Markup.keyword1 name) end) end) (* check commands *) fun command_reports thy tok = if C_Token.is_command tok then let val name = C_Token.content_of tok in (case lookup_commands thy name of NONE => [] - | SOME cmd => [((C_Token.pos_of tok, command_markup false (name, cmd)), "")]) + | SOME cmd => [((C_Token.pos_of tok, command_markup {def = false} (name, cmd)), "")]) end else []; fun check_command ctxt (name, pos) = let val thy = Proof_Context.theory_of ctxt; val keywords = C_Thy_Header.get_keywords thy; in if C_Keyword.is_command keywords name then let val markup = C_Token.explode0 keywords name |> maps (command_reports thy) |> map (#2 o #1); val _ = Context_Position.reports ctxt (map (pair pos) markup); in name end else let val completion_report = Completion.make_report (name, pos) (fn completed => C_Keyword.dest_commands keywords |> filter completed |> sort_strings |> map (fn a => (a, (Markup.commandN, a)))); in error ("Bad command " ^ quote name ^ Position.here pos ^ completion_report) end end; end \ ML \ \\<^file>\~~/src/Pure/PIDE/resources.ML\\ (* Author: Frédéric Tuong, Université Paris-Saclay *) (* Title: Pure/PIDE/resources.ML Author: Makarius Resources for theories and auxiliary files. *) \ structure C_Resources = struct (* load files *) fun parse_files cmd = Scan.ahead C_Parse.not_eof -- C_Parse.path >> (fn (tok, name) => fn thy => (case C_Token.get_files tok of [] => let val keywords = C_Thy_Header.get_keywords thy; val master_dir = Resources.master_directory thy; val pos = C_Token.pos_of tok; val delimited = Input.is_delimited (C_Token.input_of tok); val src_paths = C_Keyword.command_files keywords cmd (Path.explode name); in map (Command.read_file master_dir pos delimited) src_paths end | files => map Exn.release files)); end; \ end diff --git a/thys/Isabelle_C/C11-FrontEnd/src/C_Parser_Language.thy b/thys/Isabelle_C/C11-FrontEnd/src/C_Parser_Language.thy --- a/thys/Isabelle_C/C11-FrontEnd/src/C_Parser_Language.thy +++ b/thys/Isabelle_C/C11-FrontEnd/src/C_Parser_Language.thy @@ -1,1364 +1,1364 @@ (****************************************************************************** * Isabelle/C * * Copyright (c) 2018-2019 Université Paris-Saclay, Univ. Paris-Sud, France * * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are * met: * * * Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * * Redistributions in binary form must reproduce the above * copyright notice, this list of conditions and the following * disclaimer in the documentation and/or other materials provided * with the distribution. * * * Neither the name of the copyright holders nor the names of its * contributors may be used to endorse or promote products derived * from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. ******************************************************************************) section \Core Language: Parsing Support (C Language without Annotations)\ theory C_Parser_Language imports C_Environment begin text \ As mentioned in \<^theory>\Isabelle_C.C_Ast\, Isabelle/C depends on certain external parsing libraries, such as \<^dir>\../../src_ext/mlton\, and more specifically \<^dir>\../../src_ext/mlton/lib/mlyacc-lib\. Actually, the sole theory making use of the files in \<^dir>\../../src_ext/mlton/lib/mlyacc-lib\ is the present \<^file>\C_Parser_Language.thy\. (Any other remaining files in \<^dir>\../../src_ext/mlton\ are not used by Isabelle/C, they come from the original repository of MLton: \<^url>\https://github.com/MLton/mlton\.) \ subsection \Parsing Library (Including Semantic Functions)\ ML \ \\<^file>\../generated/c_grammar_fun.grm.sml\\ (* * Modified by Frédéric Tuong, Université Paris-Saclay * * * * * * * * * * * * * * * * * * * * * * * * * * * * * Language.C * https://hackage.haskell.org/package/language-c * * Copyright (c) 1999-2017 Manuel M T Chakravarty * Duncan Coutts * Benedikt Huber * Portions Copyright (c) 1989,1990 James A. Roskind * * * * * * * * * * * * * * * * * * * * * * * * * * * * * Language.C.Comments * https://hackage.haskell.org/package/language-c-comments * * Copyright (c) 2010-2014 Geoff Hulette *) \ signature C_GRAMMAR_RULE_LIB = sig type arg = C_Env.T type 'a monad = arg -> 'a * arg (* type aliases *) type class_Pos = C_Ast.class_Pos type reports_text0' = { markup : Markup.T, markup_body : string } type reports_text0 = reports_text0' list -> reports_text0' list type ('a, 'b) reports_base = ('a C_Env.markup_store * Position.T list, Position.T list * 'a C_Env.markup_store option) C_Ast.either -> Position.T list -> string -> 'b -> 'b (**) type NodeInfo = C_Ast.nodeInfo type CStorageSpec = NodeInfo C_Ast.cStorageSpecifier type CFunSpec = NodeInfo C_Ast.cFunctionSpecifier type CConst = NodeInfo C_Ast.cConstant type 'a CInitializerList = ('a C_Ast.cPartDesignator List.list * 'a C_Ast.cInitializer) List.list type CTranslUnit = NodeInfo C_Ast.cTranslationUnit type CExtDecl = NodeInfo C_Ast.cExternalDeclaration type CFunDef = NodeInfo C_Ast.cFunctionDef type CDecl = NodeInfo C_Ast.cDeclaration type CDeclr = NodeInfo C_Ast.cDeclarator type CDerivedDeclr = NodeInfo C_Ast.cDerivedDeclarator type CArrSize = NodeInfo C_Ast.cArraySize type CStat = NodeInfo C_Ast.cStatement type CAsmStmt = NodeInfo C_Ast.cAssemblyStatement type CAsmOperand = NodeInfo C_Ast.cAssemblyOperand type CBlockItem = NodeInfo C_Ast.cCompoundBlockItem type CDeclSpec = NodeInfo C_Ast.cDeclarationSpecifier type CTypeSpec = NodeInfo C_Ast.cTypeSpecifier type CTypeQual = NodeInfo C_Ast.cTypeQualifier type CAlignSpec = NodeInfo C_Ast.cAlignmentSpecifier type CStructUnion = NodeInfo C_Ast.cStructureUnion type CEnum = NodeInfo C_Ast.cEnumeration type CInit = NodeInfo C_Ast.cInitializer type CInitList = NodeInfo CInitializerList type CDesignator = NodeInfo C_Ast.cPartDesignator type CAttr = NodeInfo C_Ast.cAttribute type CExpr = NodeInfo C_Ast.cExpression type CBuiltin = NodeInfo C_Ast.cBuiltinThing type CStrLit = NodeInfo C_Ast.cStringLiteral (**) type ClangCVersion = C_Ast.clangCVersion type Ident = C_Ast.ident type Position = C_Ast.positiona type PosLength = Position * int type Name = C_Ast.namea type Bool = bool type CString = C_Ast.cString type CChar = C_Ast.cChar type CInteger = C_Ast.cInteger type CFloat = C_Ast.cFloat type CStructTag = C_Ast.cStructTag type CUnaryOp = C_Ast.cUnaryOp type 'a CStringLiteral = 'a C_Ast.cStringLiteral type 'a CConstant = 'a C_Ast.cConstant type ('a, 'b) Either = ('a, 'b) C_Ast.either type CIntRepr = C_Ast.cIntRepr type CIntFlag = C_Ast.cIntFlag type CAssignOp = C_Ast.cAssignOp type Comment = C_Ast.comment (**) type 'a Reversed = 'a C_Ast.Reversed type CDeclrR = C_Ast.CDeclrR type 'a Maybe = 'a C_Ast.optiona type 'a Located = 'a C_Ast.Located (**) structure List : sig val reverse : 'a list -> 'a list end (* monadic operations *) val return : 'a -> 'a monad val bind : 'a monad -> ('a -> 'b monad) -> 'b monad val bind' : 'b monad -> ('b -> unit monad) -> 'b monad val >> : unit monad * 'a monad -> 'a monad (* position reports *) val markup_make : ('a -> reports_text0' option) -> ('a -> 'b) -> ('b option -> string) -> ((Markup.T -> reports_text0) -> bool -> ('b, 'b option * string * reports_text0) C_Ast.either -> reports_text0) -> ('a, C_Position.reports_text) reports_base val markup_tvar : (C_Env.markup_global, C_Position.reports_text) reports_base val markup_var_enum : (C_Env.markup_global, C_Position.reports_text) reports_base val markup_var : (C_Env.markup_ident, C_Position.reports_text) reports_base val markup_var_bound : (C_Env.markup_ident, C_Position.reports_text) reports_base val markup_var_improper : (C_Env.markup_ident, C_Position.reports_text) reports_base (* Language.C.Data.RList *) val empty : 'a list Reversed val singleton : 'a -> 'a list Reversed val snoc : 'a list Reversed -> 'a -> 'a list Reversed val rappend : 'a list Reversed -> 'a list -> 'a list Reversed val rappendr : 'a list Reversed -> 'a list Reversed -> 'a list Reversed val rmap : ('a -> 'b) -> 'a list Reversed -> 'b list Reversed (* Language.C.Data.Position *) val posOf : 'a -> Position val posOf' : bool -> class_Pos -> Position * int val make_comment : Symbol_Pos.T list -> Symbol_Pos.T list -> Symbol_Pos.T list -> Position.range -> Comment (* Language.C.Data.Node *) val mkNodeInfo' : Position -> PosLength -> Name -> NodeInfo val decode : NodeInfo -> (class_Pos, string) Either val decode_error' : NodeInfo -> Position.range (* Language.C.Data.Ident *) val quad : string list -> int val ident_encode : string -> int val ident_decode : int -> string val mkIdent : Position * int -> string -> Name -> Ident val internalIdent : string -> Ident (* Language.C.Syntax.AST *) val liftStrLit : 'a CStringLiteral -> 'a CConstant (* Language.C.Syntax.Constants *) val concatCStrings : CString list -> CString (* Language.C.Parser.ParserMonad *) val getNewName : Name monad val shadowTypedef0'''' : string -> Position.T list -> C_Env.markup_ident -> C_Env.env_lang -> C_Env.env_tree -> C_Env.env_lang * C_Env.env_tree val shadowTypedef0' : C_Ast.CDeclSpec list C_Env.parse_status -> bool -> C_Ast.ident * C_Ast.CDerivedDeclr list -> C_Env.env_lang -> C_Env.env_tree -> C_Env.env_lang * C_Env.env_tree val isTypeIdent : string -> arg -> bool val enterScope : unit monad val leaveScope : unit monad val getCurrentPosition : Position monad (* Language.C.Parser.Tokens *) val CTokCLit : CChar -> (CChar -> 'a) -> 'a val CTokILit : CInteger -> (CInteger -> 'a) -> 'a val CTokFLit : CFloat -> (CFloat -> 'a) -> 'a val CTokSLit : CString -> (CString -> 'a) -> 'a (* Language.C.Parser.Parser *) val reverseList : 'a list -> 'a list Reversed val L : 'a -> int -> 'a Located monad val unL : 'a Located -> 'a val withNodeInfo : int -> (NodeInfo -> 'a) -> 'a monad val withNodeInfo_CExtDecl : CExtDecl -> (NodeInfo -> 'a) -> 'a monad val withNodeInfo_CExpr : CExpr list Reversed -> (NodeInfo -> 'a) -> 'a monad val withLength : NodeInfo -> (NodeInfo -> 'a) -> 'a monad val reverseDeclr : CDeclrR -> CDeclr val withAttribute : int -> CAttr list -> (NodeInfo -> CDeclrR) -> CDeclrR monad val withAttributePF : int -> CAttr list -> (NodeInfo -> CDeclrR -> CDeclrR) -> (CDeclrR -> CDeclrR) monad val appendObjAttrs : CAttr list -> CDeclr -> CDeclr val withAsmNameAttrs : CStrLit Maybe * CAttr list -> CDeclrR -> CDeclrR monad val appendDeclrAttrs : CAttr list -> CDeclrR -> CDeclrR val ptrDeclr : CDeclrR -> CTypeQual list -> NodeInfo -> CDeclrR val funDeclr : CDeclrR -> (Ident list, (CDecl list * Bool)) Either -> CAttr list -> NodeInfo -> CDeclrR val arrDeclr : CDeclrR -> CTypeQual list -> Bool -> Bool -> CExpr Maybe -> NodeInfo -> CDeclrR val liftTypeQuals : CTypeQual list Reversed -> CDeclSpec list val liftCAttrs : CAttr list -> CDeclSpec list val addTrailingAttrs : CDeclSpec list Reversed -> CAttr list -> CDeclSpec list Reversed val emptyDeclr : CDeclrR val mkVarDeclr : Ident -> NodeInfo -> CDeclrR val doDeclIdent : CDeclSpec list -> CDeclrR -> unit monad val ident_of_decl : (Ident list, CDecl list * bool) C_Ast.either -> (Ident * CDerivedDeclr list * CDeclSpec list) list val doFuncParamDeclIdent : CDeclr -> unit monad end structure C_Grammar_Rule_Lib : C_GRAMMAR_RULE_LIB = struct open C_Ast type arg = C_Env.T type 'a monad = arg -> 'a * arg (**) type reports_text0' = { markup : Markup.T, markup_body : string } type reports_text0 = reports_text0' list -> reports_text0' list type 'a reports_store = Position.T list * serial * 'a type ('a, 'b) reports_base = ('a C_Env.markup_store * Position.T list, Position.T list * 'a C_Env.markup_store option) C_Ast.either -> Position.T list -> string -> 'b -> 'b fun markup_init markup = { markup = markup, markup_body = "" } val look_idents = C_Env_Ext.list_lookup o C_Env_Ext.get_idents val look_idents' = C_Env_Ext.list_lookup o C_Env_Ext.get_idents' val look_tyidents_typedef = C_Env_Ext.list_lookup o C_Env_Ext.get_tyidents_typedef val look_tyidents'_typedef = C_Env_Ext.list_lookup o C_Env_Ext.get_tyidents'_typedef val To_string0 = meta_of_logic val ident_encode = Word8Vector.foldl (fn (w, acc) => Word8.toInt w + acc * 256) 0 o Byte.stringToBytes fun ident_decode nb = radixpand (256, nb) |> map chr |> implode fun reverse l = rev l fun report _ [] _ = I | report markup ps x = let val ms = markup x in fold (fn p => fold (fn {markup, markup_body} => cons ((p, markup), markup_body)) ms) ps end fun markup_make typing get_global desc report' data = report (fn name => let val (def, ps, id, global, typing) = case data of Left ((ps, id, param), ps' as _ :: _) => ( true , ps , id , Right ( SOME (get_global param) , "Redefinition of " ^ quote name ^ Position.here_list ps \ \Any positions provided here will be explicitly reported, which might not be the desired effect. So we explicitly refer the reader to a separate tooltip.\ ^ " (more details in the command modifier tooltip)" , cons { markup = Markup.class_parameter , markup_body = "redefining this" ^ Position.here_list ps' }) , typing param) | Left ((ps, id, param), _) => (true, ps, id, Left (get_global param), typing param) | Right (_, SOME (ps, id, param)) => (false, ps, id, Left (get_global param), typing param) | Right (ps, _) => ( true , ps , serial () , Right (NONE, "Undeclared " ^ quote name ^ Position.here_list ps, I) , NONE) fun markup_elem name = (name, (name, []): Markup.T) val (varN, var) = markup_elem (desc (case global of Left b => SOME b | Right (SOME b, _, _) => SOME b | _ => NONE)); val entity = Markup.entity varN name val cons' = cons o markup_init in (cons' var #> report' cons' def global #> (case typing of NONE => I | SOME x => cons x)) (map (fn pos => - let val props = #2 (Position.make_entity_markup def id "" ("", pos)) + let val props = #2 (Position.make_entity_markup {def = def} id "" ("", pos)) in markup_init (Markup.properties props entity) end) ps) end) fun markup_make' typing get_global desc report = markup_make typing get_global (fn global => "C " ^ (case global of SOME true => "global " | SOME false => "local " | NONE => "") ^ desc) (fn cons' => fn def => fn Left b => report cons' def b | Right (b, msg, f) => tap (fn _ => Output.information msg) #> f #> (case b of NONE => cons' Markup.free | SOME b => report cons' def b)) fun markup_tvar0 desc = markup_make' (K NONE) I desc (fn cons' => fn def => fn true => cons' (if def then Markup.free else Markup.ML_keyword3) | false => cons' Markup.skolem) val markup_tvar = markup_tvar0 "type variable" val markup_var_enum = markup_tvar0 "enum tag" fun string_of_list f = (fn [] => NONE | [s] => SOME s | s => SOME ("[" ^ String.concatWith ", " s ^ "]")) o map f val string_of_cDeclarationSpecifier = fn C_Ast.CStorageSpec0 _ => "storage" | C_Ast.CTypeSpec0 t => (case t of CVoidType0 _ => "void" | CCharType0 _ => "char" | CShortType0 _ => "short" | CIntType0 _ => "int" | CLongType0 _ => "long" | CFloatType0 _ => "float" | CDoubleType0 _ => "double" | CSignedType0 _ => "signed" | CUnsigType0 _ => "unsig" | CBoolType0 _ => "bool" | CComplexType0 _ => "complex" | CInt128Type0 _ => "int128" | CSUType0 _ => "SU" | CEnumType0 _ => "enum" | CTypeDef0 _ => "typedef" | CTypeOfExpr0 _ => "type_of_expr" | CTypeOfType0 _ => "type_of_type" | CAtomicType0 _ => "atomic") | C_Ast.CTypeQual0 _ => "type_qual" | C_Ast.CFunSpec0 _ => "fun" | C_Ast.CAlignSpec0 _ => "align" fun typing {params, ret, ...} = SOME { markup = Markup.typing , markup_body = case ( string_of_list (fn C_Ast.CPtrDeclr0 _ => "pointer" | C_Ast.CArrDeclr0 _ => "array" | C_Ast.CFunDeclr0 (C_Ast.Left _, _, _) => "function [...] ->" | C_Ast.CFunDeclr0 (C_Ast.Right (l_decl, _), _, _) => "function " ^ (String.concatWith " -> " (map (fn CDecl0 ([decl], _, _) => string_of_cDeclarationSpecifier decl | CDecl0 (l, _, _) => "(" ^ String.concatWith " " (map string_of_cDeclarationSpecifier l) ^ ")" | CStaticAssert0 _ => "static_assert") l_decl)) ^ " ->") params , case ret of C_Env.Previous_in_stack => SOME "..." | C_Env.Parsed ret => string_of_list string_of_cDeclarationSpecifier ret) of (NONE, NONE) => let val _ = warning "markup_var: Not yet implemented" in "" end | (SOME params, NONE) => params | (NONE, SOME ret) => ret | (SOME params, SOME ret) => params ^ " " ^ ret } val markup_var = markup_make' typing #global "variable" (fn cons' => fn def => fn true => if def then cons' Markup.free else cons' Markup.delimiter (*explicit black color, otherwise the default color of constants might be automatically chosen (especially in term cartouches)*) | false => cons' Markup.bound) val markup_var_bound = markup_make' typing #global "variable" (fn cons' => K (K (cons' Markup.bound))) val markup_var_improper = markup_make' typing #global "variable" (fn cons' => K (K (cons' Markup.improper))) (**) val return = pair fun bind f g = f #-> g fun bind' f g = bind f (fn r => bind (g r) (fn () => return r)) fun a >> b = a #> b o #2 fun sequence_ f = fn [] => return () | x :: xs => f x >> sequence_ f xs (* Language.C.Data.RList *) val empty = [] fun singleton x = [x] fun snoc xs x = x :: xs fun rappend xs ys = rev ys @ xs fun rappendr xs ys = ys @ xs val rmap = map val viewr = fn [] => error "viewr: empty RList" | x :: xs => (xs, x) (* Language.C.Data.Position *) val nopos = NoPosition fun posOf _ = NoPosition fun posOf' mk_range = (if mk_range then Position.range else I) #> (fn (pos1, pos2) => let val {offset = offset, end_offset = end_offset, ...} = Position.dest pos1 in ( Position offset (From_string (C_Env.encode_positions [pos1, pos2])) 0 0 , end_offset - offset) end) fun posOf'' node env = let val (stack, len) = #rule_input env val (mk_range, (pos1a, pos1b)) = case node of Left i => (true, nth stack (len - i - 1)) | Right range => (false, range) val (pos2a, pos2b) = nth stack 0 in ( (posOf' mk_range (pos1a, pos1b) |> #1, posOf' true (pos2a, pos2b)) , env |> C_Env_Ext.map_output_pos (K (SOME (pos1a, pos2b))) |> C_Env_Ext.map_output_vacuous (K false)) end val posOf''' = posOf'' o Left val internalPos = InternalPosition fun make_comment body_begin body body_end range = Commenta ( posOf' false range |> #1 , From_string (Symbol_Pos.implode (body_begin @ body @ body_end)) , case body_end of [] => SingleLine | _ => MultiLine) (* Language.C.Data.Node *) val undefNode = OnlyPos nopos (nopos, ~1) fun mkNodeInfoOnlyPos pos = OnlyPos pos (nopos, ~1) fun mkNodeInfo pos name = NodeInfo pos (nopos, ~1) name val mkNodeInfo' = NodeInfo val decode = (fn OnlyPos0 range => range | NodeInfo0 (pos1, (pos2, len2), _) => (pos1, (pos2, len2))) #> (fn (Position0 (_, s1, _, _), (Position0 (_, s2, _, _), _)) => (case (C_Env.decode_positions (To_string0 s1), C_Env.decode_positions (To_string0 s2)) of ([pos1, _], [_, pos2]) => Left (Position.range (pos1, pos2)) | _ => Right "Expecting 2 elements") | _ => Right "Invalid position") fun decode_error' x = case decode x of Left x => x | Right msg => error msg fun decode_error x = Right (decode_error' x) val nameOfNode = fn OnlyPos0 _ => NONE | NodeInfo0 (_, _, name) => SOME name (* Language.C.Data.Ident *) local val bits7 = Integer.pow 7 2 val bits14 = Integer.pow 14 2 val bits21 = Integer.pow 21 2 val bits28 = Integer.pow 28 2 in fun quad s = case s of [] => 0 | c1 :: [] => ord c1 | c1 :: c2 :: [] => ord c2 * bits7 + ord c1 | c1 :: c2 :: c3 :: [] => ord c3 * bits14 + ord c2 * bits7 + ord c1 | c1 :: c2 :: c3 :: c4 :: s => ((ord c4 * bits21 + ord c3 * bits14 + ord c2 * bits7 + ord c1) mod bits28) + (quad s mod bits28) end local fun internalIdent0 pos s = Ident (From_string s, ident_encode s, pos) in fun mkIdent (pos, len) s name = internalIdent0 (mkNodeInfo' pos (pos, len) name) s val internalIdent = internalIdent0 (mkNodeInfoOnlyPos internalPos) end (* Language.C.Syntax.AST *) fun liftStrLit (CStrLit0 (str, at)) = CStrConst str at (* Language.C.Syntax.Constants *) fun concatCStrings cs = CString0 (flattena (map (fn CString0 (s,_) => s) cs), exists (fn CString0 (_, b) => b) cs) (* Language.C.Parser.ParserMonad *) fun getNewName env = (Namea (C_Env_Ext.get_namesupply env), C_Env_Ext.map_namesupply (fn x => x + 1) env) fun addTypedef (Ident0 (_, i, node)) env = let val name = ident_decode i val pos1 = [decode_error' node |> #1] val data = (pos1, serial (), null (C_Env_Ext.get_scopes env)) in ((), env |> C_Env_Ext.map_idents (Symtab.delete_safe name) |> C_Env_Ext.map_tyidents_typedef (Symtab.update (name, data)) |> C_Env_Ext.map_reports_text (markup_tvar (Left (data, flat [ look_idents env name, look_tyidents_typedef env name ])) pos1 name)) end fun shadowTypedef0''' name pos data0 env_lang env_tree = let val data = (pos, serial (), data0) val update_id = Symtab.update (name, data) in ( env_lang |> C_Env_Ext.map_tyidents'_typedef (Symtab.delete_safe name) |> C_Env_Ext.map_idents' update_id , update_id , env_tree |> C_Env.map_reports_text (markup_var (Left (data, flat [ look_idents' env_lang name , look_tyidents'_typedef env_lang name ])) pos name)) end fun shadowTypedef0'''' name pos data0 env_lang env_tree = let val (env_lang, _, env_tree) = shadowTypedef0''' name pos data0 env_lang env_tree in ( env_lang, env_tree) end fun shadowTypedef0'' ret global (Ident0 (_, i, node), params) = shadowTypedef0''' (ident_decode i) [decode_error' node |> #1] {global = global, params = params, ret = ret} fun shadowTypedef0' ret global ident env_lang env_tree = let val (env_lang, _, env_tree) = shadowTypedef0'' ret global ident env_lang env_tree in (env_lang, env_tree) end fun shadowTypedef0 ret global f ident env = let val (update_id, env) = C_Env.map_env_lang_tree' (fn env_lang => fn env_tree => let val (env_lang, update_id, env_tree) = shadowTypedef0'' ret global ident env_lang env_tree in (update_id, (env_lang, env_tree)) end) env in ((), f update_id env) end fun shadowTypedef_fun ident env = shadowTypedef0 C_Env.Previous_in_stack (case C_Env_Ext.get_scopes env of _ :: [] => true | _ => false) (fn update_id => C_Env_Ext.map_scopes (fn (NONE, x) :: xs => (SOME (fst ident), C_Env.map_idents update_id x) :: xs | (SOME _, _) :: _ => error "Not yet implemented" | [] => error "Not expecting an empty scope")) ident env fun shadowTypedef (i, params, ret) env = shadowTypedef0 (C_Env.Parsed ret) (List.null (C_Env_Ext.get_scopes env)) (K I) (i, params) env fun isTypeIdent s0 = Symtab.exists (fn (s1, _) => s0 = s1) o C_Env_Ext.get_tyidents_typedef fun enterScope env = ((), C_Env_Ext.map_scopes (cons (NONE, C_Env_Ext.get_var_table env)) env) fun leaveScope env = case C_Env_Ext.get_scopes env of [] => error "leaveScope: already in global scope" | (_, var_table) :: scopes => ((), env |> C_Env_Ext.map_scopes (K scopes) |> C_Env_Ext.map_var_table (K var_table)) val getCurrentPosition = return NoPosition (* Language.C.Parser.Tokens *) fun CTokCLit x f = x |> f fun CTokILit x f = x |> f fun CTokFLit x f = x |> f fun CTokSLit x f = x |> f (* Language.C.Parser.Parser *) fun reverseList x = rev x fun L a i = posOf''' i #>> curry Located a fun unL (Located (a, _)) = a fun withNodeInfo00 (pos1, (pos2, len2)) mkAttrNode name = return (mkAttrNode (NodeInfo pos1 (pos2, len2) name)) fun withNodeInfo0 x = x |> bind getNewName oo withNodeInfo00 fun withNodeInfo0' node mkAttrNode env = let val (range, env) = posOf'' node env in withNodeInfo0 range mkAttrNode env end fun withNodeInfo x = x |> withNodeInfo0' o Left fun withNodeInfo' x = x |> withNodeInfo0' o decode_error fun withNodeInfo_CExtDecl x = x |> withNodeInfo' o (fn CDeclExt0 (CDecl0 (_, _, node)) => node | CDeclExt0 (CStaticAssert0 (_, _, node)) => node | CFDefExt0 (CFunDef0 (_, _, _, _, node)) => node | CAsmExt0 (_, node) => node) val get_node_CExpr = fn CComma0 (_, a) => a | CAssign0 (_, _, _, a) => a | CCond0 (_, _, _, a) => a | CBinary0 (_, _, _, a) => a | CCast0 (_, _, a) => a | CUnary0 (_, _, a) => a | CSizeofExpr0 (_, a) => a | CSizeofType0 (_, a) => a | CAlignofExpr0 (_, a) => a | CAlignofType0 (_, a) => a | CComplexReal0 (_, a) => a | CComplexImag0 (_, a) => a | CIndex0 (_, _, a) => a | CCall0 (_, _, a) => a | CMember0 (_, _, _, a) => a | CVar0 (_, a) => a | CConst0 c => (case c of CIntConst0 (_, a) => a | CCharConst0 (_, a) => a | CFloatConst0 (_, a) => a | CStrConst0 (_, a) => a) | CCompoundLit0 (_, _, a) => a | CGenericSelection0 (_, _, a) => a | CStatExpr0 (_, a) => a | CLabAddrExpr0 (_, a) => a | CBuiltinExpr0 cBuiltinThing => (case cBuiltinThing of CBuiltinVaArg0 (_, _, a) => a | CBuiltinOffsetOf0 (_, _, a) => a | CBuiltinTypesCompatible0 (_, _, a) => a) fun withNodeInfo_CExpr x = x |> withNodeInfo' o get_node_CExpr o hd fun withLength node mkAttrNode = bind (posOf'' (decode_error node)) (fn range => withNodeInfo00 range mkAttrNode (case nameOfNode node of NONE => error "nameOfNode" | SOME name => name)) fun reverseDeclr (CDeclrR0 (ide, reversedDDs, asmname, cattrs, at)) = CDeclr ide (rev reversedDDs) asmname cattrs at fun appendDeclrAttrs newAttrs (CDeclrR0 (ident, l, asmname, cattrs, at)) = case l of [] => CDeclrR ident empty asmname (cattrs @ newAttrs) at | x :: xs => let val appendAttrs = fn CPtrDeclr0 (typeQuals, at) => CPtrDeclr (typeQuals @ map CAttrQual newAttrs) at | CArrDeclr0 (typeQuals, arraySize, at) => CArrDeclr (typeQuals @ map CAttrQual newAttrs) arraySize at | CFunDeclr0 (parameters, cattrs, at) => CFunDeclr parameters (cattrs @ newAttrs) at in CDeclrR ident (appendAttrs x :: xs) asmname cattrs at end fun withAttribute node cattrs mkDeclrNode = bind (posOf''' node) (fn (pos, _) => bind getNewName (fn name => let val attrs = mkNodeInfo pos name val newDeclr = appendDeclrAttrs cattrs (mkDeclrNode attrs) in return newDeclr end)) fun withAttributePF node cattrs mkDeclrCtor = bind (posOf''' node) (fn (pos, _) => bind getNewName (fn name => let val attrs = mkNodeInfo pos name val newDeclr = appendDeclrAttrs cattrs o mkDeclrCtor attrs in return newDeclr end)) fun appendObjAttrs newAttrs (CDeclr0 (ident, indirections, asmname, cAttrs, at)) = CDeclr ident indirections asmname (cAttrs @ newAttrs) at fun appendObjAttrsR newAttrs (CDeclrR0 (ident, indirections, asmname, cAttrs, at)) = CDeclrR ident indirections asmname (cAttrs @ newAttrs) at fun setAsmName mAsmName (CDeclrR0 (ident, indirections, oldName, cattrs, at)) = case (case (mAsmName, oldName) of (None, None) => Right None | (None, oldname as Some _) => Right oldname | (newname as Some _, None) => Right newname | (Some n1, Some n2) => Left (n1, n2)) of Left (n1, n2) => let fun showName (CStrLit0 (CString0 (s, _), _)) = To_string0 s in error ("Duplicate assembler name: " ^ showName n1 ^ " " ^ showName n2) end | Right newName => return (CDeclrR ident indirections newName cattrs at) fun withAsmNameAttrs (mAsmName, newAttrs) declr = setAsmName mAsmName (appendObjAttrsR newAttrs declr) fun ptrDeclr (CDeclrR0 (ident, derivedDeclrs, asmname, cattrs, dat)) tyquals at = CDeclrR ident (snoc derivedDeclrs (CPtrDeclr tyquals at)) asmname cattrs dat fun funDeclr (CDeclrR0 (ident, derivedDeclrs, asmname, dcattrs, dat)) params cattrs at = CDeclrR ident (snoc derivedDeclrs (CFunDeclr params cattrs at)) asmname dcattrs dat fun arrDeclr (CDeclrR0 (ident, derivedDeclrs, asmname, cattrs, dat)) tyquals var_sized static_size size_expr_opt at = CDeclrR ident (snoc derivedDeclrs (CArrDeclr tyquals (case size_expr_opt of Some e => CArrSize static_size e | None => CNoArrSize var_sized) at)) asmname cattrs dat val liftTypeQuals = map CTypeQual o reverse val liftCAttrs = map (CTypeQual o CAttrQual) fun addTrailingAttrs declspecs new_attrs = case viewr declspecs of (specs_init, CTypeSpec0 (CSUType0 (CStruct0 (tag, name, Some def, def_attrs, su_node), node))) => snoc specs_init (CTypeSpec (CSUType (CStruct tag name (Just def) (def_attrs @ new_attrs) su_node) node)) | (specs_init, CTypeSpec0 (CEnumType0 (CEnum0 (name, Some def, def_attrs, e_node), node))) => snoc specs_init (CTypeSpec (CEnumType (CEnum name (Just def) (def_attrs @ new_attrs) e_node) node)) | _ => rappend declspecs (liftCAttrs new_attrs) val emptyDeclr = CDeclrR Nothing empty Nothing [] undefNode fun mkVarDeclr ident = CDeclrR (Some ident) empty Nothing [] fun doDeclIdent declspecs (decl as CDeclrR0 (mIdent, _, _, _, _)) = case mIdent of None => return () | Some ident => if exists (fn CStorageSpec0 (CTypedef0 _) => true | _ => false) declspecs then addTypedef ident else shadowTypedef ( ident , case reverseDeclr decl of CDeclr0 (_, params, _, _, _) => params , declspecs) val ident_of_decl = fn Left params => map (fn i => (i, [], [])) params | Right (params, _) => maps (fn CDecl0 (ret, l, _) => maps (fn ((Some (CDeclr0 (Some mIdent, params, _, _, _)),_),_) => [(mIdent, params, ret)] | _ => []) l | _ => []) params local fun sequence_' f = sequence_ f o ident_of_decl val is_fun = fn CFunDeclr0 _ => true | _ => false in fun doFuncParamDeclIdent (CDeclr0 (mIdent0, param0, _, _, node0)) = let val (param_not_fun, param0') = chop_prefix (not o is_fun) param0 val () = if null param_not_fun then () else Output.information ("Not a function" ^ Position.here (decode_error' (case mIdent0 of None => node0 | Some (Ident0 (_, _, node)) => node) |> #1)) val (param_fun, param0') = chop_prefix is_fun param0' in (case mIdent0 of None => return () | Some mIdent0 => shadowTypedef_fun (mIdent0, param0)) >> sequence_ shadowTypedef (maps (fn CFunDeclr0 (params, _, _) => ident_of_decl params | _ => []) param_fun) >> sequence_ (fn CFunDeclr0 (params, _, _) => C_Env.map_env_tree (pair Symtab.empty #> sequence_' (fn (Ident0 (_, i, node), params, ret) => fn (env_lang, env_tree) => pair () let val name = ident_decode i val pos = [decode_error' node |> #1] val data = ( pos , serial () , {global = false, params = params, ret = C_Env.Parsed ret}) in ( env_lang |> Symtab.update (name, data) , env_tree |> C_Env.map_reports_text (markup_var_improper (Left (data, C_Env_Ext.list_lookup env_lang name)) pos name)) end) params #> #2 o #2) #> pair () | _ => return ()) param0' end end (**) structure List = struct val reverse = rev end end \ subsection \Miscellaneous\ ML \ \\<^file>\~~/src/Pure/Thy/document_antiquotations.ML\\ (* Author: Frédéric Tuong, Université Paris-Saclay *) (* Title: Pure/Thy/document_antiquotations.ML Author: Makarius Miscellaneous document antiquotations. *) \ structure ML_Document_Antiquotations = struct (* ML text *) local fun ml_text name ml = Document_Output.antiquotation_raw_embedded name (Scan.lift Args.text_input \ \TODO: enable reporting with \<^ML_type>\Token.file\ as in \<^ML>\Resources.parse_files\\) (fn ctxt => fn text => let val file_content = Token.file_source (Command.read_file (Resources.master_directory (Proof_Context.theory_of ctxt)) Position.none false (Path.explode (#1 (Input.source_content text)))) val _ = (*TODO: avoid multiple file scanning*) ML_Context.eval_in (SOME ctxt) ML_Compiler.flags Position.none (* \ (optionally) disabling a potential double report*) (ml file_content) in file_content |> Input.source_explode |> Source.of_list |> Source.source Symbol_Pos.stopper (Scan.bulk (Symbol_Pos.scan_comment "" >> (C_Scan.Left o pair true) || Scan.many1 (Symbol.is_ascii_blank o Symbol_Pos.symbol) >> (C_Scan.Left o pair false) || Scan.one (not o Symbol_Pos.is_eof) >> C_Scan.Right)) |> Source.exhaust |> drop_prefix (fn C_Scan.Left _ => true | _ => false) |> drop_suffix (fn C_Scan.Left (false, _) => true | _ => false) |> maps (fn C_Scan.Left (_, x) => x | C_Scan.Right x => [x]) |> Symbol_Pos.implode |> enclose "\n" "\n" |> cartouche |> Document_Output.output_source ctxt |> Document_Output.isabelle ctxt end); fun ml_enclose bg en source = ML_Lex.read bg @ ML_Lex.read_source source @ ML_Lex.read en; in val _ = Theory.setup (ml_text \<^binding>\ML_file\ (ml_enclose "" "")); end; end; \ subsection \Loading the Grammar Simulator\ text \ The parser consists of a generic module \<^file>\../../src_ext/mlton/lib/mlyacc-lib/base.sig\, which interprets an automata-like format generated from ML-Yacc. \ ML_file "../../src_ext/mlton/lib/mlyacc-lib/base.sig" \ \\<^ML_file>\../../src_ext/mlton/lib/mlyacc-lib/base.sig\\ ML_file "../../src_ext/mlton/lib/mlyacc-lib/join.sml" \ \\<^ML_file>\../../src_ext/mlton/lib/mlyacc-lib/join.sml\\ ML_file "../../src_ext/mlton/lib/mlyacc-lib/lrtable.sml" \ \\<^ML_file>\../../src_ext/mlton/lib/mlyacc-lib/lrtable.sml\\ ML_file "../../src_ext/mlton/lib/mlyacc-lib/stream.sml" \ \\<^ML_file>\../../src_ext/mlton/lib/mlyacc-lib/stream.sml\\ ML_file "../../src_ext/mlton/lib/mlyacc-lib/parser1.sml" \ \\<^ML_file>\../../src_ext/mlton/lib/mlyacc-lib/parser1.sml\\ subsection \Loading the Generated Grammar (SML signature)\ ML_file "../generated/c_grammar_fun.grm.sig" subsection \Overloading Grammar Rules (Optional Part)\ ML \ \\<^file>\../generated/c_grammar_fun.grm.sml\\ \ structure C_Grammar_Rule_Wrap_Overloading = struct open C_Grammar_Rule_Lib fun update_env_bottom_up f x arg = ((), C_Env.map_env_lang_tree (f x) arg) fun update_env_top_down f x = pair () ##> (fn arg => C_Env_Ext.map_output_env (K (SOME (f x (#env_lang arg)))) arg) (*type variable (report bound)*) val specifier3 : (CDeclSpec list) -> unit monad = update_env_bottom_up (fn l => fn env_lang => fn env_tree => ( env_lang , fold let open C_Ast in fn CTypeSpec0 (CTypeDef0 (Ident0 (_, i, node), _)) => let val name = ident_decode i val pos1 = [decode_error' node |> #1] in C_Env.map_reports_text (markup_tvar (Right (pos1, Symtab.lookup (C_Env_Ext.get_tyidents'_typedef env_lang) name)) pos1 name) end | _ => I end l env_tree)) val declaration_specifier3 : (CDeclSpec list) -> unit monad = specifier3 val type_specifier3 : (CDeclSpec list) -> unit monad = specifier3 (*basic variable (report bound)*) val primary_expression1 : (CExpr) -> unit monad = update_env_bottom_up (fn e => fn env_lang => fn env_tree => ( env_lang , let open C_Ast in fn CVar0 (Ident0 (_, i, node), _) => let val name = ident_decode i val pos1 = [decode_error' node |> #1] in C_Env.map_reports_text (markup_var (Right (pos1, Symtab.lookup (C_Env_Ext.get_idents' env_lang) name)) pos1 name) end | _ => I end e env_tree)) (*basic variable, parameter functions (report bound)*) val declarator1 : (CDeclrR) -> unit monad = update_env_bottom_up (fn d => fn env_lang => fn env_tree => ( env_lang , let open C_Ast fun markup markup_var params = pair Symtab.empty #> fold (fn (Ident0 (_, i, node), params, ret) => fn (env_lang, env_tree) => let val name = ident_decode i val pos = [decode_error' node |> #1] val data = ( pos , serial () , {global = false, params = params, ret = C_Env.Parsed ret}) in ( env_lang |> Symtab.update (name, data) , env_tree |> C_Env.map_reports_text (markup_var (Left (data, C_Env_Ext.list_lookup env_lang name)) pos name)) end) (ident_of_decl params) #> #2 in fn CDeclrR0 (_, param0, _, _, _) => (case rev param0 of CFunDeclr0 (params, _, _) :: param0 => pair param0 o markup markup_var_bound params | param0 => pair param0) #-> fold (fn CFunDeclr0 (params, _, _) => markup markup_var_improper params | _ => I) end d env_tree)) (*old style syntax for functions (legacy feature)*) val external_declaration1 : (CExtDecl) -> unit monad = update_env_bottom_up (fn f => fn env_lang => fn env_tree => ( env_lang , let open C_Ast in fn CFDefExt0 (CFunDef0 (_, _, l, _, node)) => if null l then I else tap (fn _ => legacy_feature ("Scope analysing for old function syntax not implemented" ^ Position.here (decode_error' node |> #1))) | _ => I end f env_tree)) (*(type) enum, struct, union (report define & report bound)*) fun report_enum_bound i' node env_lang = let open C_Ast val name = ident_decode i' val pos1 = [decode_error' node |> #1] in C_Env.map_reports_text (markup_var_enum (Right (pos1, Symtab.lookup (C_Env_Ext.get_tyidents'_enum env_lang) name)) pos1 name) end local val look_tyidents'_enum = C_Env_Ext.list_lookup o C_Env_Ext.get_tyidents'_enum val declaration : (CDecl) -> unit monad = update_env_bottom_up (fn decl => fn env_lang => fn env_tree => let open C_Ast in fn CDecl0 (l, _, _) => fold (fn CTypeSpec0 (CEnumType0 (CEnum0 (Some (Ident0 (_, i, node)), body, _, _), _)) => (case body of None => (fn (env_lang, env_tree) => (env_lang, report_enum_bound i node env_lang env_tree)) | Some _ => fn (env_lang, env_tree) => let val name = ident_decode i val pos1 = [decode_error' node |> #1] val data = (pos1, serial (), null (C_Env.get_scopes env_lang)) in ( C_Env_Ext.map_tyidents'_enum (Symtab.update (name, data)) env_lang , C_Env.map_reports_text (markup_var_enum (Left (data, look_tyidents'_enum env_lang name)) pos1 name) env_tree) end) | _ => I) l | _ => I end decl (env_lang, env_tree)) in val declaration1 = declaration val declaration2 = declaration val declaration3 = declaration end (*(basic) enum, struct, union (report define)*) local val enumerator : ( ( Ident * CExpr Maybe ) ) -> unit monad = update_env_bottom_up (fn id => fn env_lang => let open C_Ast in fn (ident as Ident0 (_, _, node), _) => C_Grammar_Rule_Lib.shadowTypedef0' (C_Env.Parsed [CTypeSpec0 (CIntType0 node)]) (null (C_Env.get_scopes env_lang)) (ident, []) env_lang end id) in val enumerator1 = enumerator val enumerator2 = enumerator val enumerator3 = enumerator val enumerator4 = enumerator end (*(type) enum, struct, union (report bound)*) local fun declaration_specifier env_lang = let open C_Ast in fold (fn CTypeSpec0 (CEnumType0 (CEnum0 (Some (Ident0 (_, i, node)), _, _, _), _)) => report_enum_bound i node env_lang | _ => I) end in val declaration_specifier2 : (CDeclSpec list) -> unit monad = update_env_bottom_up (fn d => fn env_lang => fn env_tree => let open C_Ast in ( env_lang , env_tree |> (if exists (fn CStorageSpec0 (CTypedef0 _) => true | _ => false) d then I else declaration_specifier env_lang d)) end) local val f_definition : (CFunDef) -> unit monad = update_env_bottom_up (fn d => fn env_lang => fn env_tree => ( env_lang , let open C_Ast in fn CFunDef0 (l, _, _, _, _) => declaration_specifier env_lang l end d env_tree)) in val function_definition4 = f_definition val nested_function_definition2 = f_definition end local val parameter_type_list : ( ( CDecl list * Bool ) ) -> unit monad = update_env_bottom_up (fn d => fn env_lang => fn env_tree => ( env_lang , let open C_Ast in #1 #> fold (fn CDecl0 (l, _, _) => declaration_specifier env_lang l | _ => I) end d env_tree)) in val parameter_type_list2 = parameter_type_list val parameter_type_list3 = parameter_type_list end end end \ ML \ \\<^file>\../generated/c_grammar_fun.grm.sml\\ \ structure C_Grammar_Rule_Wrap = struct open C_Grammar_Rule_Wrap open C_Grammar_Rule_Wrap_Overloading end \ subsection \Loading the Generated Grammar (SML structure)\ ML_file "../generated/c_grammar_fun.grm.sml" subsection \Grammar Initialization\ subsubsection \Functor Application\ ML \ \\<^file>\../generated/c_grammar_fun.grm.sml\\ \ structure C_Grammar = C_Grammar_Fun (structure Token = LALR_Parser_Eval.Token) \ subsubsection \Mapping Strings to Structured Tokens\ ML \ \\<^file>\../generated/c_grammar_fun.grm.sml\\ \ structure C_Grammar_Tokens = struct local open C_Grammar.Tokens in fun token_of_string error ty_ClangCVersion ty_cChar ty_cFloat ty_cInteger ty_cString ty_ident ty_string a1 a2 = fn "(" => x28 (ty_string, a1, a2) | ")" => x29 (ty_string, a1, a2) | "[" => x5b (ty_string, a1, a2) | "]" => x5d (ty_string, a1, a2) | "->" => x2d_x3e (ty_string, a1, a2) | "." => x2e (ty_string, a1, a2) | "!" => x21 (ty_string, a1, a2) | "~" => x7e (ty_string, a1, a2) | "++" => x2b_x2b (ty_string, a1, a2) | "--" => x2d_x2d (ty_string, a1, a2) | "+" => x2b (ty_string, a1, a2) | "-" => x2d (ty_string, a1, a2) | "*" => x2a (ty_string, a1, a2) | "/" => x2f (ty_string, a1, a2) | "%" => x25 (ty_string, a1, a2) | "&" => x26 (ty_string, a1, a2) | "<<" => x3c_x3c (ty_string, a1, a2) | ">>" => x3e_x3e (ty_string, a1, a2) | "<" => x3c (ty_string, a1, a2) | "<=" => x3c_x3d (ty_string, a1, a2) | ">" => x3e (ty_string, a1, a2) | ">=" => x3e_x3d (ty_string, a1, a2) | "==" => x3d_x3d (ty_string, a1, a2) | "!=" => x21_x3d (ty_string, a1, a2) | "^" => x5e (ty_string, a1, a2) | "|" => x7c (ty_string, a1, a2) | "&&" => x26_x26 (ty_string, a1, a2) | "||" => x7c_x7c (ty_string, a1, a2) | "?" => x3f (ty_string, a1, a2) | ":" => x3a (ty_string, a1, a2) | "=" => x3d (ty_string, a1, a2) | "+=" => x2b_x3d (ty_string, a1, a2) | "-=" => x2d_x3d (ty_string, a1, a2) | "*=" => x2a_x3d (ty_string, a1, a2) | "/=" => x2f_x3d (ty_string, a1, a2) | "%=" => x25_x3d (ty_string, a1, a2) | "&=" => x26_x3d (ty_string, a1, a2) | "^=" => x5e_x3d (ty_string, a1, a2) | "|=" => x7c_x3d (ty_string, a1, a2) | "<<=" => x3c_x3c_x3d (ty_string, a1, a2) | ">>=" => x3e_x3e_x3d (ty_string, a1, a2) | "," => x2c (ty_string, a1, a2) | ";" => x3b (ty_string, a1, a2) | "{" => x7b (ty_string, a1, a2) | "}" => x7d (ty_string, a1, a2) | "..." => x2e_x2e_x2e (ty_string, a1, a2) | x => let val alignof = alignof (ty_string, a1, a2) val alignas = alignas (ty_string, a1, a2) val atomic = x5f_Atomic (ty_string, a1, a2) val asm = asm (ty_string, a1, a2) val auto = auto (ty_string, a1, a2) val break = break (ty_string, a1, a2) val bool = x5f_Bool (ty_string, a1, a2) val case0 = case0 (ty_string, a1, a2) val char = char (ty_string, a1, a2) val const = const (ty_string, a1, a2) val continue = continue (ty_string, a1, a2) val complex = x5f_Complex (ty_string, a1, a2) val default = default (ty_string, a1, a2) val do0 = do0 (ty_string, a1, a2) val double = double (ty_string, a1, a2) val else0 = else0 (ty_string, a1, a2) val enum = enum (ty_string, a1, a2) val extern = extern (ty_string, a1, a2) val float = float (ty_string, a1, a2) val for0 = for0 (ty_string, a1, a2) val generic = x5f_Generic (ty_string, a1, a2) val goto = goto (ty_string, a1, a2) val if0 = if0 (ty_string, a1, a2) val inline = inline (ty_string, a1, a2) val int = int (ty_string, a1, a2) val int128 = x5f_x5f_int_x31_x32_x38 (ty_string, a1, a2) val long = long (ty_string, a1, a2) val label = x5f_x5f_label_x5f_x5f (ty_string, a1, a2) val noreturn = x5f_Noreturn (ty_string, a1, a2) val nullable = x5f_Nullable (ty_string, a1, a2) val nonnull = x5f_Nonnull (ty_string, a1, a2) val register = register (ty_string, a1, a2) val restrict = restrict (ty_string, a1, a2) val return0 = return0 (ty_string, a1, a2) val short = short (ty_string, a1, a2) val signed = signed (ty_string, a1, a2) val sizeof = sizeof (ty_string, a1, a2) val static = static (ty_string, a1, a2) val staticassert = x5f_Static_assert (ty_string, a1, a2) val struct0 = struct0 (ty_string, a1, a2) val switch = switch (ty_string, a1, a2) val typedef = typedef (ty_string, a1, a2) val typeof = typeof (ty_string, a1, a2) val thread = x5f_x5f_thread (ty_string, a1, a2) val union = union (ty_string, a1, a2) val unsigned = unsigned (ty_string, a1, a2) val void = void (ty_string, a1, a2) val volatile = volatile (ty_string, a1, a2) val while0 = while0 (ty_string, a1, a2) val cchar = cchar (ty_cChar, a1, a2) val cint = cint (ty_cInteger, a1, a2) val cfloat = cfloat (ty_cFloat, a1, a2) val cstr = cstr (ty_cString, a1, a2) val ident = ident (ty_ident, a1, a2) val tyident = tyident (ty_ident, a1, a2) val attribute = x5f_x5f_attribute_x5f_x5f (ty_string, a1, a2) val extension = x5f_x5f_extension_x5f_x5f (ty_string, a1, a2) val real = x5f_x5f_real_x5f_x5f (ty_string, a1, a2) val imag = x5f_x5f_imag_x5f_x5f (ty_string, a1, a2) val builtinvaarg = x5f_x5f_builtin_va_arg (ty_string, a1, a2) val builtinoffsetof = x5f_x5f_builtin_offsetof (ty_string, a1, a2) val builtintypescompatiblep = x5f_x5f_builtin_types_compatible_p (ty_string, a1, a2) val clangcversion = clangcversion (ty_ClangCVersion, a1, a2) in case x of "_Alignas" => alignas | "_Alignof" => alignof | "__alignof" => alignof | "alignof" => alignof | "__alignof__" => alignof | "__asm" => asm | "asm" => asm | "__asm__" => asm | "_Atomic" => atomic | "__attribute" => attribute | "__attribute__" => attribute | "auto" => auto | "_Bool" => bool | "break" => break | "__builtin_offsetof" => builtinoffsetof | "__builtin_types_compatible_p" => builtintypescompatiblep | "__builtin_va_arg" => builtinvaarg | "case" => case0 | "char" => char | "_Complex" => complex | "__complex__" => complex | "__const" => const | "const" => const | "__const__" => const | "continue" => continue | "default" => default | "do" => do0 | "double" => double | "else" => else0 | "enum" => enum | "__extension__" => extension | "extern" => extern | "float" => float | "for" => for0 | "_Generic" => generic | "goto" => goto | "if" => if0 | "__imag" => imag | "__imag__" => imag | "__inline" => inline | "inline" => inline | "__inline__" => inline | "int" => int | "__int128" => int128 | "__label__" => label | "long" => long | "_Nonnull" => nonnull | "__nonnull" => nonnull | "_Noreturn" => noreturn | "_Nullable" => nullable | "__nullable" => nullable | "__real" => real | "__real__" => real | "register" => register | "__restrict" => restrict | "restrict" => restrict | "__restrict__" => restrict | "return" => return0 | "short" => short | "__signed" => signed | "signed" => signed | "__signed__" => signed | "sizeof" => sizeof | "static" => static | "_Static_assert" => staticassert | "struct" => struct0 | "switch" => switch | "__thread" => thread | "_Thread_local" => thread | "typedef" => typedef | "__typeof" => typeof | "typeof" => typeof | "__typeof__" => typeof | "union" => union | "unsigned" => unsigned | "void" => void | "__volatile" => volatile | "volatile" => volatile | "__volatile__" => volatile | "while" => while0 | _ => error end end end \ end