diff --git a/thys/Isabelle_C/C11-FrontEnd/examples/C1.thy b/thys/Isabelle_C/C11-FrontEnd/examples/C1.thy --- a/thys/Isabelle_C/C11-FrontEnd/examples/C1.thy +++ b/thys/Isabelle_C/C11-FrontEnd/examples/C1.thy @@ -1,649 +1,647 @@ (****************************************************************************** * Isabelle/C * * Copyright (c) 2018-2019 Université Paris-Saclay, Univ. Paris-Sud, France * * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are * met: * * * Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * * Redistributions in binary form must reproduce the above * copyright notice, this list of conditions and the following * disclaimer in the documentation and/or other materials provided * with the distribution. * * * Neither the name of the copyright holders nor the names of its * contributors may be used to endorse or promote products derived * from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. ******************************************************************************) chapter \Appendix III: Examples for the SML Interfaces to Generic and Specific C11 ASTs\ theory C1 imports "../C_Main" begin section\Access to Main C11 AST Categories via the Standard Interface \ text\For the parsing root key's, c.f. ~ \<^verbatim>\C_Command.thy\\ declare [[C\<^sub>r\<^sub>u\<^sub>l\<^sub>e\<^sub>0 = "expression"]] C\a + b * c - a / b\ ML\val ast_expr = @{C11_CExpr}\ declare [[C\<^sub>r\<^sub>u\<^sub>l\<^sub>e\<^sub>0 = "statement"]] C\a = a + b;\ ML\val ast_stmt = @{C11_CStat}\ declare [[C\<^sub>r\<^sub>u\<^sub>l\<^sub>e\<^sub>0 = "external_declaration"]] C\int m ();\ ML\val ast_ext_decl = @{C11_CExtDecl}\ declare [[C\<^sub>e\<^sub>n\<^sub>v\<^sub>0 = last]] declare [[C\<^sub>r\<^sub>u\<^sub>l\<^sub>e\<^sub>0 = "translation_unit"]] C\int b; int a = a + b;\ ML\val ast_unit = @{C11_CTranslUnit} val env_unit = @{C\<^sub>e\<^sub>n\<^sub>v} \ text\... and completely low-level in ML:\ declare [[C\<^sub>r\<^sub>u\<^sub>l\<^sub>e\<^sub>0 = "expression"]] ML\ val src = \a + d\; val ctxt = (Context.Theory @{theory}); val ctxt' = C_Module.C' @{C\<^sub>e\<^sub>n\<^sub>v} src ctxt; val tt = Context.the_theory ctxt'; \ subsection\Queries on C11-Asts via the iterator\ ML\ fun selectIdent0 (a:C11_Ast_Lib.node_content) b c= if #tag a = "Ident0" then a::c else c; (* and here comes the hic >>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> *) val S = (C11_Ast_Lib.fold_cTranslationUnit selectIdent0 ast_unit []); (* ... end of hic *) fun print ({args = (C11_Ast_Lib.data_string S)::_::C11_Ast_Lib.data_string S'::[], sub_tag = STAG, tag = TAG} :C11_Ast_Lib.node_content) = let fun dark_matter (x:bstring) = XML.content_of (YXML.parse_body x) in writeln (":>"^dark_matter(S)^"<:>"^(S')^"<:>"^STAG^"<:>"^TAG^"<:") end; app print S; (* these strings are representations for C_Ast.abr_string, where the main constructor is C_Ast.SS_base. *) map (YXML.parse_body o (fn {args = (C11_Ast_Lib.data_string S)::_::C11_Ast_Lib.data_string S'::[], sub_tag = _, tag = _} =>S)) S ; \ subsection\A small compiler to Isabelle term's.\ ML\ fun drop_dark_matter x = (XML.content_of o YXML.parse_body) x fun node_content_2_free (x : C11_Ast_Lib.node_content) = let val C11_Ast_Lib.data_string a_markup = hd(#args(x)); val id = hd(tl(String.tokens (fn x => x = #"\"")(drop_dark_matter a_markup))) in Free(id,dummyT) end (* no type inference *); fun selectIdent0Binary (a as { tag, sub_tag, args }:C11_Ast_Lib.node_content) (b: C_Ast.nodeInfo ) (c : term list)= case tag of "Ident0" => (node_content_2_free a)::c |"CBinary0" => (case (drop_dark_matter sub_tag, c) of ("CAddOp0",b::a::R) => (Const("Groups.plus_class.plus",dummyT) $ a $ b :: R) | ("CMulOp0",b::a::R) => (Const("Groups.times_class.times",dummyT) $ a $ b :: R) | ("CDivOp0",b::a::R) => (Const("Rings.divide_class.divide",dummyT) $ a $ b :: R) | ("CSubOp0",b::a::R) => (Const("Groups.minus_class.minus",dummyT) $ a $ b :: R) | _ => (writeln ("sub_tag all " ^sub_tag^" :>> "^ @{make_string} c);c )) | _ => c; \ text\ And here comes the ultra-hic: direct compilation of C11 expressions into (untyped) \\\-terms in Isabelle. The term-list of the @{ML \C11_Ast_Lib.fold_cExpression\} - iterator serves as term-stack in which sub-expressions were stored in reversed polish notation. The example shows that the resulting term is structurally equivalent. \ ML\ val S = (C11_Ast_Lib.fold_cExpression selectIdent0Binary ast_expr []); val S' = @{term "a + b * c - a / b"}; \ section \Late-binding a Simplistic Post-Processor for ASTs and ENVs\ subsection\Definition of Core Data Structures\ text\The following setup just stores the result of the parsed values in the environment.\ ML\ structure Data_Out = Generic_Data (type T = (C_Grammar_Rule.ast_generic * C_Antiquote.antiq C_Env.stream) list val empty = [] - val extend = I val merge = K empty) fun get_CTranslUnit thy = let val context = Context.Theory thy in (Data_Out.get context |> map (apfst (C_Grammar_Rule.get_CTranslUnit #> the)), C_Module.Data_In_Env.get context) end fun get_CExpr thy = let val context = Context.Theory thy in (Data_Out.get context |> map (apfst (C_Grammar_Rule.get_CExpr #> the)), C_Module.Data_In_Env.get context) end \ text\... this gives :\ ML\ Data_Out.map: ( (C_Grammar_Rule.ast_generic * C_Antiquote.antiq C_Env.stream) list -> (C_Grammar_Rule.ast_generic * C_Antiquote.antiq C_Env.stream) list) -> Context.generic -> Context.generic \ subsection\Registering A Store-Function in \<^ML>\C_Module.Data_Accept.put\\ text\... as C-method call-back. \ setup \Context.theory_map (C_Module.Data_Accept.put (fn ast => fn env_lang => Data_Out.map (cons (ast, #stream_ignored env_lang |> rev))))\ subsection\Registering an ML-Antiquotation with an Access-Function \ ML\ val _ = Theory.setup( ML_Antiquotation.value_embedded \<^binding>\C11_AST_CTranslUnit\ (Args.context -- Scan.lift Args.name_position >> (fn (ctxt, (name, pos)) => (warning"arg variant not implemented";"get_CTranslUnit (Context.the_global_context())")) || Scan.succeed "get_CTranslUnit (Context.the_global_context())")) \ subsection\Accessing the underlying C11-AST's via the ML Interface.\ declare [[C\<^sub>r\<^sub>u\<^sub>l\<^sub>e\<^sub>0 = "translation_unit"]] C\ void swap(int *x,int *y) { int temp; temp = *x; *x = *y; *y = temp; } \ ML\ local open C_Ast in val _ = CTranslUnit0 val (A::R, _) = @{C11_AST_CTranslUnit}; val (CTranslUnit0 (t,u), v) = A fun rule_trans (CTranslUnit0 (t,u), v) = case C_Grammar_Rule_Lib.decode u of Left (p1,p2) => writeln (Position.here p1 ^ " " ^ Position.here p2) | Right S => warning ("Not expecting that value:"^S) val bb = rule_trans A end val (R, env_final) = @{C11_AST_CTranslUnit}; val rules = map rule_trans R; @{C\<^sub>e\<^sub>n\<^sub>v} \ section \Example: A Possible Semantics for \#include\\ subsubsection \Implementation\ text \ The CPP directive \<^C>\#include _\ is used to import signatures of modules in C. This has the effect that imported identifiers are included in the C environment and, as a consequence, appear as constant symbols and not as free variables in the output. \ text \ The following structure is an extra mechanism to define the effect of \<^C>\#include _\ wrt. to its definition in its environment. \ ML \ structure Directive_include = Generic_Data (type T = (Input.source * C_Env.markup_ident) list Symtab.table val empty = Symtab.empty - val extend = I val merge = K empty) \ ML \ \\<^theory>\Pure\\ \ local fun return f (env_cond, env) = ([], (env_cond, f env)) val _ = Theory.setup (Context.theory_map (C_Context0.Directives.map (C_Context.directive_update ("include", \<^here>) ( (return o K I) , fn C_Lex.Include (C_Lex.Group2 (toks_bl, _, tok :: _)) => let fun exec file = if exists (fn C_Scan.Left _ => false | C_Scan.Right _ => true) file then K (error ("Unsupported character" ^ Position.here (Position.range_position (C_Lex.pos_of tok, C_Lex.end_pos_of (List.last toks_bl))))) else fn (env_lang, env_tree) => fold (fn (src, data) => fn (env_lang, env_tree) => let val (name, pos) = Input.source_content src in C_Grammar_Rule_Lib.shadowTypedef0'''' name [pos] data env_lang env_tree end) (these (Symtab.lookup (Directive_include.get (#context env_tree)) (String.concat (maps (fn C_Scan.Left s => [s] | _ => []) file)))) (env_lang, env_tree) in case tok of C_Lex.Token (_, (C_Lex.String (_, file), _)) => exec file | C_Lex.Token (_, (C_Lex.File (_, file), _)) => exec file | _ => tap (fn _ => (* not yet implemented *) warning ("Ignored directive" ^ Position.here (Position.range_position ( C_Lex.pos_of tok , C_Lex.end_pos_of (List.last toks_bl))))) end |> K |> K | _ => K (K I))))) in end \ ML \ structure Include = struct fun init name vars = Context.theory_map (Directive_include.map (Symtab.update (name, map (rpair {global = true, params = [], ret = C_Env.Previous_in_stack}) vars))) fun append name vars = Context.theory_map (Directive_include.map (Symtab.map_default (name, []) (rev o fold (cons o rpair {global = true, params = [], ret = C_Env.Previous_in_stack}) vars o rev))) val show = Context.theory_map (Directive_include.map (tap (Symtab.dest #> app (fn (fic, vars) => writeln ("Content of \"" ^ fic ^ "\": " ^ String.concat (map (fn (i, _) => let val (name, pos) = Input.source_content i in name ^ Position.here pos ^ " " end) vars)))))) end \ setup \Include.append "stdio.h" [\printf\, \scanf\]\ subsubsection \Tests\ C \ //@ setup \Include.append "tmp" [\b\]\ #include "tmp" int a = b; \ C \ int b = 0; //@ setup \Include.init "tmp" [\b\]\ #include "tmp" int a = b; \ C \ int c = 0; //@ setup \Include.append "tmp" [\c\]\ //@ setup \Include.append "tmp" [\c\]\ #include "tmp" int a = b + c; //@ setup \Include.show\ \ C\ #include #include /*sdfsdf */ #define a B #define b(C) #pragma /* just exists syntaxically */ \ text\In the following, we retrieve the C11 AST parsed above. \ ML\ val ((C_Ast.CTranslUnit0 (t,u), v)::R, env) = @{C11_AST_CTranslUnit}; val u = C_Grammar_Rule_Lib.decode u; C_Ast.CTypeSpec0; \ section \Defining a C-Annotation Commands Language \ ML \ \\<^theory>\Isabelle_C.C_Command\\ \ \ \setup for a dummy ensures : the "Hello World" of Annotation Commands\ local datatype antiq_hol = Term of string (* term *) val scan_opt_colon = Scan.option (C_Parse.$$$ ":") fun msg cmd_name call_pos cmd_pos = tap (fn _ => tracing ("\Hello World\ reported by \"" ^ cmd_name ^ "\" here" ^ call_pos cmd_pos)) fun command (cmd as (cmd_name, _)) scan0 scan f = C_Annotation.command' cmd "" (fn (_, (cmd_pos, _)) => (scan0 -- (scan >> f) >> (fn _ => C_Env.Never |> msg cmd_name Position.here cmd_pos))) in val _ = Theory.setup ( C_Inner_Syntax.command_no_range (C_Inner_Toplevel.generic_theory oo C_Inner_Isar_Cmd.setup \K (K (K I))\) ("loop", \<^here>, \<^here>) #> command ("ensures", \<^here>) scan_opt_colon C_Parse.term Term #> command ("invariant", \<^here>) scan_opt_colon C_Parse.term Term #> command ("assigns", \<^here>) scan_opt_colon C_Parse.term Term #> command ("requires", \<^here>) scan_opt_colon C_Parse.term Term #> command ("variant", \<^here>) scan_opt_colon C_Parse.term Term) end \ C\ /*@ ensures "result >= x && result >= y" */ int max(int x, int y) { if (x > y) return x; else return y; } \ text\What happens on C11 AST level:\ ML\ val ((C_Ast.CTranslUnit0 (t,u), v)::R, env) = get_CTranslUnit @{theory}; val u = C_Grammar_Rule_Lib.decode u \ subsection \C Code: Various Annotated Examples\ text\This example suite is drawn from Frama-C and used in our GLA - TPs. \ C\ int sqrt(int a) { int i = 0; int tm = 1; int sum = 1; /*@ loop invariant "1 <= sum <= a+tm" loop invariant "(i+1)*(i+1) == sum" loop invariant "tm+(i*i) == sum" loop invariant "1<=tm<=sum" loop assigns "i, tm, sum" loop variant "a-sum" */ while (sum <= a) { i++; tm = tm + 2; sum = sum + tm; } return i; } \ C\ /*@ requires "n >= 0" requires "valid(t+(0..n-1))" ensures "exists integer i; (0<=i result == 0" ensures "(forall integer i; 0<=i t[i] == 0) <==> result == 1" assigns nothing */ int allzeros(int t[], int n) { int k = 0; /*@ loop invariant "0 <= k <= n" loop invariant "forall integer i; 0<=i t[i] == 0" loop assigns k loop variant "n-k" */ while(k < n) { if (t[k]) return 0; k = k + 1; } return 1; } \ C\ /*@ requires "n >= 0" requires "valid(t+(0..n-1))" ensures "(forall integer i; 0<=i t[i] != v) <==> result == -1" ensures "(exists integer i; 0<=i result == v" assigns nothing */ int binarysearch(int t[], int n, int v) { int l = 0; int u = n-1; /*@ loop invariant false */ while (l <= u) { int m = (l + u) / 2; if (t[m] < v) { l = m + 1; } else if (t[m] > v) { u = m - 1; } else return m; } return -1; } \ C\ /*@ requires "n >= 0" requires "valid(t+(0..n-1))" requires "(forall integer i,j; 0<=i<=j t[i] <= t[j])" ensures "exists integer i; (0<=i result == 1" ensures "(forall integer i; 0<=i t[i] != x) <==> result == 0" assigns nothing */ int linearsearch(int x, int t[], int n) { int i = 0; /*@ loop invariant "0<=i<=n" loop invariant "forall integer j; 0<=j (t[j] != x)" loop assigns i loop variant "n-i" */ while (i < n) { if (t[i] < x) { i++; } else { return (t[i] == x); } } return 0; } \ subsection \Example: An Annotated Sorting Algorithm\ C\ #include int main() { int array[100], n, c, d, position, swap; printf("Enter number of elements\n"); scanf("%d", &n); printf("Enter %d integers\n", n); for (c = 0; c < n; c++) scanf("%d", &array[c]); for (c = 0; c < (n - 1); c++) { position = c; for (d = c + 1; d < n; d++) { if (array[position] > array[d]) position = d; } if (position != c) { swap = array[c]; array[c] = array[position]; array[position] = swap; } } printf("Sorted list in ascending order:\n"); for (c = 0; c < n; c++) printf("%d\n", array[c]); return 0; } \ text\A better example implementation:\ C\ #include #include #define SIZE 10 void swap(int *x,int *y); void selection_sort(int* a, const int n); void display(int a[],int size); void main() { int a[SIZE] = {8,5,2,3,1,6,9,4,0,7}; int i; printf("The array before sorting:\n"); display(a,SIZE); selection_sort(a,SIZE); printf("The array after sorting:\n"); display(a,SIZE); } /* swap two integers */ void swap(int *x,int *y) { int temp; temp = *x; *x = *y; *y = temp; } /* perform selection sort */ void selection_sort(int* a,const int size) { int i, j, min; for (i = 0; i < size - 1; i++) { min = i; for (j = i + 1; j < size; j++) { if (a[j] < a[min]) { min = j; } } swap(&a[i], &a[min]); } } /* display array content */ void display(int a[],const int size) { int i; for(i=0; i section \C Code: Floats Exist\ declare [[C\<^sub>r\<^sub>u\<^sub>l\<^sub>e\<^sub>0 = "translation_unit"]] C\ int a; float b; int m() {return 0;} \ end \ No newline at end of file diff --git a/thys/Isabelle_C/C11-FrontEnd/examples/C2.thy b/thys/Isabelle_C/C11-FrontEnd/examples/C2.thy --- a/thys/Isabelle_C/C11-FrontEnd/examples/C2.thy +++ b/thys/Isabelle_C/C11-FrontEnd/examples/C2.thy @@ -1,898 +1,896 @@ (****************************************************************************** * Isabelle/C * * Copyright (c) 2018-2019 Université Paris-Saclay, Univ. Paris-Sud, France * * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are * met: * * * Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * * Redistributions in binary form must reproduce the above * copyright notice, this list of conditions and the following * disclaimer in the documentation and/or other materials provided * with the distribution. * * * Neither the name of the copyright holders nor the names of its * contributors may be used to endorse or promote products derived * from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. ******************************************************************************) chapter \Appendix IV : Examples for Annotation Navigation and Context Serialization\ theory C2 imports "../C_Main" "HOL-ex.Cartouche_Examples" begin text \ Operationally, the \<^theory_text>\C\ command can be thought of as behaving as the \<^theory_text>\ML\ command, where it is for example possible to recursively nest C code in C. Generally, the present chapter assumes a familiarity with all advance concepts of ML as described in \<^file>\~~/src/HOL/Examples/ML.thy\, as well as the concept of ML antiquotations (\<^file>\~~/src/Doc/Implementation/ML.thy\). However, even if\<^theory_text>\C\ might resemble to \<^theory_text>\ML\, we will now see in detail that there are actually subtle differences between the two commands.\ section \Setup of ML Antiquotations Displaying the Environment (For Debugging) \ ML\ fun print_top make_string f _ (_, (value, _, _)) _ = tap (fn _ => writeln (make_string value)) o f fun print_top' _ f _ _ env = tap (fn _ => writeln ("ENV " ^ C_Env.string_of env)) o f fun print_stack s make_string stack _ _ thy = let val () = Output.information ("SHIFT " ^ (case s of NONE => "" | SOME s => "\"" ^ s ^ "\" ") ^ Int.toString (length stack - 1) ^ " +1 ") val () = stack |> split_list |> #2 |> map_index I |> app (fn (i, (value, pos1, pos2)) => writeln (" " ^ Int.toString (length stack - i) ^ " " ^ make_string value ^ " " ^ Position.here pos1 ^ " " ^ Position.here pos2)) in thy end fun print_stack' s _ stack _ env thy = let val () = Output.information ("SHIFT " ^ (case s of NONE => "" | SOME s => "\"" ^ s ^ "\" ") ^ Int.toString (length stack - 1) ^ " +1 ") val () = writeln ("ENV " ^ C_Env.string_of env) in thy end \ setup \ML_Antiquotation.inline @{binding print_top} (Args.context >> K ("print_top " ^ ML_Pretty.make_string_fn ^ " I"))\ setup \ML_Antiquotation.inline @{binding print_top'} (Args.context >> K ("print_top' " ^ ML_Pretty.make_string_fn ^ " I"))\ setup \ML_Antiquotation.inline @{binding print_stack} (Scan.peek (fn _ => Scan.option Args.text) >> (fn name => ("print_stack " ^ (case name of NONE => "NONE" | SOME s => "(SOME \"" ^ s ^ "\")") ^ " " ^ ML_Pretty.make_string_fn)))\ setup \ML_Antiquotation.inline @{binding print_stack'} (Scan.peek (fn _ => Scan.option Args.text) >> (fn name => ("print_stack' " ^ (case name of NONE => "NONE" | SOME s => "(SOME \"" ^ s ^ "\")") ^ " " ^ ML_Pretty.make_string_fn)))\ declare[[C_lexer_trace]] section \Introduction to C Annotations: Navigating in the Parsing Stack\ subsection \Basics\ text \ Since the present theory \<^file>\C1.thy\ is depending on \<^theory>\Isabelle_C.C_Lexer_Language\ and \<^theory>\Isabelle_C.C_Parser_Language\, the syntax one is writing in the \<^theory_text>\C\ command is C11. Additionally, \<^file>\C1.thy\ also depends on \<^theory>\Isabelle_C.C_Parser_Annotation\, making it possible to write commands in C comments, called annotation commands, such as \<^theory_text>\\setup\. \ C \ \Nesting ML code in C comments\ \ int a = (((0))); /*@ highlight */ /*@ \setup \@{print_stack}\ */ /*@ \setup \@{print_top}\ */ \ text \ In terms of execution order, nested annotation commands are not pre-filtered out of the C code, but executed when the C code is still being parsed. Since the parser implemented is a LALR parser \<^footnote>\\<^url>\https://en.wikipedia.org/wiki/LALR\\, C tokens are uniquely read and treated from left to right. Thus, each nested command is (supposed by default to be) executed when the parser has already read all C tokens before the comment associated to the nested command, so when the parser is in a particular intermediate parsing step (not necessarily final) \<^footnote>\\<^url>\https://en.wikipedia.org/wiki/Shift-reduce_parser\\. \ text \The command \<^theory_text>\\setup\ is similar to the command \<^theory_text>\setup\ except that the former takes a function with additional arguments. These arguments are precisely depending on the current parsing state. To better examine these arguments, it is convenient to use ML antiquotations (be it for printing, or for doing any regular ML actions like PIDE reporting). Note that, in contrast with \<^theory_text>\setup\, the return type of the \<^theory_text>\\setup\ function is not \<^ML_type>\theory -> theory\ but \<^ML_type>\Context.generic -> Context.generic\. \ C \ \Positional navigation: referring to any previous parsed sub-tree in the stack\ \ int a = (((0 + 5))) /*@@ \setup \print_top @{make_string} I\ @ highlight */ * 4; float b = 7 / 3; \ text \The special \@\ symbol makes the command be executed whenever the first element \E\ in the stack is about to be irremediably replaced by a more structured parent element (having \E\ as one of its direct children). It is the parent element which is provided to the ML code. Instead of always referring to the first element of the stack, \N\ consecutive occurrences of \@\ will make the ML code getting as argument the direct parent of the \N\-th element.\ C \ \Positional navigation: referring to any previous parsed sub-tree in the stack\ \ int a = (((0 + 5))) /*@@ highlight */ * 4; int a = (((0 + 5))) /*@& highlight */ * 4; int a = (((0 + 5))) /*@@@@@ highlight */ * 4; int a = (((0 + 5))) /*@&&&& highlight */ * 4; \ text \\&\ behaves as \@\, but instead of always giving the designated direct parent to the ML code, it finds the first parent ancestor making non-trivial changes in the respective grammar rule (a non-trivial change can be for example the registration of the position of the current AST node being built).\ C \ \Positional navigation: moving the comment after a number of C token\ \ int b = 7 / (3) * 50; /*@+++@@ highlight */ long long f (int a) { while (0) { return 0; } } int b = 7 / (3) * 50; \ text \\N\ consecutive occurrences of \+\ will delay the interpretation of the comment, which is ignored at the place it is written. The comment is only really considered after the C parser has treated \N\ more tokens.\ C \ \Closing C comments \*/\ must close anything, even when editing ML code\ \ int a = (((0 //@ (* inline *) \setup \fn _ => fn _ => fn _ => fn context => let in (* */ *) context end\ /*@ \setup \(K o K o K) I\ (* * / *) */ ))); \ C \ \Inline comments with antiquotations\ \ /*@ \setup\(K o K o K) (fn x => K x @{con\ text (**)})\ */ // break of line activated everywhere (also in antiquotations) int a = 0; //\ @ \setup\(K o K o K) (fn x => K x @{term \a \ + b\ (* (**) *\ \ )})\ \ subsection \Erroneous Annotations Treated as Regular C Comments\ C \ \Permissive Types of Antiquotations\ \ int a = 0; /*@ \setup (* Errors: Explicit warning + Explicit markup reporting *) */ /** \setup (* Errors: Turned into tracing report information *) */ /** \setup \fn _ => fn _ => fn _ => I\ (* An example of correct syntax accepted as usual *) */ \ C \ \Permissive Types of Antiquotations\ \ int a = 0; /*@ \setup \fn _ => fn _ => fn _ => I\ \setup (* Parsing error of a single command does not propagate to other commands *) \setup \fn _ => fn _ => fn _ => I\ context */ /** \setup \fn _ => fn _ => fn _ => I\ \setup (* Parsing error of a single command does not propagate to other commands *) \setup \fn _ => fn _ => fn _ => I\ context */ /*@ \setup (* Errors in all commands are all rendered *) \setup (* Errors in all commands are all rendered *) \setup (* Errors in all commands are all rendered *) */ /** \setup (* Errors in all commands makes the whole comment considered as an usual comment *) \setup (* Errors in all commands makes the whole comment considered as an usual comment *) \setup (* Errors in all commands makes the whole comment considered as an usual comment *) */ \ subsection \Bottom-Up vs. Top-Down Evaluation\ ML\ -structure Example_Data = Generic_Data (type T = string list - val empty = [] val extend = I val merge = K empty) +structure Example_Data = Generic_Data(type T = string list val empty = [] val merge = K empty) + fun add_ex s1 s2 = Example_Data.map (cons s2) #> (fn context => let val () = Output.information (s1 ^ s2) val () = app (fn s => writeln (" Data content: " ^ s)) (Example_Data.get context) in context end) \ setup \Context.theory_map (Example_Data.put [])\ declare[[ML_source_trace]] declare[[C_parser_trace]] C \ \Arbitrary interleaving of effects: \\setup\ vs \\setup\\\ \ int b,c,d/*@@ \setup \fn s => fn x => fn env => @{print_top} s x env #> add_ex "evaluation of " "3_print_top"\ */,e = 0; /*@@ \setup \fn s => fn x => fn env => @{print_top} s x env #> add_ex "evaluation of " "4_print_top"\ */ int b,c,d/*@@ \setup\ \fn s => fn x => fn env => @{print_top} s x env #> add_ex "evaluation of " "6_print_top"\ */,e = 0; /*@@ \setup\ \fn s => fn x => fn env => @{print_top} s x env #> add_ex "evaluation of " "5_print_top"\ */ \ section \Reporting of Positions and Contextual Update of Environment\ text \ To show the content of the parsing environment, the ML antiquotations \print_top'\ and \print_stack'\ will respectively be used instead of \print_top\ and \print_stack\. This example suite allows to explore the bindings represented in the C environment and made accessible in PIDE for hovering. \ subsection \Reporting: \typedef\, \enum\\ (*\struct\*) declare [[ML_source_trace = false]] declare [[C_lexer_trace = false]] C \ \Reporting of Positions\ \ typedef int i, j; /*@@ \setup \@{print_top'}\ @highlight */ //@ +++++@ \setup \@{print_top'}\ +++++@highlight int j = 0; typedef int i, j; j jj1 = 0; j jj = jj1; j j = jj1 + jj; typedef i j; typedef i j; typedef i j; i jj = jj; j j = jj; \ C \ \Nesting type definitions\ \ typedef int j; j a = 0; typedef int k; int main (int c) { j b = 0; typedef int k; typedef k l; k a = c; l a = 0; } k a = a; \ C \ \Reporting \enum\\ \ enum a b; // bound case: undeclared enum a {aaa}; // define case enum a {aaa}; // define case: redefined enum a _; // bound case __thread (f ( enum a, enum a vv)); enum a /* \\ \\<^ML>\C_Grammar_Rule_Wrap_Overloading.function_definition4\\*/ f (enum a a) { } __thread enum a /* \\ \\<^ML>\C_Grammar_Rule_Wrap_Overloading.declaration_specifier2\\*/ f (enum a a) { enum c {ccc}; // define case __thread enum c f (enum c a) { return 0; } enum c /* \\ \\<^ML>\C_Grammar_Rule_Wrap_Overloading.nested_function_definition2\\*/ f (enum c a) { return 0; } return 0; } enum z {zz}; // define case int main (enum z *x) /* \\ \\<^ML>\C_Grammar_Rule_Wrap_Overloading.parameter_type_list2\\*/ { return zz; } int main (enum a *x, ...) /* \\ \\<^ML>\C_Grammar_Rule_Wrap_Overloading.parameter_type_list3\\*/ { return zz; } \ subsection \Continuation Calculus with the C Environment: Presentation in ML\ declare [[C_parser_trace = false]] ML\ val C = C_Module.C val C' = C_Module.C' \ C \ \Nesting C code without propagating the C environment\ \ int a = 0; int b = 7 / (3) * 50 /*@@@@@ \setup \fn _ => fn _ => fn _ => C \int b = a + a + a + a + a + a + a ;\ \ */; \ C \ \Nesting C code and propagating the C environment\ \ int a = 0; int b = 7 / (3) * 50 /*@@@@@ \setup \fn _ => fn _ => fn env => C' env \int b = a + a + a + a + a + a + a ;\ \ */; \ subsection \Continuation Calculus with the C Environment: Presentation with Outer Commands\ ML\ val _ = Theory.setup (C_Inner_Syntax.command0 (fn src => fn context => C' (C_Stack.Data_Lang.get' context |> #2) src context) C_Parse.C_source ("C'", \<^here>, \<^here>, \<^here>)) \ C \ \Nesting C code without propagating the C environment\ \ int f (int a) { int b = 7 / (3) * 50 /*@ C \int b = a + a + a + a + a + a + a;\ */; int c = b + a + a + a + a + a + a; } \ C \ \Nesting C code and propagating the C environment\ \ int f (int a) { int b = 7 / (3) * 50 /*@ C' \int b = a + a + a + a + a + a + a;\ */; int c = b + b + b + b + a + a + a + a + a + a; } \ C \ \Miscellaneous\ \ int f (int a) { int b = 7 / (3) * 50 /*@ C \int b = a + a + a + a + a; //@ C' \int c = b + b + b + b + a;\ \ */; int b = 7 / (3) * 50 /*@ C' \int b = a + a + a + a + a; //@ C' \int c = b + b + b + b + a;\ \ */; int c = b + b + b + b + a + a + a + a + a + a; } \ subsection \Continuation Calculus with the C Environment: Deep-First Nesting vs Breadth-First Folding: Propagation of \<^ML_type>\C_Env.env_lang\\ C \ \Propagation of report environment while manually composing at ML level (with \#>\)\ \ \In \c1 #> c2\, \c1\ and \c2\ should not interfere each other.\ \ //@ ML \fun C_env src _ _ env = C' env src\ int a; int f (int b) { int c = 0; /*@ \setup \fn _ => fn _ => fn env => C' env \int d = a + b + c + d; //@ \setup \C_env \int e = a + b + c + d;\\\ #> C \int d = a + b + c + d; //@ \setup \C_env \int e = a + b + c + d;\\\ #> C' env \int d = a + b + c + d; //@ \setup \C_env \int e = a + b + c + d;\\\ #> C \int d = a + b + c + d; //@ \setup \C_env \int e = a + b + c + d;\\\ \ */ int e = a + b + c + d; }\ C \ \Propagation of directive environment (evaluated before parsing) to any other annotations (evaluated at parsing time)\ \ #undef int #define int(a,b) int #define int int int a; int f (int b) { int c = 0; /*@ \setup \fn _ => fn _ => fn env => C' env \int d = a + b + c + d; //@ \setup \C_env \int e = a + b + c + d;\\\ #> C \int d = a + b + c + d; //@ \setup \C_env \int e = a + b + c + d;\\\ #> C' env \int d = a + b + c + d; //@ \setup \C_env \int e = a + b + c + d;\\\ #> C \int d = a + b + c + d; //@ \setup \C_env \int e = a + b + c + d;\\\ \ */ #undef int int e = a + b + c + d; } \ subsection \Continuation Calculus with the C Environment: Deep-First Nesting vs Breadth-First Folding: Propagation of \<^ML_type>\C_Env.env_tree\\ ML\ structure Data_Out = Generic_Data (type T = int val empty = 0 - val extend = I val merge = K empty) fun show_env0 make_string f msg context = Output.information ("(" ^ msg ^ ") " ^ make_string (f (Data_Out.get context))) val show_env = tap o show_env0 @{make_string} I \ setup \Context.theory_map (C_Module.Data_Accept.put (fn _ => fn _ => Data_Out.map (fn x => x + 1)))\ C \ \Propagation of Updates\ \ typedef int i, j; int j = 0; typedef int i, j; j jj1 = 0; j jj = jj1; /*@@ \setup \fn _ => fn _ => fn _ => show_env "POSITION 0"\ @\setup \@{print_top'}\ */ typedef int k; /*@@ \setup \fn _ => fn _ => fn env => C' env \k jj = jj; //@@ \setup \@{print_top'}\ k jj = jj + jj1; typedef k l; //@@ \setup \@{print_top'}\\ #> show_env "POSITION 1"\ */ j j = jj1 + jj; //@@ \setup \@{print_top'}\ typedef i j; /*@@ \setup \fn _ => fn _ => fn _ => show_env "POSITION 2"\ */ typedef i j; typedef i j; i jj = jj; j j = jj; \ ML\show_env "POSITION 3" (Context.Theory @{theory})\ setup \Context.theory_map (C_Module.Data_Accept.put (fn _ => fn _ => I))\ subsection \Reporting: Scope of Recursive Functions\ declare [[C\<^sub>e\<^sub>n\<^sub>v\<^sub>0 = last]] C \ \Propagation of Updates\ \ int a = 0; int b = a * a + 0; int jjj = b; int main (void main(int *x,int *y),int *jjj) { return a + jjj + main(); } int main2 () { int main3 () { main2() + main(); } int main () { main2() + main(); } return a + jjj + main3() + main(); } \ C \ int main3 () { main2 (); } \ declare [[C\<^sub>e\<^sub>n\<^sub>v\<^sub>0 = empty]] subsection \Reporting: Extensions to Function Types, Array Types\ C \int f (int z);\ C \int * f (int z);\ C \int (* f) (int z /* \\ \\<^ML>\C_Grammar_Rule_Wrap_Overloading.declarator1\\*/);\ C \typedef int (* f) (int z);\ C \int f (int z) {}\ C \int * f (int z) {return z;}\ C \int ((* f) (int z1, int z2)) {return z1 + z2;}\ C \int (* (* f) (int z1, int z2)) {return z1 + z2;}\ C \typedef int (* f) (int z); f uuu (int b) {return b;};\ C \typedef int (* (* f) (int z, int z)) (int a); f uuu (int b) {return b;};\ C \struct z { int (* f) (int z); int (* (* ff) (int z)) (int a); };\ C \double (* (* f (int a /* \\ \\<^ML>\C_Grammar_Rule_Wrap_Overloading.declarator1\\*/)) (int a, double d)) (char a);\ C \double (* (((* f) []) (int a)) (int b, double c)) (char d) {int a = b + c + d;}\ C \double ((*((f) (int a))) (int a /* \\ \\<^ML>\C_Grammar_Rule_Lib.doFuncParamDeclIdent\\*/, double)) (char c) {int a = 0;}\ C \ \Nesting functions\ \ double (* (* f (int a)) (int a, double)) (char c) { double (* (* f (int a)) (double a, int a)) (char) { return a; } } \ C \ \Old function syntax\ \ f (x) int x; {return x;} \ section \General Isar Commands\ locale zz begin definition "z' = ()" end C \ \Mixing arbitrary commands\ \ int a = 0; int b = a * a + 0; int jjj = b; /*@ @@@ ML \@{lemma \A \ B \ B \ A\ by (ml_tactic \blast_tac ctxt 1\)}\ definition "a' = ()" declare [[ML_source_trace]] lemma (in zz) \A \ B \ B \ A\ by (ml_tactic \blast_tac ctxt 1\) definition (in zz) "z = ()" corollary "zz.z' = ()" apply (unfold zz.z'_def) by blast theorem "True &&& True" by (auto, presburger?) */ \ declare [[ML_source_trace = false]] C \ \Backslash newlines must be supported by \<^ML>\C_Token.syntax'\ (in particular in keywords)\ \ //@ lem\ ma (i\ n z\ z) \ \\ AA \ B\ \\ B \ A\ \ A\ b\ y (ml_t\ actic \\ bla\ st_tac c\ txt\ 0\ 001\) \ section \Starting Parsing Rule\ subsection \Basics\ C \ \Parameterizing starting rule\ \ /*@ declare [[C\<^sub>r\<^sub>u\<^sub>l\<^sub>e\<^sub>0 = "statement"]] C \while (a) {}\ C \a = 2;\ declare [[C\<^sub>r\<^sub>u\<^sub>l\<^sub>e\<^sub>0 = "expression"]] C \2 + 3\ C \a = 2\ C \a[1]\ C \&a\ C \a\ */ \ subsection \Embedding in Inner Terms\ term \\<^C> \ \default behavior of parsing depending on the activated option\ \0\\ term \\<^C>\<^sub>u\<^sub>n\<^sub>i\<^sub>t \ \force the explicit parsing\ \f () {while (a) {}; return 0;} int a = 0;\\ term \\<^C>\<^sub>d\<^sub>e\<^sub>c\<^sub>l \ \force the explicit parsing\ \int a = 0; \\ term \\<^C>\<^sub>e\<^sub>x\<^sub>p\<^sub>r \ \force the explicit parsing\ \a\\ term \\<^C>\<^sub>s\<^sub>t\<^sub>m\<^sub>t \ \force the explicit parsing\ \while (a) {}\\ declare [[C\<^sub>r\<^sub>u\<^sub>l\<^sub>e\<^sub>0 = "translation_unit"]] term \\<^C> \ \default behavior of parsing depending on the current option\ \int a = 0;\\ subsection \User Defined Setup of Syntax\ setup \C_Module.C_Term.map_expression (fn _ => fn _ => fn _ => @{term "10 :: nat"})\ setup \C_Module.C_Term.map_statement (fn _ => fn _ => fn _ => @{term "20 :: nat"})\ value \\<^C>\<^sub>e\<^sub>x\<^sub>p\<^sub>r\1\ + \<^C>\<^sub>s\<^sub>t\<^sub>m\<^sub>t\for (;;);\\ setup \ \redefinition\ \C_Module.C_Term.map_expression (fn _ => fn _ => fn _ => @{term "1000 :: nat"})\ value \\<^C>\<^sub>e\<^sub>x\<^sub>p\<^sub>r\1\ + \<^C>\<^sub>s\<^sub>t\<^sub>m\<^sub>t\for (;;);\\ setup \C_Module.C_Term.map_default (fn _ => fn _ => fn _ => @{term "True"})\ subsection \Validity of Context for Annotations\ ML \fun fac x = if x = 0 then 1 else x * fac (x - 1)\ ML \ \Execution of annotations in term possible in (the outermost) \<^theory_text>\ML\\ \ \<^term>\ \<^C> \int c = 0; /*@ ML \fac 100\ */\ \ \ definition \ \Execution of annotations in term possible in \<^ML_type>\local_theory\ commands (such as \<^theory_text>\definition\)\ \ term = \<^C> \int c = 0; /*@ ML \fac 100\ */\ \ section \Scopes of Inner and Outer Terms\ ML \ local fun bind scan ((stack1, (to_delay, stack2)), _) = C_Parse.range scan >> (fn (src, range) => C_Env.Parsing ( (stack1, stack2) , ( range , C_Inner_Syntax.bottom_up (fn _ => fn context => ML_Context.exec (tap (fn _ => Syntax.read_term (Context.proof_of context) (Token.inner_syntax_of src))) context) , Symtab.empty , to_delay))) in val _ = Theory.setup ( C_Annotation.command' ("term\<^sub>i\<^sub>n\<^sub>n\<^sub>e\<^sub>r", \<^here>) "" (bind (C_Token.syntax' (Parse.token Parse.cartouche))) #> C_Inner_Syntax.command0 (C_Inner_Toplevel.keep'' o C_Inner_Isar_Cmd.print_term) (C_Token.syntax' (Scan.succeed [] -- Parse.term)) ("term\<^sub>o\<^sub>u\<^sub>t\<^sub>e\<^sub>r", \<^here>, \<^here>, \<^here>)) end \ C \ int z = z; /*@ C \//@ term\<^sub>o\<^sub>u\<^sub>t\<^sub>e\<^sub>r \\<^C>\<^sub>e\<^sub>x\<^sub>p\<^sub>r\z\\\ C' \//@ term\<^sub>o\<^sub>u\<^sub>t\<^sub>e\<^sub>r \\<^C>\<^sub>e\<^sub>x\<^sub>p\<^sub>r\z\\\ term\<^sub>o\<^sub>u\<^sub>t\<^sub>e\<^sub>r \\<^C>\<^sub>e\<^sub>x\<^sub>p\<^sub>r\z\\ C \//@ term\<^sub>i\<^sub>n\<^sub>n\<^sub>e\<^sub>r \\<^C>\<^sub>e\<^sub>x\<^sub>p\<^sub>r\z\\\ C' \//@ term\<^sub>i\<^sub>n\<^sub>n\<^sub>e\<^sub>r \\<^C>\<^sub>e\<^sub>x\<^sub>p\<^sub>r\z\\\ term\<^sub>i\<^sub>n\<^sub>n\<^sub>e\<^sub>r \\<^C>\<^sub>e\<^sub>x\<^sub>p\<^sub>r\z\\ */\ term(*outer*) \\<^C>\<^sub>e\<^sub>x\<^sub>p\<^sub>r\z\\ C \ int z = z; /*@ C \//@ term\<^sub>o\<^sub>u\<^sub>t\<^sub>e\<^sub>r \\<^C>\<^sub>e\<^sub>x\<^sub>p\<^sub>r\z\\\ C' \//@ term\<^sub>o\<^sub>u\<^sub>t\<^sub>e\<^sub>r \\<^C>\<^sub>e\<^sub>x\<^sub>p\<^sub>r\z\\\ term\<^sub>o\<^sub>u\<^sub>t\<^sub>e\<^sub>r \\<^C>\<^sub>e\<^sub>x\<^sub>p\<^sub>r\z\\ C \//@ term\<^sub>i\<^sub>n\<^sub>n\<^sub>e\<^sub>r \\<^C>\<^sub>e\<^sub>x\<^sub>p\<^sub>r\z\\\ C' \//@ term\<^sub>i\<^sub>n\<^sub>n\<^sub>e\<^sub>r \\<^C>\<^sub>e\<^sub>x\<^sub>p\<^sub>r\z\\\ term\<^sub>i\<^sub>n\<^sub>n\<^sub>e\<^sub>r \\<^C>\<^sub>e\<^sub>x\<^sub>p\<^sub>r\z\\ */\ term(*outer*) \\<^C>\<^sub>e\<^sub>x\<^sub>p\<^sub>r\z\\ declare [[C\<^sub>e\<^sub>n\<^sub>v\<^sub>0 = last]] C \ int z = z; /*@ C \//@ term\<^sub>o\<^sub>u\<^sub>t\<^sub>e\<^sub>r \\<^C>\<^sub>e\<^sub>x\<^sub>p\<^sub>r\z\\\ C' \//@ term\<^sub>o\<^sub>u\<^sub>t\<^sub>e\<^sub>r \\<^C>\<^sub>e\<^sub>x\<^sub>p\<^sub>r\z\\\ term\<^sub>o\<^sub>u\<^sub>t\<^sub>e\<^sub>r \\<^C>\<^sub>e\<^sub>x\<^sub>p\<^sub>r\z\\ C \//@ term\<^sub>i\<^sub>n\<^sub>n\<^sub>e\<^sub>r \\<^C>\<^sub>e\<^sub>x\<^sub>p\<^sub>r\z\\\ C' \//@ term\<^sub>i\<^sub>n\<^sub>n\<^sub>e\<^sub>r \\<^C>\<^sub>e\<^sub>x\<^sub>p\<^sub>r\z\\\ term\<^sub>i\<^sub>n\<^sub>n\<^sub>e\<^sub>r \\<^C>\<^sub>e\<^sub>x\<^sub>p\<^sub>r\z\\ */\ term(*outer*) \\<^C>\<^sub>e\<^sub>x\<^sub>p\<^sub>r\z\\ declare [[C\<^sub>e\<^sub>n\<^sub>v\<^sub>0 = empty]] C \ \Propagation of report environment while manually composing at ML level\ \ int a; int f (int b) { int c = 0; /*@ \setup \fn _ => fn _ => fn env => C' env \int d = a + b + c + d; //@ term\<^sub>i\<^sub>n\<^sub>n\<^sub>e\<^sub>r \\<^C>\<^sub>e\<^sub>x\<^sub>p\<^sub>r\c\ + \<^C>\<^sub>e\<^sub>x\<^sub>p\<^sub>r\d\\ term\<^sub>o\<^sub>u\<^sub>t\<^sub>e\<^sub>r \\<^C>\<^sub>e\<^sub>x\<^sub>p\<^sub>r\c\ + \<^C>\<^sub>e\<^sub>x\<^sub>p\<^sub>r\d\\\ #> C \int d = a + b + c + d; //@ term\<^sub>i\<^sub>n\<^sub>n\<^sub>e\<^sub>r \\<^C>\<^sub>e\<^sub>x\<^sub>p\<^sub>r\c\ + \<^C>\<^sub>e\<^sub>x\<^sub>p\<^sub>r\d\\ term\<^sub>o\<^sub>u\<^sub>t\<^sub>e\<^sub>r \\<^C>\<^sub>e\<^sub>x\<^sub>p\<^sub>r\c\ + \<^C>\<^sub>e\<^sub>x\<^sub>p\<^sub>r\d\\\ #> C' env \int d = a + b + c + d; //@ term\<^sub>i\<^sub>n\<^sub>n\<^sub>e\<^sub>r \\<^C>\<^sub>e\<^sub>x\<^sub>p\<^sub>r\c\ + \<^C>\<^sub>e\<^sub>x\<^sub>p\<^sub>r\d\\ term\<^sub>o\<^sub>u\<^sub>t\<^sub>e\<^sub>r \\<^C>\<^sub>e\<^sub>x\<^sub>p\<^sub>r\c\ + \<^C>\<^sub>e\<^sub>x\<^sub>p\<^sub>r\d\\\ #> C \int d = a + b + c + d; //@ term\<^sub>i\<^sub>n\<^sub>n\<^sub>e\<^sub>r \\<^C>\<^sub>e\<^sub>x\<^sub>p\<^sub>r\c\ + \<^C>\<^sub>e\<^sub>x\<^sub>p\<^sub>r\d\\ term\<^sub>o\<^sub>u\<^sub>t\<^sub>e\<^sub>r \\<^C>\<^sub>e\<^sub>x\<^sub>p\<^sub>r\c\ + \<^C>\<^sub>e\<^sub>x\<^sub>p\<^sub>r\d\\\ \ term\<^sub>i\<^sub>n\<^sub>n\<^sub>e\<^sub>r \\<^C>\<^sub>e\<^sub>x\<^sub>p\<^sub>r\c\ + \<^C>\<^sub>e\<^sub>x\<^sub>p\<^sub>r\d\\ term\<^sub>o\<^sub>u\<^sub>t\<^sub>e\<^sub>r \\<^C>\<^sub>e\<^sub>x\<^sub>p\<^sub>r\c\ + \<^C>\<^sub>e\<^sub>x\<^sub>p\<^sub>r\d\\ */ int e = a + b + c + d; }\ section \Calculation in Directives\ subsection \Annotation Command Classification\ C \ \Lexing category vs. parsing category\ \ int a = 0; // \ \Category 2: only parsing\ //@ \setup \K (K (K I))\ (* evaluation at parsing *) //@@ \setup\ \K (K (K I))\ (* evaluation at parsing *) //@ highlight (* evaluation at parsing *) //@@ highlight\ (* evaluation at parsing *) // \ \Category 3: with lexing\ //@ #setup I (* evaluation at lexing (and directives resolving) *) //@ setup I (* evaluation at parsing *) //@@ setup\ I (* evaluation at parsing *) //@ #ML I (* evaluation at lexing (and directives resolving) *) //@ ML I (* evaluation at parsing *) //@@ ML\ I (* evaluation at parsing *) //@ #C \\ (* evaluation at lexing (and directives resolving) *) //@ C \\ (* evaluation at parsing *) //@@ C\ \\ (* evaluation at parsing *) \ C \ \Scheduling example\ \ //@+++++ ML \writeln "2"\ int a = 0; //@@ ML\ \writeln "3"\ //@ #ML \writeln "1"\ \ C \ \Scheduling example\ \ //* lemma True by simp //* #lemma True #by simp //* #lemma True by simp //* lemma True #by simp \ C \ \Scheduling example\ \ /*@ lemma \1 = one\ \2 = two\ \two + one = three\ by auto #definition [simp]: \three = 3\ #definition [simp]: \two = 2\ #definition [simp]: \one = 1\ */ \ subsection \Generalizing ML Antiquotations with C Directives\ ML \ structure Directive_setup_define = Generic_Data (type T = int val empty = 0 - val extend = I val merge = K empty) fun setup_define1 pos f = C_Directive.setup_define pos (fn toks => fn (name, (pos1, _)) => tap (fn _ => writeln ("Executing " ^ name ^ Position.here pos1 ^ " (only once)")) #> pair (f toks)) (K I) fun setup_define2 pos = C_Directive.setup_define pos (K o pair) \ C \ \General scheme of C antiquotations\ \ /*@ #setup \ \Overloading \#define\\ \ setup_define2 \<^here> (fn (name, (pos1, _)) => op ` Directive_setup_define.get #>> (case name of "f3" => curry op * 152263 | _ => curry op + 1) #> tap (fn (nb, _) => tracing ("Executing antiquotation " ^ name ^ Position.here pos1 ^ " (number = " ^ Int.toString nb ^ ")")) #> uncurry Directive_setup_define.put) \ */ #define f1 #define f2 int a = 0; #define f3 f1 f2 f1 f3 //@ #setup \ \Resetting \#define\\ \setup_define2 \<^here> (K I)\ f3 #define f3 f3 \ C \ \Dynamic token computing in \#define\\ \ //@ #setup \setup_define1 \<^here> (K [])\ #define f int a = 0; f f f f //@ #setup \setup_define1 \<^here> (fn toks => toks @ toks)\ #define f int b = a; f f //@ #setup \setup_define1 \<^here> I\ #define f int a = 0; f f \ section \Miscellaneous\ C \ \Antiquotations acting on a parsed-subtree\ \ # /**/ include // backslash rendered unescaped f(){0 + 0;} /**/ // val _ : theory => 'a => theory # /* context */ if if elif #include if then else ; # /* zzz */ elif /**/ #else\ #define FOO 00 0 "" (( FOO(FOO(a,b,c)) #endif\ C \ \Header-names in directives\ \ #define F #define G "stdio\h" // expecting an error whenever expanded #define H "stdio_h" // can be used anywhere without errors int f = /*F*/ ""; int g = /*G*/ ""; int h = H ""; #include F \ C \ \Parsing tokens as directives only when detecting space symbols before \#\\ \/* */ \ \ // # /* */ define /**/ \ a a a /*#include <>*/ // must not be considered as a directive \ C \ \Universal character names in identifiers and Isabelle symbols\ \ #include int main () { char * ó\<^url>ò = "ó\<^url>ò"; printf ("%s", ó\<^url>ò); } \ \\The core lexer ...\ ML\ C_Parse.ML_source \ declare[[C\<^sub>e\<^sub>n\<^sub>v\<^sub>0 = last]] ML\@{C\<^sub>e\<^sub>n\<^sub>v}\ ML\C_Stack.Data_Lang.get' : Context.generic -> (LALR_Table.state, C_Grammar_Rule.svalue0, Position.T) C_Env.stack_elem0 list * C_Env.env_lang; C_Parse.C_source: Input.source C_Parse.parser ; C_Inner_Syntax.command0 ; C'; C; \ declare [[C\<^sub>r\<^sub>u\<^sub>l\<^sub>e\<^sub>0 = "expression"]] ML\ val src = \a + b\; val ctxt = (Context.Proof @{context}); val ctxt' = C' @{C\<^sub>e\<^sub>n\<^sub>v} src ctxt; C_Module.Data_In_Env.get ctxt' \ ML\val _ = @{term \3::nat\}\ ML\ ML_Antiquotation.inline_embedded; \ (* and from where do I get the result ? *) end diff --git a/thys/Isabelle_C/C11-FrontEnd/examples/C3.thy b/thys/Isabelle_C/C11-FrontEnd/examples/C3.thy --- a/thys/Isabelle_C/C11-FrontEnd/examples/C3.thy +++ b/thys/Isabelle_C/C11-FrontEnd/examples/C3.thy @@ -1,865 +1,862 @@ (****************************************************************************** * Isabelle/C * * Copyright (c) 2018-2019 Université Paris-Saclay, Univ. Paris-Sud, France * * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are * met: * * * Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * * Redistributions in binary form must reproduce the above * copyright notice, this list of conditions and the following * disclaimer in the documentation and/or other materials provided * with the distribution. * * * Neither the name of the copyright holders nor the names of its * contributors may be used to endorse or promote products derived * from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. ******************************************************************************) chapter \Annnex IV : Examples for Annotation Navigation and Context Serialization\ theory C3 imports "../C_Main" "HOL-ex.Cartouche_Examples" (* This dependency should be erliminated.*) begin text \ Operationally, the \<^theory_text>\C\ command can be thought of as behaving as \<^theory_text>\ML\, where it is for example possible to recursively nest C code in C. Generally, the present chapter assumes a familiarity with all advance concepts of ML as described in \<^file>\~~/src/HOL/Examples/ML.thy\, as well as the concept of ML antiquotations (\<^file>\~~/src/Doc/Implementation/ML.thy\). However, even if \<^theory_text>\C\ might resemble to \<^theory_text>\ML\, we will now see in detail that there are actually subtle differences between the two commands.\ section \Setup of ML Antiquotations Displaying the Environment (For Debugging) \ ML\ fun print_top make_string f _ (_, (value, _, _)) _ = tap (fn _ => writeln (make_string value)) o f fun print_top' _ f _ _ env = tap (fn _ => writeln ("ENV " ^ C_Env.string_of env)) o f fun print_stack s make_string stack _ _ thy = let val () = Output.information ("SHIFT " ^ (case s of NONE => "" | SOME s => "\"" ^ s ^ "\" ") ^ Int.toString (length stack - 1) ^ " +1 ") val () = stack |> split_list |> #2 |> map_index I |> app (fn (i, (value, pos1, pos2)) => writeln (" " ^ Int.toString (length stack - i) ^ " " ^ make_string value ^ " " ^ Position.here pos1 ^ " " ^ Position.here pos2)) in thy end fun print_stack' s _ stack _ env thy = let val () = Output.information ("SHIFT " ^ (case s of NONE => "" | SOME s => "\"" ^ s ^ "\" ") ^ Int.toString (length stack - 1) ^ " +1 ") val () = writeln ("ENV " ^ C_Env.string_of env) in thy end \ setup \ML_Antiquotation.inline @{binding print_top} (Args.context >> K ("print_top " ^ ML_Pretty.make_string_fn ^ " I"))\ setup \ML_Antiquotation.inline @{binding print_top'} (Args.context >> K ("print_top' " ^ ML_Pretty.make_string_fn ^ " I"))\ setup \ML_Antiquotation.inline @{binding print_stack} (Scan.peek (fn _ => Scan.option Args.text) >> (fn name => ("print_stack " ^ (case name of NONE => "NONE" | SOME s => "(SOME \"" ^ s ^ "\")") ^ " " ^ ML_Pretty.make_string_fn)))\ setup \ML_Antiquotation.inline @{binding print_stack'} (Scan.peek (fn _ => Scan.option Args.text) >> (fn name => ("print_stack' " ^ (case name of NONE => "NONE" | SOME s => "(SOME \"" ^ s ^ "\")") ^ " " ^ ML_Pretty.make_string_fn)))\ declare[[C_lexer_trace]] section \Introduction to C Annotations: Navigating in the Parsing Stack\ subsection \Basics\ text \ Since the present theory \<^file>\C1.thy\ is depending on \<^theory>\Isabelle_C.C_Lexer_Language\ and \<^theory>\Isabelle_C.C_Parser_Language\, the syntax one is writing in the \<^theory_text>\C\ command is C11. Additionally, \<^file>\C1.thy\ also depends on \<^theory>\Isabelle_C.C_Parser_Annotation\, making it possible to write commands in C comments, called annotation commands, such as \<^theory_text>\\setup\. \ C \ \Nesting ML code in C comments\ \ int a = (((0))); /*@ highlight */ /*@ \setup \@{print_stack}\ */ /*@ \setup \@{print_top}\ */ \ text \ In terms of execution order, nested annotation commands are not pre-filtered out of the C code, but executed when the C code is still being parsed. Since the parser implemented is a LALR parser \<^footnote>\\<^url>\https://en.wikipedia.org/wiki/LALR\\, C tokens are uniquely read and treated from left to right. Thus, each nested command is (supposed by default to be) executed when the parser has already read all C tokens before the comment associated to the nested command, so when the parser is in a particular intermediate parsing step (not necessarily final) \<^footnote>\\<^url>\https://en.wikipedia.org/wiki/Shift-reduce_parser\\. \ text \The command \<^theory_text>\\setup\ is similar to the command \<^theory_text>\setup\ except that the former takes a function with additional arguments. These arguments are precisely depending on the current parsing state. To better examine these arguments, it is convenient to use ML antiquotations (be it for printing, or for doing any regular ML actions like PIDE reporting). Note that, in contrast with \<^theory_text>\setup\, the return type of the \<^theory_text>\\setup\ function is not \<^ML_type>\theory -> theory\ but \<^ML_type>\Context.generic -> Context.generic\. \ C \ \Positional navigation: referring to any previous parsed sub-tree in the stack\ \ int a = (((0 + 5))) /*@@ \setup \print_top @{make_string} I\ @ highlight */ * 4; float b = 7 / 3; \ text \The special \@\ symbol makes the command be executed whenever the first element \E\ in the stack is about to be irremediably replaced by a more structured parent element (having \E\ as one of its direct children). It is the parent element which is provided to the ML code. Instead of always referring to the first element of the stack, \N\ consecutive occurrences of \@\ will make the ML code getting as argument the direct parent of the \N\-th element.\ C \ \Positional navigation: referring to any previous parsed sub-tree in the stack\ \ int a = (((0 + 5))) /*@@ highlight */ * 4; int a = (((0 + 5))) /*@& highlight */ * 4; int a = (((0 + 5))) /*@@@@@ highlight */ * 4; int a = (((0 + 5))) /*@&&&& highlight */ * 4; \ text \\&\ behaves as \@\, but instead of always giving the designated direct parent to the ML code, it finds the first parent ancestor making non-trivial changes in the respective grammar rule (a non-trivial change can be for example the registration of the position of the current AST node being built).\ C \ \Positional navigation: moving the comment after a number of C token\ \ int b = 7 / (3) * 50; /*@+++@@ highlight */ long long f (int a) { while (0) { return 0; } } int b = 7 / (3) * 50; \ text \\N\ consecutive occurrences of \+\ will delay the interpretation of the comment, which is ignored at the place it is written. The comment is only really considered after the C parser has treated \N\ more tokens.\ C \ \Closing C comments \*/\ must close anything, even when editing ML code\ \ int a = (((0 //@ (* inline *) \setup \fn _ => fn _ => fn _ => fn context => let in (* */ *) context end\ /*@ \setup \(K o K o K) I\ (* * / *) */ ))); \ C \ \Inline comments with antiquotations\ \ /*@ \setup\(K o K o K) (fn x => K x @{con\ text (**)})\ */ // break of line activated everywhere (also in antiquotations) int a = 0; //\ @ \setup\(K o K o K) (fn x => K x @{term \a \ + b\ (* (**) *\ \ )})\ \ subsection \Erroneous Annotations Treated as Regular C Comments\ C \ \Permissive Types of Antiquotations\ \ int a = 0; /*@ \setup (* Errors: Explicit warning + Explicit markup reporting *) */ /** \setup (* Errors: Turned into tracing report information *) */ /** \setup \fn _ => fn _ => fn _ => I\ (* An example of correct syntax accepted as usual *) */ \ C \ \Permissive Types of Antiquotations\ \ int a = 0; /*@ \setup \fn _ => fn _ => fn _ => I\ \setup (* Parsing error of a single command does not propagate to other commands *) \setup \fn _ => fn _ => fn _ => I\ context */ /** \setup \fn _ => fn _ => fn _ => I\ \setup (* Parsing error of a single command does not propagate to other commands *) \setup \fn _ => fn _ => fn _ => I\ context */ /*@ \setup (* Errors in all commands are all rendered *) \setup (* Errors in all commands are all rendered *) \setup (* Errors in all commands are all rendered *) */ /** \setup (* Errors in all commands makes the whole comment considered as an usual comment *) \setup (* Errors in all commands makes the whole comment considered as an usual comment *) \setup (* Errors in all commands makes the whole comment considered as an usual comment *) */ \ subsection \Bottom-Up vs. Top-Down Evaluation\ ML\ -structure Example_Data = Generic_Data (type T = string list - val empty = [] val extend = I val merge = K empty) +structure Example_Data = Generic_Data (type T = string list val empty = [] val merge = K empty) fun add_ex s1 s2 = Example_Data.map (cons s2) #> (fn context => let val () = Output.information (s1 ^ s2) val () = app (fn s => writeln (" Data content: " ^ s)) (Example_Data.get context) in context end) \ setup \Context.theory_map (Example_Data.put [])\ declare[[ML_source_trace]] declare[[C_parser_trace]] C \ \Arbitrary interleaving of effects: \\setup\ vs \\setup\\\ \ int b,c,d/*@@ \setup \fn s => fn x => fn env => @{print_top} s x env #> add_ex "evaluation of " "3_print_top"\ */,e = 0; /*@@ \setup \fn s => fn x => fn env => @{print_top} s x env #> add_ex "evaluation of " "4_print_top"\ */ int b,c,d/*@@ \setup\ \fn s => fn x => fn env => @{print_top} s x env #> add_ex "evaluation of " "6_print_top"\ */,e = 0; /*@@ \setup\ \fn s => fn x => fn env => @{print_top} s x env #> add_ex "evaluation of " "5_print_top"\ */ \ section \Reporting of Positions and Contextual Update of Environment\ text \ To show the content of the parsing environment, the ML antiquotations \print_top'\ and \print_stack'\ will respectively be used instead of \print_top\ and \print_stack\. This example suite allows to explore the bindings represented in the C environment and made accessible in PIDE for hovering. \ subsection \Reporting: \typedef\, \enum\\ (*\struct\*) declare [[ML_source_trace = false]] declare [[C_lexer_trace = false]] C \ \Reporting of Positions\ \ typedef int i, j; /*@@ \setup \@{print_top'}\ @highlight */ //@ +++++@ \setup \@{print_top'}\ +++++@highlight int j = 0; typedef int i, j; j jj1 = 0; j jj = jj1; j j = jj1 + jj; typedef i j; typedef i j; typedef i j; i jj = jj; j j = jj; \ C \ \Nesting type definitions\ \ typedef int j; j a = 0; typedef int k; int main (int c) { j b = 0; typedef int k; typedef k l; k a = c; l a = 0; } k a = a; \ C \ \Reporting \enum\\ \ enum a b; // bound case: undeclared enum a {aaa}; // define case enum a {aaa}; // define case: redefined enum a _; // bound case __thread (f ( enum a, enum a vv)); enum a /* \\ \\<^ML>\C_Grammar_Rule_Wrap_Overloading.function_definition4\\*/ f (enum a a) { } __thread enum a /* \\ \\<^ML>\C_Grammar_Rule_Wrap_Overloading.declaration_specifier2\\*/ f (enum a a) { enum c {ccc}; // define case __thread enum c f (enum c a) { return 0; } enum c /* \\ \\<^ML>\C_Grammar_Rule_Wrap_Overloading.nested_function_definition2\\*/ f (enum c a) { return 0; } return 0; } enum z {zz}; // define case int main (enum z *x) /* \\ \\<^ML>\C_Grammar_Rule_Wrap_Overloading.parameter_type_list2\\*/ { return zz; } int main (enum a *x, ...) /* \\ \\<^ML>\C_Grammar_Rule_Wrap_Overloading.parameter_type_list3\\*/ { return zz; } \ subsection \Continuation Calculus with the C Environment: Presentation in ML\ declare [[C_parser_trace = false]] ML\ val C = tap o C_Module.C val C' = C_Module.C' \ C \ \Nesting C code without propagating the C environment\ \ int a = 0; int b = 7 / (3) * 50 /*@@@@@ \setup \fn _ => fn _ => fn _ => C \int b = a + a + a + a + a + a + a ;\ \ */; \ C \ \Nesting C code and propagating the C environment\ \ int a = 0; int b = 7 / (3) * 50 /*@@@@@ \setup \fn _ => fn _ => fn env => C' env \int b = a + a + a + a + a + a + a ;\ \ */; \ subsection \Continuation Calculus with the C Environment: Presentation with Outer Commands\ ML\ val _ = Theory.setup (C_Inner_Syntax.command0 (fn src => fn context => C' (C_Stack.Data_Lang.get' context |> #2) src context) C_Parse.C_source ("C'", \<^here>, \<^here>, \<^here>)) \ C \ \Nesting C code without propagating the C environment\ \ int f (int a) { int b = 7 / (3) * 50 /*@ C \int b = a + a + a + a + a + a + a;\ */; int c = b + a + a + a + a + a + a; } \ C \ \Nesting C code and propagating the C environment\ \ int f (int a) { int b = 7 / (3) * 50 /*@ C' \int b = a + a + a + a + a + a + a;\ */; int c = b + b + b + b + a + a + a + a + a + a; } \ C \ \Miscellaneous\ \ int f (int a) { int b = 7 / (3) * 50 /*@ C \int b = a + a + a + a + a; //@ C' \int c = b + b + b + b + a;\ \ */; int b = 7 / (3) * 50 /*@ C' \int b = a + a + a + a + a; //@ C' \int c = b + b + b + b + a;\ \ */; int c = b + b + b + b + a + a + a + a + a + a; } \ subsection \Continuation Calculus with the C Environment: Deep-First Nesting vs Breadth-First Folding: Propagation of \<^ML_type>\C_Env.env_lang\\ C \ \Propagation of report environment while manually composing at ML level (with \#>\)\ \ \In \c1 #> c2\, \c1\ and \c2\ should not interfere each other.\ \ //@ ML \fun C_env src _ _ env = C' env src\ int a; int f (int b) { int c = 0; /*@ \setup \fn _ => fn _ => fn env => C' env \int d = a + b + c + d; //@ \setup \C_env \int e = a + b + c + d;\\\ #> C \int d = a + b + c + d; //@ \setup \C_env \int e = a + b + c + d;\\\ #> C' env \int d = a + b + c + d; //@ \setup \C_env \int e = a + b + c + d;\\\ #> C \int d = a + b + c + d; //@ \setup \C_env \int e = a + b + c + d;\\\ \ */ int e = a + b + c + d; }\ C \ \Propagation of directive environment (evaluated before parsing) to any other annotations (evaluated at parsing time)\ \ #undef int #define int(a,b) int #define int int int a; int f (int b) { int c = 0; /*@ \setup \fn _ => fn _ => fn env => C' env \int d = a + b + c + d; //@ \setup \C_env \int e = a + b + c + d;\\\ #> C \int d = a + b + c + d; //@ \setup \C_env \int e = a + b + c + d;\\\ #> C' env \int d = a + b + c + d; //@ \setup \C_env \int e = a + b + c + d;\\\ #> C \int d = a + b + c + d; //@ \setup \C_env \int e = a + b + c + d;\\\ \ */ #undef int int e = a + b + c + d; } \ subsection \Continuation Calculus with the C Environment: Deep-First Nesting vs Breadth-First Folding: Propagation of \<^ML_type>\C_Env.env_tree\\ ML\ structure Data_Out = Generic_Data (type T = int val empty = 0 - val extend = I val merge = K empty) fun show_env0 make_string f msg context = Output.information ("(" ^ msg ^ ") " ^ make_string (f (Data_Out.get context))) val show_env = tap o show_env0 @{make_string} I \ setup \Context.theory_map (C_Module.Data_Accept.put (fn _ => fn _ => Data_Out.map (fn x => x + 1)))\ C \ \Propagation of Updates\ \ typedef int i, j; int j = 0; typedef int i, j; j jj1 = 0; j jj = jj1; /*@@ \setup \fn _ => fn _ => fn _ => show_env "POSITION 0"\ @\setup \@{print_top'}\ */ typedef int k; /*@@ \setup \fn _ => fn _ => fn env => C' env \k jj = jj; //@@ \setup \@{print_top'}\ k jj = jj + jj1; typedef k l; //@@ \setup \@{print_top'}\\ #> show_env "POSITION 1"\ */ j j = jj1 + jj; //@@ \setup \@{print_top'}\ typedef i j; /*@@ \setup \fn _ => fn _ => fn _ => show_env "POSITION 2"\ */ typedef i j; typedef i j; i jj = jj; j j = jj; \ ML\show_env "POSITION 3" (Context.Theory @{theory})\ setup \Context.theory_map (C_Module.Data_Accept.put (fn _ => fn _ => I))\ subsection \Reporting: Scope of Recursive Functions\ declare [[C\<^sub>e\<^sub>n\<^sub>v\<^sub>0 = last]] C \ \Propagation of Updates\ \ int a = 0; int b = a * a + 0; int jjj = b; int main (void main(int *x,int *y),int *jjj) { return a + jjj + main(); } int main2 () { int main3 () { main2() + main(); } int main () { main2() + main(); } return a + jjj + main3() + main(); } \ C \ int main3 () { main2 (); } \ declare [[C\<^sub>e\<^sub>n\<^sub>v\<^sub>0 = empty]] subsection \Reporting: Extensions to Function Types, Array Types\ C \int f (int z);\ C \int * f (int z);\ C \int (* f) (int z /* \\ \\<^ML>\C_Grammar_Rule_Wrap_Overloading.declarator1\\*/);\ C \typedef int (* f) (int z);\ C \int f (int z) {}\ C \int * f (int z) {return z;}\ C \int ((* f) (int z1, int z2)) {return z1 + z2;}\ C \int (* (* f) (int z1, int z2)) {return z1 + z2;}\ C \typedef int (* f) (int z); f uuu (int b) {return b;};\ C \typedef int (* (* f) (int z, int z)) (int a); f uuu (int b) {return b;};\ C \struct z { int (* f) (int z); int (* (* ff) (int z)) (int a); };\ C \double (* (* f (int a /* \\ \\<^ML>\C_Grammar_Rule_Wrap_Overloading.declarator1\\*/)) (int a, double d)) (char a);\ C \double (* (((* f) []) (int a)) (int b, double c)) (char d) {int a = b + c + d;}\ C \double ((*((f) (int a))) (int a /* \\ \\<^ML>\C_Grammar_Rule_Lib.doFuncParamDeclIdent\\*/, double)) (char c) {int a = 0;}\ C \ \Nesting functions\ \ double (* (* f (int a)) (int a, double)) (char c) { double (* (* f (int a)) (double a, int a)) (char) { return a; } } \ C \ \Old function syntax\ \ f (x) int x; {return x;} \ section \General Isar Commands\ locale zz begin definition "z' = ()" end C \ \Mixing arbitrary commands\ \ int a = 0; int b = a * a + 0; int jjj = b; /*@ @@@ ML \@{lemma \A \ B \ B \ A\ by (ml_tactic \blast_tac ctxt 1\)}\ definition "a' = ()" declare [[ML_source_trace]] lemma (in zz) \A \ B \ B \ A\ by (ml_tactic \blast_tac ctxt 1\) definition (in zz) "z = ()" corollary "zz.z' = ()" apply (unfold zz.z'_def) by blast theorem "True &&& True" by (auto, presburger?) */ \ declare [[ML_source_trace = false]] C \ \Backslash newlines must be supported by \<^ML>\C_Token.syntax'\ (in particular in keywords)\ \ //@ lem\ ma (i\ n z\ z) \ \\ AA \ B\ \\ B \ A\ \ A\ b\ y (ml_t\ actic \\ bla\ st_tac c\ txt\ 0\ 001\) \ section \Starting Parsing Rule\ subsection \Basics\ C \ \Parameterizing starting rule\ \ /*@ declare [[C\<^sub>r\<^sub>u\<^sub>l\<^sub>e\<^sub>0 = "statement"]] C \while (a) {}\ C \a = 2;\ declare [[C\<^sub>r\<^sub>u\<^sub>l\<^sub>e\<^sub>0 = "expression"]] C \2 + 3\ C \a = 2\ C \a[1]\ C \&a\ C \a\ */ \ subsection \Embedding in Inner Terms\ term \\<^C> \ \default behavior of parsing depending on the activated option\ \0\\ term \\<^C>\<^sub>u\<^sub>n\<^sub>i\<^sub>t \ \force the explicit parsing\ \f () {while (a) {}; return 0;} int a = 0;\\ term \\<^C>\<^sub>d\<^sub>e\<^sub>c\<^sub>l \ \force the explicit parsing\ \int a = 0; \\ term \\<^C>\<^sub>e\<^sub>x\<^sub>p\<^sub>r \ \force the explicit parsing\ \a\\ term \\<^C>\<^sub>s\<^sub>t\<^sub>m\<^sub>t \ \force the explicit parsing\ \while (a) {}\\ declare [[C\<^sub>r\<^sub>u\<^sub>l\<^sub>e\<^sub>0 = "translation_unit"]] term \\<^C> \ \default behavior of parsing depending on the current option\ \int a = 0;\\ subsection \User Defined Setup of Syntax\ setup \C_Module.C_Term.map_expression (fn _ => fn _ => fn _ => @{term "10 :: nat"})\ setup \C_Module.C_Term.map_statement (fn _ => fn _ => fn _ => @{term "20 :: nat"})\ value \\<^C>\<^sub>e\<^sub>x\<^sub>p\<^sub>r\1\ + \<^C>\<^sub>s\<^sub>t\<^sub>m\<^sub>t\for (;;);\\ setup \ \redefinition\ \C_Module.C_Term.map_expression (fn _ => fn _ => fn _ => @{term "1000 :: nat"})\ value \\<^C>\<^sub>e\<^sub>x\<^sub>p\<^sub>r\1\ + \<^C>\<^sub>s\<^sub>t\<^sub>m\<^sub>t\for (;;);\\ setup \C_Module.C_Term.map_default (fn _ => fn _ => fn _ => @{term "True"})\ subsection \Validity of Context for Annotations\ ML \fun fac x = if x = 0 then 1 else x * fac (x - 1)\ ML \ \Execution of annotations in term possible in (the outermost) \<^theory_text>\ML\\ \ \<^term>\ \<^C> \int c = 0; /*@ ML \fac 100\ */\ \ \ definition \ \Execution of annotations in term possible in \<^ML_type>\local_theory\ commands (such as \<^theory_text>\definition\)\ \ term = \<^C> \int c = 0; /*@ ML \fac 100\ */\ \ section \Scopes of Inner and Outer Terms\ ML \ local fun bind scan ((stack1, (to_delay, stack2)), _) = C_Parse.range scan >> (fn (src, range) => C_Env.Parsing ( (stack1, stack2) , ( range , C_Inner_Syntax.bottom_up (fn _ => fn context => ML_Context.exec (tap (fn _ => Syntax.read_term (Context.proof_of context) (Token.inner_syntax_of src))) context) , Symtab.empty , to_delay))) in val _ = Theory.setup ( C_Annotation.command' ("term\<^sub>i\<^sub>n\<^sub>n\<^sub>e\<^sub>r", \<^here>) "" (bind (C_Token.syntax' (Parse.token Parse.cartouche))) #> C_Inner_Syntax.command0 (C_Inner_Toplevel.keep'' o C_Inner_Isar_Cmd.print_term) (C_Token.syntax' (Scan.succeed [] -- Parse.term)) ("term\<^sub>o\<^sub>u\<^sub>t\<^sub>e\<^sub>r", \<^here>, \<^here>, \<^here>)) end \ C \ int z = z; /*@ C \//@ term\<^sub>o\<^sub>u\<^sub>t\<^sub>e\<^sub>r \\<^C>\<^sub>e\<^sub>x\<^sub>p\<^sub>r\z\\\ C' \//@ term\<^sub>o\<^sub>u\<^sub>t\<^sub>e\<^sub>r \\<^C>\<^sub>e\<^sub>x\<^sub>p\<^sub>r\z\\\ term\<^sub>o\<^sub>u\<^sub>t\<^sub>e\<^sub>r \\<^C>\<^sub>e\<^sub>x\<^sub>p\<^sub>r\z\\ C \//@ term\<^sub>i\<^sub>n\<^sub>n\<^sub>e\<^sub>r \\<^C>\<^sub>e\<^sub>x\<^sub>p\<^sub>r\z\\\ C' \//@ term\<^sub>i\<^sub>n\<^sub>n\<^sub>e\<^sub>r \\<^C>\<^sub>e\<^sub>x\<^sub>p\<^sub>r\z\\\ term\<^sub>i\<^sub>n\<^sub>n\<^sub>e\<^sub>r \\<^C>\<^sub>e\<^sub>x\<^sub>p\<^sub>r\z\\ */\ term(*outer*) \\<^C>\<^sub>e\<^sub>x\<^sub>p\<^sub>r\z\\ C \ int z = z; /*@ C \//@ term\<^sub>o\<^sub>u\<^sub>t\<^sub>e\<^sub>r \\<^C>\<^sub>e\<^sub>x\<^sub>p\<^sub>r\z\\\ C' \//@ term\<^sub>o\<^sub>u\<^sub>t\<^sub>e\<^sub>r \\<^C>\<^sub>e\<^sub>x\<^sub>p\<^sub>r\z\\\ term\<^sub>o\<^sub>u\<^sub>t\<^sub>e\<^sub>r \\<^C>\<^sub>e\<^sub>x\<^sub>p\<^sub>r\z\\ C \//@ term\<^sub>i\<^sub>n\<^sub>n\<^sub>e\<^sub>r \\<^C>\<^sub>e\<^sub>x\<^sub>p\<^sub>r\z\\\ C' \//@ term\<^sub>i\<^sub>n\<^sub>n\<^sub>e\<^sub>r \\<^C>\<^sub>e\<^sub>x\<^sub>p\<^sub>r\z\\\ term\<^sub>i\<^sub>n\<^sub>n\<^sub>e\<^sub>r \\<^C>\<^sub>e\<^sub>x\<^sub>p\<^sub>r\z\\ */\ term(*outer*) \\<^C>\<^sub>e\<^sub>x\<^sub>p\<^sub>r\z\\ declare [[C\<^sub>e\<^sub>n\<^sub>v\<^sub>0 = last]] C \ int z = z; /*@ C \//@ term\<^sub>o\<^sub>u\<^sub>t\<^sub>e\<^sub>r \\<^C>\<^sub>e\<^sub>x\<^sub>p\<^sub>r\z\\\ C' \//@ term\<^sub>o\<^sub>u\<^sub>t\<^sub>e\<^sub>r \\<^C>\<^sub>e\<^sub>x\<^sub>p\<^sub>r\z\\\ term\<^sub>o\<^sub>u\<^sub>t\<^sub>e\<^sub>r \\<^C>\<^sub>e\<^sub>x\<^sub>p\<^sub>r\z\\ C \//@ term\<^sub>i\<^sub>n\<^sub>n\<^sub>e\<^sub>r \\<^C>\<^sub>e\<^sub>x\<^sub>p\<^sub>r\z\\\ C' \//@ term\<^sub>i\<^sub>n\<^sub>n\<^sub>e\<^sub>r \\<^C>\<^sub>e\<^sub>x\<^sub>p\<^sub>r\z\\\ term\<^sub>i\<^sub>n\<^sub>n\<^sub>e\<^sub>r \\<^C>\<^sub>e\<^sub>x\<^sub>p\<^sub>r\z\\ */\ term(*outer*) \\<^C>\<^sub>e\<^sub>x\<^sub>p\<^sub>r\z\\ declare [[C\<^sub>e\<^sub>n\<^sub>v\<^sub>0 = empty]] C \ \Propagation of report environment while manually composing at ML level\ \ int a; int f (int b) { int c = 0; /*@ \setup \fn _ => fn _ => fn env => C' env \int d = a + b + c + d; //@ term\<^sub>i\<^sub>n\<^sub>n\<^sub>e\<^sub>r \\<^C>\<^sub>e\<^sub>x\<^sub>p\<^sub>r\c\ + \<^C>\<^sub>e\<^sub>x\<^sub>p\<^sub>r\d\\ term\<^sub>o\<^sub>u\<^sub>t\<^sub>e\<^sub>r \\<^C>\<^sub>e\<^sub>x\<^sub>p\<^sub>r\c\ + \<^C>\<^sub>e\<^sub>x\<^sub>p\<^sub>r\d\\\ #> C \int d = a + b + c + d; //@ term\<^sub>i\<^sub>n\<^sub>n\<^sub>e\<^sub>r \\<^C>\<^sub>e\<^sub>x\<^sub>p\<^sub>r\c\ + \<^C>\<^sub>e\<^sub>x\<^sub>p\<^sub>r\d\\ term\<^sub>o\<^sub>u\<^sub>t\<^sub>e\<^sub>r \\<^C>\<^sub>e\<^sub>x\<^sub>p\<^sub>r\c\ + \<^C>\<^sub>e\<^sub>x\<^sub>p\<^sub>r\d\\\ #> C' env \int d = a + b + c + d; //@ term\<^sub>i\<^sub>n\<^sub>n\<^sub>e\<^sub>r \\<^C>\<^sub>e\<^sub>x\<^sub>p\<^sub>r\c\ + \<^C>\<^sub>e\<^sub>x\<^sub>p\<^sub>r\d\\ term\<^sub>o\<^sub>u\<^sub>t\<^sub>e\<^sub>r \\<^C>\<^sub>e\<^sub>x\<^sub>p\<^sub>r\c\ + \<^C>\<^sub>e\<^sub>x\<^sub>p\<^sub>r\d\\\ #> C \int d = a + b + c + d; //@ term\<^sub>i\<^sub>n\<^sub>n\<^sub>e\<^sub>r \\<^C>\<^sub>e\<^sub>x\<^sub>p\<^sub>r\c\ + \<^C>\<^sub>e\<^sub>x\<^sub>p\<^sub>r\d\\ term\<^sub>o\<^sub>u\<^sub>t\<^sub>e\<^sub>r \\<^C>\<^sub>e\<^sub>x\<^sub>p\<^sub>r\c\ + \<^C>\<^sub>e\<^sub>x\<^sub>p\<^sub>r\d\\\ \ term\<^sub>i\<^sub>n\<^sub>n\<^sub>e\<^sub>r \\<^C>\<^sub>e\<^sub>x\<^sub>p\<^sub>r\c\ + \<^C>\<^sub>e\<^sub>x\<^sub>p\<^sub>r\d\\ term\<^sub>o\<^sub>u\<^sub>t\<^sub>e\<^sub>r \\<^C>\<^sub>e\<^sub>x\<^sub>p\<^sub>r\c\ + \<^C>\<^sub>e\<^sub>x\<^sub>p\<^sub>r\d\\ */ int e = a + b + c + d; }\ section \Calculation in Directives\ subsection \Annotation Command Classification\ C \ \Lexing category vs. parsing category\ \ int a = 0; // \ \Category 2: only parsing\ //@ \setup \K (K (K I))\ (* evaluation at parsing *) //@@ \setup\ \K (K (K I))\ (* evaluation at parsing *) //@ highlight (* evaluation at parsing *) //@@ highlight\ (* evaluation at parsing *) // \ \Category 3: with lexing\ //@ #setup I (* evaluation at lexing (and directives resolving) *) //@ setup I (* evaluation at parsing *) //@@ setup\ I (* evaluation at parsing *) //@ #ML I (* evaluation at lexing (and directives resolving) *) //@ ML I (* evaluation at parsing *) //@@ ML\ I (* evaluation at parsing *) //@ #C \\ (* evaluation at lexing (and directives resolving) *) //@ C \\ (* evaluation at parsing *) //@@ C\ \\ (* evaluation at parsing *) \ C \ \Scheduling example\ \ //@+++++ ML \writeln "2"\ int a = 0; //@@ ML\ \writeln "3"\ //@ #ML \writeln "1"\ \ C \ \Scheduling example\ \ //* lemma True by simp //* #lemma True #by simp //* #lemma True by simp //* lemma True #by simp \ C \ \Scheduling example\ \ /*@ lemma \1 = one\ \2 = two\ \two + one = three\ by auto #definition [simp]: \three = 3\ #definition [simp]: \two = 2\ #definition [simp]: \one = 1\ */ \ subsection \Generalizing ML Antiquotations with C Directives\ ML \ structure Directive_setup_define = Generic_Data (type T = int val empty = 0 - val extend = I val merge = K empty) fun setup_define1 pos f = C_Directive.setup_define pos (fn toks => fn (name, (pos1, _)) => tap (fn _ => writeln ("Executing " ^ name ^ Position.here pos1 ^ " (only once)")) #> pair (f toks)) (K I) fun setup_define2 pos = C_Directive.setup_define pos (K o pair) \ C \ \General scheme of C antiquotations\ \ /*@ #setup \ \Overloading \#define\\ \ setup_define2 \<^here> (fn (name, (pos1, _)) => op ` Directive_setup_define.get #>> (case name of "f3" => curry op * 152263 | _ => curry op + 1) #> tap (fn (nb, _) => tracing ("Executing antiquotation " ^ name ^ Position.here pos1 ^ " (number = " ^ Int.toString nb ^ ")")) #> uncurry Directive_setup_define.put) \ */ #define f1 #define f2 int a = 0; #define f3 f1 f2 f1 f3 //@ #setup \ \Resetting \#define\\ \setup_define2 \<^here> (K I)\ f3 #define f3 f3 \ C \ \Dynamic token computing in \#define\\ \ //@ #setup \setup_define1 \<^here> (K [])\ #define f int a = 0; f f f f //@ #setup \setup_define1 \<^here> (fn toks => toks @ toks)\ #define f int b = a; f f //@ #setup \setup_define1 \<^here> I\ #define f int a = 0; f f \ section \Miscellaneous\ C \ \Antiquotations acting on a parsed-subtree\ \ # /**/ include // backslash rendered unescaped f(){0 + 0;} /**/ // val _ : theory => 'a => theory # /* context */ if if elif #include if then else ; # /* zzz */ elif /**/ #else\ #define FOO 00 0 "" (( FOO(FOO(a,b,c)) #endif\ C \ \Header-names in directives\ \ #define F #define G "stdio\h" // expecting an error whenever expanded #define H "stdio_h" // can be used anywhere without errors int f = /*F*/ ""; int g = /*G*/ ""; int h = H ""; #include F \ C \ \Parsing tokens as directives only when detecting space symbols before \#\\ \/* */ \ \ // # /* */ define /**/ \ a a a /*#include <>*/ // must not be considered as a directive \ C \ \Universal character names in identifiers and Isabelle symbols\ \ #include int main () { char * ó\<^url>ò = "ó\<^url>ò"; printf ("%s", ó\<^url>ò); } \ end diff --git a/thys/Isabelle_C/C11-FrontEnd/examples/C4.thy b/thys/Isabelle_C/C11-FrontEnd/examples/C4.thy --- a/thys/Isabelle_C/C11-FrontEnd/examples/C4.thy +++ b/thys/Isabelle_C/C11-FrontEnd/examples/C4.thy @@ -1,631 +1,628 @@ (****************************************************************************** * Isabelle/C * * Copyright (c) 2018-2019 Université Paris-Saclay, Univ. Paris-Sud, France * * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are * met: * * * Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * * Redistributions in binary form must reproduce the above * copyright notice, this list of conditions and the following * disclaimer in the documentation and/or other materials provided * with the distribution. * * * Neither the name of the copyright holders nor the names of its * contributors may be used to endorse or promote products derived * from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. ******************************************************************************) chapter \Annex V: Examples for A Simple C Program with Directives and Annotations\ theory C4 imports "../C_Main" begin section \A Simplistic Setup: Parse and Store\ text\The following setup just stores the result of the parsed values in the environment.\ ML\ structure Data_Out = Generic_Data (type T = (C_Grammar_Rule.ast_generic * C_Antiquote.antiq C_Env.stream) list val empty = [] - val extend = I val merge = K empty) fun get_CTranslUnit thy = let val context = Context.Theory thy in (Data_Out.get context |> map (apfst (C_Grammar_Rule.get_CTranslUnit #> the)), C_Module.Data_In_Env.get context) end fun get_CExpr thy = let val context = Context.Theory thy in (Data_Out.get context |> map (apfst (C_Grammar_Rule.get_CExpr #> the)), C_Module.Data_In_Env.get context) end \ text\Und hier setzen wir per callback die Petze:\ ML\ Data_Out.map: ( (C_Grammar_Rule.ast_generic * C_Antiquote.antiq C_Env.stream) list -> (C_Grammar_Rule.ast_generic * C_Antiquote.antiq C_Env.stream) list) -> Context.generic -> Context.generic \ ML\val SPY = Unsynchronized.ref([]:C_Grammar_Rule.ast_generic list)\ setup \Context.theory_map (C_Module.Data_Accept.put (fn ast => fn env_lang => let val _ = (SPY:= ast:: !SPY) in Data_Out.map (cons (ast, #stream_ignored env_lang |> rev)) end))\ ML\ val _ = Theory.setup( ML_Antiquotation.value_embedded \<^binding>\C11_AST_CTranslUnit\ (Args.context -- Scan.lift Args.name_position >> (fn (ctxt, (name, pos)) => (warning"arg variant not implemented";"get_CTranslUnit (Context.the_global_context())")) || Scan.succeed "get_CTranslUnit (Context.the_global_context())")) \ section \Example: A Possible Semantics for \#include\\ subsection \Implementation\ text \ The CPP directive \<^C>\#include _\ is used to import signatures of modules in C. This has the effect that imported identifiers are included in the C environment and, as a consequence, appear as constant symbols and not as free variables in the output. \ text \ The following structure is an extra mechanism to define the effect of \<^C>\#include _\ wrt. to its definition in its environment. \ ML \ structure Directive_include = Generic_Data (type T = (Input.source * C_Env.markup_ident) list Symtab.table val empty = Symtab.empty - val extend = I val merge = K empty) \ ML \ \\<^theory>\Pure\\ \ local fun return f (env_cond, env) = ([], (env_cond, f env)) val _ = Theory.setup (Context.theory_map (C_Context0.Directives.map (C_Context.directive_update ("include", \<^here>) ( (return o K I) , fn C_Lex.Include (C_Lex.Group2 (toks_bl, _, tok :: _)) => let fun exec file = if exists (fn C_Scan.Left _ => false | C_Scan.Right _ => true) file then K (error ("Unsupported character" ^ Position.here (Position.range_position (C_Lex.pos_of tok, C_Lex.end_pos_of (List.last toks_bl))))) else fn (env_lang, env_tree) => fold (fn (src, data) => fn (env_lang, env_tree) => let val (name, pos) = Input.source_content src in C_Grammar_Rule_Lib.shadowTypedef0'''' name [pos] data env_lang env_tree end) (these (Symtab.lookup (Directive_include.get (#context env_tree)) (String.concat (maps (fn C_Scan.Left s => [s] | _ => []) file)))) (env_lang, env_tree) in case tok of C_Lex.Token (_, (C_Lex.String (_, file), _)) => exec file | C_Lex.Token (_, (C_Lex.File (_, file), _)) => exec file | _ => tap (fn _ => (* not yet implemented *) warning ("Ignored directive" ^ Position.here (Position.range_position ( C_Lex.pos_of tok , C_Lex.end_pos_of (List.last toks_bl))))) end |> K |> K | _ => K (K I))))) in end \ ML \ structure Include = struct fun init name vars = Context.theory_map (Directive_include.map (Symtab.update (name, map (rpair {global = true, params = [], ret = C_Env.Previous_in_stack}) vars))) fun append name vars = Context.theory_map (Directive_include.map (Symtab.map_default (name, []) (rev o fold (cons o rpair {global = true, params = [], ret = C_Env.Previous_in_stack}) vars o rev))) val show = Context.theory_map (Directive_include.map (tap (Symtab.dest #> app (fn (fic, vars) => writeln ("Content of \"" ^ fic ^ "\": " ^ String.concat (map (fn (i, _) => let val (name, pos) = Input.source_content i in name ^ Position.here pos ^ " " end) vars)))))) end \ setup \Include.append "stdio.h" [\printf\, \scanf\]\ subsection \Tests\ C \ //@ setup \Include.append "tmp" [\b\]\ #include "tmp" int a = b; \ C \ int b = 0; //@ setup \Include.init "tmp" [\b\]\ #include "tmp" int a = b; \ C \ int c = 0; //@ setup \Include.append "tmp" [\c\]\ //@ setup \Include.append "tmp" [\c\]\ #include "tmp" int a = b + c; //@ setup \Include.show\ \ section \Working with Pragmas\ C\ #include #include /*sdfsdf */ #define a B #define b(C) #pragma /* just exists syntaxically */ \ text\In the following, we retrieve the C11 AST parsed above. \ ML\ val ((C_Ast.CTranslUnit0 (t,u), v)::R, env) = @{C11_AST_CTranslUnit}; val u = C_Grammar_Rule_Lib.decode u; C_Ast.CTypeSpec0; \ section \Working with Annotation Commands\ ML \ \\<^theory>\Isabelle_C.C_Command\\ \ \ \setup for a dummy ensures : the "Hello World" of Annotation Commands\ local datatype antiq_hol = Term of string (* term *) val scan_opt_colon = Scan.option (C_Parse.$$$ ":") fun msg cmd_name call_pos cmd_pos = tap (fn _ => tracing ("\Hello World\ reported by \"" ^ cmd_name ^ "\" here" ^ call_pos cmd_pos)) fun command (cmd as (cmd_name, _)) scan0 scan f = C_Annotation.command' cmd "" (fn (_, (cmd_pos, _)) => (scan0 -- (scan >> f) >> (fn _ => C_Env.Never |> msg cmd_name Position.here cmd_pos))) in val _ = Theory.setup ( C_Inner_Syntax.command_no_range (C_Inner_Toplevel.generic_theory oo C_Inner_Isar_Cmd.setup \K (K (K I))\) ("loop", \<^here>, \<^here>) #> command ("ensures", \<^here>) scan_opt_colon C_Parse.term Term #> command ("invariant", \<^here>) scan_opt_colon C_Parse.term Term #> command ("assigns", \<^here>) scan_opt_colon C_Parse.term Term #> command ("requires", \<^here>) scan_opt_colon C_Parse.term Term #> command ("variant", \<^here>) scan_opt_colon C_Parse.term Term) end \ C\ /*@ ensures "result >= x && result >= y" */ int max(int x, int y) { if (x > y) return x; else return y; } \ ML\ val ((C_Ast.CTranslUnit0 (t,u), v)::R, env) = get_CTranslUnit @{theory}; val u = C_Grammar_Rule_Lib.decode u \ section \C Code: Various Examples\ text\This example suite is drawn from Frama-C and used in our GLA - TPs. \ C\ int sqrt(int a) { int i = 0; int tm = 1; int sum = 1; /*@ loop invariant "1 <= sum <= a+tm" loop invariant "(i+1)*(i+1) == sum" loop invariant "tm+(i*i) == sum" loop invariant "1<=tm<=sum" loop assigns "i, tm, sum" loop variant "a-sum" */ while (sum <= a) { i++; tm = tm + 2; sum = sum + tm; } return i; } \ C\ /*@ requires "n >= 0" requires "valid(t+(0..n-1))" ensures "exists integer i; (0<=i result == 0" ensures "(forall integer i; 0<=i t[i] == 0) <==> result == 1" assigns nothing */ int allzeros(int t[], int n) { int k = 0; /*@ loop invariant "0 <= k <= n" loop invariant "forall integer i; 0<=i t[i] == 0" loop assigns k loop variant "n-k" */ while(k < n) { if (t[k]) return 0; k = k + 1; } return 1; } \ C\ /*@ requires "n >= 0" requires "valid(t+(0..n-1))" ensures "(forall integer i; 0<=i t[i] != v) <==> result == -1" ensures "(exists integer i; 0<=i result == v" assigns nothing */ int binarysearch(int t[], int n, int v) { int l = 0; int u = n-1; /*@ loop invariant false */ while (l <= u) { int m = (l + u) / 2; if (t[m] < v) { l = m + 1; } else if (t[m] > v) { u = m - 1; } else return m; } return -1; } \ C\ /*@ requires "n >= 0" requires "valid(t+(0..n-1))" requires "(forall integer i,j; 0<=i<=j t[i] <= t[j])" ensures "exists integer i; (0<=i result == 1" ensures "(forall integer i; 0<=i t[i] != x) <==> result == 0" assigns nothing */ int linearsearch(int x, int t[], int n) { int i = 0; /*@ loop invariant "0<=i<=n" loop invariant "forall integer j; 0<=j (t[j] != x)" loop assigns i loop variant "n-i" */ while (i < n) { if (t[i] < x) { i++; } else { return (t[i] == x); } } return 0; } \ section \C Code: A Sorting Algorithm\ C\ #include int main() { int array[100], n, c, d, position, swap; printf("Enter number of elements\n"); scanf("%d", &n); printf("Enter %d integers\n", n); for (c = 0; c < n; c++) scanf("%d", &array[c]); for (c = 0; c < (n - 1); c++) { position = c; for (d = c + 1; d < n; d++) { if (array[position] > array[d]) position = d; } if (position != c) { swap = array[c]; array[c] = array[position]; array[position] = swap; } } printf("Sorted list in ascending order:\n"); for (c = 0; c < n; c++) printf("%d\n", array[c]); return 0; } \ text\A better example implementation:\ C\ #include #include #define SIZE 10 void swap(int *x,int *y); void selection_sort(int* a, const int n); void display(int a[],int size); void main() { int a[SIZE] = {8,5,2,3,1,6,9,4,0,7}; int i; printf("The array before sorting:\n"); display(a,SIZE); selection_sort(a,SIZE); printf("The array after sorting:\n"); display(a,SIZE); } /* swap two integers */ void swap(int *x,int *y) { int temp; temp = *x; *x = *y; *y = temp; } /* perform selection sort */ void selection_sort(int* a,const int size) { int i, j, min; for (i = 0; i < size - 1; i++) { min = i; for (j = i + 1; j < size; j++) { if (a[j] < a[min]) { min = j; } } swap(&a[i], &a[min]); } } /* display array content */ void display(int a[],const int size) { int i; for(i=0; i text\Accessing the underlying C11-AST's via the ML Interface.\ ML\ local open C_Ast in val _ = CTranslUnit0 val (A::R, _) = @{C11_AST_CTranslUnit}; val (CTranslUnit0 (t,u), v) = A fun rule_trans (CTranslUnit0 (t,u), v) = case C_Grammar_Rule_Lib.decode u of Left (p1,p2) => writeln (Position.here p1 ^ " " ^ Position.here p2) | Right S => warning ("Not expecting that value:"^S) val bb = rule_trans A val CDeclExt0(x1)::_ = t; val _ = CDecl0 end \ ML\ get_CTranslUnit; val (R, env_final) = @{C11_AST_CTranslUnit}; val rules = map rule_trans R; @{C\<^sub>e\<^sub>n\<^sub>v} \ declare [[C\<^sub>r\<^sub>u\<^sub>l\<^sub>e\<^sub>0 = "expression"]] ML\ val src = \a + d\; val ctxt = (Context.Theory @{theory}); val ctxt' = C_Module.C' @{C\<^sub>e\<^sub>n\<^sub>v} src ctxt; val tt = Context.the_theory ctxt'; (*get_CExpr (Context.the_theory ctxt'); C_Module.Data_In_Env.get ctxt' *) \ ML\val Expr = hd(map_filter C_Grammar_Rule.get_CExpr (!SPY));\ ML\Symtab.map_entry\ ML\ Context.theory_long_name @{theory}\ ML\ fun insert_K_ast key ast = Symtab.map_default (key,[]) (cons ast) \ ML\ structure Root_Ast_Store = Generic_Data (type T = C_Grammar_Rule.ast_generic list Symtab.table val empty = Symtab.empty - val extend = I val merge = K empty); Root_Ast_Store.map: ( C_Grammar_Rule.ast_generic list Symtab.table -> C_Grammar_Rule.ast_generic list Symtab.table) -> Context.generic -> Context.generic; fun update_Root_Ast filter ast _ ctxt = let val theory_id = Context.theory_long_name(Context.theory_of ctxt) val insert_K_ast = Symtab.map_default (theory_id,[]) (cons ast) in case filter ast of NONE => (warning "No appropriate c11 ast found - store unchanged."; ctxt) |SOME _ => (Root_Ast_Store.map insert_K_ast) ctxt end; fun get_Root_Ast filter thy = let val ctxt = Context.Theory thy val thid = Context.theory_long_name(Context.theory_of ctxt) val ast = case Symtab.lookup (Root_Ast_Store.get ctxt) (thid) of SOME (a::_) => (case filter a of NONE => error "Last C command is not of appropriate AST-class." | SOME x => x) | _ => error"No C command in the current theory." in ast end val get_CExpr = get_Root_Ast C_Grammar_Rule.get_CExpr; val get_CStat = get_Root_Ast C_Grammar_Rule.get_CStat; val get_CExtDecl = get_Root_Ast C_Grammar_Rule.get_CExtDecl; val get_CTranslUnit = get_Root_Ast C_Grammar_Rule.get_CTranslUnit; \ setup \Context.theory_map (C_Module.Data_Accept.put (update_Root_Ast SOME))\ ML\ val _ = Theory.setup( ML_Antiquotation.value_embedded \<^binding>\C11_CTranslUnit\ (Args.context -- Scan.lift Args.name_position >> (fn (ctxt, (name, pos)) => (warning"arg variant not implemented";"get_CTranslUnit (Context.the_global_context())")) || Scan.succeed "get_CTranslUnit (Context.the_global_context())") #> ML_Antiquotation.value_embedded \<^binding>\C11_CExtDecl\ (Args.context -- Scan.lift Args.name_position >> (fn (ctxt, (name, pos)) => (warning"arg variant not implemented";"get_CExtDecl (Context.the_global_context())")) || Scan.succeed "get_CExtDecl (Context.the_global_context())") #> ML_Antiquotation.value_embedded \<^binding>\C11_CStat\ (Args.context -- Scan.lift Args.name_position >> (fn (ctxt, (name, pos)) => (warning"arg variant not implemented";"get_CStat (Context.the_global_context())")) || Scan.succeed "get_CStat (Context.the_global_context())") #> ML_Antiquotation.value_embedded \<^binding>\C11_CExpr\ (Args.context -- Scan.lift Args.name_position >> (fn (ctxt, (name, pos)) => (warning"arg variant not implemented";"get_CExpr (Context.the_global_context())")) || Scan.succeed "get_CExpr (Context.the_global_context())") ) \ text\For the parsing root key's, c.f. ~ \<^verbatim>\C_Command.thy\\ declare [[C\<^sub>r\<^sub>u\<^sub>l\<^sub>e\<^sub>0 = "expression"]] C\a + b\ ML\val ast = @{C11_CExpr}\ declare [[C\<^sub>r\<^sub>u\<^sub>l\<^sub>e\<^sub>0 = "statement"]] C\a = a + b;\ ML\val ast = @{C11_CStat}\ declare [[C\<^sub>r\<^sub>u\<^sub>l\<^sub>e\<^sub>0 = "external_declaration"]] C\int m ();\ ML\val ast = @{C11_CExtDecl}\ declare [[C\<^sub>r\<^sub>u\<^sub>l\<^sub>e\<^sub>0 = "translation_unit"]] C\int a = a + b;\ ML\val ast = @{C11_CTranslUnit}\ end \ No newline at end of file diff --git a/thys/Isabelle_C/C11-FrontEnd/src/C_Command.thy b/thys/Isabelle_C/C11-FrontEnd/src/C_Command.thy --- a/thys/Isabelle_C/C11-FrontEnd/src/C_Command.thy +++ b/thys/Isabelle_C/C11-FrontEnd/src/C_Command.thy @@ -1,1184 +1,1179 @@ (****************************************************************************** * Isabelle/C * * Copyright (c) 2018-2019 Université Paris-Saclay, Univ. Paris-Sud, France * * All rights reserved. * Author: Frédéric Tuong, Burkhart Wolff, Université Paris-Saclay * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are * met: * * * Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * * Redistributions in binary form must reproduce the above * copyright notice, this list of conditions and the following * disclaimer in the documentation and/or other materials provided * with the distribution. * * * Neither the name of the copyright holders nor the names of its * contributors may be used to endorse or promote products derived * from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. ******************************************************************************) section \Interface: Inner and Outer Commands\ theory C_Command imports C_Eval keywords "C" :: thy_decl % "ML" and "C_file" :: thy_load % "ML" and "C_export_boot" :: thy_decl % "ML" and "C_export_file" :: thy_decl and "C_prf" :: prf_decl % "proof" (* FIXME % "ML" ?? *) and "C_val" :: diag % "ML" begin subsection \Parsing Entry-Point: Error and Acceptance Cases\ ML \ \analogous to \<^file>\~~/src/Pure/Tools/ghc.ML\\ (* Author: Frédéric Tuong, Université Paris-Saclay *) \ structure C_Serialize = struct (** string literals **) fun print_codepoint c = (case c of 10 => "\\n" | 9 => "\\t" | 11 => "\\v" | 8 => "\\b" | 13 => "\\r" | 12 => "\\f" | 7 => "\\a" | 27 => "\\e" | 92 => "\\\\" | 63 => "\\?" | 39 => "\\'" | 34 => "\\\"" | c => if c >= 32 andalso c < 127 then chr c else error "Not yet implemented"); fun print_symbol sym = let val ord = SML90.ord; (* copied from ML_init in Isabelle2020. *) in (case Symbol.decode sym of Symbol.Char s => print_codepoint (ord s) | Symbol.UTF8 s => UTF8.decode_permissive s |> map print_codepoint |> implode | Symbol.Sym s => "\\092<" ^ s ^ ">" | Symbol.Control s => "\\092<^" ^ s ^ ">" | _ => translate_string (print_codepoint o ord) sym) end; val print_string = quote o implode o map print_symbol o Symbol.explode; end \ ML \ \analogous to \<^file>\~~/src/Pure/Tools/generated_files.ML\\ (* Author: Frédéric Tuong, Université Paris-Saclay *) \ structure C_Generated_Files = struct val c_dir = "C"; val c_ext = "c"; val c_make_comment = enclose "/*" "*/"; (** context data **) (* file types *) fun get_file_type ext = if ext = "" then error "Bad file extension" else if c_ext = ext then () else error ("Unknown file type for extension " ^ quote ext); (** Isar commands **) (* generate_file *) fun generate_file (binding, src_content) lthy = let val (path, pos) = Path.dest_binding binding; val () = get_file_type (#2 (Path.split_ext path)) handle ERROR msg => error (msg ^ Position.here pos); val header = c_make_comment " generated by Isabelle "; val content = header ^ "\n" ^ src_content; in lthy |> (Local_Theory.background_theory o Generated_Files.add_files) (binding, content) end; (** concrete file types **) val _ = Theory.setup (Generated_Files.file_type \<^binding>\C\ {ext = c_ext, make_comment = c_make_comment, make_string = C_Serialize.print_string}); end \ ML \ \\<^theory>\Isabelle_C.C_Eval\\ \ signature C_MODULE = sig structure Data_Accept : GENERIC_DATA structure Data_In_Env : GENERIC_DATA structure Data_In_Source : GENERIC_DATA structure Data_Term : GENERIC_DATA structure C_Term: sig val key0_default: string val key0_expression: string val key0_external_declaration: string val key0_statement: string val key0_translation_unit: string val key_default: Input.source val key_expression: Input.source val key_external_declaration: Input.source val key_statement: Input.source val key_translation_unit: Input.source val map_default: (C_Grammar_Rule.ast_generic -> C_Env.env_lang -> local_theory -> term) -> theory -> theory val map_expression: (C_Grammar_Rule_Lib.CExpr -> C_Env.env_lang -> local_theory -> term) -> theory -> theory val map_external_declaration: (C_Grammar_Rule_Lib.CExtDecl -> C_Env.env_lang -> local_theory -> term) -> theory -> theory val map_statement: (C_Grammar_Rule_Lib.CStat -> C_Env.env_lang -> local_theory -> term) -> theory -> theory val map_translation_unit: (C_Grammar_Rule_Lib.CTranslUnit -> C_Env.env_lang -> local_theory -> term) -> theory -> theory val tok0_expression: string * ('a * 'a -> (C_Grammar.Tokens.svalue, 'a) LALR_Parser_Eval.Token.token) val tok0_external_declaration: string * ('a * 'a -> (C_Grammar.Tokens.svalue, 'a) LALR_Parser_Eval.Token.token) val tok0_statement: string * ('a * 'a -> (C_Grammar.Tokens.svalue, 'a) LALR_Parser_Eval.Token.token) val tok0_translation_unit: string * ('a * 'a -> (C_Grammar.Tokens.svalue, 'a) LALR_Parser_Eval.Token.token) val tok_expression: Input.source * ('a * 'a -> (C_Grammar.Tokens.svalue, 'a) LALR_Parser_Eval.Token.token) val tok_external_declaration: Input.source * ('a * 'a -> (C_Grammar.Tokens.svalue, 'a) LALR_Parser_Eval.Token.token) val tok_statement: Input.source * ('a * 'a -> (C_Grammar.Tokens.svalue, 'a) LALR_Parser_Eval.Token.token) val tok_translation_unit: Input.source * ('a * 'a -> (C_Grammar.Tokens.svalue, 'a) LALR_Parser_Eval.Token.token) val tokens: (string * ('a * 'a -> (C_Grammar.Tokens.svalue, 'a) LALR_Parser_Eval.Token.token)) list end structure C_Term': sig val accept: local_theory -> (Input.source * (Position.range -> (C_Grammar.Tokens.svalue, Position.T) LALR_Parser_Eval.Token.token)) option -> (Input.source -> Context.generic -> (C_Grammar.Tokens.svalue, Position.T) LALR_Parser_Eval.Token.token) * (Data_In_Env.T -> 'a * (C_Grammar_Rule.ast_generic * 'b * 'c) -> {context: Context.generic, error_lines: 'd, reports_text: 'e} -> term * {context: Context.generic, error_lines: 'd, reports_text: 'e}) val err: C_Env.env_lang -> (LALR_Table.state * (C_Grammar_Parser.svalue0 * Position.T * Position.T)) list -> Position.T -> {context: Context.generic, error_lines: string list, reports_text: Position.report_text list} -> term * {context: Context.generic, error_lines: string list, reports_text: Position.report_text list} val eval_in: Input.source -> Context.generic -> (Context.generic -> C_Env.env_lang) -> (Input.source * (Position.range -> (C_Grammar.Tokens.svalue, Position.T) LALR_Parser_Eval.Token.token)) option -> C_Lex.token list * (C_Env.error_lines -> string list) -> term val parse_translation: ('a * (Input.source * (Position.range -> (C_Grammar.Tokens.svalue, Position.T) LALR_Parser_Eval.Token.token)) option) list -> ('a * (Proof.context -> term list -> term)) list end val accept: Data_In_Env.T -> 'a * (C_Grammar_Rule.ast_generic * 'b * 'c) -> {context: Context.generic, error_lines: 'd, reports_text: 'e} -> unit * {context: Context.generic, error_lines: 'd, reports_text: 'e} val accept0: (Context.generic -> C_Grammar_Rule.ast_generic -> Data_In_Env.T -> Context.generic -> 'a) -> Data_In_Env.T -> C_Grammar_Rule.ast_generic -> Context.generic -> 'a val c_enclose: string -> string -> Input.source -> C_Lex.token list * (string list -> string list) val env: Context.generic -> Data_In_Env.T val env0: Proof.context -> Data_In_Env.T val err: C_Env.env_lang -> (LALR_Table.state * (C_Grammar_Parser.svalue0 * Position.T * Position.T)) list -> Position.T -> {context: Context.generic, error_lines: string list, reports_text: Position.report_text list} -> unit * {context: Context.generic, error_lines: string list, reports_text: Position.report_text list} val err0: 'a -> 'b -> Position.T -> {context: 'c, error_lines: string list, reports_text: 'd} -> {context: 'c, error_lines: string list, reports_text: 'd} val eval_in: Input.source -> Context.generic option -> C_Lex.token list * (C_Env.error_lines -> string list) -> unit val eval_source: Input.source -> unit val exec_eval: Input.source -> Context.generic -> Context.generic val start: Input.source -> Context.generic -> (C_Grammar.Tokens.svalue, Position.T) LALR_Parser_Eval.Token.token (* toplevel command semantics of Isabelle_C *) val C: Input.source -> Context.generic -> Context.generic val C': C_Env.env_lang -> Input.source -> Context.generic -> Context.generic val C_export_boot: Input.source -> Context.generic -> generic_theory val C_export_file: Position.T * 'a -> Proof.context -> Proof.context val C_prf: Input.source -> Proof.state -> Proof.state end structure C_Module : C_MODULE = struct structure Data_In_Source = Generic_Data (type T = Input.source list val empty = [] - val extend = I val merge = K empty) structure Data_In_Env = Generic_Data (type T = C_Env.env_lang val empty = C_Env.empty_env_lang - val extend = I val merge = K empty) structure Data_Accept = Generic_Data (type T = C_Grammar_Rule.ast_generic -> C_Env.env_lang -> Context.generic -> Context.generic fun empty _ _ = I - val extend = I val merge = #2) structure Data_Term = Generic_Data (type T = (C_Grammar_Rule.ast_generic -> C_Env.env_lang -> local_theory -> term) Symtab.table val empty = Symtab.empty - val extend = I val merge = #2) (* keys for major syntactic categories *) structure C_Term = struct val key_translation_unit = \translation_unit\ val key_external_declaration = \external_declaration\ val key_statement = \statement\ val key_expression = \expression\ val key_default = \default\ local val source_content = Input.source_content #> #1 in val key0_translation_unit = source_content key_translation_unit val key0_external_declaration = source_content key_external_declaration val key0_statement = source_content key_statement val key0_expression = source_content key_expression val key0_default = source_content key_default end val tok0_translation_unit = (key0_translation_unit, C_Grammar.Tokens.start_translation_unit) val tok0_external_declaration = ( key0_external_declaration , C_Grammar.Tokens.start_external_declaration) val tok0_statement = (key0_statement, C_Grammar.Tokens.start_statement) val tok0_expression = (key0_expression, C_Grammar.Tokens.start_expression) val tok_translation_unit = (key_translation_unit, C_Grammar.Tokens.start_translation_unit) val tok_external_declaration = ( key_external_declaration , C_Grammar.Tokens.start_external_declaration) val tok_statement = (key_statement, C_Grammar.Tokens.start_statement) val tok_expression = (key_expression, C_Grammar.Tokens.start_expression) val tokens = [ tok0_translation_unit , tok0_external_declaration , tok0_statement , tok0_expression ] local fun map_upd0 key v = Context.theory_map (Data_Term.map (Symtab.update (key, v))) fun map_upd key start f = map_upd0 key (f o the o start) in val map_translation_unit = map_upd key0_translation_unit C_Grammar_Rule.get_CTranslUnit val map_external_declaration = map_upd key0_external_declaration C_Grammar_Rule.get_CExtDecl val map_statement = map_upd key0_statement C_Grammar_Rule.get_CStat val map_expression = map_upd key0_expression C_Grammar_Rule.get_CExpr val map_default = map_upd0 key0_default end end fun env0 ctxt = case Config.get ctxt C_Options.starting_env of "last" => Data_In_Env.get (Context.Proof ctxt) | "empty" => C_Env.empty_env_lang | s => error ("Unknown option: " ^ s ^ Position.here (Config.pos_of C_Options.starting_env)) val env = env0 o Context.proof_of fun start source context = Input.range_of source |> let val s = Config.get (Context.proof_of context) C_Options.starting_rule in case AList.lookup (op =) C_Term.tokens s of SOME tok => tok | NONE => error ("Unknown option: " ^ s ^ Position.here (Config.pos_of C_Options.starting_rule)) end fun err0 _ _ pos = C_Env.map_error_lines (cons ("Parser: No matching grammar rule" ^ Position.here pos)) val err = pair () oooo err0 fun accept0 f (env_lang:C_Env.env_lang) ast = Data_In_Env.put env_lang #> (fn context => f context ast env_lang (Data_Accept.get context ast env_lang context)) fun accept (env_lang:C_Env.env_lang) (_, (ast, _, _)) = pair () o C_Env.map_context (accept0 (K (K (K I))) env_lang ast) val eval_source = C_Context.eval_source env start err accept fun c_enclose bg en source = C_Lex.@@ ( C_Lex.@@ (C_Lex.read bg, C_Lex.read_source source) , C_Lex.read en); structure C_Term' = struct val err = pair Term.dummy oooo err0 fun accept ctxt start_rule = let val (key, start) = case start_rule of NONE => (C_Term.key_default, start) | SOME (key, start_rule) => (key, fn source => fn _ => start_rule (Input.range_of source)) val (key, pos) = Input.source_content key in ( start , fn env_lang => fn (_, (ast, _, _)) => C_Env.map_context' (accept0 (fn context => pair oo (case Symtab.lookup (Data_Term.get context) key of NONE => tap (fn _ => warning ("Representation function associated to\ \ \"" ^ key ^ "\"" ^ Position.here pos ^ " not found (returning a dummy term)")) (fn _ => fn _ => @{term "()"}) | SOME f => fn ast => fn env_lang => f ast env_lang ctxt)) env_lang ast)) end fun eval_in text context env start_rule = let val (start, accept) = accept (Context.proof_of context) start_rule in C_Context.eval_in (SOME context) env (start text) err accept end fun parse_translation l = l |> map (apsnd (fn start_rule => fn ctxt => fn args => let val msg = (case start_rule of NONE => C_Term.key_default | SOME (key, _) => key) |> Input.source_content |> #1 fun err () = raise TERM (msg, args) in case args of [(c as Const (\<^syntax_const>\_constrain\, _)) $ Free (s, _) $ p] => (case Term_Position.decode_position p of SOME (pos, _) => c $ let val src = uncurry (Input.source false) let val s0 = Symbol_Pos.explode (s, pos) val s = Symbol_Pos.cartouche_content s0 in ( Symbol_Pos.implode s , case s of [] => Position.no_range | (_, pos0) :: _ => Position.range (pos0, s0 |> List.last |> snd)) end in eval_in src (case Context.get_generic_context () of NONE => Context.Proof ctxt | SOME context => Context.mapping I (K ctxt) context) (C_Stack.Data_Lang.get #> (fn NONE => env0 ctxt | SOME (_, env_lang) => env_lang)) start_rule (c_enclose "" "" src) end $ p | NONE => err ()) | _ => err () end)) end fun eval_in text ctxt = C_Context.eval_in ctxt env (start text) err accept fun exec_eval source = Data_In_Source.map (cons source) #> ML_Context.exec (fn () => eval_source source) fun C_prf source = Proof.map_context (Context.proof_map (exec_eval source)) #> Proof.propagate_ml_env fun C_export_boot source context = context |> Config.put_generic ML_Env.ML_environment ML_Env.Isabelle |> Config.put_generic ML_Env.ML_write_global true |> exec_eval source |> Config.restore_generic ML_Env.ML_write_global context |> Config.restore_generic ML_Env.ML_environment context |> Local_Theory.propagate_ml_env val C: Input.source -> Context.generic -> Context.generic = fn source => exec_eval source #> Local_Theory.propagate_ml_env val C': C_Env.env_lang -> Input.source -> Context.generic -> Context.generic = fn env_lang:C_Env.env_lang => fn src:Input.source => fn context:Context.generic => context |> C_Env.empty_env_tree |> C_Context.eval_source' env_lang (fn src => start src context) err accept src |> (fn (_, {context, reports_text, error_lines}) => tap (fn _ => case error_lines of [] => () | l => warning (cat_lines (rev l))) (C_Stack.Data_Tree.map (curry C_Stack.Data_Tree_Args.merge (reports_text, [])) context)) fun C_export_file (pos, _) lthy = let val c_sources = Data_In_Source.get (Context.Proof lthy) val binding = Path.binding ( Path.appends [ Path.basic C_Generated_Files.c_dir , Path.basic (string_of_int (length c_sources)) , lthy |> Proof_Context.theory_of |> Context.theory_name |> Path.explode |> Path.ext C_Generated_Files.c_ext ] , pos) in lthy |> C_Generated_Files.generate_file (binding, rev c_sources |> map (Input.source_content #> #1) |> cat_lines) |> tap (Proof_Context.theory_of #> (fn thy => let val file = Generated_Files.get_file thy binding in Generated_Files.export_file thy file; writeln (Export.message thy Path.current); writeln (prefix " " (Generated_Files.print_file file)) end)) end end \ subsection \Definitions of C11 Directives as C-commands\ subsubsection \Initialization\ ML \ \analogous to \<^theory>\Pure\\ \ structure C_Directive : sig val setup_define: Position.T -> (C_Lex.token list -> string * Position.range -> Context.generic -> C_Lex.token list * Context.generic) -> (string * Position.range -> Context.generic -> Context.generic) -> theory -> theory end = struct local fun directive_update keyword data = C_Context.directive_update keyword (data, K (K (K I))) fun return f (env_cond, env) = ([], (env_cond, f env)) fun directive_update_define pos f_toks f_antiq = directive_update ("define", pos) (return o (fn C_Lex.Define (_, C_Lex.Group1 ([], [tok3]), NONE, C_Lex.Group1 ([], toks)) => let val map_ctxt = case (tok3, toks) of (C_Lex.Token ((pos, _), (C_Lex.Ident, ident)), [C_Lex.Token (_, (C_Lex.Integer (_, C_Lex.Repr_decimal, []), integer))]) => C_Env.map_context (Context.map_theory (Named_Target.theory_map (Specification.definition_cmd (SOME (Binding.make (ident, pos), NONE, NoSyn)) [] [] (Binding.empty_atts, ident ^ " \ " ^ integer) true #> tap (fn ((_, (_, t)), ctxt) => Output.information ("Generating " ^ Pretty.string_of (Syntax.pretty_term ctxt (Thm.prop_of t)) ^ Position.here (Position.range_position ( C_Lex.pos_of tok3 , C_Lex.end_pos_of (List.last toks))))) #> #2))) | _ => I in fn (env_dir, env_tree) => let val name = C_Lex.content_of tok3 val pos = [C_Lex.pos_of tok3] val data = (pos, serial (), (C_Scan.Left (f_toks toks), f_antiq)) in ( Symtab.update (name, data) env_dir , env_tree |> C_Context.markup_directive_define false (C_Ast.Left (data, C_Env_Ext.list_lookup env_dir name)) pos name |> map_ctxt) end end | C_Lex.Define (_, C_Lex.Group1 ([], [tok3]), SOME (C_Lex.Group1 (_ :: toks_bl, _)), _) => tap (fn _ => (* not yet implemented *) warning ("Ignored functional macro directive" ^ Position.here (Position.range_position (C_Lex.pos_of tok3, C_Lex.end_pos_of (List.last toks_bl))))) | _ => I)) in val setup_define = Context.theory_map o C_Context0.Directives.map ooo directive_update_define val _ = Theory.setup (Context.theory_map (C_Context0.Directives.map (directive_update_define \<^here> (K o pair) (K I) #> directive_update ("undef", \<^here>) (return o (fn C_Lex.Undef (C_Lex.Group2 (_, _, [tok])) => (fn (env_dir, env_tree) => let val name = C_Lex.content_of tok val pos1 = [C_Lex.pos_of tok] val data = Symtab.lookup env_dir name in ( (case data of NONE => env_dir | SOME _ => Symtab.delete name env_dir) , C_Context.markup_directive_define true (C_Ast.Right (pos1, data)) pos1 name env_tree) end) | _ => I))))) end end \ subsection \Definitions of C Annotation Commands\ subsubsection \Library\ ML \ \analogous to \<^file>\~~/src/Pure/Isar/toplevel.ML\\ \ structure C_Inner_Toplevel = struct val theory = Context.map_theory fun local_theory' target f gthy = let val (finish, lthy) = Target_Context.switch_named_cmd target gthy; val lthy' = lthy |> Local_Theory.new_group |> f false |> Local_Theory.reset_group; in finish lthy' end val generic_theory = I fun keep'' f = tap (f o Context.proof_of) end \ ML \ \analogous to \<^file>\~~/src/Pure/Isar/isar_cmd.ML\\ \ structure C_Inner_Isar_Cmd = struct (** theory declarations **) (* generic setup *) fun setup0 f_typ f_val src = fn NONE => let val setup = "setup" in C_Context.expression "C_Ast" (Input.range_of src) setup (f_typ "C_Stack.stack_data" "C_Stack.stack_data_elem -> C_Env.env_lang -> Context.generic -> Context.generic") ("fn context => \ \let val (stack, env_lang) = C_Stack.Data_Lang.get' context \ \in " ^ f_val setup "stack" ^ " (stack |> hd) env_lang end context") (ML_Lex.read_source src) end | SOME rule => let val hook = "hook" in C_Context.expression "C_Ast" (Input.range_of src) hook (f_typ "C_Stack.stack_data" (C_Grammar_Rule.type_reduce rule ^ " C_Stack.stack_elem -> C_Env.env_lang -> Context.generic -> Context.generic")) ("fn context => \ \let val (stack, env_lang) = C_Stack.Data_Lang.get' context \ \in " ^ f_val hook "stack" ^ " " ^ "(stack \ \|> hd \ \|> C_Stack.map_svalue0 C_Grammar_Rule.reduce" ^ Int.toString rule ^ ")\ \env_lang \ \end \ \ context") (ML_Lex.read_source src) end val setup = setup0 (fn a => fn b => a ^ " -> " ^ b) (fn a => fn b => a ^ " " ^ b) val setup' = setup0 (K I) K (* print theorems, terms, types etc. *) local fun string_of_term ctxt s = let val t = Syntax.read_term ctxt s; val T = Term.type_of t; val ctxt' = Proof_Context.augment t ctxt; in Pretty.string_of (Pretty.block [Pretty.quote (Syntax.pretty_term ctxt' t), Pretty.fbrk, Pretty.str "::", Pretty.brk 1, Pretty.quote (Syntax.pretty_typ ctxt' T)]) end; fun print_item string_of (modes, arg) ctxt = Print_Mode.with_modes modes (fn () => writeln (string_of ctxt arg)) (); in val print_term = print_item string_of_term; end; end \ ML \ \analogous to \<^file>\~~/src/Pure/Isar/outer_syntax.ML\\ \ structure C_Inner_Syntax = struct val drop1 = fn C_Scan.Left f => C_Scan.Left (K o f) | C_Scan.Right (f, dir) => C_Scan.Right (K o f, dir) val drop2 = fn C_Scan.Left f => C_Scan.Left (K o K o f) | C_Scan.Right (f, dir) => C_Scan.Right (K o K o f, dir) val bottom_up = C_Env.Bottom_up o C_Env.Exec_annotation (**) fun pref_lex name = "#" ^ name val pref_bot = I fun pref_top name = name ^ "\" (**) fun command2' cmd f (pos_bot, pos_top) = let fun cmd' dir = cmd (C_Scan.Right (f, dir)) Keyword.thy_decl in cmd' bottom_up (pref_bot, pos_bot) #> cmd' C_Env.Top_down (pref_top, pos_top) end fun command3' cmd f (pos_lex, pos_bot, pos_top) = cmd (C_Scan.Left f) (pref_lex, pos_lex) #> command2' (K o cmd) f (pos_bot, pos_top) fun command2 cmd f (name, pos_bot, pos_top) = command2' (fn f => fn kind => fn (name_pref, pos) => cmd f kind (name_pref name, pos)) f (pos_bot, pos_top) fun command3 cmd f (name, pos_lex, pos_bot, pos_top) = command3' (fn f => fn (name_pref, pos) => cmd f (name_pref name, pos)) f (pos_lex, pos_bot, pos_top) (**) fun command00 f kind scan name = C_Annotation.command'' kind name "" (case f of C_Scan.Left f => (fn _ => C_Parse.range scan >> (fn (src, range) => C_Env.Lexing (range, f src range))) | C_Scan.Right (f, dir) => fn ((stack1, (to_delay, stack2)), _) => C_Parse.range scan >> (fn (src, range) => C_Env.Parsing ((stack1, stack2), (range, dir (f src range), Symtab.empty, to_delay)))) fun command00_no_range f kind name = C_Annotation.command'' kind name "" (case f of C_Scan.Left f => (fn (_, range) => Scan.succeed () >> K (C_Env.Lexing (range, f range))) | C_Scan.Right (f, dir) => fn ((stack1, (to_delay, stack2)), range) => Scan.succeed () >> K (C_Env.Parsing ((stack1, stack2), (range, dir (f range), Symtab.empty, to_delay)))) (**) fun command' f = command00 (drop1 f) Keyword.thy_decl fun command f scan = command2 (fn f => fn kind => command00 f kind scan) (K o f) fun command_range f = command00_no_range f Keyword.thy_decl val command_range' = command3 (command_range o drop1) fun command_no_range' f = command00_no_range (drop1 f) Keyword.thy_decl fun command_no_range f = command2 command00_no_range (K f) fun command0 f scan = command3 (fn f => command' (drop1 f) scan) f fun local_command' (name, pos_lex, pos_bot, pos_top) scan f = command3' (fn f => fn (name_pref, pos) => command' (drop1 f) (C_Token.syntax' (Parse.opt_target -- scan name_pref)) (name_pref name, pos)) (fn (target, arg) => C_Inner_Toplevel.local_theory' target (f arg)) (pos_lex, pos_bot, pos_top) fun local_command'' spec = local_command' spec o K val command0_no_range = command_no_range' o drop1 fun command0' f kind scan = command3 (fn f => fn (name, pos) => command00 (drop2 f) kind (scan name) (name, pos)) f end \ ML \ \analogous to \<^file>\~~/src/Pure/ML/ml_file.ML\\ \ structure C_Inner_File = struct fun command_c ({lines, pos, ...}: Token.file) = C_Module.C (Input.source true (cat_lines lines) (pos, pos)); fun C files gthy = command_c (hd (files (Context.theory_of gthy))) gthy; fun command_ml environment debug files gthy = let val file: Token.file = hd (files (Context.theory_of gthy)); val source = Token.file_source file; val _ = Document_Output.check_comments (Context.proof_of gthy) (Input.source_explode source); val flags: ML_Compiler.flags = {environment = environment, redirect = true, verbose = true, debug = debug, writeln = writeln, warning = warning}; in gthy |> ML_Context.exec (fn () => ML_Context.eval_source flags source) |> Local_Theory.propagate_ml_env end; val ML = command_ml ""; val SML = command_ml ML_Env.SML; end; \ subsubsection \Initialization\ setup \ \analogous to \<^theory>\Pure\\ \ C_Thy_Header.add_keywords_minor (maps (fn ((name, pos_lex, pos_bot, pos_top), ty) => [ ((C_Inner_Syntax.pref_lex name, pos_lex), ty) , ((C_Inner_Syntax.pref_bot name, pos_bot), ty) , ((C_Inner_Syntax.pref_top name, pos_top), ty) ]) [ (("apply", \<^here>, \<^here>, \<^here>), ((Keyword.prf_script, []), ["proof"])) , (("by", \<^here>, \<^here>, \<^here>), ((Keyword.qed, []), ["proof"])) , (("done", \<^here>, \<^here>, \<^here>), ((Keyword.qed_script, []), ["proof"])) ]) \ ML \ \analogous to \<^theory>\Pure\\ \ local val semi = Scan.option (C_Parse.$$$ ";"); structure C_Isar_Cmd = struct fun ML source = ML_Context.exec (fn () => ML_Context.eval_source (ML_Compiler.verbose true ML_Compiler.flags) source) #> Local_Theory.propagate_ml_env fun theorem schematic ((long, binding, includes, elems, concl), (l_meth, o_meth)) int lthy = (if schematic then Specification.schematic_theorem_cmd else Specification.theorem_cmd) long Thm.theoremK NONE (K I) binding includes elems concl int lthy |> fold (fn m => tap (fn _ => Method.report m) #> Proof.apply m #> Seq.the_result "") l_meth |> (case o_meth of NONE => Proof.global_done_proof | SOME (m1, m2) => tap (fn _ => (Method.report m1; Option.map Method.report m2)) #> Proof.global_terminal_proof (m1, m2)) fun definition (((decl, spec), prems), params) = #2 oo Specification.definition_cmd decl params prems spec fun declare (facts, fixes) = #2 oo Specification.theorems_cmd "" [(Binding.empty_atts, flat facts)] fixes end local val long_keyword = Parse_Spec.includes >> K "" || Parse_Spec.long_statement_keyword; val long_statement = Scan.optional (Parse_Spec.opt_thm_name ":" --| Scan.ahead long_keyword) Binding.empty_atts -- Scan.optional Parse_Spec.includes [] -- Parse_Spec.long_statement >> (fn ((binding, includes), (elems, concl)) => (true, binding, includes, elems, concl)); val short_statement = Parse_Spec.statement -- Parse_Spec.if_statement -- Parse.for_fixes >> (fn ((shows, assumes), fixes) => (false, Binding.empty_atts, [], [Element.Fixes fixes, Element.Assumes assumes], Element.Shows shows)); in fun theorem spec schematic = C_Inner_Syntax.local_command' spec (fn name_pref => (long_statement || short_statement) -- let val apply = Parse.$$$ (name_pref "apply") |-- Method.parse in Scan.repeat1 apply -- (Parse.$$$ (name_pref "done") >> K NONE) || Scan.repeat apply -- (Parse.$$$ (name_pref "by") |-- Method.parse -- Scan.option Method.parse >> SOME) end) (C_Isar_Cmd.theorem schematic) end val opt_modes = Scan.optional (\<^keyword>\(\ |-- Parse.!!! (Scan.repeat1 Parse.name --| \<^keyword>\)\)) []; val _ = Theory.setup ( C_Inner_Syntax.command (C_Inner_Toplevel.generic_theory oo C_Inner_Isar_Cmd.setup) C_Parse.ML_source ("\setup", \<^here>, \<^here>) #> C_Inner_Syntax.command0 (C_Inner_Toplevel.theory o Isar_Cmd.setup) C_Parse.ML_source ("setup", \<^here>, \<^here>, \<^here>) #> C_Inner_Syntax.command0 (C_Inner_Toplevel.generic_theory o C_Isar_Cmd.ML) C_Parse.ML_source ("ML", \<^here>, \<^here>, \<^here>) #> C_Inner_Syntax.command0 (C_Inner_Toplevel.generic_theory o C_Module.C) C_Parse.C_source ("C", \<^here>, \<^here>, \<^here>) #> C_Inner_Syntax.command0' (C_Inner_Toplevel.generic_theory o C_Inner_File.ML NONE) Keyword.thy_load (fn name => C_Resources.parse_files name --| semi) ("ML_file", \<^here>, \<^here>, \<^here>) #> C_Inner_Syntax.command0' (C_Inner_Toplevel.generic_theory o C_Inner_File.C) Keyword.thy_load (fn name => C_Resources.parse_files name --| semi) ("C_file", \<^here>, \<^here>, \<^here>) #> C_Inner_Syntax.command0 (C_Inner_Toplevel.generic_theory o C_Module.C_export_boot) C_Parse.C_source ("C_export_boot", \<^here>, \<^here>, \<^here>) #> C_Inner_Syntax.command_range' (Context.map_theory o Named_Target.theory_map o C_Module.C_export_file) ("C_export_file", \<^here>, \<^here>, \<^here>) #> C_Inner_Syntax.command_no_range (C_Inner_Toplevel.generic_theory oo C_Inner_Isar_Cmd.setup \fn ((_, (_, pos1, pos2)) :: _) => (fn _ => fn _ => tap (fn _ => Position.reports_text [((Position.range (pos1, pos2) |> Position.range_position, Markup.intensify), "")])) | _ => fn _ => fn _ => I\) ("highlight", \<^here>, \<^here>) #> theorem ("theorem", \<^here>, \<^here>, \<^here>) false #> theorem ("lemma", \<^here>, \<^here>, \<^here>) false #> theorem ("corollary", \<^here>, \<^here>, \<^here>) false #> theorem ("proposition", \<^here>, \<^here>, \<^here>) false #> theorem ("schematic_goal", \<^here>, \<^here>, \<^here>) true #> C_Inner_Syntax.local_command'' ("definition", \<^here>, \<^here>, \<^here>) (Scan.option Parse_Spec.constdecl -- (Parse_Spec.opt_thm_name ":" -- Parse.prop) -- Parse_Spec.if_assumes -- Parse.for_fixes) C_Isar_Cmd.definition #> C_Inner_Syntax.local_command'' ("declare", \<^here>, \<^here>, \<^here>) (Parse.and_list1 Parse.thms1 -- Parse.for_fixes) C_Isar_Cmd.declare #> C_Inner_Syntax.command0 (C_Inner_Toplevel.keep'' o C_Inner_Isar_Cmd.print_term) (C_Token.syntax' (opt_modes -- Parse.term)) ("term", \<^here>, \<^here>, \<^here>)) in end \ subsection \Definitions of Outer Classical Commands\ subsubsection \Library\ (* Author: Frédéric Tuong, Université Paris-Saclay *) ML \ \analogously to \<^file>\~~/src/Pure/Isar/parse.ML\\ \ structure C_Outer_Parse = struct val C_source = Parse.input (Parse.group (fn () => "C source") Parse.text) end \ ML \ \analogously to \<^file>\~~/src/Pure/Isar/outer_syntax.ML\\ \ structure C_Outer_Syntax = struct val _ = Outer_Syntax.command \<^command_keyword>\C\ "" (C_Outer_Parse.C_source >> (Toplevel.generic_theory o C_Module.C)); end \ ML \ \analogously to \<^file>\~~/src/Pure/Isar/isar_cmd.ML\\ \ structure C_Outer_Isar_Cmd = struct (* diagnostic ML evaluation *) structure Diag_State = Proof_Data ( type T = Toplevel.state option; fun init _ = NONE; ); fun C_diag source state = let val opt_ctxt = try Toplevel.generic_theory_of state |> Option.map (Context.proof_of #> Diag_State.put (SOME state)); in Context.setmp_generic_context (Option.map Context.Proof opt_ctxt) (fn () => C_Module.eval_source source) () end; fun diag_state ctxt = (case Diag_State.get ctxt of SOME st => st | NONE => Toplevel.init_toplevel ()); val diag_goal = Proof.goal o Toplevel.proof_of o diag_state; val _ = Theory.setup (ML_Antiquotation.value (Binding.qualify true "Isar" \<^binding>\C_state\) (Scan.succeed "C_Outer_Isar_Cmd.diag_state ML_context") #> ML_Antiquotation.value (Binding.qualify true "Isar" \<^binding>\C_goal\) (Scan.succeed "C_Outer_Isar_Cmd.diag_goal ML_context")); end \ ML \ \analogously to \<^file>\~~/src/Pure/ML/ml_file.ML\\ \ structure C_Outer_File = struct fun command_c ({src_path, lines, digest, pos}: Token.file) = let val provide = Resources.provide (src_path, digest); in I #> C_Module.C (Input.source true (cat_lines lines) (pos, pos)) #> Context.mapping provide (Local_Theory.background_theory provide) end; fun C files gthy = command_c (hd (files (Context.theory_of gthy))) gthy; end; \ subsubsection \Setup for \<^verbatim>\C\ and \<^verbatim>\C_file\ Command Syntax\ ML \ local val semi = Scan.option \<^keyword>\;\; val _ = Outer_Syntax.command \<^command_keyword>\C_file\ "read and evaluate Isabelle/C file" (Resources.parse_files single --| semi >> (Toplevel.generic_theory o C_Outer_File.C)); val _ = Outer_Syntax.command \<^command_keyword>\C_export_boot\ "C text within theory or local theory, and export to bootstrap environment" (C_Outer_Parse.C_source >> (Toplevel.generic_theory o C_Module.C_export_boot)); val _ = Outer_Syntax.command \<^command_keyword>\C_prf\ "C text within proof" (C_Outer_Parse.C_source >> (Toplevel.proof o C_Module.C_prf)); val _ = Outer_Syntax.command \<^command_keyword>\C_val\ "diagnostic C text" (C_Outer_Parse.C_source >> (Toplevel.keep o C_Outer_Isar_Cmd.C_diag)); val _ = Outer_Syntax.local_theory \<^command_keyword>\C_export_file\ "diagnostic C text" (Scan.succeed () >> K (C_Module.C_export_file Position.no_range)); in end\ subsection \Term-Cartouches for C Syntax\ syntax "_C_translation_unit" :: \cartouche_position \ string\ ("\<^C>\<^sub>u\<^sub>n\<^sub>i\<^sub>t _") syntax "_C_external_declaration" :: \cartouche_position \ string\ ("\<^C>\<^sub>d\<^sub>e\<^sub>c\<^sub>l _") syntax "_C_expression" :: \cartouche_position \ string\ ("\<^C>\<^sub>e\<^sub>x\<^sub>p\<^sub>r _") syntax "_C_statement" :: \cartouche_position \ string\ ("\<^C>\<^sub>s\<^sub>t\<^sub>m\<^sub>t _") syntax "_C" :: \cartouche_position \ string\ ("\<^C> _") parse_translation \ C_Module.C_Term'.parse_translation [ (\<^syntax_const>\_C_translation_unit\, SOME C_Module.C_Term.tok_translation_unit) , (\<^syntax_const>\_C_external_declaration\, SOME C_Module.C_Term.tok_external_declaration) , (\<^syntax_const>\_C_expression\, SOME C_Module.C_Term.tok_expression) , (\<^syntax_const>\_C_statement\, SOME C_Module.C_Term.tok_statement) , (\<^syntax_const>\_C\, NONE) ] \ (*test*) ML\C_Module.env (Context.the_generic_context())\ ML\open Args\ subsection\C-env related ML-Antiquotations as Programming Support\ ML\ (* was in Isabelle2020: (Args.context -- Scan.lift Args.embedded_position >> (fn (ctxt, (name, pos)) => with: val embedded_token = ident || string || cartouche; val embedded_inner_syntax = embedded_token >> Token.inner_syntax_of; val embedded_input = embedded_token >> Token.input_of; val embedded = embedded_token >> Token.content_of; val embedded_position = embedded_input >> Input.source_content; defined in args. Setting it to : (Args.context -- Scan.lift Args.name_position >> (fn (ctxt, (name, pos)) => makes this syntactically more restrictive. *) val _ = Theory.setup( ML_Antiquotation.value_embedded \<^binding>\C\<^sub>e\<^sub>n\<^sub>v\ (Args.context -- Scan.lift Args.name_position >> (fn (ctxt, (name, pos)) => (warning"arg variant not implemented";"C_Module.env (Context.the_generic_context())")) || Scan.succeed "C_Module.env (Context.the_generic_context())")) \ text\Note that this anti-quotation is controlled by the \<^verbatim>\C_starting_env\ - flag. \ declare[[C\<^sub>e\<^sub>n\<^sub>v\<^sub>0 = last]] ML\@{C\<^sub>e\<^sub>n\<^sub>v}\ declare[[C\<^sub>e\<^sub>n\<^sub>v\<^sub>0 = empty]] ML\@{C\<^sub>e\<^sub>n\<^sub>v}\ subsection\The Standard Store C11-AST's generated from C-commands\ text\Each call of the C command will register the parsed root AST in this theory-name indexed table.\ ML\ structure Root_Ast_Store = Generic_Data (type T = C_Grammar_Rule.ast_generic list Symtab.table val empty = Symtab.empty - val extend = I val merge = K empty); Root_Ast_Store.map: ( C_Grammar_Rule.ast_generic list Symtab.table -> C_Grammar_Rule.ast_generic list Symtab.table) -> Context.generic -> Context.generic; fun update_Root_Ast filter ast _ ctxt = let val theory_id = Context.theory_long_name(Context.theory_of ctxt) val insert_K_ast = Symtab.map_default (theory_id,[]) (cons ast) in case filter ast of NONE => (warning "No appropriate c11 ast found - store unchanged."; ctxt) |SOME _ => (Root_Ast_Store.map insert_K_ast) ctxt end; fun get_Root_Ast filter thy = let val ctxt = Context.Theory thy val thid = Context.theory_long_name(Context.theory_of ctxt) val ast = case Symtab.lookup (Root_Ast_Store.get ctxt) (thid) of SOME (a::_) => (case filter a of NONE => error "Last C command is not of appropriate AST-class." | SOME x => x) | _ => error"No C command in the current theory." in ast end val get_CExpr = get_Root_Ast C_Grammar_Rule.get_CExpr; val get_CStat = get_Root_Ast C_Grammar_Rule.get_CStat; val get_CExtDecl = get_Root_Ast C_Grammar_Rule.get_CExtDecl; val get_CTranslUnit = get_Root_Ast C_Grammar_Rule.get_CTranslUnit; \ setup \Context.theory_map (C_Module.Data_Accept.put (update_Root_Ast SOME))\ ML\ (* Was : Args.embedded_position changed to : Args.name_position. See comment above. *) val _ = Theory.setup( ML_Antiquotation.value_embedded \<^binding>\C11_CTranslUnit\ (Args.context -- Scan.lift Args.name_position >> (fn (ctxt, (name, pos)) => (warning"arg variant not implemented";"get_CTranslUnit (Context.the_global_context())")) || Scan.succeed "get_CTranslUnit (Context.the_global_context())") #> ML_Antiquotation.value_embedded \<^binding>\C11_CExtDecl\ (Args.context -- Scan.lift Args.name_position >> (fn (ctxt, (name, pos)) => (warning"arg variant not implemented";"get_CExtDecl (Context.the_global_context())")) || Scan.succeed "get_CExtDecl (Context.the_global_context())") #> ML_Antiquotation.value_embedded \<^binding>\C11_CStat\ (Args.context -- Scan.lift Args.name_position >> (fn (ctxt, (name, pos)) => (warning"arg variant not implemented";"get_CStat (Context.the_global_context())")) || Scan.succeed "get_CStat (Context.the_global_context())") #> ML_Antiquotation.value_embedded \<^binding>\C11_CExpr\ (Args.context -- Scan.lift Args.name_position >> (fn (ctxt, (name, pos)) => (warning"arg variant not implemented";"get_CExpr (Context.the_global_context())")) || Scan.succeed "get_CExpr (Context.the_global_context())") ) \ end diff --git a/thys/Isabelle_C/C11-FrontEnd/src/C_Lexer_Annotation.thy b/thys/Isabelle_C/C11-FrontEnd/src/C_Lexer_Annotation.thy --- a/thys/Isabelle_C/C11-FrontEnd/src/C_Lexer_Annotation.thy +++ b/thys/Isabelle_C/C11-FrontEnd/src/C_Lexer_Annotation.thy @@ -1,1423 +1,1422 @@ (****************************************************************************** * Isabelle/C * * Copyright (c) 2018-2019 Université Paris-Saclay, Univ. Paris-Sud, France * * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are * met: * * * Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * * Redistributions in binary form must reproduce the above * copyright notice, this list of conditions and the following * disclaimer in the documentation and/or other materials provided * with the distribution. * * * Neither the name of the copyright holders nor the names of its * contributors may be used to endorse or promote products derived * from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. ******************************************************************************) section \Annotation Language: Parsing Combinator\ theory C_Lexer_Annotation imports C_Lexer_Language begin ML \ \\<^file>\~~/src/Pure/Isar/keyword.ML\\ (* Author: Frédéric Tuong, Université Paris-Saclay *) (* Title: Pure/Isar/keyword.ML Author: Makarius Isar keyword classification. *) \ structure C_Keyword = struct (** keyword classification **) (* kinds *) val command_kinds = [Keyword.diag, Keyword.document_heading, Keyword.document_body, Keyword.document_raw, Keyword.thy_begin, Keyword.thy_end, Keyword.thy_load, Keyword.thy_decl, Keyword.thy_decl_block, Keyword.thy_defn, Keyword.thy_stmt, Keyword.thy_goal, Keyword.thy_goal_defn, Keyword.thy_goal_stmt, Keyword.qed, Keyword.qed_script, Keyword.qed_block, Keyword.qed_global, Keyword.prf_goal, Keyword.prf_block, Keyword.next_block, Keyword.prf_open, Keyword.prf_close, Keyword.prf_chain, Keyword.prf_decl, Keyword.prf_asm, Keyword.prf_asm_goal, Keyword.prf_script, Keyword.prf_script_goal, Keyword.prf_script_asm_goal]; (* specifications *) type entry = {pos: Position.T, id: serial, kind: string, files: string list, (*extensions of embedded files*) tags: string list}; fun check_spec pos ((kind, files), tags) : entry = if not (member (op =) command_kinds kind) then error ("Unknown annotation syntax keyword kind " ^ quote kind) else if not (null files) andalso kind <> Keyword.thy_load then error ("Illegal specification of files for " ^ quote kind) else {pos = pos, id = serial (), kind = kind, files = files, tags = tags}; (** keyword tables **) (* type keywords *) datatype keywords = Keywords of {minor: Scan.lexicon, major: Scan.lexicon, commands: entry Symtab.table}; fun minor_keywords (Keywords {minor, ...}) = minor; fun major_keywords (Keywords {major, ...}) = major; fun make_keywords (minor, major, commands) = Keywords {minor = minor, major = major, commands = commands}; fun map_keywords f (Keywords {minor, major, commands}) = make_keywords (f (minor, major, commands)); (* build keywords *) val empty_keywords = make_keywords (Scan.empty_lexicon, Scan.empty_lexicon, Symtab.empty); fun empty_keywords' minor = make_keywords (minor, Scan.empty_lexicon, Symtab.empty); fun merge_keywords (Keywords {minor = minor1, major = major1, commands = commands1}, Keywords {minor = minor2, major = major2, commands = commands2}) = make_keywords (Scan.merge_lexicons (minor1, minor2), Scan.merge_lexicons (major1, major2), Symtab.merge (K true) (commands1, commands2)); val add_keywords0 = fold (fn ((name, pos), force_minor, spec as ((kind, _), _)) => map_keywords (fn (minor, major, commands) => let val extend = Scan.extend_lexicon (Symbol.explode name) fun update spec = Symtab.update (name, spec) in if force_minor then (extend minor, major, update (check_spec pos spec) commands) else if kind = "" orelse kind = Keyword.before_command orelse kind = Keyword.quasi_command then (extend minor, major, commands) else (minor, extend major, update (check_spec pos spec) commands) end)); val add_keywords = add_keywords0 o map (fn (cmd, spec) => (cmd, false, spec)) val add_keywords_minor = add_keywords0 o map (fn (cmd, spec) => (cmd, true, spec)) (* keyword status *) fun is_command (Keywords {commands, ...}) = Symtab.defined commands; fun dest_commands (Keywords {commands, ...}) = Symtab.keys commands; (* command keywords *) fun lookup_command (Keywords {commands, ...}) = Symtab.lookup commands; fun command_markup keywords name = let (* PATCH: copied as such from Isabelle2020 *) fun entity_properties_of def serial pos = if def then (Markup.defN, Value.print_int serial) :: Position.properties_of pos else (Markup.refN, Value.print_int serial) :: Position.def_properties_of pos; in lookup_command keywords name |> Option.map (fn {pos, id, ...} => Markup.properties (entity_properties_of false id pos) (Markup.entity Markup.command_keywordN name)) end; fun command_files keywords name path = (case lookup_command keywords name of NONE => [] | SOME {kind, files, ...} => if kind <> Keyword.thy_load then [] else if null files then [path] else map (fn ext => Path.ext ext path) files); (* command categories *) fun command_category ks = let val tab = Symtab.make_set ks; fun pred keywords name = (case lookup_command keywords name of NONE => false | SOME {kind, ...} => Symtab.defined tab kind); in pred end; val is_theory_end = command_category [Keyword.thy_end]; val is_proof_asm = command_category [Keyword.prf_asm, Keyword.prf_asm_goal]; val is_improper = command_category [ Keyword.qed_script , Keyword.prf_script , Keyword.prf_script_goal , Keyword.prf_script_asm_goal]; end; \ text \ Notes: \<^item> The next structure contains a duplicated copy of the type \<^ML_type>\Token.T\, since it is not possible to set an arbitrary \<^emph>\slot\ value in \<^ML_structure>\Token\. \<^item> Parsing priorities in C and HOL slightly differ, see for instance \<^ML>\Token.explode\. \ ML \ \\<^file>\~~/src/Pure/Isar/token.ML\\ (* Author: Frédéric Tuong, Université Paris-Saclay *) (* Title: Pure/Isar/token.ML Author: Markus Wenzel, TU Muenchen Outer token syntax for Isabelle/Isar. *) \ structure C_Token = struct (** tokens **) (* token kind *) val immediate_kinds' = fn Token.Command => 0 | Token.Keyword => 1 | Token.Ident => 2 | Token.Long_Ident => 3 | Token.Sym_Ident => 4 | Token.Var => 5 | Token.Type_Ident => 6 | Token.Type_Var => 7 | Token.Nat => 8 | Token.Float => 9 | Token.Space => 10 | _ => ~1 val delimited_kind = (fn Token.String => true | Token.Alt_String => true | Token.Verbatim => true | Token.Cartouche => true | Token.Comment _ => true | _ => false); (* datatype token *) (*The value slot assigns an (optional) internal value to a token, usually as a side-effect of special scanner setup (see also args.ML). Note that an assignable ref designates an intermediate state of internalization -- it is NOT meant to persist.*) datatype T = Token of (Symbol_Pos.text * Position.range) * (Token.kind * string) * slot and slot = Slot | Value of value option | Assignable of value option Unsynchronized.ref and value = Source of T list | Literal of bool * Markup.T | Name of Token.name_value * morphism | Typ of typ | Term of term | Fact of string option * thm list | (*optional name for dynamic fact, i.e. fact "variable"*) Attribute of morphism -> attribute | Declaration of declaration | Files of Token.file Exn.result list; type src = T list; (* position *) fun pos_of (Token ((_, (pos, _)), _, _)) = pos; fun end_pos_of (Token ((_, (_, pos)), _, _)) = pos; fun adjust_offsets adjust (Token ((x, range), y, z)) = Token ((x, apply2 (Position.adjust_offsets adjust) range), y, z); (* stopper *) fun mk_eof pos = Token (("", (pos, Position.none)), (Token.EOF, ""), Slot); val eof = mk_eof Position.none; fun is_eof (Token (_, (Token.EOF, _), _)) = true | is_eof _ = false; val not_eof = not o is_eof; val stopper = Scan.stopper (fn [] => eof | toks => mk_eof (end_pos_of (List.last toks))) is_eof; (* kind of token *) fun kind_of (Token (_, (k, _), _)) = k; fun is_kind k (Token (_, (k', _), _)) = k = k'; val is_command = is_kind Token.Command; fun keyword_with pred (Token (_, (Token.Keyword, x), _)) = pred x | keyword_with _ _ = false; val is_command_modifier = keyword_with (fn x => x = "private" orelse x = "qualified"); fun ident_with pred (Token (_, (Token.Ident, x), _)) = pred x | ident_with _ _ = false; fun is_ignored (Token (_, (Token.Space, _), _)) = true | is_ignored (Token (_, (Token.Comment NONE, _), _)) = true | is_ignored _ = false; fun is_proper (Token (_, (Token.Space, _), _)) = false | is_proper (Token (_, (Token.Comment _, _), _)) = false | is_proper _ = true; fun is_comment (Token (_, (Token.Comment _, _), _)) = true | is_comment _ = false; fun is_informal_comment (Token (_, (Token.Comment NONE, _), _)) = true | is_informal_comment _ = false; fun is_formal_comment (Token (_, (Token.Comment (SOME _), _), _)) = true | is_formal_comment _ = false; fun is_document_marker (Token (_, (Token.Comment (SOME Comment.Marker), _), _)) = true | is_document_marker _ = false; fun is_begin_ignore (Token (_, (Token.Comment NONE, "<"), _)) = true | is_begin_ignore _ = false; fun is_end_ignore (Token (_, (Token.Comment NONE, ">"), _)) = true | is_end_ignore _ = false; fun is_error (Token (_, (Token.Error _, _), _)) = true | is_error _ = false; fun is_error' (Token (_, (Token.Error msg, _), _)) = SOME msg | is_error' _ = NONE; fun content_of (Token (_, (_, x), _)) = x; fun content_of' (Token (_, (_, _), Value (SOME (Source l)))) = map (fn Token ((_, (pos, _)), (_, x), _) => (x, pos)) l | content_of' _ = []; val is_stack1 = fn Token (_, (Token.Sym_Ident, _), Value (SOME (Source l))) => forall (fn tok => content_of tok = "+") l | _ => false; val is_stack2 = fn Token (_, (Token.Sym_Ident, _), Value (SOME (Source l))) => forall (fn tok => content_of tok = "@") l | _ => false; val is_stack3 = fn Token (_, (Token.Sym_Ident, _), Value (SOME (Source l))) => forall (fn tok => content_of tok = "&") l | _ => false; (* blanks and newlines -- space tokens obey lines *) fun is_space (Token (_, (Space, _), _)) = true | is_space _ = false; fun is_blank (Token (_, (Space, x), _)) = not (String.isSuffix "\n" x) | is_blank _ = false; fun is_newline (Token (_, (Space, x), _)) = String.isSuffix "\n" x | is_newline _ = false; (* range of tokens *) fun range_of (toks as tok :: _) = let val pos' = end_pos_of (List.last toks) in Position.range (pos_of tok, pos') end | range_of [] = Position.no_range; val core_range_of = drop_prefix is_ignored #> drop_suffix is_ignored #> range_of; (* token content *) fun content_of (Token (_, (_, x), _)) = x; fun source_of (Token ((source, _), _, _)) = source; fun input_of (Token ((source, range), (kind, _), _)) = Input.source (delimited_kind kind) source range; fun inner_syntax_of tok = let val x = content_of tok in if YXML.detect x then x else Syntax.implode_input (input_of tok) end; (* markup reports *) local val token_kind_markup = fn Token.Var => (Markup.var, "") | Token.Type_Ident => (Markup.tfree, "") | Token.Type_Var => (Markup.tvar, "") | Token.String => (Markup.string, "") | Token.Alt_String => (Markup.alt_string, "") | Token.Verbatim => (Markup.verbatim, "") | Token.Cartouche => (Markup.cartouche, "") | Token.Comment _ => (Markup.ML_comment, "") | Token.Error msg => (Markup.bad (), msg) | _ => (Markup.empty, ""); fun keyword_reports tok = map (fn markup => ((pos_of tok, markup), "")); fun command_markups keywords x = if C_Keyword.is_theory_end keywords x then [Markup.keyword2 |> Markup.keyword_properties] else (if C_Keyword.is_proof_asm keywords x then [Markup.keyword3] else if C_Keyword.is_improper keywords x then [Markup.keyword1, Markup.improper] else [Markup.keyword1]) |> map Markup.command_properties; fun keyword_markup (important, keyword) x = if important orelse Symbol.is_ascii_identifier x then keyword else Markup.delimiter; fun command_minor_markups keywords x = if C_Keyword.is_theory_end keywords x then [Markup.keyword2 |> Markup.keyword_properties] else (if C_Keyword.is_proof_asm keywords x then [Markup.keyword3] else if C_Keyword.is_improper keywords x then [Markup.keyword1, Markup.improper] else if C_Keyword.is_command keywords x then [Markup.keyword1] else [keyword_markup (false, Markup.keyword2 |> Markup.keyword_properties) x]); in fun completion_report tok = if is_kind Token.Keyword tok then map (fn m => ((pos_of tok, m), "")) (Completion.suppress_abbrevs (content_of tok)) else []; fun reports keywords tok = if is_command tok then keyword_reports tok (command_markups keywords (content_of tok)) else if is_stack1 tok orelse is_stack2 tok orelse is_stack3 tok then keyword_reports tok [Markup.keyword2 |> Markup.keyword_properties] else if is_kind Token.Keyword tok then keyword_reports tok (command_minor_markups keywords (content_of tok)) else let val pos = pos_of tok; val (m, text) = token_kind_markup (kind_of tok); val delete = (Symbol_Pos.explode_deleted (source_of tok, pos)); in ((pos, m), text) :: map (fn p => ((p, Markup.delete), "")) delete end; fun markups keywords = map (#2 o #1) o reports keywords; end; (* unparse *) fun unparse' (Token ((source0, _), (kind, x), _)) = let val source = \ \ We are computing a reverse function of \<^ML>\Symbol_Pos.implode_range\ taking into account consecutive \<^ML>\Symbol.DEL\ symbols potentially appearing at the beginning, or at the end of the string. As remark, \<^ML>\Symbol_Pos.explode_deleted\ will remove any potentially consecutive \<^ML>\Symbol.DEL\ symbols. This is why it is not used here.\ case Symbol.explode source0 of x :: xs => if x = Symbol.DEL then case rev xs of x' :: xs => if x' = Symbol.DEL then implode (rev xs) else source0 | _ => source0 else source0 | _ => source0 in case kind of Token.String => Symbol_Pos.quote_string_qq source | Token.Alt_String => Symbol_Pos.quote_string_bq source | Token.Verbatim => enclose "{*" "*}" source | Token.Cartouche => cartouche source | Token.Comment NONE => enclose "(*" "*)" source | Token.EOF => "" | _ => x end; fun text_of tok = let val k = Token.str_of_kind (kind_of tok); val ms = markups C_Keyword.empty_keywords tok; val s = unparse' tok; in if s = "" then (k, "") else if size s < 40 andalso not (exists_string (fn c => c = "\n") s) then (k ^ " " ^ Markup.markups ms s, "") else (k, Markup.markups ms s) end; (** associated values **) (* inlined file content *) fun file_source (file: Token.file) = let val text = cat_lines (#lines file); val end_pos = fold Position.symbol (Symbol.explode text) (#pos file); in Input.source true text (Position.range (#pos file, end_pos)) end; fun get_files (Token (_, _, Value (SOME (Files files)))) = files | get_files _ = []; fun put_files [] tok = tok | put_files files (Token (x, y, Slot)) = Token (x, y, Value (SOME (Files files))) | put_files _ tok = raise Fail ("Cannot put inlined files here" ^ Position.here (pos_of tok)); (* access values *) (* reports of value *) (* name value *) (* maxidx *) (* fact values *) (* transform *) (* static binding *) (*1st stage: initialize assignable slots*) fun init_assignable tok = (case tok of Token (x, y, Slot) => Token (x, y, Assignable (Unsynchronized.ref NONE)) | Token (_, _, Value _) => tok | Token (_, _, Assignable r) => (r := NONE; tok)); (*2nd stage: assign values as side-effect of scanning*) fun assign v tok = (case tok of Token (x, y, Slot) => Token (x, y, Value v) | Token (_, _, Value _) => tok | Token (_, _, Assignable r) => (r := v; tok)); fun evaluate mk eval arg = let val x = eval arg in (assign (SOME (mk x)) arg; x) end; (*3rd stage: static closure of final values*) fun closure (Token (x, y, Assignable (Unsynchronized.ref v))) = Token (x, y, Value v) | closure tok = tok; (* pretty *) (* src *) (** scanners **) open Basic_Symbol_Pos; val err_prefix = "Annotation lexical error: "; fun !!! msg = Symbol_Pos.!!! (fn () => err_prefix ^ msg); (* scan stack *) fun scan_stack is_stack = Scan.optional (Scan.one is_stack >> content_of') [] (* scan symbolic idents *) val scan_symid = Scan.many1 (Symbol.is_symbolic_char o Symbol_Pos.symbol) || Scan.one (Symbol.is_symbolic o Symbol_Pos.symbol) >> single; fun is_symid str = (case try Symbol.explode str of SOME [s] => Symbol.is_symbolic s orelse Symbol.is_symbolic_char s | SOME ss => forall Symbol.is_symbolic_char ss | _ => false); fun ident_or_symbolic "begin" = false | ident_or_symbolic ":" = true | ident_or_symbolic "::" = true | ident_or_symbolic s = Symbol_Pos.is_identifier s orelse is_symid s; (* scan verbatim text *) val scan_verb = $$$ "*" --| Scan.ahead (~$$ "}") || Scan.one (fn (s, _) => s <> "*" andalso Symbol.not_eof s) >> single; val scan_verbatim = Scan.ahead ($$ "{" -- $$ "*") |-- !!! "unclosed verbatim text" ((Symbol_Pos.scan_pos --| $$ "{" --| $$ "*") -- (Scan.repeats scan_verb -- ($$ "*" |-- $$ "}" |-- Symbol_Pos.scan_pos))); val recover_verbatim = $$$ "{" @@@ $$$ "*" @@@ Scan.repeats scan_verb; (* scan cartouche *) val scan_cartouche = Symbol_Pos.scan_pos -- ((Symbol_Pos.scan_cartouche err_prefix >> Symbol_Pos.cartouche_content) -- Symbol_Pos.scan_pos); (* scan space *) fun space_symbol (s, _) = Symbol.is_blank s andalso s <> "\n"; val scan_space = Scan.many1 space_symbol @@@ Scan.optional ($$$ "\n") [] || Scan.many space_symbol @@@ $$$ "\n"; (* scan comment *) val scan_comment = Symbol_Pos.scan_pos -- (Symbol_Pos.scan_comment_body err_prefix -- Symbol_Pos.scan_pos); (** token sources **) local fun token_leq ((_, syms1), (_, syms2)) = length syms1 <= length syms2; fun token k ss = Token ((Symbol_Pos.implode ss, Symbol_Pos.range ss), (k, Symbol_Pos.content ss), Slot); fun token' (mk_value, k) ss = if mk_value then Token ( (Symbol_Pos.implode ss, Symbol_Pos.range ss) , (k, Symbol_Pos.content ss) , Value (SOME (Source (map (fn (s, pos) => Token (("", (pos, Position.none)), (k, s), Slot)) ss)))) else token k ss; fun token_t k = token' (true, k) fun token_range k (pos1, (ss, pos2)) = Token (Symbol_Pos.implode_range (pos1, pos2) ss, (k, Symbol_Pos.content ss), Slot); fun scan_token keywords = !!! "bad input" (Symbol_Pos.scan_string_qq err_prefix >> token_range Token.String || Symbol_Pos.scan_string_bq err_prefix >> token_range Token.Alt_String || scan_verbatim >> token_range Token.Verbatim || scan_cartouche >> token_range Token.Cartouche || scan_comment >> token_range (Token.Comment NONE) || Comment.scan_outer >> (fn (k, ss) => token (Token.Comment (SOME k)) ss) || scan_space >> token Token.Space || Scan.repeats1 ($$$ "+") >> token_t Token.Sym_Ident || Scan.repeats1 ($$$ "@") >> token_t Token.Sym_Ident || Scan.repeats1 ($$$ "&") >> token_t Token.Sym_Ident || (Scan.max token_leq (Scan.max token_leq (Scan.literal (C_Keyword.major_keywords keywords) >> pair Token.Command) (Scan.literal (C_Keyword.minor_keywords keywords) >> pair Token.Keyword)) (Lexicon.scan_longid >> pair Token.Long_Ident || Scan.max token_leq (C_Lex.scan_ident >> pair Token.Ident) (Lexicon.scan_id >> pair Token.Ident) || Lexicon.scan_var >> pair Token.Var || Lexicon.scan_tid >> pair Token.Type_Ident || Lexicon.scan_tvar >> pair Token.Type_Var || Symbol_Pos.scan_float >> pair Token.Float || Symbol_Pos.scan_nat >> pair Token.Nat || scan_symid >> pair Token.Sym_Ident)) >> uncurry (token' o pair false)); fun recover msg = (Symbol_Pos.recover_string_qq || Symbol_Pos.recover_string_bq || recover_verbatim || Symbol_Pos.recover_cartouche || Symbol_Pos.recover_comment || Scan.one (Symbol.not_eof o Symbol_Pos.symbol) >> single) >> (single o token (Token.Error msg)); in fun make_source keywords {strict} = let val scan_strict = Scan.bulk (scan_token keywords); val scan = if strict then scan_strict else Scan.recover scan_strict recover; in Source.source Symbol_Pos.stopper scan end; end; (* explode *) fun tokenize keywords strict syms = Source.of_list syms |> make_source keywords strict |> Source.exhaust; fun explode keywords pos text = Symbol_Pos.explode (text, pos) |> tokenize keywords {strict = false}; fun explode0 keywords = explode keywords Position.none; (* print name in parsable form *) (* make *) (** parsers **) type 'a parser = T list -> 'a * T list; type 'a context_parser = Context.generic * T list -> 'a * (Context.generic * T list); (* read body -- e.g. antiquotation source *) fun read_with_commands'0 keywords syms = Source.of_list syms |> make_source keywords {strict = false} |> Source.filter (not o is_proper) |> Source.exhaust fun read_with_commands' keywords scan syms = Source.of_list syms |> make_source keywords {strict = false} |> Source.filter is_proper |> Source.source stopper (Scan.recover (Scan.bulk scan) (fn msg => Scan.one (not o is_eof) >> (fn tok => [C_Scan.Right let val msg = case is_error' tok of SOME msg0 => msg0 ^ " (" ^ msg ^ ")" | NONE => msg in ( msg , [((pos_of tok, Markup.bad ()), msg)] , tok) end]))) |> Source.exhaust; fun read_antiq' keywords scan = read_with_commands' keywords (scan >> C_Scan.Left); (* wrapped syntax *) local fun make src pos = Token.make src pos |> #1 fun make_default text pos = make ((~1, 0), text) pos fun explode keywords pos text = case Token.explode keywords pos text of [tok] => tok | _ => make_default text pos in fun syntax' f = I #> map (fn tok0 as Token ((source, (pos1, pos2)), (kind, x), _) => if is_stack1 tok0 orelse is_stack2 tok0 orelse is_stack3 tok0 then make_default source pos1 else if is_eof tok0 then Token.eof else if delimited_kind kind then explode Keyword.empty_keywords pos1 (unparse' tok0) else let val tok1 = explode ((case kind of Token.Keyword => Keyword.add_keywords [((x, Position.none), Keyword.no_spec)] | Token.Command => Keyword.add_keywords [( (x, Position.none), Keyword.command_spec(Keyword.thy_decl, []))] | _ => I) Keyword.empty_keywords) pos1 source in if Token.kind_of tok1 = kind then tok1 else make ( ( immediate_kinds' kind , case Position.distance_of (pos1, pos2) of NONE => 0 | SOME i => i) , source) pos1 end) #> f #> apsnd (map (fn tok => Token ( (Token.source_of tok, Token.range_of [tok]) , (Token.kind_of tok, Token.content_of tok) , Slot))) end end; type 'a c_parser = 'a C_Token.parser; type 'a c_context_parser = 'a C_Token.context_parser; \ ML \ \\<^file>\~~/src/Pure/Isar/parse.ML\\ (* Author: Frédéric Tuong, Université Paris-Saclay parsers for C syntax. A partial copy is unfortunately necessary due to signature restrictions. *) (* based on: Title: Pure/Isar/parse.ML Author: Markus Wenzel, TU Muenchen Generic parsers for Isabelle/Isar outer syntax. *) \ signature C_PARSE = sig type T type src = T list type 'a parser = T list -> 'a * T list type 'a context_parser = Context.generic * T list -> 'a * (Context.generic * T list) (**) val C_source: Input.source parser val star: string parser (**) val group: (unit -> string) -> (T list -> 'a) -> T list -> 'a val !!! : (T list -> 'a) -> T list -> 'a val !!!! : (T list -> 'a) -> T list -> 'a val not_eof: T parser val token: 'a parser -> T parser val range: 'a parser -> ('a * Position.range) parser val position: 'a parser -> ('a * Position.T) parser val input: 'a parser -> Input.source parser val inner_syntax: 'a parser -> string parser val command: string parser val keyword: string parser val short_ident: string parser val long_ident: string parser val sym_ident: string parser val dots: string parser val minus: string parser val term_var: string parser val type_ident: string parser val type_var: string parser val number: string parser val float_number: string parser val string: string parser val string_position: (string * Position.T) parser val alt_string: string parser val verbatim: string parser val cartouche: string parser val eof: string parser val command_name: string -> string parser val keyword_with: (string -> bool) -> string parser val keyword_markup: bool * Markup.T -> string -> string parser val keyword_improper: string -> string parser val $$$ : string -> string parser val reserved: string -> string parser val underscore: string parser val maybe: 'a parser -> 'a option parser val maybe_position: ('a * Position.T) parser -> ('a option * Position.T) parser val opt_keyword: string -> bool parser val opt_bang: bool parser val begin: string parser val opt_begin: bool parser val nat: int parser val int: int parser val real: real parser val enum_positions: string -> 'a parser -> ('a list * Position.T list) parser val enum1_positions: string -> 'a parser -> ('a list * Position.T list) parser val enum: string -> 'a parser -> 'a list parser val enum1: string -> 'a parser -> 'a list parser val and_list: 'a parser -> 'a list parser val and_list1: 'a parser -> 'a list parser val enum': string -> 'a context_parser -> 'a list context_parser val enum1': string -> 'a context_parser -> 'a list context_parser val and_list': 'a context_parser -> 'a list context_parser val and_list1': 'a context_parser -> 'a list context_parser val list: 'a parser -> 'a list parser val list1: 'a parser -> 'a list parser val name: string parser val name_range: (string * Position.range) parser val name_position: (string * Position.T) parser val binding: binding parser val embedded: string parser val embedded_input: Input.source parser val embedded_position: (string * Position.T) parser val text: string parser val path: string parser val path_binding: (string * Position.T) parser val session_name: (string * Position.T) parser val theory_name: (string * Position.T) parser val liberal_name: string parser val parname: string parser val parbinding: binding parser val class: string parser val sort: string parser val type_const: string parser val arity: (string * string list * string) parser val multi_arity: (string list * string list * string) parser val type_args: string list parser val type_args_constrained: (string * string option) list parser val typ: string parser val mixfix: mixfix parser val mixfix': mixfix parser val opt_mixfix: mixfix parser val opt_mixfix': mixfix parser val syntax_mode: Syntax.mode parser val where_: string parser val const_decl: (string * string * mixfix) parser val const_binding: (binding * string * mixfix) parser val params: (binding * string option * mixfix) list parser val vars: (binding * string option * mixfix) list parser val for_fixes: (binding * string option * mixfix) list parser val ML_source: Input.source parser val document_source: Input.source parser val document_marker: Input.source parser val const: string parser val term: string parser val prop: string parser val literal_fact: string parser val propp: (string * string list) parser val termp: (string * string list) parser val private: Position.T parser val qualified: Position.T parser val target: (string * Position.T) parser val opt_target: (string * Position.T) option parser val args: T list parser val args1: (string -> bool) -> T list parser val attribs: src list parser val opt_attribs: src list parser val thm_sel: Facts.interval list parser val thm: (Facts.ref * src list) parser val thms1: (Facts.ref * src list) list parser val options: ((string * Position.T) * (string * Position.T)) list parser end; structure C_Parse: C_PARSE = struct type T = C_Token.T type src = T list type 'a parser = T list -> 'a * T list type 'a context_parser = Context.generic * T list -> 'a * (Context.generic * T list) structure Token = struct open Token open C_Token end (** error handling **) (* group atomic parsers (no cuts!) *) fun group s scan = scan || Scan.fail_with (fn [] => (fn () => s () ^ " expected,\nbut end-of-input was found") | tok :: _ => (fn () => (case Token.text_of tok of (txt, "") => s () ^ " expected,\nbut " ^ txt ^ Position.here (Token.pos_of tok) ^ " was found" | (txt1, txt2) => s () ^ " expected,\nbut " ^ txt1 ^ Position.here (Token.pos_of tok) ^ " was found:\n" ^ txt2))); (* cut *) fun cut kind scan = let fun get_pos [] = " (end-of-input)" | get_pos (tok :: _) = Position.here (Token.pos_of tok); fun err (toks, NONE) = (fn () => kind ^ get_pos toks) | err (toks, SOME msg) = (fn () => let val s = msg () in if String.isPrefix kind s then s else kind ^ get_pos toks ^ ": " ^ s end); in Scan.!! err scan end; fun !!! scan = cut "Annotation syntax error" scan; fun !!!! scan = cut "Corrupted annotation syntax in presentation" scan; (** basic parsers **) (* tokens *) fun RESET_VALUE atom = (*required for all primitive parsers*) Scan.ahead (Scan.one (K true)) -- atom >> (fn (arg, x) => (Token.assign NONE arg; x)); val not_eof = RESET_VALUE (Scan.one Token.not_eof); fun token atom = Scan.ahead not_eof --| atom; fun range scan = (Scan.ahead not_eof >> (Token.range_of o single)) -- scan >> Library.swap; fun position scan = (Scan.ahead not_eof >> Token.pos_of) -- scan >> Library.swap; fun input atom = Scan.ahead atom |-- not_eof >> Token.input_of; fun inner_syntax atom = Scan.ahead atom |-- not_eof >> Token.inner_syntax_of; fun kind k = group (fn () => Token.str_of_kind k) (RESET_VALUE (Scan.one (Token.is_kind k) >> Token.content_of)); val command = kind Token.Command; val keyword = kind Token.Keyword; val short_ident = kind Token.Ident; val long_ident = kind Token.Long_Ident; val sym_ident = kind Token.Sym_Ident; val term_var = kind Token.Var; val type_ident = kind Token.Type_Ident; val type_var = kind Token.Type_Var; val number = kind Token.Nat; val float_number = kind Token.Float; val string = kind Token.String; val alt_string = kind Token.Alt_String; val verbatim = kind Token.Verbatim; val cartouche = kind Token.Cartouche; val eof = kind Token.EOF; fun command_name x = group (fn () => Token.str_of_kind Token.Command ^ " " ^ quote x) (RESET_VALUE (Scan.one (fn tok => Token.is_command tok andalso Token.content_of tok = x))) >> Token.content_of; fun keyword_with pred = RESET_VALUE (Scan.one (Token.keyword_with pred) >> Token.content_of); fun keyword_markup markup x = group (fn () => Token.str_of_kind Token.Keyword ^ " " ^ quote x) (Scan.ahead not_eof -- keyword_with (fn y => x = y)) >> (fn (tok, x) => (Token.assign (SOME (Token.Literal markup)) tok; x)); val keyword_improper = keyword_markup (true, Markup.improper); val $$$ = keyword_markup (false, Markup.quasi_keyword); fun reserved x = group (fn () => "reserved identifier " ^ quote x) (RESET_VALUE (Scan.one (Token.ident_with (fn y => x = y)) >> Token.content_of)); val dots = sym_ident :-- (fn "\" => Scan.succeed () | _ => Scan.fail) >> #1; val minus = sym_ident :-- (fn "-" => Scan.succeed () | _ => Scan.fail) >> #1; val underscore = sym_ident :-- (fn "_" => Scan.succeed () | _ => Scan.fail) >> #1; fun maybe scan = underscore >> K NONE || scan >> SOME; fun maybe_position scan = position (underscore >> K NONE) || scan >> apfst SOME; val nat = number >> (#1 o Library.read_int o Symbol.explode); val int = Scan.optional (minus >> K ~1) 1 -- nat >> op *; val real = float_number >> Value.parse_real || int >> Real.fromInt; fun opt_keyword s = Scan.optional ($$$ "(" |-- !!! (($$$ s >> K true) --| $$$ ")")) false; val opt_bang = Scan.optional ($$$ "!" >> K true) false; val begin = $$$ "begin"; val opt_begin = Scan.optional (begin >> K true) false; (* enumerations *) fun enum1_positions sep scan = scan -- Scan.repeat (position ($$$ sep) -- !!! scan) >> (fn (x, ys) => (x :: map #2 ys, map (#2 o #1) ys)); fun enum_positions sep scan = enum1_positions sep scan || Scan.succeed ([], []); fun enum1 sep scan = scan ::: Scan.repeat ($$$ sep |-- !!! scan); fun enum sep scan = enum1 sep scan || Scan.succeed []; fun enum1' sep scan = scan ::: Scan.repeat (Scan.lift ($$$ sep) |-- scan); fun enum' sep scan = enum1' sep scan || Scan.succeed []; fun and_list1 scan = enum1 "and" scan; fun and_list scan = enum "and" scan; fun and_list1' scan = enum1' "and" scan; fun and_list' scan = enum' "and" scan; fun list1 scan = enum1 "," scan; fun list scan = enum "," scan; (* names and embedded content *) val name = group (fn () => "name") (short_ident || long_ident || sym_ident || number || string); val name_range = input name >> Input.source_content_range; val name_position = input name >> Input.source_content; val string_position = input string >> Input.source_content; val binding = name_position >> Binding.make; val embedded = group (fn () => "embedded content") (cartouche || string || short_ident || long_ident || sym_ident || term_var || type_ident || type_var || number); val embedded_input = input embedded; val embedded_position = embedded_input >> Input.source_content; val text = group (fn () => "text") (embedded || verbatim); val path = group (fn () => "file name/path specification") embedded; val path_binding = group (fn () => "path binding (strict file name)") (position embedded); val session_name = group (fn () => "session name") name_position; val theory_name = group (fn () => "theory name") name_position; val liberal_name = keyword_with Token.ident_or_symbolic || name; val parname = Scan.optional ($$$ "(" |-- name --| $$$ ")") ""; val parbinding = Scan.optional ($$$ "(" |-- binding --| $$$ ")") Binding.empty; (* type classes *) val class = group (fn () => "type class") (inner_syntax embedded); val sort = group (fn () => "sort") (inner_syntax embedded); val type_const = group (fn () => "type constructor") (inner_syntax embedded); val arity = type_const -- ($$$ "::" |-- !!! (Scan.optional ($$$ "(" |-- !!! (list1 sort --| $$$ ")")) [] -- sort)) >> Scan.triple2; val multi_arity = and_list1 type_const -- ($$$ "::" |-- !!! (Scan.optional ($$$ "(" |-- !!! (list1 sort --| $$$ ")")) [] -- sort)) >> Scan.triple2; (* types *) val typ = group (fn () => "type") (inner_syntax embedded); fun type_arguments arg = arg >> single || $$$ "(" |-- !!! (list1 arg --| $$$ ")") || Scan.succeed []; val type_args = type_arguments type_ident; val type_args_constrained = type_arguments (type_ident -- Scan.option ($$$ "::" |-- !!! sort)); (* mixfix annotations *) local val mfix = input (string || cartouche); val mixfix_ = mfix -- !!! (Scan.optional ($$$ "[" |-- !!! (list nat --| $$$ "]")) [] -- Scan.optional nat 1000) >> (fn (sy, (ps, p)) => fn range => Mixfix (sy, ps, p, range)); val structure_ = $$$ "structure" >> K Structure; val binder_ = $$$ "binder" |-- !!! (mfix -- ($$$ "[" |-- nat --| $$$ "]" -- nat || nat >> (fn n => (n, n)))) >> (fn (sy, (p, q)) => fn range => Binder (sy, p, q, range)); val infixl_ = $$$ "infixl" |-- !!! (mfix -- nat >> (fn (sy, p) => fn range => Infixl (sy, p, range))); val infixr_ = $$$ "infixr" |-- !!! (mfix -- nat >> (fn (sy, p) => fn range => Infixr (sy, p, range))); val infix_ = $$$ "infix" |-- !!! (mfix -- nat >> (fn (sy, p) => fn range => Infix (sy, p, range))); val mixfix_body = mixfix_ || structure_ || binder_ || infixl_ || infixr_ || infix_; fun annotation guard body = Scan.trace ($$$ "(" |-- guard (body --| $$$ ")")) >> (fn (mx, toks) => mx (Token.range_of toks)); fun opt_annotation guard body = Scan.optional (annotation guard body) NoSyn; in val mixfix = annotation !!! mixfix_body; val mixfix' = annotation I mixfix_body; val opt_mixfix = opt_annotation !!! mixfix_body; val opt_mixfix' = opt_annotation I mixfix_body; end; (* syntax mode *) val syntax_mode_spec = ($$$ "output" >> K ("", false)) || name -- Scan.optional ($$$ "output" >> K false) true; val syntax_mode = Scan.optional ($$$ "(" |-- !!! (syntax_mode_spec --| $$$ ")")) Syntax.mode_default; (* fixes *) val where_ = $$$ "where"; val const_decl = name -- ($$$ "::" |-- !!! typ) -- opt_mixfix >> Scan.triple1; val const_binding = binding -- ($$$ "::" |-- !!! typ) -- opt_mixfix >> Scan.triple1; val param_mixfix = binding -- Scan.option ($$$ "::" |-- typ) -- mixfix' >> (single o Scan.triple1); val params = (binding -- Scan.repeat binding) -- Scan.option ($$$ "::" |-- !!! (Scan.ahead typ -- embedded)) >> (fn ((x, ys), T) => (x, Option.map #1 T, NoSyn) :: map (fn y => (y, Option.map #2 T, NoSyn)) ys); val vars = and_list1 (param_mixfix || params) >> flat; val for_fixes = Scan.optional ($$$ "for" |-- !!! vars) []; (* embedded source text *) val ML_source = input (group (fn () => "ML source") text); val document_source = input (group (fn () => "document source") text); val document_marker = group (fn () => "document marker") (RESET_VALUE (Scan.one Token.is_document_marker >> Token.input_of)); (* terms *) val const = group (fn () => "constant") (inner_syntax embedded); val term = group (fn () => "term") (inner_syntax embedded); val prop = group (fn () => "proposition") (inner_syntax embedded); val literal_fact = inner_syntax (group (fn () => "literal fact") (alt_string || cartouche)); (* patterns *) val is_terms = Scan.repeat1 ($$$ "is" |-- term); val is_props = Scan.repeat1 ($$$ "is" |-- prop); val propp = prop -- Scan.optional ($$$ "(" |-- !!! (is_props --| $$$ ")")) []; val termp = term -- Scan.optional ($$$ "(" |-- !!! (is_terms --| $$$ ")")) []; (* target information *) val private = position ($$$ "private") >> #2; val qualified = position ($$$ "qualified") >> #2; val target = ($$$ "(" -- $$$ "in") |-- !!! (name_position --| $$$ ")"); val opt_target = Scan.option target; (* arguments within outer syntax *) local val argument_kinds = [Token.Ident, Token.Long_Ident, Token.Sym_Ident, Token.Var, Token.Type_Ident, Token.Type_Var, Token.Nat, Token.Float, Token.String, Token.Alt_String, Token.Cartouche, Token.Verbatim]; fun arguments is_symid = let fun argument blk = group (fn () => "argument") (Scan.one (fn tok => let val kind = Token.kind_of tok in member (op =) argument_kinds kind orelse Token.keyword_with is_symid tok orelse (blk andalso Token.keyword_with (fn s => s = ",") tok) end)); fun args blk x = Scan.optional (args1 blk) [] x and args1 blk x = (Scan.repeats1 (Scan.repeat1 (argument blk) || argsp "(" ")" || argsp "[" "]")) x and argsp l r x = (token ($$$ l) ::: !!! (args true @@@ (token ($$$ r) >> single))) x; in (args, args1) end; in val args = #1 (arguments Token.ident_or_symbolic) false; fun args1 is_symid = #2 (arguments is_symid) false; end; (* attributes *) val attrib = token liberal_name ::: !!! args; val attribs = $$$ "[" |-- list attrib --| $$$ "]"; val opt_attribs = Scan.optional attribs []; (* theorem references *) val thm_sel = $$$ "(" |-- list1 (nat --| minus -- nat >> Facts.FromTo || nat --| minus >> Facts.From || nat >> Facts.Single) --| $$$ ")"; val thm = $$$ "[" |-- attribs --| $$$ "]" >> pair (Facts.named "") || (literal_fact >> Facts.Fact || name_position -- Scan.option thm_sel >> Facts.Named) -- opt_attribs; val thms1 = Scan.repeat1 thm; (* options *) val option_name = group (fn () => "option name") name_position; val option_value = group (fn () => "option value") ((token real || token name) >> Token.content_of); val option = option_name :-- (fn (_, pos) => Scan.optional ($$$ "=" |-- !!! (position option_value)) ("true", pos)); val options = $$$ "[" |-- list1 option --| $$$ "]"; (** C basic parsers **) (* embedded source text *) val C_source = input (group (fn () => "C source") text); (* AutoCorres (MODIFIES) *) val star = sym_ident :-- (fn "*" => Scan.succeed () | _ => Scan.fail) >> #1; end; structure C_Parse_Native: C_PARSE = struct open Token open Parse (** C basic parsers **) (* embedded source text *) val C_source = input (group (fn () => "C source") text); (* AutoCorres (MODIFIES) *) val star = sym_ident :-- (fn "*" => Scan.succeed () | _ => Scan.fail) >> #1; end; \ ML \ \\<^file>\~~/src/Pure/Thy/thy_header.ML\\ (* Author: Frédéric Tuong, Université Paris-Saclay *) (* Title: Pure/Thy/thy_header.ML Author: Makarius Static theory header information. *) \ structure C_Thy_Header = struct val bootstrap_keywords = C_Keyword.empty_keywords' (Keyword.minor_keywords (Thy_Header.get_keywords @{theory})) (* theory data *) structure Data = Theory_Data ( type T = C_Keyword.keywords; val empty = bootstrap_keywords; - val extend = I; val merge = C_Keyword.merge_keywords; ); val add_keywords = Data.map o C_Keyword.add_keywords; val add_keywords_minor = Data.map o C_Keyword.add_keywords_minor; val get_keywords = Data.get; val get_keywords' = get_keywords o Proof_Context.theory_of; end \ end diff --git a/thys/Isabelle_C/C11-FrontEnd/src/C_Parser_Annotation.thy b/thys/Isabelle_C/C11-FrontEnd/src/C_Parser_Annotation.thy --- a/thys/Isabelle_C/C11-FrontEnd/src/C_Parser_Annotation.thy +++ b/thys/Isabelle_C/C11-FrontEnd/src/C_Parser_Annotation.thy @@ -1,260 +1,259 @@ (****************************************************************************** * Isabelle/C * * Copyright (c) 2018-2019 Université Paris-Saclay, Univ. Paris-Sud, France * * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are * met: * * * Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * * Redistributions in binary form must reproduce the above * copyright notice, this list of conditions and the following * disclaimer in the documentation and/or other materials provided * with the distribution. * * * Neither the name of the copyright holders nor the names of its * contributors may be used to endorse or promote products derived * from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. ******************************************************************************) section \Annotation Language: Command Parser Registration\ theory C_Parser_Annotation imports C_Lexer_Annotation C_Environment begin ML \ \\<^file>\~~/src/Pure/Isar/outer_syntax.ML\\ (* Author: Frédéric Tuong, Université Paris-Saclay *) (* Title: Pure/Isar/outer_syntax.ML Author: Markus Wenzel, TU Muenchen Isabelle/Isar outer syntax. *) \ structure C_Annotation = struct (** outer syntax **) (* errors *) fun err_command msg name ps = error (msg ^ quote (Markup.markup Markup.keyword1 name) ^ Position.here_list ps); fun err_dup_command name ps = err_command "Duplicate annotation syntax command " name ps; (* command parsers *) datatype command_parser = Parser of (Symbol_Pos.T list * (bool * Symbol_Pos.T list)) * Position.range -> C_Env.eval_time c_parser; datatype command = Command of {comment: string, command_parser: command_parser, pos: Position.T, id: serial}; fun eq_command (Command {id = id1, ...}, Command {id = id2, ...}) = id1 = id2; fun new_command comment command_parser pos = Command {comment = comment, command_parser = command_parser, pos = pos, id = serial ()}; fun command_pos (Command {pos, ...}) = pos; fun command_markup def (name, Command {pos, id, ...}) = let (* PATCH: copied as such from Isabelle2020 *) fun entity_properties_of def serial pos = if def then (Markup.defN, Value.print_int serial) :: Position.properties_of pos else (Markup.refN, Value.print_int serial) :: Position.def_properties_of pos; in Markup.properties (entity_properties_of def id pos) (Markup.entity Markup.commandN name) end; (* theory data *) structure Data = Theory_Data ( type T = command Symtab.table; val empty = Symtab.empty; - val extend = I; fun merge data : T = data |> Symtab.join (fn name => fn (cmd1, cmd2) => if eq_command (cmd1, cmd2) then raise Symtab.SAME else err_dup_command name [command_pos cmd1, command_pos cmd2]); ); val get_commands = Data.get; val dest_commands = get_commands #> Symtab.dest #> sort_by #1; val lookup_commands = Symtab.lookup o get_commands; (* maintain commands *) fun add_command name cmd thy = let val _ = C_Keyword.is_command (C_Thy_Header.get_keywords thy) name orelse err_command "Undeclared outer syntax command " name [command_pos cmd]; val _ = (case lookup_commands thy name of NONE => () | SOME cmd' => err_dup_command name [command_pos cmd, command_pos cmd']); val _ = Context_Position.report_generic (Context.the_generic_context ()) (command_pos cmd) (command_markup true (name, cmd)); in Data.map (Symtab.update (name, cmd)) thy end; fun delete_command (name, pos) thy = let val _ = C_Keyword.is_command (C_Thy_Header.get_keywords thy) name orelse err_command "Undeclared outer syntax command " name [pos]; in Data.map (Symtab.delete name) thy end; (* implicit theory setup *) type command_keyword = string * Position.T; fun raw_command0 kind (name, pos) comment command_parser = C_Thy_Header.add_keywords [((name, pos), ((kind, []), [name]))] #> add_command name (new_command comment command_parser pos); fun raw_command (name, pos) comment command_parser = let val setup = add_command name (new_command comment command_parser pos) in Context.>> (Context.mapping setup (Local_Theory.background_theory setup)) end; fun command (name, pos) comment parse = raw_command (name, pos) comment (Parser parse); fun command'' kind (name, pos) comment parse = raw_command0 kind (name, pos) comment (Parser parse); val command' = command'' Keyword.thy_decl; (** toplevel parsing **) (* parse spans *) (* parse commands *) local fun scan_stack' f b = Scan.one f >> (pair b o C_Token.content_of') in val before_command = C_Token.scan_stack C_Token.is_stack1 -- Scan.optional ( scan_stack' C_Token.is_stack2 false || scan_stack' C_Token.is_stack3 true) (pair false []) end fun parse_command thy = Scan.ahead (before_command |-- C_Parse.position C_Parse.command) :|-- (fn (name, pos) => let val command_tags = before_command -- C_Parse.range C_Parse.command >> (fn (cmd, (_, range)) => (cmd, range)); in case lookup_commands thy name of SOME (cmd as Command {command_parser = Parser parse, ...}) => C_Parse.!!! (command_tags :|-- parse) >> pair [((pos, command_markup false (name, cmd)), "")] | NONE => Scan.fail_with (fn _ => fn _ => let val msg = "undefined command "; in msg ^ quote (Markup.markup Markup.keyword1 name) end) end) (* check commands *) fun command_reports thy tok = if C_Token.is_command tok then let val name = C_Token.content_of tok in (case lookup_commands thy name of NONE => [] | SOME cmd => [((C_Token.pos_of tok, command_markup false (name, cmd)), "")]) end else []; fun check_command ctxt (name, pos) = let val thy = Proof_Context.theory_of ctxt; val keywords = C_Thy_Header.get_keywords thy; in if C_Keyword.is_command keywords name then let val markup = C_Token.explode0 keywords name |> maps (command_reports thy) |> map (#2 o #1); val _ = Context_Position.reports ctxt (map (pair pos) markup); in name end else let val completion_report = Completion.make_report (name, pos) (fn completed => C_Keyword.dest_commands keywords |> filter completed |> sort_strings |> map (fn a => (a, (Markup.commandN, a)))); in error ("Bad command " ^ quote name ^ Position.here pos ^ completion_report) end end; end \ ML \ \\<^file>\~~/src/Pure/PIDE/resources.ML\\ (* Author: Frédéric Tuong, Université Paris-Saclay *) (* Title: Pure/PIDE/resources.ML Author: Makarius Resources for theories and auxiliary files. *) \ structure C_Resources = struct (* load files *) fun parse_files cmd = Scan.ahead C_Parse.not_eof -- C_Parse.path >> (fn (tok, name) => fn thy => (case C_Token.get_files tok of [] => let val keywords = C_Thy_Header.get_keywords thy; val master_dir = Resources.master_directory thy; val pos = C_Token.pos_of tok; val delimited = Input.is_delimited (C_Token.input_of tok); val src_paths = C_Keyword.command_files keywords cmd (Path.explode name); in map (Command.read_file master_dir pos delimited) src_paths end | files => map Exn.release files)); end; \ end