Page MenuHomeIsabelle/Phabricator

No OneTemporary

diff --git a/thys/Conditional_Transfer_Rule/CTR/CTR.ML b/thys/Conditional_Transfer_Rule/CTR/CTR.ML
--- a/thys/Conditional_Transfer_Rule/CTR/CTR.ML
+++ b/thys/Conditional_Transfer_Rule/CTR/CTR.ML
@@ -1,410 +1,409 @@
(* Title: CTR/CTR.ML
Author: Mihails Milehins
Copyright 2021 (C) Mihails Milehins
The implementation of the command ctr.
*)
signature CTR =
sig
datatype alg_input =
ALG_PP of ((binding option * (Facts.ref * Token.src list)) * mixfix) list
| ALG_RP of
(
(
(
(string * (Facts.ref * Token.src list) option) option *
(string, string, Facts.ref) Element.ctxt list
) *
(string * string) list
) * ((binding option * (Facts.ref * Token.src list)) * mixfix) list
)
val ctr_parser : alg_input parser
val process_parametricity :
(binding option * thm) * mixfix ->
Proof.context ->
ctr_pp_out
val process_relativization :
(string * thm list option) option ->
Element.context list ->
(string * string) list ->
(binding option * thm) * mixfix ->
Proof.context ->
ctr_pp_out
val process_ctrs : alg_input -> Proof.context -> Proof.context
end;
structure CTR : CTR =
struct
(**** Data ****)
datatype alg_input =
ALG_PP of ((binding option * (Facts.ref * Token.src list)) * mixfix) list
| ALG_RP of
(
(
(
(string * (Facts.ref * Token.src list) option) option *
(string, string, Facts.ref) Element.ctxt list
) * (string * string) list
) * ((binding option * (Facts.ref * Token.src list)) * mixfix) list
);
(**** Parser ****)
local
val algorithm_parser = \<^keyword>\<open>relativization\<close> || \<^keyword>\<open>parametricity\<close>;
val synthesis_parser =
Scan.option (\<^keyword>\<open>synthesis\<close> -- Scan.option Parse.thm);
val type_specs_parser =
Scan.optional
(
\<^keyword>\<open>trp\<close> |--
Parse.and_list (kw_bo |-- Parse.type_var -- Parse.term --| kw_bc)
)
[];
val thm_specs_parser =
\<^keyword>\<open>in\<close> |--
Parse.and_list
(
(Scan.option (Parse.binding --| \<^keyword>\<open>:\<close>)) --
Parse.thm --
Parse.opt_mixfix'
);
fun rel_opt_thm_name s =
Scan.optional
(
(
Parse.binding -- Parse.opt_attribs || Parse.attribs >>
pair Binding.empty
) --| Parse.$$$ s
)
Binding.empty_atts;
(*
The function was ported with amendments from the file Parse_Spec.ML in the main
distribution of Isabelle 2021.
*)
val rel_loc_element =
\<^keyword>\<open>fixes\<close> |-- Parse.!!! Parse_Spec.locale_fixes >> Element.Fixes ||
\<^keyword>\<open>assumes\<close> |--
Parse.!!! (Parse.and_list1 (rel_opt_thm_name ":" -- Scan.repeat1 Parse.propp))
>> Element.Assumes;
(*
The function was ported with amendments from the file Parse_Spec.ML in the main
distribution of Isabelle 2021.
*)
val rel_context_element =
Parse.group (fn () => "context element") rel_loc_element;
fun parametricity_parser tokens = thm_specs_parser tokens |>> ALG_PP;
fun relativization_parser tokens = tokens
|>
(
synthesis_parser --
Scan.repeat rel_context_element --
type_specs_parser --
thm_specs_parser
)
|>> ALG_RP;
in
val ctr_parser =
algorithm_parser :|--
(
fn c => case c of
"relativization" => relativization_parser
| "parametricity" => parametricity_parser
| _ => error "relativization or parametricity expected"
);
end;
-
(**** User input analysis ****)
fun mk_msg_ctr msg = "ctr: " ^ msg;
fun mk_msg_binrel c = c ^ ": trp must consist of (stv, binary relation) pairs" ;
fun mk_msg_binrel_ftv c = c ^
": the user-specified binary relations must be defined over type variables";
fun mk_msg_no_dup_stvs c = c ^ ": duplicate stvs";
fun mk_msg_no_dup_binrel_ftvs c = c ^
": duplicate ftvs in the specification of the binary relations";
fun mk_msg_def ctxt t = Syntax.string_of_term ctxt t ^ " is not a definition";
fun check_def ctxt thm =
let
val t = Thm.full_prop_of thm
val _ = (ctxt, t) |> #2 |> Logic.is_equals
orelse error (mk_msg_def ctxt t |> mk_msg_ctr)
in () end;
fun mk_msg_no_relator c = "no relator found for the type constructor " ^ c
fun mk_msg_invalid_relator c =
"the relator found for the type constructor " ^ c ^
" is not suitable (is there is a mismatch of type variables?)"
(**** Preprocessing ****)
(*** Preprocessing of the type-relation pairs ***)
fun preprocess_type_specs ctxt type_specs =
let
fun preprocess_entry ctxt ctxt' (T, relt) =
let
val v = T |> Syntax.parse_typ ctxt' |> dest_TVar |> #1
val relt = Syntax.read_term ctxt relt
val T = type_of relt
val _ = T |> HOLogic.is_binrelT
orelse error ("trp" |> mk_msg_binrel |> mk_msg_ctr)
val (U, V) = HOLogic.dest_binrelT T
val _ = is_TFree U andalso is_TFree V
orelse error ("trp" |> mk_msg_binrel_ftv |> mk_msg_ctr)
val S = V |> dest_TFree |> #2
in ((v, S), relt) end
val type_specs =
let val ctxt' = Proof_Context.init_global (Proof_Context.theory_of ctxt)
in map (preprocess_entry ctxt ctxt') type_specs end
val _ = type_specs |> map #1 |> has_duplicates op= |> not
orelse error ("trp" |> mk_msg_no_dup_stvs |> mk_msg_ctr)
val _ = type_specs
|> map #2
|> map (fn T => Term.add_tfrees T [])
|> flat
|> has_duplicates op=
|> not
orelse error ("trp" |> mk_msg_no_dup_binrel_ftvs |> mk_msg_ctr)
val ctxt' = type_specs
|> map (#2 #> (fn t => Term.add_frees t []))
|> flat
|> map #1
|> Variable.fix_new_vars ctxt
|> fold (Variable.declare_term) (map #2 type_specs)
in (type_specs, ctxt') end;
(*** Preprocessing of the context elements ***)
fun preprocess_declaration elems lthy =
let
val lthy' = Expression.read_statement elems (Element.Shows []) lthy |> #2
val assms = Assumption.local_prems_of lthy' lthy
val (assms', lthy'') = Thm.unvarify_local_fact lthy' assms
in (assms', lthy'') end;
fun preprocess_definition ctxt thm_spec =
let
val thm_spec' = apfst (apsnd (Local_Defs.meta_rewrite_rule ctxt)) thm_spec
val thm = thm_spec' |> #1 |> #2
val _ = check_def ctxt thm
val b = case thm_spec' |> #1 |> #1 of
SOME b => b
| NONE =>
let
val c = thm
|> Thm.cprop_of
|> Thm.dest_equals_lhs
|> Thm.term_of
|> head_of
|> dest_Const
|> fst
in
Binding.qualified_name_mandatory
(CTR_Utilities.qualified_name_of_const_name c ^ ".transferred")
end
in ((b, thm_spec' |> #1 |> #2), thm_spec' |> #2) end;
(*** Preprocessing of the theorem specification ***)
fun preprocess_thm_spec ctxt type_specs thm_spec =
let
(*auxiliary*)
fun gen_indexed_rel_names n c = ((replicate n c), (1 upto n))
||> map Int.toString
|> op~~
|> map (op^)
(*theorems*)
val thm_spec = preprocess_definition ctxt thm_spec
(*invent a new brel for each ftv not previously associated with a brel*)
val ftvs = thm_spec
|> (#1 #> #2 #> Thm.full_prop_of #> (fn t => Term.add_tvars t []))
|> distinct op=
|> subtract op= (map #1 type_specs)
val (cs, ctxt') =
Variable.variant_fixes (gen_indexed_rel_names (length ftvs) "A") ctxt
val Ss = map #2 ftvs
val (lhsTs, ctxt'') = Variable.invent_types Ss ctxt' |>> map TFree
val (rhsTs, ctxt''') = Variable.invent_types Ss ctxt'' |>> map TFree
val ts = map Free (cs ~~ map HOLogic.mk_binrelT (lhsTs ~~ rhsTs))
val ctxt'''' = fold Variable.declare_term ts ctxt'''
val type_specs = type_specs @ (ftvs ~~ ts)
in ((thm_spec, type_specs), ctxt'''') end;
+
(*** Preprocessing of the keyword synthesis ***)
fun preprocess_synthesis (synthesis : (string * thm list option) option) =
case synthesis of
SOME (_, thm_opt) =>
(
case thm_opt of
SOME simp_spec => (true, SOME simp_spec)
| NONE => (true, NONE)
)
| NONE => (false, NONE);
(**** Evaluation ****)
fun apply_algorithm ctxt' alg synthesis assms type_specs thm_spec =
let
fun mk_msg_relator f T = T
|> dest_Type
|> #1
|> f
|> mk_msg_ctr
fun mk_error_no_relator T = mk_msg_relator mk_msg_no_relator T
fun mk_error_invalid_relator T = mk_msg_relator mk_msg_invalid_relator T
val alg_out = case alg of
relativization =>
(
CTR_Relativization.apply ctxt' synthesis assms type_specs thm_spec
handle
TYPE ("pr_of_typ: no relator", [T], []) =>
error (mk_error_no_relator T)
| TYPE ("pr_of_typ: invalid relator", [T], []) =>
error (mk_error_invalid_relator T)
)
| parametricity => CTR_Parametricity.apply
ctxt' synthesis assms type_specs thm_spec
in alg_out end;
fun postprocess_alg_out b mf alg_out ctxt = case alg_out of
ALGRelativization thm =>
CTR_Postprocessing.postprocess_relativization b mf thm ctxt
| ALGParametricity thm =>
CTR_Postprocessing.postprocess_parametricity b thm ctxt
| ALGFailure => error "ctr: a transfer rule could not be established.";
fun process_definition ctxt' alg synthesis assms type_specs thm_spec ctxt =
let
val alg_out = thm_spec
|> #1
|> #2
|> apply_algorithm ctxt' alg synthesis assms type_specs
|> apply_alg_out (singleton (Proof_Context.export ctxt' ctxt))
in postprocess_alg_out (thm_spec |> #1 |> #1) (#2 thm_spec) alg_out ctxt end;
(*** Parametricity ***)
fun process_parametricity thm_spec lthy =
let val thm_spec' = preprocess_definition lthy thm_spec
in
process_definition lthy parametricity (false, NONE) [] [] thm_spec' lthy
end;
(*** Relativization ***)
fun preprocess_relativization lthy synthesis elems type_specs thm_spec =
let
val synthesis' = preprocess_synthesis synthesis
val (assms, lthy') = preprocess_declaration elems lthy
val (type_specs', lthy'') = preprocess_type_specs lthy' type_specs
val ((thm_spec', type_specs''), lthy''') =
preprocess_thm_spec lthy'' type_specs' thm_spec
in ((synthesis', assms, type_specs'', thm_spec'), lthy''') end;
fun process_relativization synthesis elems type_specs thm_spec lthy =
let
val ((synthesis, assms, type_specs, thm_spec), lthy') =
preprocess_relativization lthy synthesis elems type_specs thm_spec
val lthy'' = process_definition
lthy' relativization synthesis assms type_specs thm_spec lthy
in lthy'' end;
-
(**** Interface ****)
fun process_parametricities thm_specs lthy = fold
(
fn thm_spec => fn ctxt =>
process_parametricity thm_spec ctxt |> lthy_of_pp_out
)
thm_specs
lthy;
fun process_relativizations synthesis elems type_specs thm_specs lthy =
let
fun process_relativization' thm_spec ctxt =
process_relativization synthesis elems type_specs thm_spec ctxt
|> lthy_of_pp_out
in fold process_relativization' thm_specs lthy end;
fun process_ctrs args lthy =
let
fun preprocess_thm_specs lthy =
map (apfst (apsnd (singleton (Attrib.eval_thms lthy))))
fun process_ctrs_impl (ALG_PP thm_specs) lthy =
process_parametricities (preprocess_thm_specs lthy thm_specs) lthy
| process_ctrs_impl
(ALG_RP (((synthesis, elems), type_specs), thm_specs)) lthy =
let
val thm_specs' = preprocess_thm_specs lthy thm_specs
val synthesis' = Option.map
(apsnd (Option.map ((single #> Attrib.eval_thms lthy))))
synthesis
in
process_relativizations synthesis' elems type_specs thm_specs' lthy
end
in process_ctrs_impl args lthy end;
val _ =
Outer_Syntax.local_theory
\<^command_keyword>\<open>ctr\<close>
"automated synthesis of transfer rules and relativization of definitions"
(ctr_parser >> process_ctrs);
end;
\ No newline at end of file
diff --git a/thys/Conditional_Transfer_Rule/CTR/CTR_Reference.thy b/thys/Conditional_Transfer_Rule/CTR/CTR_Reference.thy
--- a/thys/Conditional_Transfer_Rule/CTR/CTR_Reference.thy
+++ b/thys/Conditional_Transfer_Rule/CTR/CTR_Reference.thy
@@ -1,399 +1,401 @@
(* Title: CTR/CTR_Reference.thy
Author: Mihails Milehins
Copyright 2021 (C) Mihails Milehins
Reference manual for the CTR.
*)
section\<open>CTR\<close>
theory CTR_Reference
imports
CTR
"../UD/UD_Reference"
begin
subsection\<open>Introduction\<close>
subsubsection\<open>Background\<close>
text\<open>
This section presents a reference manual for the sub-framework CTR
that can be used for the automated synthesis of
conditional transfer rules.
The CTR evolved from the frameworks \textit{Conditional Parametricity} (CP)
\cite{gilcher_conditional_2017} and Types-To-Sets
that are available as part of the main distribution of Isabelle.
However, it does not require either the axiom LT or the axiom UO
associated with the Types-To-Sets for its operation.
The CTR introduces the following Isabelle/Isar commands:
\[
\begin{array}{rcl}
@{command ctr} & : & \<open>local_theory \<rightarrow> local_theory\<close>\\
@{command ctr_relator} & : & \<open>local_theory \<rightarrow> local_theory\<close>\\
\end{array}
\]
\<close>
subsubsection\<open>Purpose and scope\<close>
text\<open>
The primary functionality of the CTR is available via the
command @{command ctr}. The command @{command ctr}, given a definition
of a constant, attempts to provide a conditional transfer rule for this
constant, possibly under arbitrary user-defined side conditions.
The process of the synthesis of a transfer rule for
a constant may or may not involve the declaration and synthesis of a
definition of a new (\textit{relativized}) constant.
The command provides an interface for the application of two
plausible algorithms for the synthesis of the transfer rules
that have a restricted and overlapping scope of applicability.
The first algorithm (\textit{CTR I}) was developed and implemented in
\cite{gilcher_conditional_2017}.
An outline of the second algorithm (\textit{CTR II})
was proposed in \cite{lammich_automatic_2013} and \cite{immler_smooth_2019}:
CTR II relies on the functionality of the
@{method transfer_prover}
(see subsection 4.6 in \cite{kuncar_types_2015}).
More details about CTR II are given in the next subsection.
The command @{command ctr_relator} can be used for the
registration of the, so-called, \textit{ctr-relators} that need to be provided
for every non-nullary type constructor that occurs in the type of the
constant-instance whose definition is passed as an argument to CTR II.
However, as a fallback solution, the command @{command ctr} may
use the \textit{relators} that are associated with the standard
\textit{BNF} infrastructure
of Isabelle/HOL (e.g., see \cite{traytel_foundational_2012}).
The only necessary condition for the registration of the ctr-relators
is that they must satisfy the type-constraint
associated with the action of a BNF on relations (e.g., see
\cite{traytel_foundational_2012} or \cite{blanchette_bindings_2019}).
\<close>
subsubsection\<open>Related and previous work\<close>
text\<open>
As already mentioned, the CTR evolved from the framework
CP that is available as part of the main
distribution of Isabelle. Furthermore, CTR provides an interface to the main
functionality associated with the framework CP
and builds upon many ideas that could be associated with it.
The primary reason for the development of the command @{command ctr} instead
of extending the implementation of the existing command
@{command parametric_constant} provided as part of the CP
was the philosophy of non-intervention with the
development version of Isabelle that was adopted at the onset of the design of
the CTR. Perhaps, in the future, the functionality of the aforementioned
commands can be integrated.
It should also be mentioned that the Isabelle/ML code from the main
distribution of Isabelle was frequently reused during the
-development of CTR.
+development of CTR. Lastly, it should be mentioned that the
+framework SpecCheck \cite{kappelmann_speccheck_2021} was used for unit
+testing the framework CTR.
\<close>
subsection\<open>Theory\<close>
text\<open>
Assume the existence of an underlying well-formed definitional theory $D$ and
a context $\Gamma$; assume the existence of a map
$\mathsf{ctr}_{\mathsf{Rel}}$ from a finite set of non-nullary type constructors
to relators, mapping each non-nullary type constructor in its domain to a
valid relator for this type constructor. The inputs to CTR II are a
constant-instance definition
$\vdash c_{?\bar{\gamma}\ K} = \phi\left[?\bar{\gamma}\right] $ of the
constant-instance $c_{?\bar{\gamma}\ K}$ and the map $\mathsf{trp}$ from the
set of all schematic type variables in ?$\bar{\gamma}$ to the set
of (fixed) binary relations
$x_{\alpha\rightarrow\beta\rightarrow\mathbb{B}}$ in $\Gamma$ with
non-overlapping type variables ($?\bar{\gamma}$ corresponds to a sequence
of all distinct type variables that occur in the type
$?\bar{\gamma}\ K$, while $K$, applied using a postfix notation, contains all
information about the type constructors of the type $?\bar{\gamma}\ K$,
such that the non-nullary type constructors associated with $K$ form a subset
of the domain of $\mathsf{ctr}_{\mathsf{Rel}}$). CTR II consists of three parts:
\textit{synthesis of a parametricity relation},
\textit{synthesis of a transfer rule} and \textit{post-processing}.
\textbf{Synthesis of a parametricity relation}.
An outline of an algorithm for the conversion of a type to a
\textit{parametricity relation}
is given in subsection 4.1.1 in \cite{kuncar_types_2015}.
Thus, every nullary type constructor in $?\bar{\gamma}\ K$ is replaced by the
identity relation $=$, every non-nullary type constructor $\kappa$
in $?\bar{\gamma}\ K$ is replaced by its corresponding
relator $\mathsf{ctr}_{\mathsf{Rel}}\left(\kappa\right)$ and every type
variable $?\gamma$ in $?\bar{\gamma}\ K$ is replaced by
$\mathsf{trp}\left(?\gamma\right)$, obtaining the parametricity
relation $R_{\bar{\alpha}\ K \rightarrow \bar{\beta}\ K \rightarrow\mathbb{B}}$.
\textbf{Synthesis of a transfer rule}. First, the goal
$R\ \phi\left[\bar{\alpha}\right]\ \phi\left[\bar{\beta}\right]$ is created
in $\Gamma$ and an attempt to prove it is made using the algorithm associated
with the method
@{method transfer_prover}.
If the proof is successful, nothing else needs to be done in this part.
Otherwise, an attempt to find a solution for $?a$
in $R\ \left(?a_{\bar{\alpha}\ K}\right)\ \phi\left[\bar{\beta}\right]$ is made,
once again, using the
@{method transfer_prover}.
The result of the successful completion of the synthesis is a transfer
rule $\Gamma \vdash R\ \psi\left[\bar{\alpha},\bar{x}\right]\ \phi\left[\bar{\beta}\right]$,
where $\bar{x}$ is used to denote a sequence of typed variables, each of
which occurs free in the context $\Gamma$ (the success of this part
is not guaranteed).
\textbf{Post-processing}.
If $\psi\left[\bar{\alpha},\bar{x}\right] = \phi\left[\bar{\alpha}\right]$
after the successful completion of part 2 of the algorithm, then the
definitions of the constant-instances $c_{\bar{\alpha}\ K}$
and $c_{\bar{\beta}\ K}$ are folded, resulting in the
deduction \mbox{$\Gamma \vdash R\ c_{\bar{\alpha}\ K}\ c_{\bar{\beta}\ K}$}.
Otherwise,
if \mbox{$\psi\left[\bar{\alpha},\bar{x}\right]\ \neq \phi\left[\bar{\alpha}\right]$},
then a new constant $d$ is declared such
that \mbox{$\vdash d_{\sigma\left[?\bar{\alpha}\right]} = \left(\lambda \bar{x}.\ \psi\left[?\bar{\alpha},\bar{x}\right]\right)$},
where $\sigma$ is determined uniquely by $\bar{x}$ and $?\bar{\alpha}\ K$.
In this case, \mbox{$\Gamma \vdash R\ \psi\left[\bar{\alpha},\bar{x}\right]\ \phi\left[\bar{\beta}\right]$}
can be restated as \mbox{$\Gamma \vdash R\ \left(d_{\sigma\left[\bar{\alpha}\right]}\ \bar{x}\right)\ c_{\bar{\beta}\ K}$}.
This result can be exported to the global theory context and forms a conditional
transfer rule for $c_{?\bar{\gamma}\ K}$.
CTR II can perform the synthesis of the transfer rules for constants under
arbitrary user-defined side conditions automatically. However, the algorithm
guarantees neither that it can identify whether a transfer rule exists for
a given constant under a given set of side conditions,
nor that it will be found if it does exist.
\<close>
subsection\<open>Syntax\<close>
subsubsection\<open>Background\<close>
text\<open>
This subsection presents the syntactic categories that are associated with the
command @{command ctr} and closely related auxiliary commands and attributes.
It is important to note that the presentation is only approximate.
\<close>
subsubsection\<open>\<open>ctr_relator\<close>\<close>
text\<open>
\begin{matharray}{rcl}
@{command_def ctr_relator} & : & \<open>local_theory \<rightarrow> local_theory\<close>
\end{matharray}
\<^medskip>
\<^rail>\<open>@@{command ctr_relator} term\<close>
\<^descr> \<^theory_text>\<open>ctr_relator\<close> \<open>c\<close> saves the ctr-relator \<open>c\<close> to a database of
ctr-relators for future reference. A ctr-relator is defined as any constant
that has the type of the form
\begin{center}
\<open>(\<alpha>\<^sub>1\<Rightarrow>\<beta>\<^sub>1\<Rightarrow>\<bool>)\<Rightarrow>\<dots>\<Rightarrow>(\<alpha>\<^sub>n\<Rightarrow>\<beta>\<^sub>n\<Rightarrow>\<bool>)\<Rightarrow>(\<alpha>\<^sub>1\<dots>\<alpha>\<^sub>n)\<kappa>\<Rightarrow>(\<beta>\<^sub>1\<dots>\<beta>\<^sub>n)\<kappa>\<Rightarrow>\<bool>\<close>,
\end{center}
where \<open>\<alpha>\<^sub>1\<dots>\<alpha>\<^sub>n\<close> and \<open>\<beta>\<^sub>1\<dots>\<beta>\<^sub>n\<close> are distinct type variables,
\<open>n\<close> is a positive integer and \<open>\<kappa>\<close> is a type constructor.
\<close>
subsubsection\<open>\<open>ctr\<close>\<close>
(*
Certain elements of the content presented below were copied from the theory
Doc/Isar_Rel/Spec.thy in the main library of Isabelle.
*)
text\<open>
\begin{matharray}{rcl}
@{command_def ctr} & : & \<open>local_theory \<rightarrow> local_theory\<close> \\
\end{matharray}
\<^medskip>
\<^rail>\<open>
@@{command ctr} (@'parametricity' | @'relativization' rlt) in_def
;
rlt:
(synthesis*)
(cce*)
trp
;
synthesis: @'synthesis' (thm*)
;
cce:
@'fixes' vars |
@'assumes' (props + @'and')
;
trp: (@'trp' ('(' type_var term ')' + @'and'))?
;
in_def: ((binding @':')? thm mixfix?) + and
\<close>
\<^descr> \<^theory_text>\<open>ctr\<close> provides access to two algorithms for the automated synthesis of
the transfer rules and the relativization of constants based on their
definitions.
\<^descr> \<^theory_text>\<open>parametricity\<close> indicates that CTR I needs
to be invoked by the command.
\<^descr> \<^theory_text>\<open>relativization\<close> indicates that CTR II needs to be
invoked by the command.
\<^descr> \<^theory_text>\<open>synthesis\<close> \<open>thm\<close> indicates that the relativization of the
inputs needs to be performed and post-processed using the simplifier with
the collection of rewrite rules stated as the fact \<open>thm\<close>. If the optional
argument \<open>thm\<close> is not provided, then the default \<open>simpset\<close> is used for
post-processing. If the keyword \<^theory_text>\<open>synthesis\<close> is omitted,
then no attempt to perform the relativization is made.
The keyword \<^theory_text>\<open>synthesis\<close> is relevant only for CTR II.
\<^descr> \<^theory_text>\<open>trp\<close> \<open>(?a\<^sub>1 A\<^sub>1)\<close> \<^theory_text>\<open>and\<close> \<open>\<dots>\<close> \<^theory_text>\<open>and\<close> \<open>(?a\<^sub>n A\<^sub>n)\<close> indicates
that the type variable that has the indexname \<open>?a\<^sub>i\<close> (\<open>1\<le>i\<le>n\<close>,
\<open>n\<close> is a non-negative integer) is meant to
correspond to the binary relation \<open>A\<^sub>i\<close> in the parametricity relation constructed by
the command prior to the statement of the transfer rule (for further
information see subsection 4.1 in \cite{kuncar_types_2015}). This is relevant only
for CTR II.
\<^descr> \<^theory_text>\<open>in\<close> \<open>(b:) def_thm (mixfix)\<close> is used for
the specification of the input to the algorithms that are associated with
the command @{command ctr}. \<open>def_thm\<close> is meant to be a fact that
consists of exactly one theorem of the form
\<open>A ?a\<^sub>1\<dots>?a\<^sub>n \<simeq> f ?a\<^sub>1\<dots>?a\<^sub>n\<close>,
where \<open>A\<close> is a constant, \<open>\<simeq>\<close> is either meta- or object-equality,
\<open>n\<close> is a non-negative integer,
\<open>?a\<^sub>1\<dots>?a\<^sub>n\<close> are schematic variables and \<open>f\<close> is a suitable formula in \<open>n\<close>
arguments (however, there are further implicit restrictions).
If a new constant is introduced by the command, then the optional argument
\<open>mixfix\<close> is used for the specification
of the concrete inner syntax for the constant in the usual manner
(e.g. see \cite{wenzel_isabelle/isar_2019-1}). The optional binding \<open>b\<close> is
used for the specification of the names of the entities that
are provided after the successful execution of the command. It is hoped
that the algorithm chosen for the specification of the names
is sufficiently intuitive and does not require further explanation.
If either \<open>b\<close> or \<open>mixfix\<close> are not specified by the user, then the command
introduces sensible defaults.
Multiple theorems may be provided after the
keyword \<^theory_text>\<open>in\<close>, employing the keyword \<^theory_text>\<open>and\<close> as a separator.
In this case, the parameterized algorithm associated with the command is
applied repeatedly to each input theorem in the order of their specification
from left to right and the local context is augmented incrementally.
\<close>
subsection\<open>Examples\label{sec:ctr_ex}\<close>
text\<open>
In this subsection, some of the capabilities of the CTR are
demonstrated by example. The examples that are presented in this subsection are
expected to be sufficient to begin an independent exploration of the framework,
but do not cover the entire spectrum of the functionality and the problems that
one may encounter while using it.
The examples that are presented in this subsection continue the example
of the application of the command @{command ud}
to the definition of the constant @{const mono} that was presented in
subsection \ref{sec:ud_ex}.
\<close>
subsubsection\<open>CTR I\<close>
text\<open>
As already explained, the command @{command ctr} can be used to invoke
the algorithm associated with the command @{command parametric_constant}
for the synthesis of a transfer rule for a given constant. For
example, the invocation of
\<close>
ctr parametricity
in mono: mono.with_def
text\<open>
generates the transfer rule @{thm [source] mono_transfer}:
\begin{center}
@{thm [display, mode=IfThenNoBox] mono_transfer[no_vars]}
\end{center}
A detailed explanation of the underlying algorithm can be found in
\cite{gilcher_conditional_2017}.
\<close>
subsubsection\<open>CTR II\<close>
text\<open>
The first example in this subsection demonstrates how CTR II can be
used to produce a parametricity property identical
to the one produced by CTR I above:
\<close>
ctr relativization
fixes A1 :: "'a \<Rightarrow> 'b \<Rightarrow> bool" and A2 :: "'c \<Rightarrow> 'd \<Rightarrow> bool"
assumes [transfer_rule]: "bi_total A1"
trp (?'a A1) and (?'b A2)
in mono': mono.with_def
text\<open>
This produces the theorem @{thm [source] mono'.transfer}:
@{thm [display, mode=IfThenNoBox] mono'.transfer[no_vars]}
which is identical to the theorem @{thm [source] mono_transfer} generated
by CTR I.
\<close>
text\<open>
Of course, there is very little value in trying to establish a parametricity
property using CTR II due to the availability of CTR I. However,
it is often the case that the constant is not parametric under a given
set of side conditions. Nonetheless, in this case, it is often possible to
define a new \textit{relativized constant} that is related to the original
constant under the parametricity relation associated with the original constant.
It is expected that the most common application of CTR II will be the
synthesis of the relativized constants.
For example, suppose one desires to find a conditional transfer rule for
the constant @{const mono.with} such that (using the conventions from the
previous example) the relation \<open>A1\<close> is right total, but not, necessarily,
left total. While, under such restriction on the involved relations, the
constant @{const mono.with} is no longer conditionally parametric, its
relativization exists and can be found using the transfer prover. To automate
the relativization process, the user merely needs to add the keyword
\<^theory_text>\<open>synthesis\<close> immediately after the keyword \<^theory_text>\<open>relativization\<close>:
\<close>
ctr relativization
synthesis ctr_simps
fixes A1 :: "'a \<Rightarrow> 'b \<Rightarrow> bool" and A2 :: "'c \<Rightarrow> 'd \<Rightarrow> bool"
assumes [transfer_domain_rule]: "Domainp A1 = (\<lambda>x. x \<in> U1)"
and [transfer_rule]: "right_total A1"
trp (?'a A1) and (?'b A2)
in mono_ow: mono.with_def
text\<open>
This produces the definition @{thm [source] mono_ow_def}:
@{thm [display, mode=IfThenNoBox] mono_ow_def[no_vars]}
and the theorem @{thm [source] mono_ow.transfer}:
@{thm [display, mode=IfThenNoBox] mono_ow.transfer[no_vars]}
It should be noted that, given that the keyword \<^theory_text>\<open>synthesis\<close> was
followed by the name of the named collection of theorems
@{thm [source] ctr_simps}, this collection was used in post-processing of
the result of the relativization. The users can omit simplification
entirely by specifying the collection @{thm [source] ctr_blank} instead
of @{thm [source] ctr_simps}.
\<close>
text\<open>\newpage\<close>
end
\ No newline at end of file
diff --git a/thys/Conditional_Transfer_Rule/CTR/Tests/CTR_TEST_PROCESS_CTR_RELATOR.ML b/thys/Conditional_Transfer_Rule/CTR/Tests/CTR_TEST_PROCESS_CTR_RELATOR.ML
--- a/thys/Conditional_Transfer_Rule/CTR/Tests/CTR_TEST_PROCESS_CTR_RELATOR.ML
+++ b/thys/Conditional_Transfer_Rule/CTR/Tests/CTR_TEST_PROCESS_CTR_RELATOR.ML
@@ -1,199 +1,185 @@
(* Title: CTR/Tests/CTR_TEST_PROCESS_CTR_RELATOR.ML
Author: Mihails Milehins
Copyright 2021 (C) Mihails Milehins
*)
signature CTR_TEST_PROCESS_CTR_RELATOR =
sig
type process_ctr_relator_in_type
-val execute_test_suite_process_ctr_relator :
- Proof.context -> (process_ctr_relator_in_type, local_theory)
- UT_Test_Suite.test_results_suite
+val test_suite : Proof.context -> unit -> unit
end;
structure ctr_test_process_ctr_relator : CTR_TEST_PROCESS_CTR_RELATOR =
struct
+
+(**** Background ****)
+
+open SpecCheck;
+structure Prop = SpecCheck_Property;
+structure Show = SpecCheck_Show;
+
+
+
+
(**** Auxiliary ****)
fun mk_msg_ctr_relator msg = "ctr_relator: " ^ msg;
(*** Data ***)
type process_ctr_relator_in_type = string * Proof.context;
+(**** Visualization ****)
+
+fun show_ctr_relator_in (process_ctr_relator_in : process_ctr_relator_in_type) =
+ let
+ val (c, _) = process_ctr_relator_in
+ val name_c = "constant name: " ^ c
+ val ctxt_c = "lthy: unknown context"
+ val out_c = [name_c, ctxt_c] |> String.concatWith "\n"
+ in Pretty.str out_c end;
+
+
+
+
(**** Tests ****)
+(*** Wrapper ***)
+
+fun process_ctr_relator ((c, ctxt) : process_ctr_relator_in_type) =
+ CTR_Relators.process_ctr_relator c ctxt;
+
+
+
(*** Exceptions ***)
-fun test_exc_not_const ctxt =
- let
- val c = "a + b"
- val args = (c, ctxt)
- val err_msg = mk_msg_ctr_relator "the input must be a constant term"
- in UT_Test_Suite.assert_exception "not a constant" args (ERROR err_msg) end;
-
-fun test_exc_not_body_bool ctxt =
+fun test_exc_template ctxt c err_msg_end_c test_name_c =
let
- val c = "Cons"
- val args = (c, ctxt)
- val err_msg = mk_msg_ctr_relator
- "the body of the type of the input must be bool"
- in UT_Test_Suite.assert_exception "not bool body" args (ERROR err_msg) end;
-
-fun test_exc_not_binders_2 ctxt =
- let
- val c = "Ex"
val args = (c, ctxt)
- val err_msg = mk_msg_ctr_relator
- "the type of the input must have more than two binders"
- in UT_Test_Suite.assert_exception "not two binders" args (ERROR err_msg) end;
-
-fun test_exc_not_binders_binrelT ctxt =
- let
- val c = "not_binders_binrelT"
- val args = (c, ctxt)
- val err_msg = mk_msg_ctr_relator
- "all of the binders associated with the type of the input" ^
- "except the last two must be the binary relation types"
+ val err_msg = mk_msg_ctr_relator err_msg_end_c
+ val exn_prop = Prop.expect_failure (ERROR err_msg) process_ctr_relator
in
- UT_Test_Suite.assert_exception
- "not binary relation types" args (ERROR err_msg)
- end;
-
-fun test_exc_no_dup_binrelT ctxt =
- let
- val c = "no_dup_binrelT"
- val args = (c, ctxt)
- val err_msg = mk_msg_ctr_relator
- "the types of the binders of the binary relations associated " ^
- "with the type of the input must have no duplicates"
- in
- UT_Test_Suite.assert_exception
- "no duplicates in the binary relation types" args (ERROR err_msg)
+ check_list_unit
+ show_ctr_relator_in
+ [args]
+ test_name_c
+ exn_prop
end;
-fun test_exc_not_binders_binrelT_ftv_stv ctxt =
- let
- val c = "not_binders_binrelT_ftv_stv"
- val args = (c, ctxt)
- val err_msg = mk_msg_ctr_relator
- "the types of the binders of the binary relation types associated " ^
- "with the input type must be either free type variables or " ^
- "schematic type variables"
- in
- UT_Test_Suite.assert_exception
- "not binrel type ftv or stv" args (ERROR err_msg)
- end;
-
-fun test_exc_not_type_constructor_lhs ctxt =
- let
- val c = "not_type_constructor_lhs"
- val args = (c, ctxt)
- val err_msg = mk_msg_ctr_relator
- "the last two binders of the input type must be " ^
- "the results of an application of a type constructor"
- in
- UT_Test_Suite.assert_exception
- "not type constructor lhs" args (ERROR err_msg)
- end;
+fun test_exc_not_const ctxt _ = test_exc_template
+ ctxt "a + b" "the input must be a constant term" "not a constant";
-fun test_exc_not_type_constructor_rhs ctxt =
- let
- val c = "not_type_constructor_rhs"
- val args = (c, ctxt)
- val err_msg = mk_msg_ctr_relator
- "the last two binders of the input type must be " ^
- "the results of an application of a type constructor"
- in
- UT_Test_Suite.assert_exception
- "not type constructor rhs" args (ERROR err_msg)
- end;
+fun test_exc_not_body_bool ctxt _ = test_exc_template
+ ctxt
+ "Cons"
+ "the body of the type of the input must be bool"
+ "not bool body";
-fun test_exc_not_identical_type_constructors_lhs ctxt =
- let
- val c = "not_identical_type_constructors_lhs"
- val args = (c, ctxt)
- val err_msg = mk_msg_ctr_relator
- "the sequences of the input types to the type constructors that are " ^
- "associated with the last two binders of the input type must be " ^
- "identical to the sequences of the types formed by concatenating the " ^
- "type variables associated with the left hand side and the right " ^
- "hand side of the binary relation types, respectively"
- in
- UT_Test_Suite.assert_exception
- "not identical type constructors lhs" args (ERROR err_msg)
- end;
+fun test_exc_not_binders_2 ctxt _ = test_exc_template
+ ctxt
+ "Ex"
+ "the type of the input must have more than two binders"
+ "not two binders";
-fun test_exc_not_identical_type_constructors_rhs ctxt =
- let
- val c = "not_identical_type_constructors_rhs"
- val args = (c, ctxt)
- val err_msg = mk_msg_ctr_relator
- "the sequences of the input types to the type constructors that are " ^
- "associated with the last two binders of the input type must be " ^
- "identical to the sequences of the types formed by concatenating the " ^
- "type variables associated with the left hand side and the right " ^
- "hand side of the binary relation types, respectively"
- in
- UT_Test_Suite.assert_exception
- "not identical type constructors rhs" args (ERROR err_msg)
- end;
+fun test_exc_not_binders_binrelT ctxt _ = test_exc_template
+ ctxt
+ "not_binders_binrelT"
+ (
+ "all of the binders associated with the type of the input" ^
+ "except the last two must be the binary relation types"
+ )
+ "not binary relation types";
+
+fun test_exc_no_dup_binrelT ctxt _ = test_exc_template
+ ctxt
+ "no_dup_binrelT"
+ (
+ "the types of the binders of the binary relations associated " ^
+ "with the type of the input must have no duplicates"
+ )
+ "no duplicates in the binary relation types";
+
+fun test_exc_not_binders_binrelT_ftv_stv ctxt _ = test_exc_template
+ ctxt
+ "not_binders_binrelT_ftv_stv"
+ (
+ "the types of the binders of the binary relation types associated " ^
+ "with the input type must be either free type variables or " ^
+ "schematic type variables"
+ )
+ "not binrel type ftv or stv";
+
+fun test_exc_not_type_constructor_lhs ctxt _ = test_exc_template
+ ctxt
+ "not_type_constructor_lhs"
+ (
+ "the last two binders of the input type must be " ^
+ "the results of an application of a type constructor"
+ )
+ "not type constructor lhs";
+
+fun test_exc_not_type_constructor_rhs ctxt _ = test_exc_template
+ ctxt
+ "not_type_constructor_rhs"
+ (
+ "the last two binders of the input type must be " ^
+ "the results of an application of a type constructor"
+ )
+ "not type constructor rhs";
+
+fun test_exc_not_identical_type_constructors_lhs ctxt _ = test_exc_template
+ ctxt
+ "not_identical_type_constructors_lhs"
+ (
+ "the sequences of the input types to the type constructors that are " ^
+ "associated with the last two binders of the input type must be " ^
+ "identical to the sequences of the types formed by concatenating the " ^
+ "type variables associated with the left hand side and the right " ^
+ "hand side of the binary relation types, respectively"
+ )
+ "not identical type constructors lhs";
+
+fun test_exc_not_identical_type_constructors_rhs ctxt _ = test_exc_template
+ ctxt
+ "not_identical_type_constructors_rhs"
+ (
+ "the sequences of the input types to the type constructors that are " ^
+ "associated with the last two binders of the input type must be " ^
+ "identical to the sequences of the types formed by concatenating the " ^
+ "type variables associated with the left hand side and the right " ^
+ "hand side of the binary relation types, respectively"
+ )
+ "not identical type constructors rhs";
-(**** Test suite ****)
-
-local
-
-fun process_ctr_relator_string_of_input
- (process_ctr_relator_in : process_ctr_relator_in_type) =
- let
- val (c, _) = process_ctr_relator_in
- val name_c = "constant name: " ^ c
- val ctxt_c = "lthy: unknown context"
- val out_c = [name_c, ctxt_c] |> String.concatWith "\n"
- in out_c end;
-
-fun process_ctr_relator ((c, ctxt) : process_ctr_relator_in_type) =
- CTR_Relators.process_ctr_relator c ctxt;
-
-in
+(**** Test Suite ****)
-fun mk_test_suite_process_ctr_relator ctxt =
- let
- val test_suite_process_ctr_relator = UT_Test_Suite.init
- "process_ctr_relator"
- process_ctr_relator
- process_ctr_relator_string_of_input
- (fn _ => "lthy: unknown local theory")
- in
- test_suite_process_ctr_relator
- |> test_exc_not_const ctxt
- |> test_exc_not_body_bool ctxt
- |> test_exc_not_binders_2 ctxt
- |> test_exc_not_binders_binrelT ctxt
- |> test_exc_no_dup_binrelT ctxt
- |> test_exc_not_binders_binrelT_ftv_stv ctxt
- |> test_exc_not_type_constructor_lhs ctxt
- |> test_exc_not_type_constructor_rhs ctxt
- |> test_exc_not_identical_type_constructors_lhs ctxt
- |> test_exc_not_identical_type_constructors_rhs ctxt
- end;
-
-end;
-
-fun execute_test_suite_process_ctr_relator ctxt =
- UT_Test_Suite.execute (mk_test_suite_process_ctr_relator ctxt)
+fun test_suite ctxt s =
+ [
+ test_exc_not_const,
+ test_exc_not_body_bool,
+ test_exc_not_binders_2,
+ test_exc_not_binders_binrelT,
+ test_exc_no_dup_binrelT,
+ test_exc_not_binders_binrelT_ftv_stv,
+ test_exc_not_type_constructor_lhs,
+ test_exc_not_type_constructor_rhs,
+ test_exc_not_identical_type_constructors_lhs,
+ test_exc_not_identical_type_constructors_rhs
+ ]
+ |> map (fn f => f ctxt s)
+ |> Lecker.test_group ctxt s;
end;
\ No newline at end of file
diff --git a/thys/Conditional_Transfer_Rule/CTR/Tests/CTR_TEST_PROCESS_RELATIVIZATION.ML b/thys/Conditional_Transfer_Rule/CTR/Tests/CTR_TEST_PROCESS_RELATIVIZATION.ML
--- a/thys/Conditional_Transfer_Rule/CTR/Tests/CTR_TEST_PROCESS_RELATIVIZATION.ML
+++ b/thys/Conditional_Transfer_Rule/CTR/Tests/CTR_TEST_PROCESS_RELATIVIZATION.ML
@@ -1,307 +1,304 @@
(* Title: CTR/Tests/CTR_TEST_PROCESS_RELATIVIZATION.ML
Author: Mihails Milehins
Copyright 2021 (C) Mihails Milehins
*)
signature CTR_TEST_PROCESS_RELATIVIZATION =
sig
type process_relativization_in_type
-val execute_test_suite_process_relativization :
- Proof.context -> (process_relativization_in_type, ctr_pp_out)
- UT_Test_Suite.test_results_suite
+val test_suite : Proof.context -> unit -> unit
end;
structure ctr_test_process_relativization : CTR_TEST_PROCESS_RELATIVIZATION =
struct
+(**** Background ****)
+
+open SpecCheck;
+structure Prop = SpecCheck_Property;
+structure Show = SpecCheck_Show;
+
+
+
+
(**** Auxiliary ****)
fun mk_msg_ctr_error msg = "ctr: " ^ msg
(*** Data ***)
type process_relativization_in_type =
(
(string * thm list option) option *
Element.context list *
(string * string) list *
((binding option * thm) * mixfix)
) * Proof.context;
(*** Relation ***)
local
fun map_const_name (oldc, newc) (Const (c, T)) =
if oldc = c then Const (newc, T) else Const (c, T)
| map_const_name eqc (Abs (c, T, t)) = Abs (c, T, map_const_name eqc t)
| map_const_name eqc (t $ u) = map_const_name eqc t $ map_const_name eqc u
| map_const_name _ t = t
in
fun process_relativization_test_eq
(PPRelativization args1, PPRelativization args2) =
let
val act_lthy = #2 args1
val exp_lthy = #2 args2
val (act_ow_def_t, act_tr_t) = args1
|> #1
|>> Local_Defs.meta_rewrite_rule act_lthy
|> apply2 Thm.full_prop_of
val (exp_ow_def_t, exp_tr_t) = args2
|> #1
|>> Local_Defs.meta_rewrite_rule act_lthy
|> apply2 Thm.full_prop_of
val act_ow_def_lhst = act_ow_def_t |> Logic.dest_equals |> #1
val exp_ow_def_lhst = exp_ow_def_t |> Logic.dest_equals |> #1
val thy = Proof_Context.theory_of exp_lthy
val mapc =
(
act_ow_def_lhst |> head_of |> dest_Const |> #1,
exp_ow_def_lhst |> head_of |> dest_Const |> #1
)
val act_ow_def_t' = map_const_name mapc act_ow_def_t
val act_tr_t' = map_const_name mapc act_tr_t
val act_ow_def_eq = Pattern.equiv thy (act_ow_def_t', exp_ow_def_t)
val tr_eq = Pattern.equiv thy (act_tr_t', exp_tr_t)
val _ = act_ow_def_eq |> Bool.toString |> writeln
in act_ow_def_eq andalso tr_eq end
| process_relativization_test_eq
(PPParametricity args1, PPParametricity args2) =
(*careful: not needed; hence, a usable implementation is not provided*)
Thm.eq_thm (fst args1, fst args2)
| process_relativization_test_eq (_, _) = false;
end;
-
-(**** Tests ****)
-
-
-
-(*** Valid inputs ***)
-
-fun test_eq_trivial ctxt =
- let
- (*input*)
- val {synthesis, elems, type_specs, thm_specs, ...} =
- ctr_test_data_of_proof ctxt "mono_with" |> the
- val process_relativization_in =
- ((synthesis, elems, type_specs, the_single thm_specs), ctxt)
- (*output*)
- val process_relativization_out =
- PPRelativization ((@{thm mono_ow_def}, @{thm mono_ow_transfer'}), ctxt)
- in
- UT_Test_Suite.assert_brel
- "output equivalence"
- process_relativization_test_eq
- process_relativization_out
- process_relativization_in
- end;
-
-
-
-(*** Exceptions ***)
-
-fun test_exc_def ctxt =
- let
- val {synthesis, elems, type_specs, thm_specs, ...} =
- ctr_test_data_of_proof ctxt "exI" |> the
- val args =
- ((synthesis, elems, type_specs, the_single thm_specs), ctxt)
- val err_msg = mk_msg_ctr_error
- (
- Syntax.string_of_term ctxt (Thm.full_prop_of @{thm exI}) ^
- " is not a definition"
- )
- in UT_Test_Suite.assert_exception "not a definition" args (ERROR err_msg) end;
-
-fun test_exc_binrel ctxt =
- let
- val {synthesis, elems, type_specs, thm_specs, ...} =
- ctr_test_data_of_proof ctxt "binrel" |> the
- val args = ((synthesis, elems, type_specs, the_single thm_specs), ctxt)
- val err_msg = mk_msg_ctr_error
- "trp: trp must consist of (stv, binary relation) pairs"
- in UT_Test_Suite.assert_exception "binary relation" args (ERROR err_msg) end;
-
-fun test_exc_binrel_ftv ctxt =
- let
- val {synthesis, elems, type_specs, thm_specs, ...} =
- ctr_test_data_of_proof ctxt "binrel_ftv" |> the
- val args = ((synthesis, elems, type_specs, the_single thm_specs), ctxt)
- val err_msg = mk_msg_ctr_error
- "trp: the user-specified binary relations must " ^
- "be defined over type variables"
- in
- UT_Test_Suite.assert_exception "binary relation ftv" args (ERROR err_msg)
- end;
-
-fun test_exc_dup_stvs ctxt =
- let
- val {synthesis, elems, type_specs, thm_specs, ...} =
- ctr_test_data_of_proof ctxt "dup_stvs" |> the
- val args = ((synthesis, elems, type_specs, the_single thm_specs), ctxt)
- val err_msg = mk_msg_ctr_error "trp: duplicate stvs"
- in UT_Test_Suite.assert_exception "duplicate stv" args (ERROR err_msg) end;
-
-fun test_exc_dup_binrel_ftvs ctxt =
- let
- val {synthesis, elems, type_specs, thm_specs, ...} =
- ctr_test_data_of_proof ctxt "dup_binrel_ftvs" |> the
- val args = ((synthesis, elems, type_specs, the_single thm_specs), ctxt)
- val err_msg = mk_msg_ctr_error
- "trp: duplicate ftvs in the specification of the binary relations"
- in
- UT_Test_Suite.assert_exception "duplicate binrel ftvs" args (ERROR err_msg)
- end;
-
-fun test_exc_no_relator ctxt =
- let
- val {synthesis, elems, type_specs, thm_specs, ...} =
- ctr_test_data_of_proof ctxt "no_relator" |> the
- val args = ((synthesis, elems, type_specs, the_single thm_specs), ctxt)
- val err_msg = mk_msg_ctr_error
- "no relator found for the type constructor CTR_Tests.K"
- in
- UT_Test_Suite.assert_exception "no relator" args (ERROR err_msg)
- end;
-
-fun test_exc_invalid_relator ctxt =
- let
- val {synthesis, elems, type_specs, thm_specs, ...} =
- ctr_test_data_of_proof ctxt "invalid_relator" |> the
- val args = ((synthesis, elems, type_specs, the_single thm_specs), ctxt)
- val err_msg = mk_msg_ctr_error
- (
- "the relator found for the type constructor CTR_Tests.L " ^
- "is not suitable (is there is a mismatch of type variables?)"
- )
- in
- UT_Test_Suite.assert_exception "no relator" args (ERROR err_msg)
- end;
-
-
-
-
-(**** Test suite ****)
-
-local
+(**** Visualization ****)
fun string_of_elem_ctxt_fixes args = "fixes: " ^
(
args
|> map (fn (x, y, _) => (x, y))
|> ML_Syntax.print_list
(ML_Syntax.print_pair Binding.print (ML_Syntax.print_option I))
);
fun string_of_elem_ctxt_assumes ctxt args =
let
val string_of_fst = ML_Syntax.print_pair
Binding.print
(ML_Syntax.print_list (Token.pretty_src ctxt #> Pretty.string_of))
val string_of_snd =
ML_Syntax.print_list (ML_Syntax.print_pair I (ML_Syntax.print_list I))
in
"assumes: " ^
ML_Syntax.print_list (ML_Syntax.print_pair string_of_fst string_of_snd) args
end;
fun string_of_elem_ctxt_constrains _ = "constrains: unknown constrains";
fun string_of_elem_ctxt_defines _ = "defines: unknown defines";
fun string_of_elem_ctxt_notes _ = "notes: unknown notes";
fun string_of_elem_ctxt_lazy_notes _ = "lazy notes: unknown lazy notes";
fun string_of_elem_ctxt _ (Element.Fixes args : Element.context) =
string_of_elem_ctxt_fixes args
| string_of_elem_ctxt _ (Element.Constrains args) =
string_of_elem_ctxt_constrains args
| string_of_elem_ctxt ctxt (Element.Assumes args) =
string_of_elem_ctxt_assumes ctxt args
| string_of_elem_ctxt _ (Element.Defines args) =
string_of_elem_ctxt_defines args
| string_of_elem_ctxt _ (Element.Notes args) =
string_of_elem_ctxt_notes args
| string_of_elem_ctxt _ (Element.Lazy_Notes args) =
string_of_elem_ctxt_lazy_notes args
-fun process_relativization_string_of_input ctxt
+fun show_relativization_in
+ ctxt
(process_relativization_in : process_relativization_in_type) =
let
val ((synthesis_opt, elems, type_specs, thm_spec), lthy) =
process_relativization_in
val synthesis_opt_c =
let val synthesis_c = "synthesis: "
in
case synthesis_opt of
SOME synthesis =>
(
case #2 synthesis of
SOME _ => synthesis_c ^ "user-defined simpset"
| NONE => synthesis_c ^ "default simpset"
)
| NONE => synthesis_c ^ "none"
end
val elems_c = "elements:" ^
(
if null elems
then " none"
else "\n" ^
(
elems
|> map (string_of_elem_ctxt ctxt)
|> map (fn c => "\t" ^ c)
|> String.concatWith "\n"
)
)
val type_specs_c = "type_specs: " ^
ML_Syntax.print_list (ML_Syntax.print_pair I I) type_specs
val thm_spec_c =
"definition: " ^ (thm_spec |> #1 |> #2 |> Thm.string_of_thm lthy)
val lthy_c = "lthy: unknown local theory"
val out_c = [synthesis_opt_c, elems_c, type_specs_c, thm_spec_c, lthy_c]
|> String.concatWith "\n"
- in out_c end;
+ in Pretty.str out_c end;
+
+fun show_relativization ctxt =
+ Show.zip (show_relativization_in ctxt) (Pretty.str o string_of_pp_out)
+
+
+
+
+(**** Tests ****)
+
+
+
+(*** Wrapper ***)
fun process_relativization
(
((synthesis, assms, type_specs, thm_spec), lthy) :
process_relativization_in_type
) = CTR.process_relativization synthesis assms type_specs thm_spec lthy;
-in
-fun mk_test_suite_process_relativization ctxt =
+
+(*** Valid inputs ***)
+
+fun test_eq_trivial ctxt _ =
let
- val test_suite_process_relativization = UT_Test_Suite.init
- "process_relativization"
- process_relativization
- (process_relativization_string_of_input ctxt)
- string_of_pp_out
+ (*input*)
+ val {synthesis, elems, type_specs, thm_specs, ...} =
+ ctr_test_data_of_proof ctxt "mono_with" |> the
+ val process_relativization_in : process_relativization_in_type =
+ ((synthesis, elems, type_specs, the_single thm_specs), ctxt)
+ (*output*)
+ val process_relativization_out =
+ PPRelativization ((@{thm mono_ow_def}, @{thm mono_ow_transfer'}), ctxt)
in
- test_suite_process_relativization
- |> test_eq_trivial ctxt
- |> test_exc_def ctxt
- |> test_exc_binrel ctxt
- |> test_exc_binrel_ftv ctxt
- |> test_exc_dup_stvs ctxt
- |> test_exc_dup_binrel_ftvs ctxt
- |> test_exc_no_relator ctxt
- |> test_exc_invalid_relator ctxt
+ check_list_unit
+ (show_relativization ctxt)
+ [(process_relativization_in, process_relativization_out)]
+ "equality"
+ (
+ Prop.prop
+ (
+ fn (val_in, exp_out) =>
+ process_relativization_test_eq
+ (process_relativization val_in, exp_out)
+ )
+ )
end;
-end;
-fun execute_test_suite_process_relativization ctxt =
- UT_Test_Suite.execute (mk_test_suite_process_relativization ctxt)
+
+(*** Exceptions ***)
+
+fun test_exc_template ctxt input_name_c err_msg_end_c test_name_c =
+ let
+ val {synthesis, elems, type_specs, thm_specs, ...} =
+ ctr_test_data_of_proof ctxt input_name_c |> the
+ val args = ((synthesis, elems, type_specs, the_single thm_specs), ctxt)
+ val err_msg = mk_msg_ctr_error err_msg_end_c
+ val exn_prop = Prop.expect_failure (ERROR err_msg) process_relativization
+ in
+ check_list_unit
+ (show_relativization_in ctxt)
+ [args]
+ test_name_c
+ exn_prop
+ end;
+
+fun test_exc_def ctxt _ =
+ let
+ val err_msg_end_c =
+ (
+ Syntax.string_of_term ctxt (Thm.full_prop_of @{thm exI}) ^
+ " is not a definition"
+ )
+ in test_exc_template ctxt "exI" err_msg_end_c "not a definition" end;
+
+fun test_exc_binrel ctxt _ =
+ let val err_msg_end_c = "trp: trp must consist of (stv, binary relation) pairs"
+ in test_exc_template ctxt "binrel" err_msg_end_c "binary relation" end;
+
+fun test_exc_binrel_ftv ctxt _ =
+ let
+ val err_msg_end_c =
+ "trp: the user-specified binary relations must " ^
+ "be defined over type variables"
+ in test_exc_template ctxt "binrel_ftv" err_msg_end_c "binary relation ftv" end;
+
+fun test_exc_dup_stvs ctxt _ =
+ let val err_msg_end_c = "trp: duplicate stvs"
+ in test_exc_template ctxt "dup_stvs" err_msg_end_c "duplicate stv" end;
+
+fun test_exc_dup_binrel_ftvs ctxt _ =
+ let
+ val err_msg_end_c =
+ "trp: duplicate ftvs in the specification of the binary relations"
+ in
+ test_exc_template
+ ctxt "dup_binrel_ftvs" err_msg_end_c "duplicate binrel ftvs"
+ end;
+
+fun test_exc_no_relator ctxt _ =
+ let val err_msg_end_c = "no relator found for the type constructor CTR_Tests.K"
+ in test_exc_template ctxt "no_relator" err_msg_end_c "no relator" end;
+
+fun test_exc_invalid_relator ctxt _ =
+ let
+ val err_msg_end_c =
+ (
+ "the relator found for the type constructor CTR_Tests.L " ^
+ "is not suitable (is there is a mismatch of type variables?)"
+ )
+ in
+ test_exc_template ctxt "invalid_relator" err_msg_end_c "invalid relator"
+ end;
+
+
+
+
+(**** Test Suite ****)
+
+fun test_suite ctxt s =
+ [
+ test_eq_trivial,
+ test_exc_def,
+ test_exc_binrel,
+ test_exc_binrel_ftv,
+ test_exc_dup_stvs,
+ test_exc_dup_binrel_ftvs,
+ test_exc_no_relator,
+ test_exc_invalid_relator
+ ]
+ |> map (fn f => f ctxt s)
+ |> Lecker.test_group ctxt s;
end;
\ No newline at end of file
diff --git a/thys/Conditional_Transfer_Rule/CTR/Tests/CTR_Tests.thy b/thys/Conditional_Transfer_Rule/CTR/Tests/CTR_Tests.thy
--- a/thys/Conditional_Transfer_Rule/CTR/Tests/CTR_Tests.thy
+++ b/thys/Conditional_Transfer_Rule/CTR/Tests/CTR_Tests.thy
@@ -1,256 +1,251 @@
(* Title: CTR/Tests/CTR_Tests.thy
Author: Mihails Milehins
Copyright 2021 (C) Mihails Milehins
A test suite for the sub-framework CTR.
*)
section\<open>A test suite for CTR\<close>
theory CTR_Tests
imports
"../CTR"
"../../IML_UT/IML_UT"
+ SpecCheck.SpecCheck_Dynamic
Complex_Main
keywords "ctr_test" :: thy_defn
begin
subsection\<open>Background\<close>
ML\<open>
type ctr_test_data =
{
ctr_type : string,
synthesis : (string * thm list option) option,
elems: (string, string, Facts.ref) Element.ctxt list,
type_specs : (string * string) list,
thm_specs : ((binding option * thm) * mixfix) list
};
structure CTRTestData = Generic_Data
(
type T = ctr_test_data Symtab.table
val empty = Symtab.empty
val merge = Symtab.merge (K true)
);
val get_ctr_test_data_generic = CTRTestData.get;
val get_ctr_test_data_proof = Context.Proof #> get_ctr_test_data_generic;
val get_ctr_test_data_global = Context.Theory #> get_ctr_test_data_generic;
fun test_data_of_generic context = context
|> get_ctr_test_data_generic
|> Symtab.lookup;
val ctr_test_data_of_proof = Context.Proof #> test_data_of_generic;
(*oversimplified: to be used with care*)
fun update_ctr_test_data k ctr_test_data =
Local_Theory.declaration
{pervasive=true, syntax=false}
(fn _ => (k, ctr_test_data) |> Symtab.update |> CTRTestData.map);
fun process_ctr_test_data (k, args) (lthy : local_theory) =
let
fun preprocess_thm_specs lthy =
map (apfst (apsnd (singleton (Attrib.eval_thms lthy))))
fun process_ctrs_impl (CTR.ALG_PP _) (lthy : local_theory) = lthy
| process_ctrs_impl
(CTR.ALG_RP (((synthesis, elems), type_specs), thm_specs))
(lthy : local_theory) =
let
val thm_specs' = preprocess_thm_specs lthy thm_specs
val synthesis' = Option.map
(apsnd (Option.map ((single #> Attrib.eval_thms lthy))))
synthesis
val data : ctr_test_data =
{
ctr_type = "relativization",
synthesis = synthesis',
elems = elems,
type_specs = type_specs,
thm_specs = thm_specs'
}
in update_ctr_test_data k data lthy end
in process_ctrs_impl args lthy end;
val ctr_test_parser = Parse.string -- CTR.ctr_parser;
val _ =
Outer_Syntax.local_theory
\<^command_keyword>\<open>ctr_test\<close>
"test setup for the command ctr"
(ctr_test_parser >> process_ctr_test_data);
\<close>
ud \<open>order.mono\<close>
ud mono' \<open>mono\<close>
definition mono_ow ::
"'a set \<Rightarrow> ('b \<Rightarrow> 'b \<Rightarrow> bool) \<Rightarrow> ('a \<Rightarrow> 'a \<Rightarrow> bool) \<Rightarrow> ('a \<Rightarrow> 'b) \<Rightarrow> bool"
where "mono_ow UB leb lea f \<equiv> \<forall>x\<in>UB. \<forall>y\<in>UB. lea x y \<longrightarrow> leb (f x) (f y)"
typedef 'a K = \<open>{xs::'a list. length xs = 2}\<close>
by (simp add: Ex_list_of_length)
definition KK :: "'a K \<Rightarrow> 'a K \<Rightarrow> bool"
where "KK k1 k2 \<equiv> k1 = k2"
typedef 'a L = \<open>{xs::'a list. length xs = 2}\<close>
by (simp add: Ex_list_of_length)
definition LL :: "'a L \<Rightarrow> 'a L \<Rightarrow> bool"
where "LL k1 k2 \<equiv> k1 = k2"
definition rel_L ::
- "('a::group_add \<Rightarrow> 'b::group_add \<Rightarrow> bool) \<Rightarrow> 'a::group_add L \<Rightarrow> 'b::group_add L \<Rightarrow> bool"
+ "('a::group_add \<Rightarrow> 'b::group_add \<Rightarrow> bool) \<Rightarrow>
+ 'a::group_add L \<Rightarrow>
+ 'b::group_add L \<Rightarrow>
+ bool"
where "rel_L A b c = True"
ctr_relator rel_L
definition not_binders_binrelT ::
"('a \<Rightarrow> 'b \<Rightarrow> bool) \<Rightarrow> ('c \<Rightarrow> bool) \<Rightarrow> 'a \<Rightarrow> 'b \<Rightarrow> bool"
where "not_binders_binrelT R1 R2 a b = True"
definition no_dup_binrelT ::
"('a \<Rightarrow> 'b \<Rightarrow> bool) \<Rightarrow> ('c \<Rightarrow> 'a \<Rightarrow> bool) \<Rightarrow> 'a \<Rightarrow> 'b \<Rightarrow> bool"
where "no_dup_binrelT R1 R2 a b = True"
definition not_binders_binrelT_ftv_stv ::
"('a \<Rightarrow> 'b \<Rightarrow> bool) \<Rightarrow> (nat \<Rightarrow> 'c \<Rightarrow> bool) \<Rightarrow> 'a \<Rightarrow> 'b \<Rightarrow> bool"
where "not_binders_binrelT_ftv_stv R1 R2 a b = True"
definition not_type_constructor_lhs ::
"('a \<Rightarrow> 'b \<Rightarrow> bool) \<Rightarrow> ('c \<Rightarrow> 'd \<Rightarrow> bool) \<Rightarrow> 'a \<Rightarrow> 'a K \<Rightarrow> bool"
where "not_type_constructor_lhs R1 R2 a b = True"
definition not_type_constructor_rhs ::
"('a \<Rightarrow> 'b \<Rightarrow> bool) \<Rightarrow> ('c \<Rightarrow> 'd \<Rightarrow> bool) \<Rightarrow> 'a K \<Rightarrow> 'e \<Rightarrow> bool"
where "not_type_constructor_rhs R1 R2 a b = True"
definition not_identical_type_constructors ::
"('a \<Rightarrow> 'b \<Rightarrow> bool) \<Rightarrow> ('c \<Rightarrow> 'd \<Rightarrow> bool) \<Rightarrow> 'a K \<Rightarrow> 'e L \<Rightarrow> bool"
where "not_identical_type_constructors R1 R2 a b = True"
definition not_identical_type_constructors_lhs ::
"('a \<Rightarrow> 'b \<Rightarrow> bool) \<Rightarrow> ('c \<Rightarrow> 'd \<Rightarrow> bool) \<Rightarrow> 'a K \<Rightarrow> 'b K \<Rightarrow> bool"
where "not_identical_type_constructors_lhs R1 R2 a b = True"
definition not_identical_type_constructors_rhs ::
"('a \<Rightarrow> 'b \<Rightarrow> bool) \<Rightarrow> 'a K \<Rightarrow> 'c K \<Rightarrow> bool"
where "not_identical_type_constructors_rhs R1 a b = True"
subsection\<open>Test data\<close>
lemma mono_ow_transfer':
includes lifting_syntax
assumes [transfer_domain_rule, transfer_rule]: "Domainp B = (\<lambda>x. x \<in> UB)"
and [transfer_rule]: "right_total B"
shows
"((A ===> A ===> (=)) ===> (B ===> B ===> (=)) ===> (B ===> A) ===> (=))
(mono_ow UB) mono.with"
unfolding mono_ow_def mono.with_def
by (transfer_prover_start, transfer_step+) simp
ctr_test "mono_with" relativization
synthesis ctr_simps_Collect_mem_eq
assumes [transfer_domain_rule, transfer_rule]:
"Domainp (B::'c\<Rightarrow>'d\<Rightarrow>bool) = (\<lambda>x. x \<in> UB)"
and [transfer_rule]: "right_total B"
trp (?'b \<open>A::'a\<Rightarrow>'b\<Rightarrow>bool\<close>) and (?'a B)
in mono_ow': mono.with_def
ctr_test "exI" relativization
in mono_ow'': exI
ctr_test "binrel" relativization
synthesis ctr_simps_Collect_mem_eq
assumes [transfer_domain_rule, transfer_rule]:
"Domainp (B::'c\<Rightarrow>'d\<Rightarrow>bool) = (\<lambda>x. x \<in> UB)"
and [transfer_rule]: "right_total B"
trp (?'b A) and (?'a B)
in mono_ow': mono.with_def
ctr_test "binrel_ftv" relativization
synthesis ctr_simps_Collect_mem_eq
assumes [transfer_domain_rule, transfer_rule]:
"Domainp (B::'c\<Rightarrow>'d\<Rightarrow>bool) = (\<lambda>x. x \<in> UB)"
and [transfer_rule]: "right_total B"
trp (?'b \<open>A::nat\<Rightarrow>'b\<Rightarrow>bool\<close>) and (?'a B)
in mono_ow': mono.with_def
ctr_test "dup_stvs" relativization
synthesis ctr_simps_Collect_mem_eq
assumes [transfer_domain_rule, transfer_rule]:
"Domainp (B::'c\<Rightarrow>'d\<Rightarrow>bool) = (\<lambda>x. x \<in> UB)"
and [transfer_rule]: "right_total B"
trp (?'b \<open>A::'a\<Rightarrow>'b\<Rightarrow>bool\<close>) and (?'b B)
in mono_ow': mono.with_def
ctr_test "dup_binrel_ftvs" relativization
synthesis ctr_simps_Collect_mem_eq
assumes [transfer_domain_rule, transfer_rule]:
"Domainp (B::'c\<Rightarrow>'d\<Rightarrow>bool) = (\<lambda>x. x \<in> UB)"
and [transfer_rule]: "right_total B"
trp (?'b \<open>A::'a\<Rightarrow>'d\<Rightarrow>bool\<close>) and (?'a B)
in mono_ow': mono.with_def
ctr_test "no_relator" relativization
synthesis ctr_simps_Collect_mem_eq
assumes [transfer_domain_rule, transfer_rule]:
"Domainp (B::'c\<Rightarrow>'d\<Rightarrow>bool) = (\<lambda>x. x \<in> UB)"
and [transfer_rule]: "right_total B"
trp (?'b \<open>A::'a\<Rightarrow>'b\<Rightarrow>bool\<close>) and (?'a B)
in KK_def
ctr_test "invalid_relator" relativization
synthesis ctr_simps_Collect_mem_eq
assumes [transfer_domain_rule, transfer_rule]:
"Domainp (B::'c\<Rightarrow>'d\<Rightarrow>bool) = (\<lambda>x. x \<in> UB)"
and [transfer_rule]: "right_total B"
trp (?'b \<open>A::'a\<Rightarrow>'b\<Rightarrow>bool\<close>) and (?'a B)
in LL_def
subsection\<open>Tests\<close>
subsubsection\<open>\<open>process_relativization\<close>\<close>
ML_file\<open>CTR_TEST_PROCESS_RELATIVIZATION.ML\<close>
context
includes lifting_syntax
begin
-
ML\<open>
-val ctr_test_process_relativization_test_results =
- ctr_test_process_relativization.execute_test_suite_process_relativization
- @{context}
+Lecker.test_group @{context} () [ctr_test_process_relativization.test_suite]
\<close>
-ML\<open>
-val _ = ctr_test_process_relativization_test_results
- |> UT_Test_Suite.output_test_results true
-\<close>
-
end
subsubsection\<open>\<open>process_ctr_relator\<close>\<close>
ML_file\<open>CTR_TEST_PROCESS_CTR_RELATOR.ML\<close>
+
+context
+ includes lifting_syntax
+begin
ML\<open>
-val ctr_test_process_ctr_relator_test_results =
- ctr_test_process_ctr_relator.execute_test_suite_process_ctr_relator
- @{context}
+Lecker.test_group @{context} () [ctr_test_process_ctr_relator.test_suite]
\<close>
-ML\<open>
-val _ = ctr_test_process_ctr_relator_test_results
- |> UT_Test_Suite.output_test_results true
-\<close>
+end
end
\ No newline at end of file
diff --git a/thys/Conditional_Transfer_Rule/IML_UT/IML_UT.thy b/thys/Conditional_Transfer_Rule/IML_UT/IML_UT.thy
--- a/thys/Conditional_Transfer_Rule/IML_UT/IML_UT.thy
+++ b/thys/Conditional_Transfer_Rule/IML_UT/IML_UT.thy
@@ -1,13 +1,26 @@
(* Title: IML_UT/IML_UT.thy
Author: Mihails Milehins
Copyright 2021 (C) Mihails Milehins
+
+A minor extension of SpecCheck: courtesy of Kevin Kappelmann.
*)
section\<open>\<open>IML_UT\<close>\<close>
theory IML_UT
- imports "../CTR_Tools/CTR_Tools"
+ imports SpecCheck.SpecCheck_Dynamic
begin
-ML_file \<open>UT_Test_Suite.ML\<close>
+ML\<open>
+open SpecCheck;
+structure Prop = SpecCheck_Property;
+structure Show = SpecCheck_Show;
+
+fun style_error show_opt =
+ SpecCheck_Output_Style_Custom.style_custom writeln error show_opt
+
+fun check_list_unit show ts name prop ctxt _ =
+ check_list_style style_error (SOME show) ts name prop ctxt
+ |> K ();
+\<close>
end
\ No newline at end of file
diff --git a/thys/Conditional_Transfer_Rule/IML_UT/UT_Test_Suite.ML b/thys/Conditional_Transfer_Rule/IML_UT/UT_Test_Suite.ML
deleted file mode 100644
--- a/thys/Conditional_Transfer_Rule/IML_UT/UT_Test_Suite.ML
+++ /dev/null
@@ -1,396 +0,0 @@
-(* Title: IML_UT/UT_Test_Suite.ML
- Author: Mihails Milehins
- Copyright 2021 (C) Mihails Milehins
-
-The implementation of a lightweight unit test framework IML_UT
-for Isabelle/ML code.
-
-Notes:
- - The framework IML_UT was developed before the official release of SpecCheck
-that serves a similar purpose (https://www.isa-afp.org/entries/SpecCheck.html).
-Nonetheless, it is not unlikely that the author would be willing to make
-the framework IML_UT obsolete, using SpecCheck in its place, in the future.
- - To a certain extent, this work was inspired by xUnit (https://xunit.net/).
-*)
-
-
-signature UT_TEST_SUITE =
-sig
-type ('x, 'y) test_suite
-type ('x, 'y) test_results_suite
-val init :
- string ->
- ('x -> 'y) ->
- ('x -> string) ->
- ('y -> string) ->
- ('x, 'y) test_suite
-val assert_brel :
- string ->
- ('y * 'y -> bool) ->
- 'y -> 'x ->
- ('x, 'y) test_suite ->
- ('x, 'y) test_suite
-val assert_exception :
- string -> 'x -> exn -> ('x, 'y) test_suite -> ('x, 'y) test_suite
-val execute : ('x, 'y) test_suite -> ('x, 'y) test_results_suite
-val output_test_results : bool -> ('a, 'b) test_results_suite -> unit
-end;
-
-
-structure UT_Test_Suite: UT_TEST_SUITE =
-struct
-
-exception BLANK;
-
-
-
-(**** Types ****)
-
-datatype 'y test_output = Exception of exn | Result of 'y;
-
-type 'y assertion_output =
- {
- expected_output : string,
- output : 'y test_output,
- status : bool
- };
-
-type ('x, 'y) test_result =
- {
- test_id : int,
- name : string,
- input : 'x,
- assertion_output : 'y assertion_output
- };
-
-type ('x, 'y) unit_test =
- {
- test_id : int,
- name : string,
- input : 'x,
- assertion : 'x -> 'y assertion_output
- };
-
-type ('x, 'y) test_results_suite =
- {
- id : string,
- function : 'x -> 'y,
- string_of_input : 'x -> string,
- string_of_output : 'y -> string,
- test_results : ('x, 'y) test_result Queue.T
- };
-
-type ('x, 'y) test_suite =
- {
- id : string,
- function : 'x -> 'y,
- string_of_input : 'x -> string,
- string_of_output : 'y -> string,
- tests : ('x, 'y) unit_test Queue.T
- };
-
-
-
-(**** Initialization ****)
-
-fun init id f string_of_input string_of_output : ('x, 'y) test_suite =
- {
- id = id,
- function = f,
- string_of_input = string_of_input,
- string_of_output = string_of_output,
- tests = Queue.empty
- };
-
-
-
-(**** Miscellaneous utilities ****)
-
-fun get_num_tests tests = length (Queue.content tests);
-
-fun is_empty_test_suite test_suite = Queue.is_empty (#tests test_suite);
-
-
-
-(**** Evaluation ****)
-
-(* The following function evolved from the function try from
-Pure/General/basics.ML of Isabelle2021 *)
-fun eval f x =
- let
- val y_opt = try f x
- val out = case y_opt of
- SOME y => Result y
- | NONE => Exception
- (
- let val _ = f x in BLANK end
- handle exn =>
- if Exn.is_interrupt exn then Exn.reraise exn else exn
- )
- in out end;
-
-
-
-(**** Assertion of a binary relation ****)
-
-local
-
-fun assert_brel_impl
- string_of_output brel test_id name y f x : ('x, 'y) unit_test =
- let
- fun assertion x : 'y assertion_output =
- let
- val output = eval f x
- val status = case output of
- Exception _ => false
- | Result y' => brel (y', y)
- in
- {
- expected_output = string_of_output y,
- output = output,
- status = status
- }
- end
- in {test_id = test_id, name = name, input = x, assertion = assertion} end;
-
-in
-
-fun assert_brel name brel y x (test_suite : ('x, 'y) test_suite) =
- let
- val f = #function test_suite
- val string_of_input = #string_of_input test_suite
- val string_of_output = #string_of_output test_suite
- val tests = #tests test_suite
- val test_id = get_num_tests tests
- val test = assert_brel_impl string_of_output brel test_id name y f x
- in
- {
- id = #id test_suite,
- function = f,
- string_of_input = string_of_input,
- string_of_output = string_of_output,
- tests = Queue.enqueue test tests
- } : ('x, 'y) test_suite
- end;
-
-end;
-
-
-
-(**** Assertion of an exception ****)
-
-local
-
-fun assert_exception_impl test_id name (exn : exn) f x : ('x, 'y) unit_test =
- let
- fun assertion x : 'y assertion_output =
- let
- val output = eval f x
- val status = case output of
- Exception exn' =>
- exnName exn' = exnName exn
- andalso exnMessage exn' = exnMessage exn
- | Result _ => false
- in
- {
- expected_output = "exception '" ^ exnMessage exn ^ "'",
- output = output,
- status = status
- }
- end
- in {test_id = test_id, name = name, input = x, assertion = assertion} end;
-
-in
-
-fun assert_exception name x (exn : exn) (test_suite : ('x, 'y) test_suite) =
- let
- val f = #function test_suite
- val string_of_input = #string_of_input test_suite
- val string_of_output = #string_of_output test_suite
- val tests = #tests test_suite
- val test_id = get_num_tests tests
- val test = assert_exception_impl test_id name exn f x
- in
- {
- id = #id test_suite,
- function = f,
- string_of_input = string_of_input,
- string_of_output = string_of_output,
- tests = Queue.enqueue test tests
- } : ('x, 'y) test_suite
- end;
-
-end;
-
-
-
-(**** Test execution ****)
-
-local
-
-fun execute_test (test_suite : ('x, 'y) test_suite) :
- (('x, 'y) test_result * ('x, 'y) test_suite) =
- let
- val {id, function, string_of_input, string_of_output, tests} = test_suite
- val ({test_id, name, input, assertion}, tests') = Queue.dequeue tests
- val assertion_output = assertion input
- val test_result =
- {
- test_id = test_id,
- name = name,
- input = input,
- assertion_output = assertion_output
- }
- val test_suite' =
- {
- id = id,
- function = function,
- string_of_input = string_of_input,
- string_of_output = string_of_output,
- tests = tests'
- }
- in (test_result, test_suite') end;
-
-in
-
-fun execute (test_suite : ('x, 'y) test_suite) =
- let
- fun execute_impl test_results test_suite =
- let
- val (test_result, test_suite') = execute_test test_suite
- val test_results' = Queue.enqueue test_result test_results
- in
- if is_empty_test_suite test_suite'
- then test_results'
- else execute_impl test_results' test_suite'
- end;
- val test_results =
- {
- id = #id test_suite,
- function = #function test_suite,
- string_of_input = #string_of_input test_suite,
- string_of_output = #string_of_output test_suite,
- test_results =
- if is_empty_test_suite test_suite
- then Queue.empty
- else execute_impl Queue.empty test_suite
- }
- in test_results : ('x, 'y) test_results_suite end;
-
-end;
-
-
-
-(**** Output ****)
-
-local
-
-fun mk_message
- string_of_input string_of_output (test_result : ('x, 'y) test_result) =
- let
- fun string_of_output' (Exception exn) =
- "execution failed with the exception '" ^ exnMessage exn ^ "'"
- | string_of_output' (Result y) = string_of_output y
- val test = "Test " ^ Int.toString (#test_id test_result) ^ "\n"
- val function = "Name: " ^ #name test_result ^ "\n"
- val input =
- "Input data:\n" ^
- tabulate (string_of_input (#input test_result)) ^
- "\n"
- val output =
- "Outcome:\n" ^
- tabulate (string_of_output' (#output (#assertion_output test_result))) ^
- "\n"
- val expected_output =
- "Expected outcome:\n" ^
- tabulate (#expected_output (#assertion_output test_result)) ^
- "\n"
- val test_pass_flag = #status (#assertion_output test_result)
- val status = "Test result: " ^ (if test_pass_flag then "pass" else "fail")
- val c = String.concatWith
- " " [test, function, input, output, expected_output, status]
- in (test_pass_flag, c) end;
-
-in
-
-fun output_test_results
- error_flag (test_results_suite : ('x, 'y) test_results_suite) =
- let
-
- val
- {
- id = id,
- string_of_input = string_of_input,
- string_of_output = string_of_output,
- test_results = test_results,
- ...
- } = test_results_suite
-
- fun mk_messages cs test_results =
- if Queue.is_empty test_results
- then rev cs
- else
- let
- val (test_result, test_results') = Queue.dequeue test_results
- val (test_pass_flag, c) =
- mk_message string_of_input string_of_output test_result
- in mk_messages ((test_pass_flag, c)::cs) test_results' end
-
- val test_results = mk_messages [] test_results
-
- val heading_c = "Test results for the test suite: " ^ id ^ "\n"
-
- val test_cs = map snd test_results
- val num_tests = length test_cs
- val test_cs = test_cs
- |> String.concatWith "\n\n"
- |> single
-
- val tests_failed = test_results
- |> map fst
- |> map not
- |> find_indices I
- val num_tests_failed = length tests_failed
- val num_tests_passed = num_tests - num_tests_failed
- val success_flag = num_tests_failed = 0
-
- val num_tests_c = "Number of tests: " ^ Int.toString num_tests
- val num_tests_passed_c =
- "Number of tests passed: " ^ Int.toString num_tests_passed
- val num_tests_failed_c =
- "Number of tests failed: " ^ Int.toString num_tests_failed
- val summary_c =
- "Test outcome: " ^
- (if success_flag then "success" else "failure")
- val failed_tests_c =
- if success_flag
- then "\n"
- else
- (
- "Failed tests: " ^
- ML_Syntax.print_list Int.toString tests_failed ^
- "\n"
- )
-
- val _ =
- (
- heading_c ::
- num_tests_c ::
- num_tests_passed_c ::
- num_tests_failed_c ::
- summary_c ::
- failed_tests_c ::
- test_cs
- )
- |> map Pretty.str
- |> Pretty.chunks
- |> Pretty.writeln
-
- val _ = (not error_flag orelse success_flag)
- orelse error ("at least one of the " ^ id ^ " tests failed")
-
- in () end;
-
-end;
-
-end;
\ No newline at end of file
diff --git a/thys/Conditional_Transfer_Rule/ROOT b/thys/Conditional_Transfer_Rule/ROOT
--- a/thys/Conditional_Transfer_Rule/ROOT
+++ b/thys/Conditional_Transfer_Rule/ROOT
@@ -1,31 +1,32 @@
chapter AFP
session Conditional_Transfer_Rule (AFP) = HOL +
options [timeout=600]
sessions
"HOL-Library"
+ SpecCheck
directories
CTR_Tools
IML_UT
UD
CTR
"UD/Tests"
"CTR/Tests"
theories[document = false]
"CTR_Tools/CTR_Tools"
"IML_UT/IML_UT"
"UD/UD"
"CTR/CTR"
"UD/Tests/UD_Tests"
"CTR/Tests/CTR_Tests"
"Reference_Prerequisites"
theories
"CTR_Introduction"
"UD/UD_Reference"
"CTR/CTR_Reference"
document_files
"iman.sty"
"extra.sty"
"isar.sty"
"root.tex"
"root.bib"
\ No newline at end of file
diff --git a/thys/Conditional_Transfer_Rule/UD/Tests/UD_TEST_UNOVERLOAD_DEFINITION.ML b/thys/Conditional_Transfer_Rule/UD/Tests/UD_TEST_UNOVERLOAD_DEFINITION.ML
--- a/thys/Conditional_Transfer_Rule/UD/Tests/UD_TEST_UNOVERLOAD_DEFINITION.ML
+++ b/thys/Conditional_Transfer_Rule/UD/Tests/UD_TEST_UNOVERLOAD_DEFINITION.ML
@@ -1,245 +1,275 @@
(* Title: UD/Tests/UD_TEST_UNOVERLOAD_DEFINITION.ML
Author: Mihails Milehins
Copyright 2021 (C) Mihails Milehins
*)
signature UD_TEST_UNOVERLOAD_DEFINITION =
sig
type unoverload_definition_in_type
-val execute_test_suite_unoverload_definition :
- theory ->
- (unoverload_definition_in_type, UD.ud_thm_out_type * theory)
- UT_Test_Suite.test_results_suite
+val test_suite : Proof.context -> unit -> unit
end;
structure ud_test_unoverload_definition : UD_TEST_UNOVERLOAD_DEFINITION =
struct
+(**** Background ****)
+
+open SpecCheck;
+structure Prop = SpecCheck_Property;
+structure Show = SpecCheck_Show;
+
+
+
+
(**** Auxiliary ****)
fun mk_msg_unoverload_definition_error msg = "ud: " ^ msg
(*** Data ***)
type unoverload_definition_in_type =
(binding * mixfix) * (string * typ) * theory;
(*** Relation ***)
fun ud_test_eq_thm thy (act_c, exp_c) (act_ud_out, exp_ud_out) =
let
fun rel (act_thm, exp_thm) =
let
fun replace_const (Const (c, T)) =
if c = exp_c then Const (act_c, T) else Const (c, T)
| replace_const (t $ u) = replace_const t $ replace_const u
| replace_const (Abs (c, T, t)) = Abs (c, T, replace_const t)
| replace_const x = x
val act_t = Thm.full_prop_of act_thm
val exp_t = exp_thm |> Thm.full_prop_of |> replace_const
in Pattern.equiv thy (act_t, exp_t) end;
fun ud_test_eq_thm_impl ((UD.trivial act_thm), (UD.trivial exp_thm)) =
rel (act_thm, exp_thm)
| ud_test_eq_thm_impl ((UD.nontrivial act_thms), (UD.nontrivial exp_thms)) =
let
val (act_with_def_thm, act_with_thm) = act_thms
val (exp_with_def_thm, exp_with_thm) = exp_thms
in
rel (act_with_def_thm, exp_with_def_thm)
andalso rel (act_with_thm, exp_with_thm)
end
| ud_test_eq_thm_impl (_, _) = false
in ud_test_eq_thm_impl (fst act_ud_out, fst exp_ud_out) end;
+(**** Visualization ****)
+
+fun show_unoverload_definition_in
+ (unoverload_definition_in : unoverload_definition_in_type) =
+ let
+ val ((b, _), (c, T), thy) = unoverload_definition_in
+ val b_c = "binding: " ^ Binding.name_of b
+ val const_c =
+ "constant: " ^
+ c ^
+ " :: " ^
+ Syntax.string_of_typ (Proof_Context.init_global thy) T
+ val thy_c = "thy: unknown theory"
+ val out_c = [b_c, const_c, thy_c] |> String.concatWith "\n"
+ in Pretty.str out_c end;
+
+fun show_unoverload_definition_out (ud_thm_out, thy) =
+ let
+ val ctxt = Proof_Context.init_global thy
+ val ud_thm_c =
+ let
+ val with_thm_c = "with_thm: "
+ val with_def_thm_c = "with_def_thm: "
+ in
+ case ud_thm_out of
+ UD.trivial with_thm => with_thm_c ^ Thm.string_of_thm ctxt with_thm
+ | UD.nontrivial (with_def_thm, with_thm) =>
+ with_def_thm_c ^
+ Thm.string_of_thm ctxt with_def_thm ^
+ "\n" ^
+ with_thm_c ^
+ Thm.string_of_thm ctxt with_thm
+ end
+ val thy_c = "thy: unknown theory"
+ val out_c = [ud_thm_c, thy_c] |> String.concatWith "\n"
+ in Pretty.str out_c end;
+
+val show_unoverload_definition =
+ Show.zip show_unoverload_definition_in show_unoverload_definition_out;
+
+
+
+
(**** Tests ****)
+(*** Wrapper ***)
+
+fun unoverload_definition
+ (((b, mixfix), (c, T), thy) : unoverload_definition_in_type) =
+ UD.unoverload_definition (b, mixfix) (c, T) thy;
+
+
+
(*** Valid inputs ***)
-fun test_eq_trivial thy =
+fun test_eq_trivial thy _ =
let
(*input*)
val act_c = "UD_Tests.closed'.with"
val exp_c = ""
val b = Binding.name "closed'"
val aT = TVar (("'a", 0), \<^sort>\<open>topological_space\<close>)
val T = HOLogic.mk_setT aT --> HOLogic.boolT
val closed_c = "Topological_Spaces.topological_space_class.closed"
val ud_in = ((b, NoSyn), (closed_c, T), thy)
(*output*)
val ud_out = (UD.trivial (@{thm closed_with}), thy)
in
- UT_Test_Suite.assert_brel
- "nontrivial output equivalence"
- (ud_test_eq_thm thy (act_c, exp_c))
- ud_out
- ud_in
+ check_list_unit
+ show_unoverload_definition
+ [(ud_in, ud_out)]
+ "trivial equality"
+ (
+ Prop.prop
+ (
+ fn (ud_in, ud_out) => ud_test_eq_thm
+ thy (act_c, exp_c) (unoverload_definition ud_in, ud_out)
+ )
+ )
end;
-fun test_eq_nontrivial thy =
+fun test_eq_nontrivial thy _ =
let
(*input*)
val act_c = "UD_Tests.closure.with"
val exp_c = "UD_Tests.closure_with"
val b = Binding.name "closure"
val aT = TVar (("'a", 0), \<^sort>\<open>topological_space\<close>)
val T = (HOLogic.mk_setT aT --> HOLogic.mk_setT aT)
val closure_c = "UD_Tests.closure"
val ud_in = ((b, NoSyn), (closure_c, T), thy)
(*output*)
val ud_out =
(UD.nontrivial (@{thm closure_with_def}, @{thm closure_with}), thy)
in
- UT_Test_Suite.assert_brel
- "nontrivial output equivalence"
- (ud_test_eq_thm thy (act_c, exp_c))
- ud_out
- ud_in
+ check_list_unit
+ show_unoverload_definition
+ [(ud_in, ud_out)]
+ "nontrivial equality"
+ (
+ Prop.prop
+ (
+ fn (ud_in, ud_out) => ud_test_eq_thm
+ thy (act_c, exp_c) (unoverload_definition ud_in, ud_out)
+ )
+ )
end;
(*** Exceptions ***)
-fun test_exc_extra_type_variables thy =
+fun test_exc_extra_type_variables thy _ =
let
val aT = TVar (("'a", 0), \<^sort>\<open>type\<close>)
val Sup_class_c = "Complete_Lattices.Sup_class"
val args : unoverload_definition_in_type =
(
(Binding.empty, NoSyn),
(Sup_class_c, Term.itselfT aT --> \<^typ>\<open>prop\<close>),
thy
)
val err_msg = mk_msg_unoverload_definition_error
"specification depends on extra type variables"
- in
- UT_Test_Suite.assert_exception
- "extra type variables" args (ERROR err_msg)
+ val exn_prop = Prop.expect_failure (ERROR err_msg) unoverload_definition
+ in
+ check_list_unit
+ show_unoverload_definition_in
+ [args]
+ "extra type variables"
+ exn_prop
end;
-fun test_exc_ud_ex thy =
+fun test_exc_ud_ex thy _ =
let
val aT = TVar (("'a", 0), \<^sort>\<open>type\<close>)
val T =
(HOLogic.mk_setT aT --> HOLogic.boolT) -->
HOLogic.mk_setT aT -->
HOLogic.boolT
val ts_c = "Topological_Spaces.topological_space.closed"
val args : unoverload_definition_in_type =
((Binding.empty, NoSyn), (ts_c, T), thy)
val err_msg = mk_msg_unoverload_definition_error
"unoverloaded constant already exists"
+ val exn_prop = Prop.expect_failure (ERROR err_msg) unoverload_definition
in
- UT_Test_Suite.assert_exception
- "constant already exists" args (ERROR err_msg)
+ check_list_unit
+ show_unoverload_definition_in
+ [args]
+ "constant already exists"
+ exn_prop
end;
-fun test_exc_no_cids thy =
+fun test_exc_no_cids thy _ =
let
val aT = TVar (("'a", 0), \<^sort>\<open>type\<close>)
val T =
(HOLogic.mk_setT aT --> HOLogic.boolT) -->
HOLogic.mk_setT aT -->
HOLogic.boolT
val implies_c = "HOL.implies"
val args : unoverload_definition_in_type =
((Binding.empty, NoSyn), (implies_c, T), thy)
val err_msg = mk_msg_unoverload_definition_error
"no suitable constant-instance definitions"
+ val exn_prop = Prop.expect_failure (ERROR err_msg) unoverload_definition
in
- UT_Test_Suite.assert_exception
- "no suitable CIs" args (ERROR err_msg)
+ check_list_unit
+ show_unoverload_definition_in
+ [args]
+ "no suitable CIs"
+ exn_prop
end;
-(**** Test suite ****)
-
-local
-
-fun unoverload_definition_string_of_input
- (unoverload_definition_in : unoverload_definition_in_type) =
- let
- val ((b, _), (c, T), thy) = unoverload_definition_in
- val b_c = "binding: " ^ Binding.name_of b
- val const_c =
- "constant: " ^
- c ^
- " :: " ^
- Syntax.string_of_typ (Proof_Context.init_global thy) T
- val thy_c = "thy: unknown theory"
- val out_c = [b_c, const_c, thy_c] |> String.concatWith "\n"
- in out_c end;
+(**** Test Suite ****)
-fun unoverload_definition_string_of_output (ud_thm_out, thy) =
- let
- val ctxt = Proof_Context.init_global thy
- val ud_thm_c =
- let
- val with_thm_c = "with_thm: "
- val with_def_thm_c = "with_def_thm: "
- in
- case ud_thm_out of
- UD.trivial with_thm =>
- with_thm_c ^ Thm.string_of_thm ctxt with_thm
- | UD.nontrivial (with_def_thm, with_thm) =>
- with_def_thm_c ^
- Thm.string_of_thm ctxt with_def_thm ^
- "\n" ^
- with_thm_c ^
- Thm.string_of_thm ctxt with_thm
- end
- val thy_c = "thy: unknown theory"
- val out_c = [ud_thm_c, thy_c] |> String.concatWith "\n"
- in out_c end;
-
-fun unoverload_definition
- (((b, mixfix), (c, T), thy) : unoverload_definition_in_type) =
- UD.unoverload_definition (b, mixfix) (c, T) thy;
-
-in
-
-fun mk_test_suite_unoverload_definition thy =
- let
- val test_suite_unoverload_definition = UT_Test_Suite.init
- "unoverload_definition"
- unoverload_definition
- unoverload_definition_string_of_input
- unoverload_definition_string_of_output
+fun test_suite ctxt s =
+ let val thy = Proof_Context.theory_of ctxt
in
- test_suite_unoverload_definition
- |> test_eq_trivial thy
- |> test_eq_nontrivial thy
- |> test_exc_extra_type_variables thy
- |> test_exc_ud_ex thy
- |> test_exc_no_cids thy
+ [
+ test_eq_trivial,
+ test_eq_nontrivial,
+ test_exc_extra_type_variables,
+ test_exc_ud_ex,
+ test_exc_no_cids
+ ]
+ |> map (fn f => f thy s)
+ |> Lecker.test_group ctxt s
end;
-end;
-
-fun execute_test_suite_unoverload_definition thy =
- UT_Test_Suite.execute (mk_test_suite_unoverload_definition thy)
-
end;
\ No newline at end of file
diff --git a/thys/Conditional_Transfer_Rule/UD/Tests/UD_Tests.thy b/thys/Conditional_Transfer_Rule/UD/Tests/UD_Tests.thy
--- a/thys/Conditional_Transfer_Rule/UD/Tests/UD_Tests.thy
+++ b/thys/Conditional_Transfer_Rule/UD/Tests/UD_Tests.thy
@@ -1,58 +1,53 @@
(* Title: UD/Tests/UD_Tests.thy
Author: Mihails Milehins
Copyright 2021 (C) Mihails Milehins
A test suite for the framework UD.
*)
section\<open>Test suite for UD\<close>
theory UD_Tests
imports
"../UD"
"../../IML_UT/IML_UT"
+ SpecCheck.SpecCheck_Dynamic
Complex_Main
begin
subsection\<open>Background\<close>
(*
The following two definitions were copied from
HOL-Analysis.Elementary_Topology with minor amendments
to avoid unnecessary dependencies
*)
definition (in topological_space) islimpt:: "'a \<Rightarrow> 'a set \<Rightarrow> bool"
where "islimpt x S \<longleftrightarrow> (\<forall>T. x\<in>T \<longrightarrow> open T \<longrightarrow> (\<exists>y\<in>S. y\<in>T \<and> y\<noteq>x))"
definition closure :: "('a::topological_space) set \<Rightarrow> 'a set"
where "closure S = S \<union> {x. islimpt x S}"
ud \<open>topological_space.closed\<close>
ud \<open>islimpt\<close>
lemma closed_with: "closed \<equiv> closed.with open"
unfolding closed_def closed.with_def .
definition closure_with :: "('a set \<Rightarrow> bool) \<Rightarrow> 'a set \<Rightarrow> 'a set"
where "closure_with \<tau> \<equiv> \<lambda>S. S \<union> {x. islimpt.with \<tau> x S}"
lemma closure_with: "closure \<equiv> closure_with open"
unfolding closure_def islimpt.with closure_with_def .
subsection\<open>Tests\<close>
ML_file\<open>UD_TEST_UNOVERLOAD_DEFINITION.ML\<close>
ML\<open>
-val ud_test_unoverload_definition_test_results =
- ud_test_unoverload_definition.execute_test_suite_unoverload_definition
- @{theory}
-\<close>
-ML\<open>
-val _ = ud_test_unoverload_definition_test_results
- |> UT_Test_Suite.output_test_results true
+Lecker.test_group @{context} () [ud_test_unoverload_definition.test_suite]
\<close>
end
\ No newline at end of file
diff --git a/thys/Conditional_Transfer_Rule/UD/UD_Reference.thy b/thys/Conditional_Transfer_Rule/UD/UD_Reference.thy
--- a/thys/Conditional_Transfer_Rule/UD/UD_Reference.thy
+++ b/thys/Conditional_Transfer_Rule/UD/UD_Reference.thy
@@ -1,346 +1,348 @@
(* Title: UD/UD_Reference.thy
Author: Mihails Milehins
Copyright 2021 (C) Mihails Milehins
Reference manual for the UD.
*)
section\<open>UD\<close>
theory UD_Reference
imports
UD
"../Reference_Prerequisites"
begin
subsection\<open>Introduction\<close>
subsubsection\<open>Background\<close>
text\<open>
This section presents a reference manual for the sub-framework UD.
The UD can be used for the elimination of
\textit{sort constraints} (e.g., see \cite{altenkirch_constructive_2007})
and unoverloading of definitions in the object logic Isabelle/HOL of the
formal proof assistant Isabelle.
The UD evolved from the author's work on an extension of the
framework \textit{Types-To-Sets}
(see
\cite{blanchette_types_2016, kuncar_types_2019, immler_smooth_2019, immler_automation_2019},
for a description of the framework Types-To-Sets
and \cite{milehins_extension_2021} for a description of the author's extension)
and builds upon certain ideas expressed in \cite{kaufmann_mechanized_2010}.
\<close>
subsubsection\<open>Purpose and scope\<close>
text\<open>
The primary functionality of the framework is available via the Isabelle/Isar
command @{command ud}. This command automates the processes of the
elimination of sort constraints and unoverloading of definitions.
Thus, the command @{command ud} allows for the synthesis
of the convenience constants and theorems that are usually needed for the
application of the derivation step 2 of the original relativization algorithm
of Types-To-Sets (see subsection 5.4 in \cite{blanchette_types_2016}). However,
it is expected that the command can be useful for other purposes.
\<close>
subsubsection\<open>Related and previous work\<close>
text\<open>
The functionality provided by the command @{command ud} shares similarities
with the functionality provided by the algorithms for the elimination of
sort constraints and elimination of overloading that were
presented in \cite{kaufmann_mechanized_2010}
and with the algorithm associated with the command
\mbox{\textbf{unoverload\_definition}} that was proposed
in \cite{immler_automation_2019}.
Nonetheless, technically, unlike \mbox{\textbf{unoverload\_definition}},
the command @{command ud} does
not require the additional axiom UO associated with Types-To-Sets for
its operation (see \cite{blanchette_types_2016},
\cite{immler_automation_2019}), it uses
the \textit{definitional axioms} (e.g., see \cite{kaufmann_mechanized_2010})
instead of arbitrary theorems supplied by the user
and it is independent of the infrastructure associated with
the \textit{axiomatic type classes}
\cite{nipkow_type_1991,wenzel_type_1997,altenkirch_constructive_2007}.
It should also be mentioned that the Isabelle/ML code from the main
distribution of Isabelle was frequently reused during the
-development of the UD.
+development of the UD. Lastly, it should be mentioned that the
+framework SpecCheck \cite{kappelmann_speccheck_2021} was used for unit
+testing the framework UD.
\<close>
subsection\<open>Theory\label{sec:ud_theory}\<close>
text\<open>
The general references for this subsection are
\cite{kaufmann_mechanized_2010} and \cite{yang_comprehending_2017}.
The command @{command ud} relies
on a restricted (non-recursive) variant of the
\textit{classical overloading elimination algorithm}
that was originally proposed in \cite{kaufmann_mechanized_2010}.
It is assumed that there exists
a variable $ud_{\mathsf{with}}$ that stores theorems of the
form $c_{\tau} = c_{\mathsf{with}}\ \bar{*}$, where $c_{\tau}$ and
$c_{\mathsf{with}}$ are distinct \textit{constant-instances}
and $\bar{*}$ is a finite sequence of \textit{uninterpreted constant-instances},
such that, if $c_{\tau}$ depends on a type variable $\alpha_{\Upsilon}$,
with $\Upsilon$ being a \textit{type class}
\cite{nipkow_type_1991,wenzel_type_1997,altenkirch_constructive_2007}
that depends on the overloaded
constants $\bar{*'}$, then $\bar{*}$ contains $\bar{*'}$ as a subsequence.
Lastly, the binary operation $\cup$ is defined in a manner such that
for any sequences $\bar{*}$ and $\bar{*'}$, $\bar{*} \cup \bar{*'}$
is a sequence that consists of all elements of the union of the
elements of $\bar{*}$ and $\bar{*'}$ without duplication.
Assuming an underlying
\textit{well-formed definitional theory} $D$,
the input to the algorithm is a constant-instance $c_{\sigma}$.
Given the constant-instance $c_{\sigma}$,
there exists at most one definitional axiom
$c_{\tau} = \phi_{\tau}\left[\bar{*}\right]$
in $D$ such that $c_{\sigma} \leq c_{\tau}$: otherwise
the \textit{orthogonality} of $D$ and,
therefore, the \textit{well-formedness}
of $D$ are violated ($\phi$ is assumed to be parameterized by
the types that it can have with respect to the
type substitution operation,
and $\bar{*}$ in $c_{\tau} = \phi_{\tau}\left[\bar{*}\right]$
is a list of all uninterpreted constant-instances that
occur in $\phi_{\tau}\left[\bar{*}\right]$).
If a definitional axiom $c_{\tau}=\phi_{\tau}\left[\bar{*}\right]$
such that $c_{\sigma} \leq c_{\tau}$
exists for the constant-instance $c_{\sigma}$,
then the following derivation is applied to it by the algorithm
\[
\infer[(6)]
{\vdash c_{\sigma} = c_{\mathsf{with}}\ \left(\bar{*} \cup \bar{*'}\right)}
{
\infer[(5)]
{
\vdash c_{\mathsf{with}}\ \left(\bar{*} \cup \bar{*'}\right) =
\phi_{\mathsf{with}}\left[\bar{*} \cup \bar{*'}\right]
}
{
\infer[(4)]
{\vdash c_{\mathsf{with}}\ ?\bar{f} = \phi_{\mathsf{with}}\left[?\bar{f}\right]}
{
\infer[(3)]
{\vdash c_{\mathsf{with}} = (\lambda \bar{f}.\ \phi_{\mathsf{with}}\left[\bar{f}\right])}
{
\infer[(2)]
{\vdash c_{\sigma}=\phi_{\mathsf{with}}\left[\bar{*} \cup \bar{*'}\right]}
{
\infer[(1)]
{\vdash c_{\sigma}=\phi_{\sigma}\left[\bar{*}\right]}
{\vdash c_{\tau}=\phi_{\tau}\left[\bar{*}\right]}
}
}
}
}
}
\]
In step 1, the previously established
property $c_{\sigma} \leq c_{\tau}$ is used to create the
(extended variant of the) type substitution
map $\rho$ such that $\sigma = \rho \left( \tau \right)$
(see \cite{kuncar_types_2015}) and perform the type
substitution in $c_{\tau}=\phi_{\tau}\left[\bar{*}\right]$
to obtain $c_{\sigma}=\phi_{\sigma}\left[\bar{*}\right]$;
in step 2, the collection of theorems $ud_{\mathsf{with}}$ is unfolded,
using it as a term rewriting system, possibly introducing further uninterpreted
constants $\bar{*'}$; in step 3, the term on the right-hand side of the
theorem is processed by removing the sort constraints from all type
variables that occur in it, replacing every uninterpreted constant-instance
(this excludes all built-in constants of Isabelle/HOL) that occurs in it by a
fresh term variable, and applying the abstraction until the resulting term
is closed: this term forms the right-hand side of a new definitional axiom
of a fresh constant $c_{\mathsf{with}}$ (if the conditions associated with
the definitional principles of Isabelle/HOL \cite{yang_comprehending_2017}
are satisfied); step 4 is justified by the beta-contraction;
step 5 is a substitution of the uninterpreted constants $\bar{*} \cup \bar{*'}$;
step 6 follows trivially from the results of the application of steps 2 and 5.
The implementation of the command @{command ud} closely follows the steps of
the algorithm outlined above. Thus, at the end of the successful
execution, the command declares the constant $c_{\mathsf{with}}$ and stores the
constant-instance definition that is obtained at the end of step 3 of
the algorithm UD; furthermore, the command adds the theorem that is
obtained after the execution of step 6 of the algorithm
to $ud_{\mathsf{with}}$.
Unlike the classical overloading elimination algorithm,
the algorithm employed in the implementation
of the command @{command ud} is not recursive. Thus, the users are responsible
for maintaining an adequate collection of theorems $ud_{\mathsf{with}}$.
Nonetheless, in this case, the users can provide their own
unoverloaded constants $c_{\mathsf{with}}$ and the associated theorems
$c_{\sigma} = c_{\mathsf{with}}\ \bar{*}$ for any constant-instance $c_{\sigma}$.
From the perspective of the relativization algorithm associated with
Types-To-Sets this can be useful because there is no
guarantee that the automatically synthesized constants $c_{\mathsf{with}}$
will possess desirable parametricity characteristics
(e.g., see \cite{kuncar_types_2015} and \cite{immler_smooth_2019}).
Unfortunately, the implemented algorithm still suffers from the fundamental
limitation that was already outlined in \cite{kaufmann_mechanized_2010},
\cite{blanchette_types_2016} and \cite{kuncar_types_2019}:
it does not offer a solution for handling the
constants whose types contain occurrences of the type constructors whose
type definitions contain occurrences of unresolvable overloading.
\<close>
subsection\<open>Syntax\<close>
text\<open>
This subsection presents the syntactic categories that are associated with the
command @{command ud}. It is important to note that the presentation is
only approximate.
\<close>
text\<open>
\begin{matharray}{rcl}
@{command_def "ud"} & : & \<open>theory \<rightarrow> theory\<close>\\
\end{matharray}
\<^medskip>
\<^rail>\<open>@@{command ud} binding? const mixfix?\<close>
\<^descr> \<^theory_text>\<open>ud\<close> (\<open>b\<close>) \<open>const\<close> (\<open>mixfix\<close>) provides access to the algorithm for
the elimination of sort constraints and unoverloading of definitions
that was described in subsection \ref{sec:ud_theory}.
The optional binding \<open>b\<close> is used for the specification
of the names of the entities added by the command to the theory and the
optional argument \<open>mixfix\<close> is used for the specification
of the concrete inner syntax for the constant in the usual manner
(e.g., see \cite{wenzel_isabelle/isar_2019-1}).
If either \<open>b\<close> or \<open>mixfix\<close> are not specified by the user, then the command
introduces sensible defaults. Following the specification of the
definition of the constant, an additional theorem that establishes
the relationship between the newly introduced constant and the
constant provided by the user as an input is established and added
to the dynamic fact @{thm [source] ud_with}.
\<close>
subsection\<open>Examples\label{sec:ud_ex}\<close>
text\<open>
In this subsection, some of the capabilities of the UD are
demonstrated by example. The examples that are presented in this subsection are
expected to be sufficient for beginning an independent exploration of the
framework, but do not cover the entire spectrum of the functionality
and the problems that one may encounter while using it.
\<close>
subsubsection\<open>Type classes\<close>
text\<open>
We begin the exploration of the capabilities of the framework by considering
the constant @{const mono}.
The constant @{const mono} is an overloaded constant that is defined
in the standard library of Isabelle/HOL \cite{noauthor_isabellehol_2020} as
\begin{center}
@{thm [names_short = true] mono_def[no_vars]}
\end{center}
for any @{term [show_sorts] "f::'a::order\<Rightarrow>'b::order"}.
Technically, there exist two distinct constants that are associated with
the definition of the constant @{const mono}
(see, e.g., \cite{berardi_local_2009}
for further details): @{const order.mono} that is provided by the
Isabelle system automatically and the constant @{const mono} itself.
The constants are unoverloaded successively using the command @{command ud}:
\<close>
ud \<open>order.mono\<close>
ud mono' \<open>mono\<close>
text\<open>
The invocation of the commands above declares the constant @{const mono.with}
that is defined as
\begin{center}
@{thm mono.with_def[no_vars]}
\end{center}
and provides the theorem @{thm [source] mono.with} given by
\begin{center}
@{thm mono.with[no_vars]}
\end{center}
and the theorem @{thm [source] mono'.with} given by
\begin{center}
@{thm mono'.with[no_vars]}.
\end{center}
The theorems establish the relationship between the unoverloaded constant
@{const mono.with} and the overloaded constant @{const mono}:
both theorems are automatically added to the dynamic fact
@{thm [source] ud_with}.
\<close>
subsubsection\<open>Low-level overloading\<close>
text\<open>
The following example closely follows Example 5 in section 5.2. in
\cite{kaufmann_mechanized_2010}.
\<close>
consts pls :: "'a \<Rightarrow> 'a \<Rightarrow> 'a"
overloading
pls_nat \<equiv> "pls::nat \<Rightarrow> nat \<Rightarrow> nat"
pls_times \<equiv> "pls::'a \<times> 'b \<Rightarrow> 'a \<times> 'b \<Rightarrow> 'a \<times> 'b"
begin
definition pls_nat :: "nat \<Rightarrow> nat \<Rightarrow> nat" where "pls_nat a b = a + b"
definition pls_times :: "'a \<times> 'b \<Rightarrow> 'a \<times> 'b \<Rightarrow> 'a \<times> 'b"
where "pls_times \<equiv> \<lambda>x y. (pls (fst x) (fst y), pls (snd x) (snd y))"
end
ud pls_nat \<open>pls::nat \<Rightarrow> nat \<Rightarrow> nat\<close>
ud pls_times \<open>pls::'a \<times> 'b \<Rightarrow> 'a \<times> 'b \<Rightarrow> 'a \<times> 'b\<close>
text\<open>
As expected, two new unoverloaded constants are produced via
the invocations of the command @{command ud} above. The first constant,
\<^const>\<open>pls_nat.with\<close>, corresponds to \<open>pls::nat \<Rightarrow> nat \<Rightarrow> nat\<close> and is given by
\begin{center}
@{thm pls_nat.with_def[no_vars]},
\end{center}
the second constant, \<^const>\<open>pls_times.with\<close>, corresponds to
\begin{center}
\<open>pls::'a \<times> 'b \<Rightarrow> 'a \<times> 'b \<Rightarrow> 'a \<times> 'b\<close>
\end{center}
and is given by
\begin{center}
@{thm pls_times.with_def[no_vars]}.
\end{center}
The theorems that establish the relationship between the overloaded and
the unoverloaded constants are given by
\begin{center}
@{thm pls_nat.with}
\end{center}
and
\begin{center}
@{thm pls_times.with}.
\end{center}
The definitions of the constants \<^const>\<open>pls_nat.with\<close> and
\<^const>\<open>pls_times.with\<close> are consistent with the ones suggested in
\cite{kaufmann_mechanized_2010}. Nonetheless, of course, it is
important to keep in mind that the command @{command ud}
has a more restricted scope of applicability than the
algorithm suggested in \cite{kaufmann_mechanized_2010}.
\<close>
text\<open>\newpage\<close>
-end
\ No newline at end of file
+end
diff --git a/thys/Conditional_Transfer_Rule/document/root.bib b/thys/Conditional_Transfer_Rule/document/root.bib
--- a/thys/Conditional_Transfer_Rule/document/root.bib
+++ b/thys/Conditional_Transfer_Rule/document/root.bib
@@ -1,248 +1,254 @@
@incollection{blanchette_types_2016,
address = {Heidelberg},
title = {From {Types} to {Sets} by {Local} {Type} {Definitions} in {Higher}-{Order} {Logic}},
volume = {9807},
isbn = {978-3-319-43144-4},
booktitle = {Interactive {Theorem} {Proving}},
publisher = {Springer},
author = {Kun{\v c}ar, Ond{\v r}ej and Popescu, Andrei},
editor = {Blanchette, Jasmin Christian and Merz, Stephan},
year = {2016},
pages = {200--218},
}
@incollection{berardi_local_2009,
address = {Heidelberg},
title = {Local {Theory} {Specifications} in {Isabelle}/{Isar}},
volume = {5497},
isbn = {978-3-642-02443-6},
booktitle = {Types for {Proofs} and {Programs}},
publisher = {Springer},
author = {Haftmann, Florian and Wenzel, Makarius},
editor = {Berardi, Stefano and Damiani, Ferruccio and de{\textquoteright}Liguoro, Ugo},
year = {2009},
pages = {153--168},
}
@book{urban_isabelle_2019,
title = {The {Isabelle} {Cookbook}: {A} {Gentle} {Tutorial} for {Programming} {Isabelle}/{ML}},
author = {Urban, Christian},
collaborator = {Berghofer, Stefan and Blanchette, Jasmin and Bohme, Sascha and Bulwahn, Lukas and Dawson, Jeremy and Kolanski, Rafal and Krauss, Alexander and Nipkow, Tobias and Schropp, Andreas and Sternagel, Christian},
year = {2019},
}
@misc{noauthor_isabelle_nodate,
title = {Isabelle mailing-list},
url = {https://lists.cam.ac.uk/pipermail/cl-isabelle-users/},
journal = {The Cl-isabelle-users Archives},
}
@incollection{immler_smooth_2019,
address = {New York},
series = {{CPP} 2019},
title = {Smooth {Manifolds} and {Types} to {Sets} for {Linear} {Algebra} in {Isabelle}/{HOL}},
isbn = {978-1-4503-6222-1},
booktitle = {Proceedings of the 8th {ACM} {SIGPLAN} {International} {Conference} on {Certified} {Programs} and {Proofs}, {CPP} 2019, {Cascais}, {Portugal}},
publisher = {ACM},
author = {Immler, Fabian and Zhan, Bohua},
editor = {Mahboubi, Assia and Myreen, Magnus O.},
year = {2019},
keywords = {Formalization of Mathematics, Higher Order Logic, Isabelle, Manifolds},
pages = {65--77},
}
@misc{immler_automation_2019,
title = {Automation for unverloading definitions},
url = {http://isabelle.in.tum.de/repos/isabelle/rev/ab5a8a2519b0},
journal = {isabelle/changeset},
author = {Immler, Fabian},
year = {2019},
}
@phdthesis{kuncar_types_2015,
address = {Munich},
title = {Types, {Abstraction} and {Parametric} {Polymorphism} in {Higher}-{Order} {Logic}},
school = {Technische Universit{\"a}t M{\"u}nchen},
author = {Kun{\v c}ar, Ond{\v r}ej},
year = {2015},
}
@unpublished{wenzel_isabelle/isar_2019,
title = {The {Isabelle}/{Isar} {Implementation}},
author = {Wenzel, Makarius},
collaborator = {Berghofer, Stefan and Haftmann, Florian and Paulson, Larry},
year = {2019},
}
@unpublished{wenzel_isabelle/isar_2019-1,
title = {The {Isabelle}/{Isar} {Reference} {Manual}},
author = {Wenzel, Makarius},
collaborator = {Ballarin, Clemens and Berghofer, Stefan and Blanchette, Jasmin and Bourke, Timothy and Bulwahn, Lukas and Chaieb, Amine and Dixon, Lucas and Haftmann, Florian and Huffman, Brian and Hupel, Lars and Klein, Gerwin and Krauss, Alexander and Kun{\v c}ar, Ond{\v r}ej and Lochbihler, Andreas and Nipkow, Tobias and Noschinski, Lars and Oheimb, David von and Paulson, Larry and Skalberg, Sebastian and Sternagel, Christian and Traytel, Dmitriy},
year = {2019},
}
@phdthesis{wenzel_isabelle/isar_2001,
address = {Munich},
title = {Isabelle/{Isar} {\textemdash} {A} {Versatile} {Environment} for {Human}-{Readable} {Formal} {Proof} {Documents}},
school = {Technische Universit{\"a}t M{\"u}nchen},
author = {Wenzel, Markus M.},
year = {2001},
}
@inproceedings{gilcher_conditional_2017,
address = {Bras{\'i}lia, Brazil},
title = {Conditional {Parametricity} in {Isabelle}/{HOL}},
booktitle = {{TABLEAUX}/{FroCoS}/{ITP}},
author = {Gilcher, Jan and Lochbihler, Andreas and Traytel, Dmitriy},
year = {2017},
}
@inproceedings{traytel_foundational_2012,
address = {Dubrovnik},
title = {Foundational, {Compositional} ({Co})datatypes for {Higher}-{Order} {Logic}: {Category} {Theory} {Applied} to {Theorem} {Proving}},
booktitle = {27th {Annual} {IEEE} {Symposium} on {Logic} in {Computer} {Science}},
publisher = {IEEE},
author = {Traytel, Dmitry and Popescu, Andrei and Blanchette, Jasmin C.},
year = {2012},
keywords = {(co)datatypes, Abstracts, bounded natural functor, cardinals, categorical operation, category theory, Category theory, compositional datatype, datatype definition, definitional package, finitely branching tree, formal logic, foundational datatype, fully modular framework, high-level specification, higher-order logic, HOL Light, HOL4, interactive theorem prover, interactive theorem proving, internal datatype construction, Isabelle/HOL, logical primitive, mixed mutual recursion, nested recursion, Semantics, Set theory, Shape, Standards, theorem proving, trees (mathematics)},
pages = {596--605},
}
@misc{noauthor_isabellehol_2020,
title = {Isabelle/{HOL} {Standard} {Library}},
url = {https://isabelle.in.tum.de/website-Isabelle2020/dist/library/HOL/HOL/index.html},
journal = {Isabelle/HOL},
year = {2020},
}
@incollection{gonthier_lifting_2013,
address = {Heidelberg},
title = {Lifting and {Transfer}: {A} {Modular} {Design} for {Quotients} in {Isabelle}/{HOL}},
volume = {8307},
isbn = {978-3-319-03545-1},
booktitle = {Certified {Programs} and {Proofs}},
publisher = {Springer},
author = {Huffman, Brian and Kun{\v c}ar, Ond{\v r}ej},
editor = {Gonthier, Georges and Norrish, Michael},
year = {2013},
pages = {131--146},
}
@incollection{altenkirch_constructive_2007,
address = {Heidelberg},
title = {Constructive {Type} {Classes} in {Isabelle}},
volume = {4502},
isbn = {978-3-540-74464-1},
booktitle = {Types for {Proofs} and {Programs}},
publisher = {Springer},
author = {Haftmann, Florian and Wenzel, Makarius},
editor = {Altenkirch, Thorsten and McBride, Conor},
year = {2007},
pages = {160--174},
}
@article{blanchette_bindings_2019,
title = {Bindings as {Bounded} {Natural} {Functors}},
volume = {3},
number = {POPL},
journal = {Proceedings of the ACM on Programming Languages},
author = {Blanchette, Jasmin Christian and Gheri, Lorenzo and Popescu, Andrei and Traytel, Dmitriy},
year = {2019},
pages = {22:1--22:34},
}
@incollection{kaufmann_mechanized_2010,
address = {Heidelberg},
title = {A {Mechanized} {Translation} from {Higher}-{Order} {Logic} to {Set} {Theory}},
volume = {6172},
isbn = {978-3-642-14051-8},
booktitle = {Interactive {Theorem} {Proving}},
publisher = {Springer},
author = {Krauss, Alexander and Schropp, Andreas},
editor = {Kaufmann, Matt and Paulson, Lawrence C.},
year = {2010},
pages = {323--338},
}
@article{paulson_natural_1986,
title = {Natural {Deduction} as {Higher}-{Order} {Resolution}},
volume = {3},
number = {3},
journal = {The Journal of Logic Programming},
author = {Paulson, Lawrence C.},
year = {1986},
pages = {237--258},
}
@incollection{yang_comprehending_2017,
address = {Heidelberg},
title = {Comprehending {Isabelle}/{HOL}{\textquoteright}s {Consistency}},
volume = {10201},
isbn = {978-3-662-54433-4},
booktitle = {Programming {Languages} and {Systems}},
publisher = {Springer},
author = {Kun{\v c}ar, Ond{\v r}ej and Popescu, Andrei},
editor = {Yang, Hongseok},
year = {2017},
pages = {724--749},
}
@incollection{bertot_isar_1999,
address = {Heidelberg},
title = {Isar {\textemdash} {A} {Generic} {Interpretative} {Approach} to {Readable} {Formal} {Proof} {Documents}},
volume = {1690},
isbn = {978-3-540-66463-5},
booktitle = {Theorem {Proving} in {Higher} {Order} {Logics}},
publisher = {Springer},
author = {Wenzel, Markus},
editor = {Bertot, Yves and Dowek, Gilles and Th{\'e}ry, Laurent and Hirschowitz, Andr{\'e} and Paulin-Mohring, Christine},
year = {1999},
pages = {167--183},
}
@inproceedings{lammich_automatic_2013,
address = {Heidelberg},
title = {Automatic {Data} {Refinement}},
isbn = {978-3-642-39634-2},
booktitle = {Interactive {Theorem} {Proving}},
publisher = {Springer},
author = {Lammich, Peter},
editor = {Blazy, Sandrine and Paulin-Mohring, Christine and Pichardie, David},
year = {2013},
keywords = {Automatic Data, Executable Code, Side Condition, Synthesis Problem, Type Constructor},
pages = {84--99},
}
@article{kuncar_types_2019,
title = {From {Types} to {Sets} by {Local} {Type} {Definition} in {Higher}-{Order} {Logic}},
volume = {62},
number = {2},
journal = {Journal of Automated Reasoning},
author = {Kun{\v c}ar, Ond{\v r}ej and Popescu, Andrei},
year = {2019},
pages = {237--260},
}
@book{milner_definition_1997,
address = {Cambridge, Massachusetts},
title = {The {Definition} of {Standard} {ML} (revised)},
isbn = {978-0-262-63181-5},
publisher = {MIT Press},
author = {Milner, Robin and Tofte, Mads and Harper, Robert and MacQueen, David},
year = {1997},
keywords = {ML (Computer program language)},
}
@incollection{wenzel_type_1997,
address = {Heidelberg},
series = {Lecture {Notes} in {Computer} {Science}},
title = {Type {Classes} and {Overloading} in {Higher}-{Order} {Logic}},
isbn = {978-3-540-69526-4},
booktitle = {Theorem {Proving} in {Higher} {Order} {Logics}},
publisher = {Springer},
author = {Wenzel, Markus},
editor = {Gunter, Elsa L. and Felty, Amy P.},
year = {1997},
keywords = {Type Constructor, Class Definition, Deductive System, Type Class, Type Definition},
pages = {307--322},
}
@incollection{nipkow_type_1991,
address = {Heidelberg},
series = {Lecture {Notes} in {Computer} {Science}},
title = {Type {Classes} and {Overloading} {Resolution} via {Order}-{Sorted} {Unification}},
volume = {523},
isbn = {978-3-540-47599-6},
booktitle = {Functional {Programming} {Languages} and {Computer} {Architecture}},
publisher = {Springer},
author = {Nipkow, Tobias and Snelting, Gregor},
editor = {Hughes, John},
year = {1991},
pages = {1--14},
}
@misc{noauthor_stack_nodate,
title = {Stack {Exchange}},
url = {https://stackexchange.com/},
}
@article{milehins_extension_2021,
title = {Extension of {Types}-{To}-{Sets}},
journal = {Archive of Formal Proofs},
author = {Milehins, Mihails},
year = {2021},
}
+@article{kappelmann_speccheck_2021,
+ title = {{SpecCheck} - {Specification}-{Based} {Testing} for {Isabelle}/{ML}},
+ journal = {Archive of Formal Proofs},
+ author = {Kappelmann, Kevin and Bulwahn, Lukas and Willenbrink, Sebastian},
+ year = {2021},
+}
diff --git a/thys/Conditional_Transfer_Rule/document/root.tex b/thys/Conditional_Transfer_Rule/document/root.tex
--- a/thys/Conditional_Transfer_Rule/document/root.tex
+++ b/thys/Conditional_Transfer_Rule/document/root.tex
@@ -1,113 +1,116 @@
\PassOptionsToPackage{ngerman,main=english}{babel}
\documentclass[11pt,a4paper]{article}
\usepackage{iman,extra,isar}
\usepackage{isabelle,isabellesym}
\usepackage{railsetup}
\usepackage[margin={1in,1in}]{geometry}
\usepackage[T1]{fontenc}
\usepackage{lmodern}
\usepackage{babel}
\usepackage{amsmath}
\usepackage{amssymb}
\usepackage{amsthm}
\usepackage{xspace}
\usepackage{MnSymbol}
\usepackage[utf8]{inputenc}
\usepackage{enumitem}
\usepackage{fontspec}
\usepackage{graphicx}
\usepackage{proof}
% bibliography
\usepackage[nottoc]{tocbibind}
\usepackage[square,numbers]{natbib}
\bibliographystyle{abbrvnat}
% this should be the last package used
\usepackage{pdfsetup}
% drop Isabelle tags
\isadroptag{theory}
% enumitem configuration
\setlist{noitemsep,topsep=0pt,parsep=0pt,partopsep=0pt}
% urls in roman style, theory text in math-similar italics
\urlstyle{rm}
\isabellestyle{it}
\begin{document}
\sloppy
\title{Conditional Transfer Rule: Reference Manual}
\author{Mihails Milehins}
\maketitle
\newpage
\begin{abstract}
The document presents a reference manual for the framework
\textit{Conditional Transfer Rule}: a collection of experimental utilities
for \textit{unoverloading}
\cite{kaufmann_mechanized_2010}
of definitions and synthesis
of \textit{conditional transfer rules} \cite{gonthier_lifting_2013}
for the object logic \textit{Isabelle/HOL}
(e.g., see \cite{yang_comprehending_2017})
of the formal proof assistant \textit{Isabelle} \cite{paulson_natural_1986}
written in \textit{Isabelle/ML}
\cite{milner_definition_1997, wenzel_isabelle/isar_2019}.
\end{abstract}
\newpage
\renewcommand{\abstractname}{Acknowledgements}
\begin{abstract}
The author would like to acknowledge the assistance that he received from
the users of the mailing list of Isabelle
\cite{noauthor_isabelle_nodate}
in the form of answers given to his general queries.
Special thanks go to Fabian Immler for the development and implementation
of the original algorithm for unoverloading of definitions
\cite{immler_automation_2019}, for suggesting the original idea for
the implementation of a framework for the relativization of definitions
(the idea evolved from \cite{immler_smooth_2019}) and for providing
an outline of the first feasible algorithm for this task (implemented as
-\textit{CTR II}), to Andrei Popescu for trying the software and providing feedback,
+\textit{CTR II}),
+to Andrei Popescu for trying the software and providing feedback,
+to Kevin Kappelmann for providing an explanation of certain aspects of
+\cite{kappelmann_speccheck_2021},
to Alexander Krauss for providing an explanation of certain aspects of
\cite{kaufmann_mechanized_2010},
to Andreas Lochbihler for providing an outline
of an improved algorithm for the relativization of definitions
(not currently implemented), to Andreas Lochbihler and Dmitriy Traytel
for providing an explanation of the existing functionality of the framework
Conditional Parametricity \cite{gilcher_conditional_2017}.
Furthermore, the author would like to acknowledge the positive
impact of \cite{urban_isabelle_2019} and
\cite{wenzel_isabelle/isar_2019} on his ability to code in Isabelle/ML.
Moreover, the author would like to acknowledge
the positive role that numerous Q\&A posted on the Stack Exchange network
\cite{noauthor_stack_nodate} played in the development of this work.
The author would also like to express gratitude to all members of his family
and friends for their continuous support.
\end{abstract}
\newpage
\tableofcontents
\newpage
\parindent 0pt\parskip 0.5ex
\input{session}
\newpage
\bibliography{root}
\end{document}
\ No newline at end of file
diff --git a/thys/Types_To_Sets_Extension/ETTS/Manual/ETTS_Introduction.thy b/thys/Types_To_Sets_Extension/ETTS/Manual/ETTS_Introduction.thy
--- a/thys/Types_To_Sets_Extension/ETTS/Manual/ETTS_Introduction.thy
+++ b/thys/Types_To_Sets_Extension/ETTS/Manual/ETTS_Introduction.thy
@@ -1,379 +1,382 @@
(* Title: ETTS/Manual/ETTS_Introduction.thy
Author: Mihails Milehins
Copyright 2021 (C) Mihails Milehins
*)
chapter\<open>ETTS: Reference Manual\<close>
section\<open>Introduction\<close>
theory ETTS_Introduction
imports
Manual_Prerequisites
"HOL-Library.Conditional_Parametricity"
begin
subsection\<open>Background\<close>
text\<open>
The \textit{standard library} that is associated with the
object logic Isabelle/HOL and provided as a part of
the standard distribution of Isabelle \cite{noauthor_isabellehol_2020}
contains a significant number of formalized results from a
variety of fields of mathematics (e.g., order theory, algebra, topology).
Nevertheless, using the argot that was promoted in the
original publication of Types-To-Sets \cite{blanchette_types_2016},
the formalization is performed using a type-based approach.
Thus, for example, the carrier sets associated with the algebraic structures
and the underlying sets of the topological spaces, effectively,
consist of all terms of an arbitrary type. This restriction can create
an inconvenience when working with mathematical objects induced on a
subset of the carrier set/underlying set associated with the original
object (e.g., see \cite{immler_smooth_2019}).
To address this limitation, several additional libraries were developed
upon the foundations of the standard library
(e.g., \textit{HOL-Algebra} \cite{ballarin_isabellehol_2020} and
\textit{HOL-Analysis} \cite{noauthor_isabellehol_2020-1}).
In terms of the argot associated with Types-To-Sets, these libraries provide
the set-based counterparts of many type-based theorems in the standard library,
along with a plethora of additional results. Nonetheless, the proofs of
the majority of the theorems that are available in the standard library
are restated explicitly in the libraries that contain the set-based results.
This unnecessary duplication of efforts is one of the primary problems
that the framework Types-To-Sets is meant to address.
The framework Types-To-Sets offers a unified approach to structuring
mathematical knowledge formalized in Isabelle/HOL: potentially,
every type-based theorem can be converted to a set-based theorem in
a semi-automated manner and the relationship between such type-based
and set-based theorems can be articulated clearly and explicitly
\cite{blanchette_types_2016}.
In this document, we describe a particular implementation of the framework
Types-To-Sets in Isabelle/HOL that takes the form of a further extension
of the language Isabelle/Isar with several new commands and attributes
(e.g., see \cite{wenzel_isabelle/isar_2019-1}).
\<close>
subsection\<open>Prerequisites and conventions\<close>
text\<open>
A reader of this document is assumed to be familiar with
the proof assistant Isabelle, the proof language Isabelle/Isar,
the meta-logic Isabelle/Pure and
the object logic Isabelle/HOL, as described in,
\cite{paulson_natural_1986, wenzel_isabelle/isar_2019-1},
\cite{bertot_isar_1999, wenzel_isabelleisar_2007, wenzel_isabelle/isar_2019-1},
\cite{paulson_foundation_1989, wenzel_isabelle/isar_2019-1} and
\cite{yang_comprehending_2017}, respectively. Familiarity with the
content of the original articles about Types-To-Sets
\cite{blanchette_types_2016,kuncar_types_2019} and
the first large-scale application of Types-To-Sets
(as described in \cite{immler_smooth_2019})
is not essential but can be useful.
The notational conventions that are used in this document are
approximately equivalent to the conventions that were suggested in
\cite{blanchette_types_2016}, \cite{yang_comprehending_2017} and
\cite{kuncar_types_2019}.
However, a disparity comes from our use of explicit notation
for the \textit{schematic variables}. In Isabelle/HOL, free variables
that occur in the theorems at the top-level in the theory context
are generalized implicitly, which may be expressed by replacing
fixed variables by schematic variables \cite{wenzel_isabelle/isar_2001}.
In this article, the schematic variables will be prefixed with the
question mark ``$?$'', like so: $?a$. Nonetheless, explicit
quantification over the type variables at the top-level
is also allowed: $\forall \alpha. \phi\left[\alpha\right]$.
Lastly, the square brackets may be used either for the denotation
of substitution or to indicate that certain types/terms are a part
of a term: $t\left[?\alpha\right]$.
\<close>
subsection\<open>Previous work\<close>
subsubsection\<open>Relativization Algorithm\label{sec:ra}\<close>
text\<open>
Let ${}_{\alpha}(\beta \approx U)_{\mathsf{Rep}}^{\mathsf{Abs}}$ denote
\[
\begin{aligned}
& (\forall x_{\beta}. \mathsf{Rep}\ x \in U)\ \wedge\\
& (\forall x_{\beta}. \mathsf{Abs}\ (\mathsf{Rep}\ x) = x)\ \wedge\\
& (\forall y_{\alpha}. y \in U \longrightarrow \mathsf{Rep}\ (\mathsf{Abs}\ y) = y)
\end{aligned},
\]
let $\rightsquigarrow$ denote the constant/type \textit{dependency relation}
(see subsection 2.3 in \cite{blanchette_types_2016}),
let $\rightsquigarrow^{\downarrow}$
be a \textit{substitutive closure} of the constant/type dependency relation,
let $R^{+}$ denote the transitive closure of
the binary relation $R$, let $\Delta_c$ denote the set of all types for which
$c$ is \textit{overloaded} and let $\sigma\not\leq S $ mean that $\sigma$ is not
an instance of any type in $S$ (see \cite{blanchette_types_2016} and
\cite{yang_comprehending_2017}).
The framework Types-To-Sets extends Isabelle/HOL with two axioms:
\textit{Local Typedef Rule} (LT) and the \textit{Unoverloading Rule} (UO).
The consistency of Isabelle/HOL augmented with the LT and
the UO is proved in Theorem 11 in \cite{yang_comprehending_2017}.
The LT is given by
\[
\begin{aligned}
\scalebox{1.0}{
\infer[\beta \not\in U, \phi, \Gamma]{\Gamma \vdash \phi}{%
\Gamma\vdash U \neq\emptyset
& \Gamma
\vdash
\left(
\exists \mathsf{Abs}\ \mathsf{Rep}. {}_{\sigma}(\beta\approx U)_{\mathsf{Rep}}^{\mathsf{Abs}}\longrightarrow\phi
\right)
}
}
\end{aligned}
\]
Thus, if $\beta$ is fresh for the non-empty set $U_{\sigma\ \mathsf{set}}$,
the formula $\phi$ and the context $\Gamma$, then $\phi$ can be proved in
$\Gamma$ by assuming the existence of a type $\beta$ isomorphic to $U$.
The UO is given by
\[
\infer[\text{$\forall u$ in $\phi$}. \neg(u\rightsquigarrow^{\downarrow+}c_{\sigma});\ \sigma\not\leq\Delta_c]{\forall x_{\sigma}. \phi\left[x_{\sigma}/c_{\sigma}\right]}{\phi}
\]
Thus, a \textit{constant-instance} $c_{\sigma}$ may be replaced by a universally
quantified variable $x_{\sigma}$ in $\phi$, if all types and
constant-instances in $\phi$ do not semantically depend on $c_{\sigma}$
through a chain of constant and type definitions and there is no
matching definition for $c_{\sigma}$.
The statement of the \textit{original relativization algorithm} (ORA) can be
found in subsection 5.4 in \cite{blanchette_types_2016}. Here, we present
a variant of the algorithm that includes some of the amendments that were
introduced in \cite{immler_smooth_2019}, which will be referred to as the
\textit{relativization algorithm} (RA).
The differences between the ORA and
the RA are implementation-specific and have no effect on the output
of the algorithm, if applied to a conventional input.
Let $\bar{a}$ denote a finite sequence $a_1,\ldots,a_n$
for some positive integer $n$; let $\Upsilon$ be a \textit{type class}
\cite{nipkow_type_1991,wenzel_type_1997,altenkirch_constructive_2007}
that depends on the overloaded constants $\bar{*}$ and
let $U\downarrow\bar{f}$ be used
to state that $U$ is closed under the operations $\bar{f}$;
then the RA is given by
\[
\infer[(7)]
{
\vdash ?U_{?\alpha\ \mathsf{set}} \neq\emptyset\longrightarrow
?U\downarrow?\bar{f}\left[?\alpha\right]\longrightarrow
\Upsilon^{\mathsf{on}}_{\mathsf{with}}\ ?U\ ?\bar{f}\longrightarrow
\phi^{\mathsf{on}}_{\mathsf{with}}\left[?\alpha,?U,?\bar{f}\right]
}
{
\infer[(6)]
{
U_{\alpha\ \mathsf{set}}\neq\emptyset
\vdash U\downarrow?\bar{f}\left[\alpha\right]\longrightarrow
\Upsilon^{\mathsf{on}}_{\mathsf{with}}\ U\ ?\bar{f}\longrightarrow
\phi^{\mathsf{on}}_{\mathsf{with}}\left[\alpha,U,?\bar{f}\right]
}
{
\infer[(5)]
{
U\neq\emptyset,{}_{\alpha}(\beta\approx U)_{\mathsf{Rep}}^{\mathsf{Abs}}
\vdash U\downarrow?\bar{f}\left[\alpha\right]\longrightarrow
\Upsilon^{\mathsf{on}}_{\mathsf{with}}\ U\ ?\bar{f}\longrightarrow
\phi^{\mathsf{on}}_{\mathsf{with}}\left[\alpha,U,?\bar{f}\right]
}
{
\infer[(4)]
{
U\neq\emptyset,{}_{\alpha}(\beta\approx U)_{\mathsf{Rep}}^{\mathsf{Abs}}
\vdash\Upsilon_{\mathsf{with}}\ ?\bar{f}\left[\beta\right]\longrightarrow
\phi_{\mathsf{with}}\left[\beta,?\bar{f}\right]
}
{
\infer[(3)]
{
U\neq\emptyset,{}_{\alpha}(\beta\approx U)_{\mathsf{Rep}}^{\mathsf{Abs}}
\vdash\Upsilon_{\mathsf{with}}\ ?\bar{f}\left[?\alpha\right]\longrightarrow
\phi_{\mathsf{with}}\left[?\alpha,?\bar{f}\right]
}
{
\infer[(2)]
{
\vdash\Upsilon_{\mathsf{with}}\ ?\bar{f}\left[?\alpha\right]\longrightarrow
\phi_{\mathsf{with}}\left[?\alpha,?\bar{f}\right]
}
{
\infer[(1)]
{\vdash\phi_{\mathsf{with}}\left[?\alpha_{\Upsilon},\bar{*}\right]}
{\vdash\phi\left[?\alpha_{\Upsilon}\right]}
}
}
}
}
}
}
\]
The input to the RA is assumed to be a theorem
$\vdash\phi\left[?\alpha_{\Upsilon}\right]$.
Step 1 will be referred to as the first step of the dictionary
construction (subsection 5.2 in \cite{blanchette_types_2016});
step 2 as unoverloading of the type $?\alpha_{\Upsilon}$:
it includes class internalization (subsection 5.1 in \cite{blanchette_types_2016})
and the application of the UO (it corresponds to the application
of the attribute @{attribute unoverload_type} \cite{immler_smooth_2019});
step 3 provides the assumptions that are the prerequisites for
the application of the LT;
step 4 is reserved for the concrete type instantiation;
step 5 is the application of \textit{Transfer}
(section 6 in \cite{blanchette_types_2016});
step 6 refers to the application of the LT;
step 7 is the export of the theorem from the local
context \cite{wenzel_isabelle/isar_2019}.
\<close>
subsubsection\<open>Implementation of Types-To-Sets\label{subsec:ITTS}\<close>
text\<open>
In \cite{blanchette_types_2016}, the authors extended the implementation
of Isabelle/HOL with the LT and UO. Also, they introduced the
attributes @{attribute internalize_sort},
@{attribute unoverload} and @{attribute cancel_type_definition}
that allowed for the execution of steps 1, 3 and 7 (respectively) of the ORA.
Other steps could be performed using the technology that already existed.
In \cite{immler_smooth_2019}, the implementation was augmented with the
attribute @{attribute unoverload_type},
which largely subsumed the functionality of the attributes
@{attribute internalize_sort} and @{attribute unoverload}.
The examples of the application of the ORA to theorems in
Isabelle/HOL that were developed in \cite{blanchette_types_2016}
already contained an implicit suggestion that the constants and theorems
needed for the first step of the dictionary construction in step 2 of
the ORA and the \textit{transfer rules} \cite{kuncar_types_2015}
needed for step 6 of the ORA can and should
be obtained prior to the application of the algorithm. Thus, using the notation
from subsection \ref{sec:ra}, for each constant-instance $c_{\sigma}$
that occurs in the type-based theorem
$\vdash\phi\left[?\alpha_{\Upsilon}\right]$
prior to the application of the ORA with respect to
${}_{\alpha}(\beta \approx U)_{\mathsf{Rep}}^{\mathsf{Abs}}$,
the users were expected to provide
an \textit{unoverloaded} constant $c_{\mathsf{with}}$ such that
$c_{\sigma} = c_{\mathsf{with}}\ \bar{*}$,
and a \textit{relativized} constant
$c^{\mathsf{on}}_{\mathsf{with}}$
such that $R\left[T_{\alpha\rightarrow\beta\rightarrow\mathbb{B}}\right]
\ (c^{\mathsf{on}}_{\mathsf{with}}\ U_{\alpha\ \mathsf{set}})\ c_{\mathsf{with}}$
($\mathbb{B}$ denotes the built-in Isabelle/HOL type $bool$
\cite{kuncar_types_2015})
is a conditional transfer rule (e.g., see \cite{gonthier_lifting_2013}),
with $T$ being a binary
relation that is at least right-total and bi-unique,
assuming the default order on predicates
in Isabelle/HOL (see \cite{kuncar_types_2015}).
The unoverloading \cite{kaufmann_mechanized_2010}
and relativization of constants for the application
of the RA was performed manually in
\cite{blanchette_types_2016,kuncar_types_2019,immler_smooth_2019}.
Nonetheless, unoverloading could be performed using the
\textit{classical overloading elimination algorithm} proposed
in \cite{kaufmann_mechanized_2010}, but it is likely that an implementation
of this algorithm was not publicly available at the time of writing
of this document. In \cite{immler_automation_2019}, an alternative algorithm was
implemented and made available via the command
@{command unoverload_definition},
although it suffers from several limitations in comparison to the
algorithm in \cite{kaufmann_mechanized_2010}.
The transfer rules
for the constants that are conditionally parametric
can be synthesized automatically using the command
@{command parametric_constant}
\cite{gilcher_conditional_2017} from the standard distribution of Isabelle;
the framework \textit{autoref} \cite{lammich_automatic_2013} allows
for the synthesis of transfer rules $R\ t\ t'$,
including both the \textit{parametricity relation} \cite{kuncar_types_2015}
$R$ and the term $t$,
based on $t'$, under favorable conditions;
lastly, in \cite{lammich_automatic_2013} and \cite{immler_smooth_2019},
the authors suggest an outline of another feasible algorithm for the
synthesis of the transfer rules based on the functionality of the framework
Transfer \cite{gonthier_lifting_2013}, but do not provide an implementation.
Finally, the assumption
${}_{\alpha}(\beta \approx U)_{\mathsf{Rep}}^{\mathsf{Abs}}$
can be stated using the constant @{const type_definition}
from the standard library of Isabelle/HOL as
\<^term>\<open>type_definition Rep Abs U\<close>;
the instantiation of types required in step 4 of the RA can
be performed using the standard attributes of Isabelle;
step 6 can be performed using the attribute
@{attribute cancel_type_definition} developed in
\cite{blanchette_types_2016}; step 7 is expected to be performed manually
by the user.
\<close>
subsection\<open>Purpose and scope\<close>
text\<open>
The extension of the framework Types-To-Sets that is described in this manual
adds a further layer of automation to the existing implementation
of the framework Types-To-Sets. The primary functionality of the extension
is available via the following Isar commands:
@{command tts_context}, @{command tts_lemmas} and @{command tts_lemma} (and the
synonymous commands @{command tts_corollary}, @{command tts_proposition} and
@{command tts_theorem}\footnote{In what follows, any reference to the
command @{command tts_lemma} should be viewed as a reference to the
entire family of the commands with the identical functionality.}).
The commands @{command tts_lemmas} and @{command tts_lemma}, when invoked inside
an appropriately defined @{command tts_context} provide the
functionality that is approximately equivalent to the application of all
steps of the RA and several additional steps of
pre-processing of the input and post-processing of the result
(collectively referred to as the \textit{extended relativization algorithm}
-or ERA).
+or ERA).
As part of our work on the ETTS, we have also designed
the auxiliary framework \textit{Conditional Transfer Rule} (CTR).
The framework CTR provides the commands @{command ud} and @{command ctr} for
the automation of unoverloading of definitions and
synthesis of conditional transfer rules from definitions,
respectively. Further information about this framework can be found in its
reference manual \cite{milehins_conditional_2021}.
+In this context, we also mention that both the CTR and the ETTS were tested
+using the framework SpecCheck \cite{kappelmann_speccheck_2021}.
The extension was designed under a policy of non-intervention with the
existing implementation of the framework Types-To-Sets. Therefore, it does
not reduce the scope of the applicability of the framework.
However, the functionality that is provided by the commands associated with the
extension is a proper subset of the functionality provided by the existing
implementation. Nevertheless, the author of the extension believes that there
exist very few practical applications of the relativization algorithm that
can be solved using the original interface but cannot be solved using
the commands that are introduced within the scope of the
extension.
+
\<close>
text\<open>\newpage\<close>
end
\ No newline at end of file
diff --git a/thys/Types_To_Sets_Extension/ETTS/Tests/ETTS_TEST_AMEND_CTXT_DATA.ML b/thys/Types_To_Sets_Extension/ETTS/Tests/ETTS_TEST_AMEND_CTXT_DATA.ML
--- a/thys/Types_To_Sets_Extension/ETTS/Tests/ETTS_TEST_AMEND_CTXT_DATA.ML
+++ b/thys/Types_To_Sets_Extension/ETTS/Tests/ETTS_TEST_AMEND_CTXT_DATA.ML
@@ -1,495 +1,541 @@
(* Title: ETTS/Tests/ETTS_TEST_AMEND_CTXT_DATA.ML
Author: Mihails Milehins
Copyright 2021 (C) Mihails Milehins
*)
signature ETTS_TEST_AMEND_CTXT_DATA =
sig
-
-val execute_test_suite_amend_context_data :
- Proof.context ->
- (
- ETTS_Context.amend_ctxt_data_input_type * Proof.context,
- ETTS_Context.ctxt_def_type * Proof.context
- ) UT_Test_Suite.test_results_suite
-
+val test_suite : Proof.context -> unit -> unit
end;
structure etts_test_amend_ctxt_data : ETTS_TEST_AMEND_CTXT_DATA =
struct
+
+(**** Background ****)
+
+open SpecCheck;
+structure Prop = SpecCheck_Property;
+structure Show = SpecCheck_Show;
+
+
+
+
(**** Auxiliary ****)
fun mk_msg_tts_ctxt_error msg = "tts_context: " ^ msg
(*
approximate comparison of Token.src values: should not be made public
and should be used with great care
*)
local
val eq_eps_src_msg = "eq_eps_src: comparison is not possible"
in
fun eq_eps_src (src, src') =
let
val eq_name = Token.name_of_src src = Token.name_of_src src'
val eq_args = Token.args_of_src src ~~ Token.args_of_src src'
|> map eq_eps_token
|> List.all I
in eq_name andalso eq_args end
and eq_eps_token (token : Token.T, token' : Token.T) =
let
val eq_kind = Token.kind_of token = Token.kind_of token'
val eq_content = Token.content_of token = Token.content_of token'
val eq_source = Token.source_of token = Token.source_of token'
val eq_range =
Input.range_of (Token.input_of token) =
Input.range_of (Token.input_of token')
val eq_slot = (Token.get_value token, Token.get_value token')
|> eq_option eq_eps_value
in
eq_kind
andalso eq_content
andalso eq_source
andalso eq_range
andalso eq_slot
end
and eq_eps_value (Token.Source src, Token.Source src') = eq_eps_src (src, src')
| eq_eps_value (Token.Literal ltrl, Token.Literal ltrl') = ltrl = ltrl'
| eq_eps_value (Token.Name _, Token.Name _) = error eq_eps_src_msg
| eq_eps_value (Token.Typ T, Token.Typ T') = T = T'
| eq_eps_value (Token.Term t, Token.Term t') = t = t'
| eq_eps_value (Token.Fact (c_opt, thms), Token.Fact (c_opt', thms')) =
let
val eq_c = c_opt = c_opt'
val eq_thms = eq_list Thm.eq_thm (thms, thms')
in eq_c andalso eq_thms end
| eq_eps_value (Token.Attribute _, Token.Attribute _) =
error eq_eps_src_msg
| eq_eps_value (Token.Declaration _, Token.Declaration _) =
error eq_eps_src_msg
| eq_eps_value (Token.Files _, Token.Files _) =
error eq_eps_src_msg;
end;
(*
approximate comparison of ctxt_def_type: should not be made public
and used with great care
*)
fun eq_tts_ctxt_data
(
ctxt_data : ETTS_Context.ctxt_def_type,
ctxt_data' : ETTS_Context.ctxt_def_type
) =
let
fun eq_subst_thm (rsrc, rsrc') = fst rsrc = fst rsrc'
andalso eq_list eq_eps_src (snd rsrc, snd rsrc')
val _ = (#mpespc_opt ctxt_data, #mpespc_opt ctxt_data')
|> apply2 is_none
|> (fn (x, y) => x = true andalso y = true)
orelse error "eq_tts_ctxt_data: comparison is not possible"
val eq_rispec = #rispec ctxt_data = #rispec ctxt_data'
val eq_sbtspec = #sbtspec ctxt_data = #sbtspec ctxt_data'
val eq_subst_thms =
eq_list eq_subst_thm (#subst_thms ctxt_data, #subst_thms ctxt_data')
val eq_sbrr_opt = (#sbrr_opt ctxt_data, #sbrr_opt ctxt_data')
|> eq_option eq_subst_thm
val eq_attrbs = eq_list eq_eps_src (#attrbs ctxt_data, #attrbs ctxt_data')
in
eq_rispec
andalso eq_sbtspec
andalso eq_subst_thms
andalso eq_sbrr_opt
andalso eq_attrbs
end;
+(**** Visualization ****)
+
+fun show_amend_context_in (args, ctxt) =
+ let
+ val c =
+ "ctxt : unknown context\n" ^
+ ETTS_Context.string_of_amend_context_data_args ctxt args
+ in Pretty.str c end;
+
+fun show_amend_context_out (ctxt_data, ctxt) =
+ Pretty.str (ETTS_Context.string_of_tts_ctxt_data ctxt ctxt_data)
+
+val show_amend_context = Show.zip show_amend_context_in show_amend_context_out
+
+
+
+
(**** Tests ****)
+(*** Wrapper ***)
+
+val test_amend_context_data = uncurry ETTS_Context.amend_context_data;
+
+
+
(*** Valid inputs ***)
-fun test_eq_tts_context (ctxt : Proof.context) =
+fun test_eq_tts_context (ctxt : Proof.context) _ =
let
(*input*)
val risstv1_c = "?'a"
val U1_c = "UA::'ao set"
val risstv2_c = "?'b"
val U2_c = "UB::'bo set"
val rispec = [(risstv1_c, U1_c), (risstv2_c, U2_c)]
val tbt_1 = "(\<le>)::?'a::ord\<Rightarrow>?'a::ord\<Rightarrow>bool"
val sbt_1 = "(\<le>\<^sub>o\<^sub>w)"
val tbt_2 = "(<)::?'b::ord\<Rightarrow>?'b::ord\<Rightarrow>bool"
val sbt_2 = "(<\<^sub>o\<^sub>w)"
val sbtspec = [(tbt_1, sbt_1), (tbt_2, sbt_2)]
val args : ETTS_Context.amend_ctxt_data_input_type =
(((((rispec, sbtspec), NONE), []), NONE), [])
(*output*)
val s_a_ix : indexname = ("'a", 0)
val s_a_T = TVar (s_a_ix, \<^sort>\<open>ord\<close>)
val aT = TFree ("'ao", \<^sort>\<open>type\<close>)
val U1 = Free ("UA", HOLogic.mk_setT aT)
val s_b_ix : indexname = ("'b", 0)
val s_b_T = TVar (s_b_ix, \<^sort>\<open>ord\<close>)
val bT = TFree ("'bo", \<^sort>\<open>type\<close>)
val U2 = Free ("UB", HOLogic.mk_setT bT)
val less_eqt =
Const (\<^const_name>\<open>less_eq\<close>, s_a_T --> s_a_T --> HOLogic.boolT)
val lesst =
Const (\<^const_name>\<open>less\<close>, s_b_T --> s_b_T --> HOLogic.boolT)
val leqt = Free ("le", aT --> aT --> HOLogic.boolT)
val lst = Free ("ls", bT --> bT --> HOLogic.boolT)
val rispec = [(s_a_ix, U1), (s_b_ix, U2)]
val sbtspec = [(less_eqt, leqt), (lesst, lst)]
val subst_thms = []
val sbrr_opt = NONE
val mpespc_opt = NONE
val attrbs = []
val tts_ctxt_data_out : ETTS_Context.ctxt_def_type =
{
rispec = rispec,
sbtspec = sbtspec,
subst_thms = subst_thms,
sbrr_opt = sbrr_opt,
mpespc_opt = mpespc_opt,
attrbs = attrbs
}
in
- UT_Test_Suite.assert_brel
+ check_list_unit
+ show_amend_context
+ [((args, ctxt), (tts_ctxt_data_out, ctxt))]
"equality"
- (eq_fst eq_tts_ctxt_data)
- (tts_ctxt_data_out, ctxt)
- (args, ctxt)
+ (
+ Prop.prop
+ (
+ fn (val_in, val_out) =>
+ eq_fst eq_tts_ctxt_data (test_amend_context_data val_in, val_out)
+ )
+ )
end;
(*** Exceptions ***)
(** General **)
-fun test_exc_tts_context_tts_context thy =
+fun test_exc_tts_context_tts_context thy _ =
let
val ctxt = Proof_Context.init_global thy;
val risstv1_c = "?'a"
val U1_c = "U1::'a set"
val rispec1 = [(risstv1_c, U1_c)]
val args1 : ETTS_Context.amend_ctxt_data_input_type =
(((((rispec1, []), NONE), []), NONE), [])
val ctxt' = ETTS_Context.amend_context_data args1 ctxt |> snd
val risstv2_c = "?'b"
val U2_c = "U2::'b set"
val rispec2 = [(risstv2_c, U2_c)]
val args2 : ETTS_Context.amend_ctxt_data_input_type =
(((((rispec2, []), NONE), []), NONE), [])
val err_msg = mk_msg_tts_ctxt_error "nested tts contexts"
- in
- UT_Test_Suite.assert_exception
- "nested tts contexts" (args2, ctxt') (ERROR err_msg)
+ val exn_prop = Prop.expect_failure (ERROR err_msg) test_amend_context_data
+
+ in
+ check_list_unit
+ (show_amend_context_in)
+ [(args2, ctxt')]
+ "nested tts contexts"
+ exn_prop
end;
(** tts **)
-fun test_exc_rispec_empty thy =
+fun test_exc_rispec_empty thy _ =
let
val ctxt = Proof_Context.init_global thy;
val args = ((((([], []), NONE), []), NONE), [])
val err_msg = mk_msg_tts_ctxt_error "rispec must not be empty"
+ val exn_prop = Prop.expect_failure (ERROR err_msg) test_amend_context_data
in
- UT_Test_Suite.assert_exception "rispec empty" (args, ctxt) (ERROR err_msg)
+ check_list_unit
+ (show_amend_context_in)
+ [(args, ctxt)]
+ "rispec empty"
+ exn_prop
end;
-fun test_exc_rispec_not_set thy =
+fun test_exc_rispec_not_set thy _ =
let
val ctxt = Proof_Context.init_global thy;
val risstv1_c = "?'a"
val U1_c = "U1::('b list) set"
val risstv2_c = "?'b"
val U2_c = "U2::'a set"
val rispec = [(risstv1_c, U1_c), (risstv2_c, U2_c)]
val args = (((((rispec, []), NONE), []), NONE), [])
val err_msg = mk_msg_tts_ctxt_error
"risset must be terms of the type of the form ?'a set or 'a set"
+ val exn_prop = Prop.expect_failure (ERROR err_msg) test_amend_context_data
in
- UT_Test_Suite.assert_exception
- "risset are not all sets" (args, ctxt) (ERROR err_msg)
+ check_list_unit
+ (show_amend_context_in)
+ [(args, ctxt)]
+ "risset are not all sets"
+ exn_prop
end;
-fun test_exc_rispec_duplicate_risstvs thy =
+fun test_exc_rispec_duplicate_risstvs thy _ =
let
val ctxt = Proof_Context.init_global thy;
val risstv1_c = "?'a"
val U1_c = "U1::'a set"
val risstv2_c = "?'b"
val U2_c = "U2::'b set"
val risstv3_c = "?'a"
val U3_c = "U3::'c set"
val rispec = [(risstv1_c, U1_c), (risstv2_c, U2_c), (risstv3_c, U3_c)]
val args = (((((rispec, []), NONE), []), NONE), [])
+ val err_msg = "tts_context: risstvs must be distinct"
+ val exn_prop = Prop.expect_failure (ERROR err_msg) test_amend_context_data
in
- UT_Test_Suite.assert_exception
+ check_list_unit
+ (show_amend_context_in)
+ [(args, ctxt)]
"duplicate risstvs"
- (args, ctxt)
- (ERROR "tts_context: risstvs must be distinct")
+ exn_prop
end;
-fun test_exc_rispec_not_ds_dtv thy =
+fun test_exc_rispec_not_ds_dtv thy _ =
let
val ctxt = Proof_Context.init_global thy;
val risstv1_c = "?'a"
val U1_c = "U1::'a set"
val risstv2_c = "?'b"
val U2_c = "U2::'b::{group_add, finite} set"
val risstv3_c = "?'c"
val U3_c = "U3::'c::{group_add, order} set"
val risstv4_c = "?'d"
val U4_c = "U4::'b::{group_add, order} set"
val rispec =
[(risstv1_c, U1_c), (risstv2_c, U2_c), (risstv3_c, U3_c), (risstv4_c, U4_c)]
val args = (((((rispec, []), NONE), []), NONE), [])
val err_msg =
"tts_context: risset: type variables with distinct sorts must be distinct"
+ val exn_prop = Prop.expect_failure (ERROR err_msg) test_amend_context_data
in
- UT_Test_Suite.assert_exception
- "not distinct sorts \<longrightarrow> distinct types variables"
- (args, ctxt)
- (ERROR err_msg)
+ check_list_unit
+ (show_amend_context_in)
+ [(args, ctxt)]
+ "not distinct sorts \<longrightarrow> distinct types variables"
+ exn_prop
end;
-fun test_exc_rispec_not_dt_dv thy =
+fun test_exc_rispec_not_dt_dv thy _ =
let
val ctxt = Proof_Context.init_global thy;
val risstv1_c = "?'a"
val U1_c = "U1::'a set"
val risstv2_c = "?'b"
val U2_c = "U2::'b::{group_add, finite} set"
val risstv3_c = "?'c"
val U3_c = "U3::'c::{group_add, order} set"
val risstv4_c = "?'d"
val U4_c = "U2::'c::{group_add, order} set"
val rispec =
[
(risstv1_c, U1_c),
(risstv2_c, U2_c),
(risstv3_c, U3_c),
(risstv4_c, U4_c)
]
val args = (((((rispec, []), NONE), []), NONE), [])
val err_msg =
"tts_context: risset: variables with distinct types must be distinct"
+ val exn_prop = Prop.expect_failure (ERROR err_msg) test_amend_context_data
in
- UT_Test_Suite.assert_exception
- "not distinct types \<longrightarrow> distinct variables" (args, ctxt) (ERROR err_msg)
+ check_list_unit
+ (show_amend_context_in)
+ [(args, ctxt)]
+ "not distinct types \<longrightarrow> distinct variables"
+ exn_prop
end;
(** sbterms **)
-fun test_exc_distinct_sorts ctxt =
+fun test_exc_distinct_sorts ctxt _ =
let
val risstv1_c = "?'a"
val U1_c = "UA::'ao set"
val risstv2_c = "?'b"
val U2_c = "UB::'bo set"
val rispec = [(risstv1_c, U1_c), (risstv2_c, U2_c)]
val tbt_1 = "(<)::?'a::ord\<Rightarrow>?'a::ord\<Rightarrow>bool"
val sbt_1 = "(<\<^sub>o\<^sub>w)"
val tbt_2 = "?a::?'a::order\<Rightarrow>?'a::order\<Rightarrow>bool"
val sbt_2 = "f"
val sbtspec = [(tbt_1, sbt_1), (tbt_2, sbt_2)]
val args = (((((rispec, sbtspec), NONE), []), NONE), [])
val err_msg = mk_msg_tts_ctxt_error
"tbts: a single stv should not have two distinct sorts associated with it"
- in
- UT_Test_Suite.assert_exception
- "tbts: an stv with distinct sorts" (args, ctxt) (ERROR err_msg)
+ val exn_prop = Prop.expect_failure (ERROR err_msg) test_amend_context_data
+ in
+ check_list_unit
+ (show_amend_context_in)
+ [(args, ctxt)]
+ "tbts: an stv with distinct sorts"
+ exn_prop
end;
-fun test_exc_sbts_no_tis ctxt =
+fun test_exc_sbts_no_tis ctxt _ =
let
val risstv1_c = "?'a"
val U1_c = "UA::'ao set"
val risstv2_c = "?'b"
val U2_c = "UB::'bo set"
val rispec = [(risstv1_c, U1_c), (risstv2_c, U2_c)]
val tbt_1 = "(<)::?'a::ord\<Rightarrow>?'a::ord\<Rightarrow>bool"
val sbt_1 = "(<\<^sub>o\<^sub>w)"
val tbt_2 = "(\<le>)::?'a::ord\<Rightarrow>?'a::ord\<Rightarrow>bool"
val sbt_2 = "(\<le>\<^sub>o\<^sub>w)"
val sbtspec = [(tbt_1, sbt_1), (tbt_2, sbt_2)]
val args = (((((rispec, sbtspec), NONE), []), NONE), [])
val err_msg = mk_msg_tts_ctxt_error
"\n\t-the types of the sbts must be equivalent " ^
"to the types of the tbts up to renaming of the type variables\n" ^
"\t-to each type variable that occurs among the tbts must correspond " ^
"exactly one type variable among all type " ^
"variables that occur among all of the sbts"
+ val exn_prop = Prop.expect_failure (ERROR err_msg) test_amend_context_data
in
- UT_Test_Suite.assert_exception
- "sbts are not type instances of tbts" (args, ctxt) (ERROR err_msg)
+ check_list_unit
+ (show_amend_context_in)
+ [(args, ctxt)]
+ "sbts are not type instances of tbts"
+ exn_prop
end;
-fun test_exc_tbt_fixed ctxt =
+fun test_exc_tbt_fixed ctxt _ =
let
val risstv1_c = "?'a"
val U1_c = "UA::'ao set"
val risstv2_c = "?'b"
val U2_c = "UB::'bo set"
val rispec = [(risstv1_c, U1_c), (risstv2_c, U2_c)]
val tbt_1 = "(<)::?'a::ord\<Rightarrow>?'a::ord\<Rightarrow>bool"
val sbt_1 = "(<\<^sub>o\<^sub>w)"
val tbt_2 = "g::?'a::ord\<Rightarrow>?'a::ord\<Rightarrow>bool"
val sbt_2 = "(<\<^sub>o\<^sub>w)"
val sbtspec = [(tbt_1, sbt_1), (tbt_2, sbt_2)]
val args = (((((rispec, sbtspec), NONE), []), NONE), [])
val err_msg = mk_msg_tts_ctxt_error
"tbts must consist of constants and schematic variables"
+ val exn_prop = Prop.expect_failure (ERROR err_msg) test_amend_context_data
in
- UT_Test_Suite.assert_exception
- "tbts are not constants and schematic variables"
- (args, ctxt)
- (ERROR err_msg)
+ check_list_unit
+ (show_amend_context_in)
+ [(args, ctxt)]
+ "tbts are not constants and schematic variables"
+ exn_prop
end;
-fun test_exc_sbts_not_registered ctxt =
+fun test_exc_sbts_not_registered ctxt _ =
let
val risstv1_c = "?'a"
val U1_c = "UA::'ao set"
val risstv2_c = "?'b"
val U2_c = "UB::'bo set"
val rispec = [(risstv1_c, U1_c), (risstv2_c, U2_c)]
val tbt_1 = "(<)::?'a::ord\<Rightarrow>?'a::ord\<Rightarrow>bool"
val sbt_1 = "(<\<^sub>o\<^sub>w)"
val tbt_2 = "(\<le>)::?'a::ord\<Rightarrow>?'a::ord\<Rightarrow>bool"
val sbt_2 = "g::'bo::type\<Rightarrow>'bo::type\<Rightarrow>bool"
val sbtspec = [(tbt_1, sbt_1), (tbt_2, sbt_2)]
val args = (((((rispec, sbtspec), NONE), []), NONE), [])
val err_msg = mk_msg_tts_ctxt_error
"sbts must be registered using the command tts_register_sbts"
+ val exn_prop = Prop.expect_failure (ERROR err_msg) test_amend_context_data
in
- UT_Test_Suite.assert_exception
- "sbts must be registered" (args, ctxt) (ERROR err_msg)
+ check_list_unit
+ (show_amend_context_in)
+ [(args, ctxt)]
+ "sbts must be registered"
+ exn_prop
end;
-fun test_exc_tbts_not_distinct ctxt =
+fun test_exc_tbts_not_distinct ctxt _ =
let
val risstv1_c = "?'a"
val U1_c = "UA::'ao set"
val risstv2_c = "?'b"
val U2_c = "UB::'bo set"
val rispec = [(risstv1_c, U1_c), (risstv2_c, U2_c)]
val tbt_1 = "(\<le>)::?'a::ord\<Rightarrow>?'a::ord\<Rightarrow>bool"
val sbt_1 = "(<\<^sub>o\<^sub>w)"
val tbt_2 = "(\<le>)::?'a::ord\<Rightarrow>?'a::ord\<Rightarrow>bool"
val sbt_2 = "(<\<^sub>o\<^sub>w)"
val sbtspec = [(tbt_1, sbt_1), (tbt_2, sbt_2)]
val args = (((((rispec, sbtspec), NONE), []), NONE), [])
val err_msg = mk_msg_tts_ctxt_error "tbts must be distinct"
+ val exn_prop = Prop.expect_failure (ERROR err_msg) test_amend_context_data
in
- UT_Test_Suite.assert_exception
+ check_list_unit
+ (show_amend_context_in)
+ [(args, ctxt)]
"tbts must be distinct"
- (args, ctxt)
- (ERROR err_msg)
+ exn_prop
end;
-fun test_exc_sbterms_subset_rispec (ctxt : Proof.context) =
+fun test_exc_sbterms_subset_rispec (ctxt : Proof.context) _ =
let
(* input *)
val risstv1_c = "?'a"
val U1_c = "UA::'ao set"
val rispec = [(risstv1_c, U1_c)]
val tbt_1 = "(\<le>)::?'a::ord\<Rightarrow>?'a::ord\<Rightarrow>bool"
val sbt_1 = "(\<le>\<^sub>o\<^sub>w)"
val tbt_2 = "(<)::?'b::ord\<Rightarrow>?'b::ord\<Rightarrow>bool"
val sbt_2 = "(<\<^sub>o\<^sub>w)"
val sbtspec = [(tbt_1, sbt_1), (tbt_2, sbt_2)]
val args : ETTS_Context.amend_ctxt_data_input_type =
(((((rispec, sbtspec), NONE), []), NONE), [])
val err_msg = mk_msg_tts_ctxt_error
"the collection of the (stv, ftv) pairs associated with the sbterms " ^
"must form a subset of the collection of the (stv, ftv) pairs " ^
"associated with the RI specification, provided that only the pairs " ^
"(stv, ftv) associated with the sbterms such that ftv occurs in a " ^
"premise of a theorem associated with an sbterm are taken into account"
+ val exn_prop = Prop.expect_failure (ERROR err_msg) test_amend_context_data
in
- UT_Test_Suite.assert_exception
- "tbts "
- (args, ctxt)
- (ERROR err_msg)
+ check_list_unit
+ (show_amend_context_in)
+ [(args, ctxt)]
+ "stv-ftv subset"
+ exn_prop
end;
-(**** Test suite ****)
-
-local
-
-val test_amend_context_data = uncurry ETTS_Context.amend_context_data;
-
-fun test_amend_context_string_of_input (args, ctxt) =
- let
- val c =
- "ctxt : unknown context\n" ^
- ETTS_Context.string_of_amend_context_data_args ctxt args
- in c end;
-
-in
-fun mk_test_suite_amend_context_data ctxt =
- let
- fun string_of_tts_ctxt_data (ctxt_data, ctxt) =
- ETTS_Context.string_of_tts_ctxt_data ctxt ctxt_data
- val test_suite_amend_context_data = UT_Test_Suite.init
- "amend_context_data"
- test_amend_context_data
- test_amend_context_string_of_input
- string_of_tts_ctxt_data
- val thy = Proof_Context.theory_of ctxt
+(**** Test Suite ****)
+
+fun test_suite ctxt s =
+ let val thy = Proof_Context.theory_of ctxt
in
- test_suite_amend_context_data
- (*valid inputs*)
- |> test_eq_tts_context ctxt
- (*exceptions: general*)
- |> test_exc_tts_context_tts_context thy
- (*exceptions: rispec*)
- |> test_exc_rispec_empty thy
- |> test_exc_rispec_not_set thy
- |> test_exc_rispec_duplicate_risstvs thy
- |> test_exc_rispec_not_ds_dtv thy
- |> test_exc_rispec_not_dt_dv thy
- (*exceptions: sbtspec*)
- |> test_exc_distinct_sorts ctxt
- |> test_exc_sbts_no_tis ctxt
- |> test_exc_tbt_fixed ctxt
- |> test_exc_sbts_not_registered ctxt
- |> test_exc_tbts_not_distinct ctxt
- |> test_exc_sbterms_subset_rispec ctxt
+ [
+ test_eq_tts_context ctxt s,
+ test_exc_tts_context_tts_context thy s,
+ test_exc_rispec_empty thy s,
+ test_exc_rispec_not_set thy s,
+ test_exc_rispec_duplicate_risstvs thy s,
+ test_exc_rispec_not_ds_dtv thy s,
+ test_exc_rispec_not_dt_dv thy s,
+ test_exc_distinct_sorts ctxt s,
+ test_exc_sbts_no_tis ctxt s,
+ test_exc_tbt_fixed ctxt s,
+ test_exc_sbts_not_registered ctxt s,
+ test_exc_tbts_not_distinct ctxt s,
+ test_exc_sbterms_subset_rispec ctxt s
+ ]
+ |> Lecker.test_group ctxt s
end;
-fun execute_test_suite_amend_context_data ctxt =
- UT_Test_Suite.execute (mk_test_suite_amend_context_data ctxt)
-
-end;
-
end;
\ No newline at end of file
diff --git a/thys/Types_To_Sets_Extension/ETTS/Tests/ETTS_TEST_TTS_ALGORITHM.ML b/thys/Types_To_Sets_Extension/ETTS/Tests/ETTS_TEST_TTS_ALGORITHM.ML
--- a/thys/Types_To_Sets_Extension/ETTS/Tests/ETTS_TEST_TTS_ALGORITHM.ML
+++ b/thys/Types_To_Sets_Extension/ETTS/Tests/ETTS_TEST_TTS_ALGORITHM.ML
@@ -1,393 +1,410 @@
(* Title: ETTS/Tests/ETTS_TEST_TTS_ALGORITHM.ML
Author: Mihails Milehins
Copyright 2021 (C) Mihails Milehins
*)
signature ETTS_TEST_TTS_ALGORITHM =
sig
-
type tts_algorithm_in_type
-val execute_test_suite_tts_algorithm :
- Proof.context ->
- (tts_algorithm_in_type,
- (thm * int list) * Proof.context)
- UT_Test_Suite.test_results_suite
-
+val test_suite : Proof.context -> unit -> unit
end;
structure etts_test_tts_algorithm : ETTS_TEST_TTS_ALGORITHM =
struct
+
+(**** Background ****)
+
+open SpecCheck;
+structure Prop = SpecCheck_Property;
+structure Show = SpecCheck_Show;
+
+
+
+
(**** Auxiliary ****)
fun mk_msg_tts_algorithm_error msg = "tts_algorithm: " ^ msg
(*** Data ***)
type tts_algorithm_in_type =
Proof.context *
ETTS_Algorithm.etts_output_type *
int list *
(indexname * term) list *
(term * term) list *
(Facts.ref * Token.src list) option *
(Facts.ref * Token.src list) list *
(term list * (Proof.context -> tactic)) option *
Token.src list *
thm;
(*** String I/O ***)
fun string_of_writer writer = writer
|> ML_Syntax.print_list Int.toString
|> curry op^ "writer: "
(*** Relation for the outputs ***)
fun rel_tts_algorithm_out
(
act_out : (thm * int list) * Proof.context,
exp_out : (thm * int list) * Proof.context
) =
let
val ((thm_act_out, writer_act_out), _) = act_out
val ((thm_exp_out, writer_exp_out), _) = exp_out
val t_act_out = Thm.full_prop_of thm_act_out
val t_exp_out = Thm.full_prop_of thm_exp_out
in
t_act_out aconv t_exp_out
andalso writer_act_out = writer_exp_out
end;
-(**** Tests ****)
-
-
-
-(*** Valid inputs ***)
-
-fun test_eq_tts_algorithm (ctxt : Proof.context) =
- let
-
- (*input*)
- val tts_ctxt_data = ETTS_Context.get_tts_ctxt_data ctxt
- val rispec = #rispec tts_ctxt_data
- val sbtspec = #sbtspec tts_ctxt_data
- val sbrr_opt = #sbrr_opt tts_ctxt_data
- val subst_thms = #subst_thms tts_ctxt_data
- val mpespc_opt = #mpespc_opt tts_ctxt_data
- val attrbs = #attrbs tts_ctxt_data
- val tts_output_type = ETTS_Algorithm.default
- val writer_in = [1, 1, 1, 1]
- val in_thm = @{thm tta_semigroup_hom.tta_left_ideal_closed}
- val tts_algorithm_in : tts_algorithm_in_type =
- (
- ctxt,
- tts_output_type,
- writer_in,
- rispec,
- sbtspec,
- sbrr_opt,
- subst_thms,
- mpespc_opt,
- attrbs,
- in_thm
- )
-
- (*output*)
- val writer_out = [1, 3, 1, 1]
- val exp_tts_algorithm_out =
- ((@{thm tta_left_ideal_ow_closed}, writer_out), ctxt)
-
- in
- UT_Test_Suite.assert_brel
- "equality"
- rel_tts_algorithm_out
- exp_tts_algorithm_out
- tts_algorithm_in
- end;
-
-
-
-(*** Exceptions ***)
-
-fun test_exc_ftvs ctxt =
- let
- val tts_ctxt_data = ETTS_Context.get_tts_ctxt_data ctxt
- val rispec = #rispec tts_ctxt_data
- val sbtspec = #sbtspec tts_ctxt_data
- val sbrr_opt = #sbrr_opt tts_ctxt_data
- val subst_thms = #subst_thms tts_ctxt_data
- val mpespc_opt = #mpespc_opt tts_ctxt_data
- val attrbs = #attrbs tts_ctxt_data
- val tts_output_type = ETTS_Algorithm.default
- val writer_in = [1, 1, 1, 1]
- val in_thm = @{thm exI'[where 'a='a]}
- val tts_algorithm_in : tts_algorithm_in_type =
- (
- ctxt,
- tts_output_type,
- writer_in,
- rispec,
- sbtspec,
- sbrr_opt,
- subst_thms,
- mpespc_opt,
- attrbs,
- in_thm
- )
- val err_msg = mk_msg_tts_algorithm_error
- "fixed type variables must not occur in the type-based theorems"
- in
- UT_Test_Suite.assert_exception
- "fixed type variables"
- tts_algorithm_in
- (ERROR err_msg)
- end;
-
-fun test_exc_fvs ctxt =
- let
- val tts_ctxt_data = ETTS_Context.get_tts_ctxt_data ctxt
- val rispec = #rispec tts_ctxt_data
- val sbtspec = #sbtspec tts_ctxt_data
- val sbrr_opt = #sbrr_opt tts_ctxt_data
- val subst_thms = #subst_thms tts_ctxt_data
- val mpespc_opt = #mpespc_opt tts_ctxt_data
- val attrbs = #attrbs tts_ctxt_data
- val tts_output_type = ETTS_Algorithm.default
- val writer_in = [1, 1, 1, 1]
- val aT = TVar (("'a", 0), \<^sort>\<open>type\<close>)
- val xv = ("x", 0)
- val xt = Free ("x", aT) |> Thm.cterm_of ctxt
- val in_thm = Drule.instantiate_normalize
- (TVars.empty, Vars.make [((xv, aT), xt)]) @{thm exI'}
- val tts_algorithm_in : tts_algorithm_in_type =
- (
- ctxt,
- tts_output_type,
- writer_in,
- rispec,
- sbtspec,
- sbrr_opt,
- subst_thms,
- mpespc_opt,
- attrbs,
- in_thm
- )
- val err_msg = mk_msg_tts_algorithm_error
- "fixed variables must not occur in the type-based theorems"
- in
- UT_Test_Suite.assert_exception
- "fixed variables"
- tts_algorithm_in
- (ERROR err_msg)
- end;
-
-fun test_exc_not_risstv_subset ctxt =
- let
- val tts_ctxt_data = ETTS_Context.get_tts_ctxt_data ctxt
- val rispec = #rispec tts_ctxt_data
- val sbtspec = #sbtspec tts_ctxt_data
- val sbrr_opt = #sbrr_opt tts_ctxt_data
- val subst_thms = #subst_thms tts_ctxt_data
- val mpespc_opt = #mpespc_opt tts_ctxt_data
- val attrbs = #attrbs tts_ctxt_data
- val tts_output_type = ETTS_Algorithm.default
- val writer_in = [1, 1, 1, 1]
- val in_thm = @{thm tta_semigroup.tta_assoc}
- val tts_algorithm_in : tts_algorithm_in_type =
- (
- ctxt,
- tts_output_type,
- writer_in,
- rispec,
- sbtspec,
- sbrr_opt,
- subst_thms,
- mpespc_opt,
- attrbs,
- in_thm
- )
- val err_msg = mk_msg_tts_algorithm_error
- "risstv must be a subset of the schematic type " ^
- "variables that occur in the type-based theorems"
- in
- UT_Test_Suite.assert_exception
- "risstv is not a subset of the stvs of the type-based theorems"
- tts_algorithm_in
- (ERROR err_msg)
- end;
-
-fun test_not_tts_context thy =
- let
- val ctxt = Proof_Context.init_global thy
- val tts_ctxt_data = ETTS_Context.get_tts_ctxt_data ctxt
- val rispec = #rispec tts_ctxt_data
- val sbtspec = #sbtspec tts_ctxt_data
- val sbrr_opt = #sbrr_opt tts_ctxt_data
- val subst_thms = #subst_thms tts_ctxt_data
- val mpespc_opt = #mpespc_opt tts_ctxt_data
- val attrbs = #attrbs tts_ctxt_data
- val tts_output_type = ETTS_Algorithm.default
- val writer_in = [1, 1, 1, 1]
- val in_thm = @{thm tta_semigroup_hom.tta_left_ideal_closed}
- val tts_algorithm_in : tts_algorithm_in_type =
- (
- ctxt,
- tts_output_type,
- writer_in,
- rispec,
- sbtspec,
- sbrr_opt,
- subst_thms,
- mpespc_opt,
- attrbs,
- in_thm
- )
- val err_msg = mk_msg_tts_algorithm_error
- "ERA can only be invoked from an appropriately parameterized tts context"
- in
- UT_Test_Suite.assert_exception
- "not tts context"
- tts_algorithm_in
- (ERROR err_msg)
- end;
-
-
-
-
-(**** Test suite ****)
-
-local
+(**** Visualization ****)
fun string_of_rispec ctxt =
ML_Syntax.print_pair (Term.string_of_vname) (Syntax.string_of_term ctxt)
|> ML_Syntax.print_list;
fun string_of_sbtspec ctxt =
let val string_of_term = Syntax.string_of_term ctxt
in
ML_Syntax.print_pair string_of_term string_of_term
|> ML_Syntax.print_list
end;
-fun tts_algorithm_string_of_input (tts_algorithm_in : tts_algorithm_in_type) =
+fun show_tts_algorithm_in (tts_algorithm_in : tts_algorithm_in_type) =
let
val
(
ctxt,
tts_output_type,
writer,
rispec,
sbtspec,
sbrr_opt,
subst_thms,
mpespc_opt,
attrbs,
thm
) = tts_algorithm_in
val ctxt_c = "ctxt: unknown context"
val tts_output_type_c =
ETTS_Algorithm.string_of_etts_output_type tts_output_type
val writer_c = string_of_writer writer
val rispec_c = rispec |> string_of_rispec ctxt |> curry op^ "rispec: "
val sbtspec_c = sbtspec |> string_of_sbtspec ctxt |> curry op^ "sbtspec: "
val sbrr_opt_c = sbrr_opt
|> ETTS_Context.string_of_sbrr_opt
|> curry op^ "sbrr_opt: "
val subst_thms_c = subst_thms
|> ETTS_Context.string_of_subst_thms
|> curry op^ "subst_thms: "
val mpespc_opt_c = mpespc_opt
|> ETTS_Context.string_of_mpespc_opt ctxt
|> curry op^ "mpespc_opt: "
val attrbs_c = attrbs |> string_of_token_src_list |> curry op^ "attrbs: "
val thm_c = thm |> Thm.string_of_thm ctxt |> curry op^ "in_thm: "
val out_c =
[
ctxt_c,
tts_output_type_c,
writer_c,
rispec_c,
sbtspec_c,
sbrr_opt_c,
subst_thms_c,
mpespc_opt_c,
attrbs_c,
thm_c
]
|> String.concatWith "\n"
- in out_c end;
+ in Pretty.str out_c end;
-fun tts_algorithm_string_of_output
+fun show_tts_algorithm_out
(((thm, writer), ctxt) : (thm * int list) * Proof.context) =
let
val ctxt_c = "ctxt: unknown context"
val thm_c = Thm.string_of_thm ctxt thm
val writer_c = ML_Syntax.print_list Int.toString writer
val out_c = [ctxt_c, thm_c, writer_c] |> String.concatWith "\n"
- in out_c end;
+ in Pretty.str out_c end;
+
+val show_tts_algorithm = Show.zip show_tts_algorithm_in show_tts_algorithm_out
+
+
+
+
+(**** Tests ****)
+
+
+
+(*** Wrapper ***)
fun tts_algorithm (tts_algorithm_in : tts_algorithm_in_type) =
let
val
(
ctxt,
tts_output_type,
writer,
rispec,
sbtspec,
sbrr_opt,
subst_thms,
mpespc_opt,
attrbs,
thm
) = tts_algorithm_in
val tts_algorithm_out =
ETTS_Algorithm.etts_algorithm
ctxt
tts_output_type
writer
rispec
sbtspec
sbrr_opt
subst_thms
mpespc_opt
- attrbs
+ attrbs
thm
in tts_algorithm_out end;
-in
-fun mk_test_suite_tts_algorithm ctxt =
+
+(*** Valid inputs ***)
+
+fun test_eq_tts_algorithm (ctxt : Proof.context) _ =
let
- val test_suite_tts_algorithm = UT_Test_Suite.init
- "tts_algorithm"
- tts_algorithm
- tts_algorithm_string_of_input
- tts_algorithm_string_of_output
+
+ (*input*)
+ val tts_ctxt_data = ETTS_Context.get_tts_ctxt_data ctxt
+ val rispec = #rispec tts_ctxt_data
+ val sbtspec = #sbtspec tts_ctxt_data
+ val sbrr_opt = #sbrr_opt tts_ctxt_data
+ val subst_thms = #subst_thms tts_ctxt_data
+ val mpespc_opt = #mpespc_opt tts_ctxt_data
+ val attrbs = #attrbs tts_ctxt_data
+ val tts_output_type = ETTS_Algorithm.default
+ val writer_in = [1, 1, 1, 1]
+ val in_thm = @{thm tta_semigroup_hom.tta_left_ideal_closed}
+ val tts_algorithm_in : tts_algorithm_in_type =
+ (
+ ctxt,
+ tts_output_type,
+ writer_in,
+ rispec,
+ sbtspec,
+ sbrr_opt,
+ subst_thms,
+ mpespc_opt,
+ attrbs,
+ in_thm
+ )
+
+ (*output*)
+ val writer_out = [1, 3, 1, 1]
+ val exp_tts_algorithm_out =
+ ((@{thm tta_left_ideal_ow_closed}, writer_out), ctxt)
+
in
- test_suite_tts_algorithm
- |> test_eq_tts_algorithm ctxt
- |> test_exc_ftvs ctxt
- |> test_exc_fvs ctxt
- |> test_exc_not_risstv_subset ctxt
- |> test_not_tts_context (Proof_Context.theory_of ctxt)
+ check_list_unit
+ show_tts_algorithm
+ [(tts_algorithm_in, exp_tts_algorithm_out)]
+ "equality"
+ (
+ Prop.prop
+ (
+ fn (val_in, val_out) =>
+ rel_tts_algorithm_out (tts_algorithm val_in, val_out)
+ )
+ )
end;
-end;
-fun execute_test_suite_tts_algorithm ctxt =
- UT_Test_Suite.execute (mk_test_suite_tts_algorithm ctxt);
+
+(*** Exceptions ***)
+
+fun test_exc_ftvs ctxt _ =
+ let
+ val tts_ctxt_data = ETTS_Context.get_tts_ctxt_data ctxt
+ val rispec = #rispec tts_ctxt_data
+ val sbtspec = #sbtspec tts_ctxt_data
+ val sbrr_opt = #sbrr_opt tts_ctxt_data
+ val subst_thms = #subst_thms tts_ctxt_data
+ val mpespc_opt = #mpespc_opt tts_ctxt_data
+ val attrbs = #attrbs tts_ctxt_data
+ val tts_output_type = ETTS_Algorithm.default
+ val writer_in = [1, 1, 1, 1]
+ val in_thm = @{thm exI'[where 'a='a]}
+ val tts_algorithm_in : tts_algorithm_in_type =
+ (
+ ctxt,
+ tts_output_type,
+ writer_in,
+ rispec,
+ sbtspec,
+ sbrr_opt,
+ subst_thms,
+ mpespc_opt,
+ attrbs,
+ in_thm
+ )
+ val err_msg = mk_msg_tts_algorithm_error
+ "fixed type variables must not occur in the type-based theorems"
+ val exn_prop = Prop.expect_failure (ERROR err_msg) tts_algorithm
+ in
+ check_list_unit
+ show_tts_algorithm_in
+ [tts_algorithm_in]
+ "fixed type variables"
+ exn_prop
+ end;
+
+fun test_exc_fvs ctxt _ =
+ let
+ val tts_ctxt_data = ETTS_Context.get_tts_ctxt_data ctxt
+ val rispec = #rispec tts_ctxt_data
+ val sbtspec = #sbtspec tts_ctxt_data
+ val sbrr_opt = #sbrr_opt tts_ctxt_data
+ val subst_thms = #subst_thms tts_ctxt_data
+ val mpespc_opt = #mpespc_opt tts_ctxt_data
+ val attrbs = #attrbs tts_ctxt_data
+ val tts_output_type = ETTS_Algorithm.default
+ val writer_in = [1, 1, 1, 1]
+ val aT = TVar (("'a", 0), \<^sort>\<open>type\<close>)
+ val xv = ("x", 0)
+ val xt = Free ("x", aT) |> Thm.cterm_of ctxt
+ val in_thm = Drule.instantiate_normalize
+ (TVars.empty, Vars.make [((xv, aT), xt)]) @{thm exI'}
+ val tts_algorithm_in : tts_algorithm_in_type =
+ (
+ ctxt,
+ tts_output_type,
+ writer_in,
+ rispec,
+ sbtspec,
+ sbrr_opt,
+ subst_thms,
+ mpespc_opt,
+ attrbs,
+ in_thm
+ )
+ val err_msg = mk_msg_tts_algorithm_error
+ "fixed variables must not occur in the type-based theorems"
+ val exn_prop = Prop.expect_failure (ERROR err_msg) tts_algorithm
+ in
+ check_list_unit
+ show_tts_algorithm_in
+ [tts_algorithm_in]
+ "fixed variables"
+ exn_prop
+ end;
+
+fun test_exc_not_risstv_subset ctxt _ =
+ let
+ val tts_ctxt_data = ETTS_Context.get_tts_ctxt_data ctxt
+ val rispec = #rispec tts_ctxt_data
+ val sbtspec = #sbtspec tts_ctxt_data
+ val sbrr_opt = #sbrr_opt tts_ctxt_data
+ val subst_thms = #subst_thms tts_ctxt_data
+ val mpespc_opt = #mpespc_opt tts_ctxt_data
+ val attrbs = #attrbs tts_ctxt_data
+ val tts_output_type = ETTS_Algorithm.default
+ val writer_in = [1, 1, 1, 1]
+ val in_thm = @{thm tta_semigroup.tta_assoc}
+ val tts_algorithm_in : tts_algorithm_in_type =
+ (
+ ctxt,
+ tts_output_type,
+ writer_in,
+ rispec,
+ sbtspec,
+ sbrr_opt,
+ subst_thms,
+ mpespc_opt,
+ attrbs,
+ in_thm
+ )
+ val err_msg = mk_msg_tts_algorithm_error
+ "risstv must be a subset of the schematic type " ^
+ "variables that occur in the type-based theorems"
+ val exn_prop = Prop.expect_failure (ERROR err_msg) tts_algorithm
+ in
+ check_list_unit
+ show_tts_algorithm_in
+ [tts_algorithm_in]
+ "risstv is not a subset of the stvs of the type-based theorems"
+ exn_prop
+ end;
+
+fun test_not_tts_context thy _ =
+ let
+ val ctxt = Proof_Context.init_global thy
+ val tts_ctxt_data = ETTS_Context.get_tts_ctxt_data ctxt
+ val rispec = #rispec tts_ctxt_data
+ val sbtspec = #sbtspec tts_ctxt_data
+ val sbrr_opt = #sbrr_opt tts_ctxt_data
+ val subst_thms = #subst_thms tts_ctxt_data
+ val mpespc_opt = #mpespc_opt tts_ctxt_data
+ val attrbs = #attrbs tts_ctxt_data
+ val tts_output_type = ETTS_Algorithm.default
+ val writer_in = [1, 1, 1, 1]
+ val in_thm = @{thm tta_semigroup_hom.tta_left_ideal_closed}
+ val tts_algorithm_in : tts_algorithm_in_type =
+ (
+ ctxt,
+ tts_output_type,
+ writer_in,
+ rispec,
+ sbtspec,
+ sbrr_opt,
+ subst_thms,
+ mpespc_opt,
+ attrbs,
+ in_thm
+ )
+ val err_msg = mk_msg_tts_algorithm_error
+ "ERA can only be invoked from an appropriately parameterized tts context"
+ val exn_prop = Prop.expect_failure (ERROR err_msg) tts_algorithm
+ in
+ check_list_unit
+ show_tts_algorithm_in
+ [tts_algorithm_in]
+ "not tts context"
+ exn_prop
+ end;
+
+
+
+
+(**** Test Suite ****)
+
+fun test_suite ctxt s =
+ let val thy = Proof_Context.theory_of ctxt
+ in
+ [
+ test_eq_tts_algorithm ctxt s,
+ test_exc_ftvs ctxt s,
+ test_exc_fvs ctxt s,
+ test_exc_not_risstv_subset ctxt s,
+ test_not_tts_context thy s
+ ]
+ |> Lecker.test_group ctxt s
+ end;
end;
\ No newline at end of file
diff --git a/thys/Types_To_Sets_Extension/ETTS/Tests/ETTS_TEST_TTS_REGISTER_SBTS.ML b/thys/Types_To_Sets_Extension/ETTS/Tests/ETTS_TEST_TTS_REGISTER_SBTS.ML
--- a/thys/Types_To_Sets_Extension/ETTS/Tests/ETTS_TEST_TTS_REGISTER_SBTS.ML
+++ b/thys/Types_To_Sets_Extension/ETTS/Tests/ETTS_TEST_TTS_REGISTER_SBTS.ML
@@ -1,126 +1,127 @@
(* Title: ETTS/Tests/ETTS_TEST_TTS_REGISTER_SBTS.ML
Author: Mihails Milehins
Copyright 2021 (C) Mihails Milehins
*)
signature ETTS_TEST_TTS_REGISTER_SBTS =
sig
-
type tts_register_sbts_in_type
-val execute_test_suite_tts_register_sbts :
- Proof.context ->
- (tts_register_sbts_in_type, Proof.state)
- UT_Test_Suite.test_results_suite
-
+val test_suite : Proof.context -> unit -> unit
end;
structure etts_test_tts_register_sbts : ETTS_TEST_TTS_REGISTER_SBTS =
struct
+(**** Background ****)
+
+open SpecCheck;
+structure Prop = SpecCheck_Property;
+structure Show = SpecCheck_Show;
+
+
+
+
(**** Auxiliary ****)
fun mk_msg_tts_register_sbts_error msg = "tts_register_sbts: " ^ msg
(*** Data ***)
type tts_register_sbts_in_type =
(string * string list) * Proof.context
+
+(**** Visualization ****)
+
+fun show_tts_register_sbts_in
+ (tts_register_sbts_in : tts_register_sbts_in_type) =
+ let
+ val ((sbt, risset), _) = tts_register_sbts_in
+ val ctxt_c = "ctxt: unknown context"
+ val sbt_c = "sbt: " ^ sbt
+ val risset_c = "risset: " ^ ML_Syntax.print_list I risset
+ val out_c = [ctxt_c, sbt_c, risset_c] |> String.concatWith "\n"
+ in Pretty.str out_c end;
+
+
+
+
+
+(**** Tests ****)
+
+
+
+(*** Wrapper ***)
+
+fun tts_register_sbts ((args, ctxt) : tts_register_sbts_in_type) =
+ ETTS_Substitution.process_tts_register_sbts args ctxt;
+
+
+
(*** Exceptions ***)
-fun test_exc_fvs ctxt =
+fun test_exc_fvs ctxt _ =
let
val sbt = "g::'q"
val UA_c = "UA::'a set"
val UB_c = "UB::'b set"
val rissest = [UA_c, UB_c]
val tts_register_sbts_in : tts_register_sbts_in_type =
((sbt, rissest), ctxt)
val err_msg = mk_msg_tts_register_sbts_error
"all fixed variables that occur in the sbterm " ^
"must be fixed in the context"
+ val exn_prop = Prop.expect_failure (ERROR err_msg) tts_register_sbts
+
in
- UT_Test_Suite.assert_exception
+ check_list_unit
+ (show_tts_register_sbts_in)
+ [tts_register_sbts_in]
"variables not fixed in the context"
- tts_register_sbts_in
- (ERROR err_msg)
+ exn_prop
end;
-fun test_exc_repeated_risset ctxt =
+fun test_exc_repeated_risset ctxt _ =
let
val sbt = "f"
val UA_c = "UA::'a set"
val UB_c = "UA::'a set"
val rissest = [UA_c, UB_c]
val tts_register_sbts_in : tts_register_sbts_in_type =
((sbt, rissest), ctxt)
val err_msg = mk_msg_tts_register_sbts_error
"the type variables associated with the risset must be distinct"
+ val exn_prop = Prop.expect_failure (ERROR err_msg) tts_register_sbts
+
in
- UT_Test_Suite.assert_exception
+ check_list_unit
+ show_tts_register_sbts_in
+ [tts_register_sbts_in]
"repeated risset"
- tts_register_sbts_in
- (ERROR err_msg)
+ exn_prop
end;
-(**** Test suite ****)
-
-local
-
-fun tts_register_sbts_string_of_input
- (tts_register_sbts_in : tts_register_sbts_in_type) =
- let
- val ((sbt, risset), _) = tts_register_sbts_in
- val ctxt_c = "ctxt: unknown context"
- val sbt_c = "sbt: " ^ sbt
- val risset_c = "risset: " ^ ML_Syntax.print_list I risset
- val out_c = [ctxt_c, sbt_c, risset_c]
- |> String.concatWith "\n"
- in out_c end;
-
-fun tts_register_sbts_string_of_output (_ : Proof.state) =
- let val st_c = "st: unknown state"
- in st_c end;
+(**** Test Suite ****)
-fun tts_register_sbts ((args, ctxt) : tts_register_sbts_in_type) =
- ETTS_Substitution.process_tts_register_sbts args ctxt;
-
-in
-
-fun mk_test_suite_tts_register_sbts ctxt =
- let
- val test_suite_tts_register_sbts = UT_Test_Suite.init
- "tts_register_sbts"
- tts_register_sbts
- tts_register_sbts_string_of_input
- tts_register_sbts_string_of_output
- in
- test_suite_tts_register_sbts
- |> test_exc_fvs ctxt
- |> test_exc_repeated_risset ctxt
- end;
-
-end;
-
-fun execute_test_suite_tts_register_sbts ctxt =
- UT_Test_Suite.execute (mk_test_suite_tts_register_sbts ctxt);
+fun test_suite ctxt s = [test_exc_fvs ctxt s, test_exc_repeated_risset ctxt s]
+ |> Lecker.test_group ctxt s;
end;
\ No newline at end of file
diff --git a/thys/Types_To_Sets_Extension/ETTS/Tests/ETTS_Tests.thy b/thys/Types_To_Sets_Extension/ETTS/Tests/ETTS_Tests.thy
--- a/thys/Types_To_Sets_Extension/ETTS/Tests/ETTS_Tests.thy
+++ b/thys/Types_To_Sets_Extension/ETTS/Tests/ETTS_Tests.thy
@@ -1,385 +1,366 @@
(* Title: ETTS/Tests/ETTS_Tests.thy
Author: Mihails Milehins
Copyright 2021 (C) Mihails Milehins
A test suite for the ETTS.
*)
section\<open>A test suite for ETTS\<close>
theory ETTS_Tests
imports
"../ETTS_Auxiliary"
Conditional_Transfer_Rule.IML_UT
+ SpecCheck.SpecCheck_Dynamic
begin
subsection\<open>\<open>amend_ctxt_data\<close>\<close>
ML_file\<open>ETTS_TEST_AMEND_CTXT_DATA.ML\<close>
locale test_amend_ctxt_data =
fixes UA :: "'ao set" and UB :: "'bo set" and UC :: "'co set"
and le :: "['ao, 'ao] \<Rightarrow> bool" (infix \<open>\<le>\<^sub>o\<^sub>w\<close> 50)
and ls :: "['bo, 'bo] \<Rightarrow> bool" (infix \<open><\<^sub>o\<^sub>w\<close> 50)
and f :: "['ao, 'bo] \<Rightarrow> 'co"
- assumes closed_f: "a \<in> UA \<Longrightarrow> b \<in> UB \<Longrightarrow> f a b \<in> UC"
+ assumes closed_f: "\<lbrakk> a \<in> UA; b \<in> UB \<rbrakk> \<Longrightarrow> f a b \<in> UC"
begin
notation le (\<open>'(\<le>\<^sub>o\<^sub>w')\<close>)
and le (infix \<open>\<le>\<^sub>o\<^sub>w\<close> 50)
and ls (\<open>'(<\<^sub>o\<^sub>w')\<close>)
and ls (infix \<open><\<^sub>o\<^sub>w\<close> 50)
tts_register_sbts \<open>(\<le>\<^sub>o\<^sub>w)\<close> | UA
proof-
assume "Domainp AOA = (\<lambda>x. x \<in> UA)" "bi_unique AOA" "right_total AOA"
from tts_AA_eq_transfer[OF this] show ?thesis by auto
qed
tts_register_sbts \<open>(<\<^sub>o\<^sub>w)\<close> | UB
proof-
assume "Domainp BOA = (\<lambda>x. x \<in> UB)" "bi_unique BOA" "right_total BOA"
from tts_AA_eq_transfer[OF this] show ?thesis by auto
qed
tts_register_sbts f | UA and UB and UC
proof-
assume
"Domainp AOC = (\<lambda>x. x \<in> UA)" "bi_unique AOC" "right_total AOC"
"Domainp BOB = (\<lambda>x. x \<in> UB)" "bi_unique BOB" "right_total BOB"
"Domainp COA = (\<lambda>x. x \<in> UC)" "bi_unique COA" "right_total COA"
from tts_AB_C_transfer[OF closed_f this] show ?thesis by auto
qed
end
context test_amend_ctxt_data
begin
-
ML\<open>
-val tts_test_amend_ctxt_data_test_results =
- etts_test_amend_ctxt_data.execute_test_suite_amend_context_data @{context}
+Lecker.test_group @{context} () [etts_test_amend_ctxt_data.test_suite]
\<close>
-ML\<open>
-val _ = tts_test_amend_ctxt_data_test_results
- |> UT_Test_Suite.output_test_results true
-\<close>
-
end
subsection\<open>\<open>tts_algorithm\<close>\<close>
text\<open>
Some of the elements of the content of this section are based on the
elements of the content of \cite{cain_nine_2019}.
\<close>
(*the following theorem is restated for forward compatibility*)
lemma exI': "P x \<Longrightarrow> \<exists>x. P x" by auto
class tta_mult =
fixes tta_mult :: "'a \<Rightarrow> 'a \<Rightarrow> 'a" (infixl "*\<^sub>t\<^sub>t\<^sub>a" 65)
class tta_semigroup = tta_mult +
assumes tta_assoc[simp]: "(a *\<^sub>t\<^sub>t\<^sub>a b) *\<^sub>t\<^sub>t\<^sub>a c = a *\<^sub>t\<^sub>t\<^sub>a (b *\<^sub>t\<^sub>t\<^sub>a c)"
definition set_mult :: "'a::tta_mult set \<Rightarrow> 'a set \<Rightarrow> 'a set" (infixl "\<^bold>*\<^sub>t\<^sub>t\<^sub>a" 65)
where "set_mult S T = {s *\<^sub>t\<^sub>t\<^sub>a t | s t. s \<in> S \<and> t \<in> T}"
definition left_ideal :: "'a::tta_mult set \<Rightarrow> bool"
where "left_ideal T \<longleftrightarrow> (\<forall>s. \<forall>t\<in>T. s *\<^sub>t\<^sub>t\<^sub>a t \<in> T)"
lemma left_idealI[intro]:
assumes "\<And>s t. t \<in> T \<Longrightarrow> s *\<^sub>t\<^sub>t\<^sub>a t \<in> T"
shows "left_ideal T"
using assms unfolding left_ideal_def by simp
lemma left_idealE[elim]:
assumes "left_ideal T"
obtains "\<And>s t. t \<in> T \<Longrightarrow> s *\<^sub>t\<^sub>t\<^sub>a t \<in> T"
using assms unfolding left_ideal_def by simp
lemma left_ideal_set_mult_iff: "left_ideal T \<longleftrightarrow> UNIV \<^bold>*\<^sub>t\<^sub>t\<^sub>a T \<subseteq> T"
unfolding left_ideal_def set_mult_def by auto
ud \<open>set_mult\<close>
ud \<open>left_ideal\<close>
ctr relativization
synthesis ctr_simps
assumes [transfer_domain_rule]: "Domainp A = (\<lambda>x. x \<in> U)"
and [transfer_rule]: "bi_unique A" "right_total A"
trp (?'a A)
in set_mult_ow: set_mult.with_def
and left_ideal_ow: left_ideal.with_def
locale tta_semigroup_hom =
fixes f :: "'a::tta_semigroup \<Rightarrow> 'b::tta_semigroup"
assumes hom: "f (a *\<^sub>t\<^sub>t\<^sub>a b) = f a *\<^sub>t\<^sub>t\<^sub>a f b"
context tta_semigroup_hom
begin
lemma tta_left_ideal_closed:
assumes "left_ideal T" and "surj f"
shows "left_ideal (f ` T)"
unfolding left_ideal_def
proof(intro allI ballI)
fix fs ft assume prems: "ft \<in> f ` T"
then obtain t where t: "t \<in> T" and ft_def: "ft = f t" by clarsimp
from assms(2) obtain s where fs_def: "fs = f s" by auto
from assms have "t \<in> T \<Longrightarrow> s *\<^sub>t\<^sub>t\<^sub>a t \<in> T" for s t by auto
with t show "fs *\<^sub>t\<^sub>t\<^sub>a ft \<in> f ` T"
unfolding ft_def fs_def hom[symmetric] by simp
qed
end
locale semigroup_mult_hom_with =
dom: tta_semigroup times_a + cod: tta_semigroup times_b
for times_a (infixl \<open>*\<^sub>o\<^sub>w\<^sub>.\<^sub>a\<close> 70) and times_b (infixl \<open>*\<^sub>o\<^sub>w\<^sub>.\<^sub>b\<close> 70) +
fixes f :: "'a \<Rightarrow> 'b"
assumes f_hom: "f (a *\<^sub>o\<^sub>w\<^sub>.\<^sub>a b) = f a *\<^sub>o\<^sub>w\<^sub>.\<^sub>b f b"
lemma semigroup_mult_hom_with[ud_with]:
"tta_semigroup_hom = semigroup_mult_hom_with (*\<^sub>t\<^sub>t\<^sub>a) (*\<^sub>t\<^sub>t\<^sub>a)"
unfolding
semigroup_mult_hom_with_def tta_semigroup_hom_def
class.tta_semigroup_def semigroup_mult_hom_with_axioms_def
by auto
locale semigroup_ow =
fixes U :: "'ag set" and f :: "['ag, 'ag] \<Rightarrow> 'ag" (infixl \<open>\<^bold>*\<^sub>o\<^sub>w\<close> 70)
assumes f_closed: "\<lbrakk> a \<in> U; b \<in> U \<rbrakk> \<Longrightarrow> a \<^bold>*\<^sub>o\<^sub>w b \<in> U"
and assoc: "\<lbrakk> a \<in> U; b \<in> U; c \<in> U \<rbrakk> \<Longrightarrow> a \<^bold>*\<^sub>o\<^sub>w b \<^bold>*\<^sub>o\<^sub>w c = a \<^bold>*\<^sub>o\<^sub>w (b \<^bold>*\<^sub>o\<^sub>w c)"
begin
notation f (infixl \<open>\<^bold>*\<^sub>o\<^sub>w\<close> 70)
lemma f_closed'[simp]: "\<forall>x\<in>U. \<forall>y\<in>U. x \<^bold>*\<^sub>o\<^sub>w y \<in> U" by (simp add: f_closed)
tts_register_sbts \<open>(\<^bold>*\<^sub>o\<^sub>w)\<close> | U by (rule tts_AA_A_transfer[OF f_closed])
end
locale times_ow =
fixes U :: "'ag set" and times :: "['ag, 'ag] \<Rightarrow> 'ag" (infixl \<open>*\<^sub>o\<^sub>w\<close> 70)
assumes times_closed[simp, intro]: "\<lbrakk> a \<in> U; b \<in> U \<rbrakk> \<Longrightarrow> a *\<^sub>o\<^sub>w b \<in> U"
begin
notation times (infixl \<open>*\<^sub>o\<^sub>w\<close> 70)
lemma times_closed'[simp]: "\<forall>x\<in>U. \<forall>y\<in>U. x *\<^sub>o\<^sub>w y \<in> U" by simp
tts_register_sbts \<open>(*\<^sub>o\<^sub>w)\<close> | U by (rule tts_AA_A_transfer[OF times_closed])
end
locale semigroup_mult_ow = times_ow U times
for U :: "'ag set" and times +
assumes mult_assoc:
"\<lbrakk> a \<in> U; b \<in> U; c \<in> U \<rbrakk> \<Longrightarrow> a *\<^sub>o\<^sub>w b *\<^sub>o\<^sub>w c = a *\<^sub>o\<^sub>w (b *\<^sub>o\<^sub>w c)"
begin
sublocale mult: semigroup_ow U \<open>(*\<^sub>o\<^sub>w)\<close>
by unfold_locales (auto simp: times_closed' mult_assoc)
end
locale semigroup_mult_hom_ow =
dom: semigroup_mult_ow UA times_a +
cod: semigroup_mult_ow UB times_b
for UA :: "'a set"
and UB :: "'b set"
and times_a (infixl \<open>*\<^sub>o\<^sub>w\<^sub>.\<^sub>a\<close> 70)
and times_b (infixl \<open>*\<^sub>o\<^sub>w\<^sub>.\<^sub>b\<close> 70) +
fixes f :: "'a \<Rightarrow> 'b"
assumes f_closed[simp]: "a \<in> UA \<Longrightarrow> f a \<in> UB"
and f_hom: "\<lbrakk> a \<in> UA; b \<in> UA \<rbrakk> \<Longrightarrow> f (a *\<^sub>o\<^sub>w\<^sub>.\<^sub>a b) = f a *\<^sub>o\<^sub>w\<^sub>.\<^sub>b f b"
begin
lemma f_closed'[simp]: "f ` UA \<subseteq> UB" by auto
tts_register_sbts f | UA and UB by (rule tts_AB_transfer[OF f_closed'])
end
context semigroup_mult_hom_ow
begin
lemma tta_left_ideal_ow_closed:
assumes "T \<subseteq> UA"
and "left_ideal_ow UA (*\<^sub>o\<^sub>w\<^sub>.\<^sub>a) T"
and "f ` UA = UB"
shows "left_ideal_ow UB (*\<^sub>o\<^sub>w\<^sub>.\<^sub>b) (f ` T)"
unfolding left_ideal_ow_def
proof(intro ballI)
fix fs ft assume ft: "ft \<in> f ` T" and fs: "fs \<in> UB"
then obtain t where t: "t \<in> T" and ft_def: "ft = f t" by auto
from assms(3) fs obtain s where fs_def: "fs = f s" and s: "s \<in> UA" by auto
from assms have "\<lbrakk> s \<in> UA; t \<in> T \<rbrakk> \<Longrightarrow> s *\<^sub>o\<^sub>w\<^sub>.\<^sub>a t \<in> T" for s t
unfolding left_ideal_ow_def by simp
with assms(1) s t fs show "fs *\<^sub>o\<^sub>w\<^sub>.\<^sub>b ft \<in> f ` T"
using f_hom[symmetric, OF s] unfolding ft_def fs_def by auto
qed
end
lemma semigroup_mult_ow: "class.tta_semigroup = semigroup_mult_ow UNIV"
unfolding
class.tta_semigroup_def semigroup_mult_ow_def
semigroup_mult_ow_axioms_def times_ow_def
by simp
lemma semigroup_mult_hom_ow:
"tta_semigroup_hom = semigroup_mult_hom_ow UNIV UNIV (*\<^sub>t\<^sub>t\<^sub>a) (*\<^sub>t\<^sub>t\<^sub>a)"
unfolding
tta_semigroup_hom_def semigroup_mult_hom_ow_axioms_def
semigroup_mult_hom_ow_def semigroup_mult_ow_def
semigroup_mult_ow_axioms_def times_ow_def
by simp
context
includes lifting_syntax
begin
lemma semigroup_transfer[transfer_rule]:
assumes [transfer_rule]: "bi_unique A" "right_total A"
shows
"((A ===> A ===> A) ===> (=))
(semigroup_ow (Collect (Domainp A))) semigroup"
proof-
let ?P = "((A ===> A ===> A) ===> (=))"
let ?semigroup_ow = "semigroup_ow (Collect (Domainp A))"
let ?rf_UNIV =
"(\<lambda>f::['b, 'b] \<Rightarrow> 'b. (\<forall>x y. x \<in> UNIV \<longrightarrow> y \<in> UNIV \<longrightarrow> f x y \<in> UNIV))"
have "?P ?semigroup_ow (\<lambda>f. ?rf_UNIV f \<and> semigroup f)"
unfolding semigroup_ow_def semigroup_def
apply transfer_prover_start
apply transfer_step+
by simp
thus ?thesis by simp
qed
lemma semigroup_mult_transfer[transfer_rule]:
assumes [transfer_rule]: "bi_unique A" "right_total A"
shows
"((A ===> A ===> A) ===> (=))
(semigroup_mult_ow (Collect (Domainp A)))
class.tta_semigroup"
proof -
let ?P = \<open>((A ===> A ===> A) ===> (=))\<close>
and ?semigroup_mult_ow =
\<open>(\<lambda>f. semigroup_mult_ow (Collect (Domainp A)) f)\<close>
and ?rf_UNIV =
\<open>(\<lambda>f::['b, 'b] \<Rightarrow> 'b. (\<forall>x y. x \<in> UNIV \<longrightarrow> y \<in> UNIV \<longrightarrow> f x y \<in> UNIV))\<close>
have "?P ?semigroup_mult_ow (\<lambda>f. ?rf_UNIV f \<and> class.tta_semigroup f)"
unfolding
semigroup_mult_ow_def class.tta_semigroup_def
semigroup_mult_ow_axioms_def times_ow_def
apply transfer_prover_start
apply transfer_step+
by simp
thus ?thesis by simp
qed
lemma semigroup_mult_hom_transfer[transfer_rule]:
assumes [transfer_rule]:
"bi_unique A" "right_total A" "bi_unique B" "right_total B"
shows
"((A ===> A ===> A) ===> (B ===> B ===> B) ===> (A ===> B) ===> (=))
(semigroup_mult_hom_ow (Collect (Domainp A)) (Collect (Domainp B)))
semigroup_mult_hom_with"
proof-
let ?PA = "A ===> A ===> A"
and ?PB = "B ===> B ===> B"
and ?semigroup_mult_hom_ow =
\<open>
\<lambda>times_a times_b f. semigroup_mult_hom_ow
(Collect (Domainp A)) (Collect (Domainp B)) times_a times_b f
\<close>
let ?closed = \<open>\<lambda>f::'b\<Rightarrow>'d . \<forall>a. a \<in> UNIV \<longrightarrow> f a \<in> UNIV\<close>
have
"(?PA ===> ?PB ===> (A ===> B) ===> (=))
?semigroup_mult_hom_ow
(
\<lambda>times_a times_b f.
?closed f \<and> semigroup_mult_hom_with times_a times_b f
)"
unfolding
times_ow_def
semigroup_mult_hom_ow_def
semigroup_mult_hom_ow_axioms_def
semigroup_mult_hom_with_def
semigroup_mult_hom_with_axioms_def
apply transfer_prover_start
apply transfer_step+
by blast
thus ?thesis by simp
qed
end
+
context semigroup_mult_hom_ow
begin
ML_file\<open>ETTS_TEST_TTS_ALGORITHM.ML\<close>
named_theorems semigroup_mult_hom_ow_test_simps
lemmas [semigroup_mult_hom_ow_test_simps] =
ctr_simps_Collect_mem_eq
ctr_simps_in_iff
tts_context
tts: (?'a to UA) and (?'b to UB)
sbterms: (\<open>(*\<^sub>t\<^sub>t\<^sub>a)::[?'a::tta_semigroup, ?'a] \<Rightarrow> ?'a\<close> to \<open>(*\<^sub>o\<^sub>w\<^sub>.\<^sub>a)\<close>)
and (\<open>(*\<^sub>t\<^sub>t\<^sub>a)::[?'b::tta_semigroup, ?'b] \<Rightarrow> ?'b\<close> to \<open>(*\<^sub>o\<^sub>w\<^sub>.\<^sub>b)\<close>)
and (\<open>?f::?'a::tta_semigroup \<Rightarrow> ?'b::tta_semigroup\<close> to f)
rewriting semigroup_mult_hom_ow_test_simps
substituting semigroup_mult_hom_ow_axioms
and dom.semigroup_mult_ow_axioms
and cod.semigroup_mult_ow_axioms
eliminating \<open>UA \<noteq> {}\<close> and \<open>UB \<noteq> {}\<close>
through (auto simp only: left_ideal_ow_def)
begin
-
ML\<open>
-val tts_test_amend_ctxt_data_test_results =
- etts_test_tts_algorithm.execute_test_suite_tts_algorithm @{context}
+Lecker.test_group @{context} () [etts_test_tts_algorithm.test_suite]
\<close>
-ML\<open>
-val _ = tts_test_amend_ctxt_data_test_results
- |> UT_Test_Suite.output_test_results true
-\<close>
-
end
end
subsection\<open>\<open>tts_register_sbts\<close>\<close>
-context
+context
fixes f :: "'a \<Rightarrow> 'b \<Rightarrow> 'c"
and UA :: "'a set"
begin
-
ML_file\<open>ETTS_TEST_TTS_REGISTER_SBTS.ML\<close>
ML\<open>
-val tts_test_tts_register_sbts_test_results =
- etts_test_tts_register_sbts.execute_test_suite_tts_register_sbts @{context}
+Lecker.test_group @{context} () [etts_test_tts_register_sbts.test_suite]
\<close>
-ML\<open>
-val _ = tts_test_tts_register_sbts_test_results
- |> UT_Test_Suite.output_test_results true
-\<close>
-
end
end
\ No newline at end of file
diff --git a/thys/Types_To_Sets_Extension/ROOT b/thys/Types_To_Sets_Extension/ROOT
--- a/thys/Types_To_Sets_Extension/ROOT
+++ b/thys/Types_To_Sets_Extension/ROOT
@@ -1,44 +1,45 @@
chapter AFP
session Types_To_Sets_Extension (AFP) = Conditional_Transfer_Rule +
options [timeout = 1200]
sessions
"HOL-Types_To_Sets"
"HOL-Eisbach"
"HOL-Analysis"
+ SpecCheck
directories
"ETTS"
"ETTS/ETTS_Tools"
"ETTS/Manual"
"ETTS/Tests"
"Examples"
"Examples/TTS_Foundations"
"Examples/TTS_Foundations/Algebra"
"Examples/TTS_Foundations/Foundations"
"Examples/TTS_Foundations/Orders"
"Examples/SML_Relativization"
"Examples/SML_Relativization/Algebra"
"Examples/SML_Relativization/Foundations"
"Examples/SML_Relativization/Lattices"
"Examples/SML_Relativization/Simple_Orders"
"Examples/SML_Relativization/Topology"
"Examples/Vector_Spaces"
theories [document = false]
"ETTS/ETTS_Tools/ETTS_Tools"
"ETTS/ETTS"
"ETTS/ETTS_Auxiliary"
"ETTS/Manual/Manual_Prerequisites"
"ETTS/Tests/ETTS_Tests"
theories
"ETTS/Manual/ETTS_CR"
"Examples/Introduction"
"Examples/SML_Relativization/SML_Conclusions"
"Examples/Vector_Spaces/VS_Conclusions"
"Examples/TTS_Foundations/FNDS_Conclusions"
document_files
"iman.sty"
"extra.sty"
"isar.sty"
"style.sty"
"root.tex"
"root.bib"
\ No newline at end of file
diff --git a/thys/Types_To_Sets_Extension/document/root.bib b/thys/Types_To_Sets_Extension/document/root.bib
--- a/thys/Types_To_Sets_Extension/document/root.bib
+++ b/thys/Types_To_Sets_Extension/document/root.bib
@@ -1,302 +1,308 @@
@incollection{blanchette_types_2016,
address = {Heidelberg},
title = {From {Types} to {Sets} by {Local} {Type} {Definitions} in {Higher}-{Order} {Logic}},
volume = {9807},
isbn = {978-3-319-43144-4},
booktitle = {Interactive {Theorem} {Proving}},
publisher = {Springer},
author = {Kun{\v c}ar, Ond{\v r}ej and Popescu, Andrei},
editor = {Blanchette, Jasmin Christian and Merz, Stephan},
year = {2016},
pages = {200--218},
}
@book{urban_isabelle_2019,
title = {The {Isabelle} {Cookbook}: {A} {Gentle} {Tutorial} for {Programming} {Isabelle}/{ML}},
author = {Urban, Christian},
collaborator = {Berghofer, Stefan and Blanchette, Jasmin and Bohme, Sascha and Bulwahn, Lukas and Dawson, Jeremy and Kolanski, Rafal and Krauss, Alexander and Nipkow, Tobias and Schropp, Andreas and Sternagel, Christian},
year = {2019},
}
@incollection{immler_smooth_2019,
address = {New York},
series = {{CPP} 2019},
title = {Smooth {Manifolds} and {Types} to {Sets} for {Linear} {Algebra} in {Isabelle}/{HOL}},
isbn = {978-1-4503-6222-1},
booktitle = {Proceedings of the 8th {ACM} {SIGPLAN} {International} {Conference} on {Certified} {Programs} and {Proofs}, {CPP} 2019, {Cascais}, {Portugal}},
publisher = {ACM},
author = {Immler, Fabian and Zhan, Bohua},
editor = {Mahboubi, Assia and Myreen, Magnus O.},
year = {2019},
keywords = {Formalization of Mathematics, Higher Order Logic, Isabelle, Manifolds},
pages = {65--77},
}
@misc{immler_automation_2019,
title = {Automation for unverloading definitions},
url = {http://isabelle.in.tum.de/repos/isabelle/rev/ab5a8a2519b0},
journal = {isabelle/changeset},
author = {Immler, Fabian},
year = {2019},
}
@phdthesis{kuncar_types_2015,
address = {Munich},
title = {Types, {Abstraction} and {Parametric} {Polymorphism} in {Higher}-{Order} {Logic}},
school = {Technische Universit{\"a}t M{\"u}nchen},
author = {Kun{\v c}ar, Ond{\v r}ej},
year = {2015},
}
@unpublished{wenzel_isabelle/isar_2019,
title = {The {Isabelle}/{Isar} {Implementation}},
author = {Wenzel, Makarius},
collaborator = {Berghofer, Stefan and Haftmann, Florian and Paulson, Larry},
year = {2019},
}
@unpublished{wenzel_isabelle/isar_2019-1,
title = {The {Isabelle}/{Isar} {Reference} {Manual}},
author = {Wenzel, Makarius},
collaborator = {Ballarin, Clemens and Berghofer, Stefan and Blanchette, Jasmin and Bourke, Timothy and Bulwahn, Lukas and Chaieb, Amine and Dixon, Lucas and Haftmann, Florian and Huffman, Brian and Hupel, Lars and Klein, Gerwin and Krauss, Alexander and Kun{\v c}ar, Ond{\v r}ej and Lochbihler, Andreas and Nipkow, Tobias and Noschinski, Lars and Oheimb, David von and Paulson, Larry and Skalberg, Sebastian and Sternagel, Christian and Traytel, Dmitriy},
year = {2019},
}
@phdthesis{wenzel_isabelle/isar_2001,
address = {Munich},
title = {Isabelle/{Isar} {\textemdash} {A} {Versatile} {Environment} for {Human}-{Readable} {Formal} {Proof} {Documents}},
school = {Technische Universit{\"a}t M{\"u}nchen},
author = {Wenzel, Markus M.},
year = {2001},
}
@inproceedings{gilcher_conditional_2017,
address = {Bras{\'i}lia, Brazil},
title = {Conditional {Parametricity} in {Isabelle}/{HOL}},
booktitle = {{TABLEAUX}/{FroCoS}/{ITP}},
author = {Gilcher, Jan and Lochbihler, Andreas and Traytel, Dmitriy},
year = {2017},
}
@article{divason_perron-frobenius_2016,
title = {Perron-{Frobenius} {Theorem} for {Spectral} {Radius} {Analysis}},
journal = {Archive of Formal Proofs},
author = {Divas{\'o}n, Jose and Kun{\v c}ar, Ond{\v r}ej and Thiemann, Ren{\'e} and Yamada, Akihisa},
year = {2016},
}
@article{maletzky_hilberts_2019,
title = {Hilbert's {Nullstellensatz}},
journal = {Archive of Formal Proofs},
author = {Maletzky, Alexander},
year = {2019},
}
@incollection{berardi_locales_2004,
address = {Heidelberg},
title = {Locales and {Locale} {Expressions} in {Isabelle}/{Isar}},
volume = {3085},
isbn = {978-3-540-22164-7},
booktitle = {Types for {Proofs} and {Programs}},
publisher = {Springer},
author = {Ballarin, Clemens},
editor = {Berardi, Stefano and Coppo, Mario and Damiani, Ferruccio},
year = {2004},
pages = {34--50},
}
@book{ballarin_isabellehol_2020,
title = {The {Isabelle}/{HOL} {Algebra} {Library}},
url = {https://isabelle.in.tum.de/website-Isabelle2020/dist/library/HOL/HOL-Algebra/document.pdf},
editor = {Ballarin, Clemens},
year = {2020},
}
@misc{noauthor_isabellehol_2020,
title = {Isabelle/{HOL} {Standard} {Library}},
url = {https://isabelle.in.tum.de/website-Isabelle2020/dist/library/HOL/HOL/index.html},
journal = {Isabelle/HOL},
year = {2020},
}
@misc{noauthor_isabellehol_2020-1,
title = {Isabelle/{HOL} {Analysis}},
url = {https://isabelle.in.tum.de/website-Isabelle2020/dist/library/HOL/HOL-Analysis/index.html},
journal = {Isabelle/HOL},
year = {2020},
}
@incollection{gonthier_lifting_2013,
address = {Heidelberg},
title = {Lifting and {Transfer}: {A} {Modular} {Design} for {Quotients} in {Isabelle}/{HOL}},
volume = {8307},
isbn = {978-3-319-03545-1},
booktitle = {Certified {Programs} and {Proofs}},
publisher = {Springer},
author = {Huffman, Brian and Kun{\v c}ar, Ond{\v r}ej},
editor = {Gonthier, Georges and Norrish, Michael},
year = {2013},
pages = {131--146},
}
@incollection{altenkirch_constructive_2007,
address = {Heidelberg},
title = {Constructive {Type} {Classes} in {Isabelle}},
volume = {4502},
isbn = {978-3-540-74464-1},
booktitle = {Types for {Proofs} and {Programs}},
publisher = {Springer},
author = {Haftmann, Florian and Wenzel, Makarius},
editor = {Altenkirch, Thorsten and McBride, Conor},
year = {2007},
pages = {160--174},
}
@article{ballarin_locales_2014,
title = {Locales: {A} {Module} {System} for {Mathematical} {Theories}},
volume = {52},
number = {2},
journal = {Journal of Automated Reasoning},
author = {Ballarin, Clemens},
year = {2014},
pages = {123--153},
}
@incollection{kaufmann_mechanized_2010,
address = {Heidelberg},
title = {A {Mechanized} {Translation} from {Higher}-{Order} {Logic} to {Set} {Theory}},
volume = {6172},
isbn = {978-3-642-14051-8},
booktitle = {Interactive {Theorem} {Proving}},
publisher = {Springer},
author = {Krauss, Alexander and Schropp, Andreas},
editor = {Kaufmann, Matt and Paulson, Lawrence C.},
year = {2010},
pages = {323--338},
}
@article{paulson_natural_1986,
title = {Natural {Deduction} as {Higher}-{Order} {Resolution}},
volume = {3},
number = {3},
journal = {The Journal of Logic Programming},
author = {Paulson, Lawrence C.},
year = {1986},
pages = {237--258},
}
@incollection{yang_comprehending_2017,
address = {Heidelberg},
title = {Comprehending {Isabelle}/{HOL}{\textquoteright}s {Consistency}},
volume = {10201},
isbn = {978-3-662-54433-4},
booktitle = {Programming {Languages} and {Systems}},
publisher = {Springer},
author = {Kun{\v c}ar, Ond{\v r}ej and Popescu, Andrei},
editor = {Yang, Hongseok},
year = {2017},
pages = {724--749},
}
@incollection{bertot_isar_1999,
address = {Heidelberg},
title = {Isar {\textemdash} {A} {Generic} {Interpretative} {Approach} to {Readable} {Formal} {Proof} {Documents}},
volume = {1690},
isbn = {978-3-540-66463-5},
booktitle = {Theorem {Proving} in {Higher} {Order} {Logics}},
publisher = {Springer},
author = {Wenzel, Markus},
editor = {Bertot, Yves and Dowek, Gilles and Th{\'e}ry, Laurent and Hirschowitz, Andr{\'e} and Paulin-Mohring, Christine},
year = {1999},
pages = {167--183},
}
@article{wenzel_isabelleisar_2007,
title = {Isabelle/{Isar} {\textemdash} a {Generic} {Framework} for {Human}-{Readable} {Proof} {Documents}},
volume = {10},
number = {23},
journal = {Studies in Logic, Grammar and Rhetoric},
author = {Wenzel, Makarius},
year = {2007},
pages = {277--297},
}
@incollection{kauers_context_2007,
address = {Heidelberg},
title = {Context {Aware} {Calculation} and {Deduction}: {Ring} {Equalities} {Via} {Gr{\"o}bner} {Bases} in {Isabelle}},
volume = {4573},
isbn = {978-3-540-73083-5},
booktitle = {Towards {Mechanized} {Mathematical} {Assistants}},
publisher = {Springer},
author = {Chaieb, Amine and Wenzel, Makarius},
editor = {Kauers, Manuel and Kerber, Manfred and Miner, Robert and Windsteiger, Wolfgang},
year = {2007},
pages = {27--39},
}
@inproceedings{lammich_automatic_2013,
address = {Heidelberg},
title = {Automatic {Data} {Refinement}},
isbn = {978-3-642-39634-2},
booktitle = {Interactive {Theorem} {Proving}},
publisher = {Springer},
author = {Lammich, Peter},
editor = {Blazy, Sandrine and Paulin-Mohring, Christine and Pichardie, David},
year = {2013},
keywords = {Automatic Data, Executable Code, Side Condition, Synthesis Problem, Type Constructor},
pages = {84--99},
}
@article{kuncar_types_2019,
title = {From {Types} to {Sets} by {Local} {Type} {Definition} in {Higher}-{Order} {Logic}},
volume = {62},
number = {2},
journal = {Journal of Automated Reasoning},
author = {Kun{\v c}ar, Ond{\v r}ej and Popescu, Andrei},
year = {2019},
pages = {237--260},
}
@article{paulson_foundation_1989,
title = {The {Foundation} of a {Generic} {Theorem} {Prover}},
volume = {5},
number = {3},
journal = {Journal of Automated Reasoning},
author = {Paulson, Lawrence C.},
year = {1989},
pages = {363--397},
}
@book{milner_definition_1997,
address = {Cambridge, Massachusetts},
title = {The {Definition} of {Standard} {ML} (revised)},
isbn = {978-0-262-63181-5},
publisher = {MIT Press},
author = {Milner, Robin and Tofte, Mads and Harper, Robert and MacQueen, David},
year = {1997},
keywords = {ML (Computer program language)},
}
@inproceedings{kammuller_locales_1999,
address = {Heidelberg},
series = {Lecture {Notes} in {Computer} {Science}},
title = {Locales {A} {Sectioning} {Concept} for {Isabelle}},
isbn = {978-3-540-48256-7},
booktitle = {Theorem {Proving} in {Higher} {Order} {Logics}},
publisher = {Springer},
author = {Kamm{\"u}ller, Florian and Wenzel, Markus and Paulson, Lawrence C.},
editor = {Bertot, Yves and Dowek, Gilles and Th{\'e}ry, Laurent and Hirschowitz, Andr{\'e} and Paulin-Mohring, Christine},
year = {1999},
pages = {149--165},
}
@misc{immler_re_2019,
title = {Re: [isabelle] {Several} questions in relation to a use case for "types to sets"},
url = {https://lists.cam.ac.uk/pipermail/cl-isabelle-users/2019-January/msg00072.html},
journal = {The Cl-isabelle-users Archives},
author = {Immler, Fabian},
year = {2019},
}
@incollection{wenzel_type_1997,
address = {Heidelberg},
series = {Lecture {Notes} in {Computer} {Science}},
title = {Type {Classes} and {Overloading} in {Higher}-{Order} {Logic}},
isbn = {978-3-540-69526-4},
booktitle = {Theorem {Proving} in {Higher} {Order} {Logics}},
publisher = {Springer},
author = {Wenzel, Markus},
editor = {Gunter, Elsa L. and Felty, Amy P.},
year = {1997},
keywords = {Type Constructor, Class Definition, Deductive System, Type Class, Type Definition},
pages = {307--322},
}
@incollection{nipkow_type_1991,
address = {Heidelberg},
series = {Lecture {Notes} in {Computer} {Science}},
title = {Type {Classes} and {Overloading} {Resolution} via {Order}-{Sorted} {Unification}},
volume = {523},
isbn = {978-3-540-47599-6},
booktitle = {Functional {Programming} {Languages} and {Computer} {Architecture}},
publisher = {Springer},
author = {Nipkow, Tobias and Snelting, Gregor},
editor = {Hughes, John},
year = {1991},
pages = {1--14},
}
@article{milehins_conditional_2021,
title = {Conditional {Transfer} {Rule}},
journal = {Archive of Formal Proofs},
author = {Milehins, Mihails},
year = {2021},
}
+@article{kappelmann_speccheck_2021,
+ title = {{SpecCheck} - {Specification}-{Based} {Testing} for {Isabelle}/{ML}},
+ journal = {Archive of Formal Proofs},
+ author = {Kappelmann, Kevin and Bulwahn, Lukas and Willenbrink, Sebastian},
+ year = {2021},
+}
diff --git a/thys/Types_To_Sets_Extension/document/root.tex b/thys/Types_To_Sets_Extension/document/root.tex
--- a/thys/Types_To_Sets_Extension/document/root.tex
+++ b/thys/Types_To_Sets_Extension/document/root.tex
@@ -1,128 +1,129 @@
\PassOptionsToPackage{ngerman,main=english}{babel}
\documentclass[11pt,a4paper,fleqn]{report}
\usepackage{iman,extra,isar}
\usepackage{isabelle,isabellesym}
\usepackage{railsetup}
\usepackage[margin={1in,1in}]{geometry}
\usepackage[T1]{fontenc}
\usepackage{lmodern}
\usepackage{babel}
\usepackage{amsmath}
\usepackage{amssymb}
\usepackage{amsthm}
\usepackage{xspace}
\usepackage{MnSymbol}
\usepackage[utf8]{inputenc}
\usepackage{enumitem}
\usepackage{fontspec}
\usepackage{graphicx}
\usepackage{caption}
\usepackage{proof}
\usepackage{algorithm,algpseudocode}
% bibliography
\usepackage[nottoc]{tocbibind}
\usepackage[square,numbers]{natbib}
\bibliographystyle{abbrvnat}
\usepackage{style}
% this should be the last package used
\usepackage{pdfsetup}
% drop Isabelle tags
\isadroptag{theory}
% enumitem configuration
\setlist{noitemsep,topsep=0pt,parsep=0pt,partopsep=0pt}
% urls in roman style, theory text in math-similar italics
\urlstyle{rm}
\isabellestyle{it}
\begin{document}
\sloppy
\title{
Introduction to the Extension of the Framework
Types-To-Sets for Isabelle/HOL
}
\author{Mihails Milehins}
\maketitle
\newpage
\begin{abstract}
In \cite{blanchette_types_2016, kuncar_types_2019},
Ondřej Kunčar and Andrei Popescu propose an
extension of the logic \textit{Isabelle/HOL} and an associated algorithm for the
relativization of \textit{type-based theorems} to more flexible
\textit{set-based theorems}, collectively referred to as
\textit{Types-To-Sets}. One of the aims of their work was to open an
opportunity for the development of a
software tool for applied relativization
in the implementation of the logic
Isabelle/HOL in the proof assistant \textit{Isabelle}
\cite{paulson_natural_1986}. In this document,
we provide a prototype of a software framework for the interactive
automated relativization
of definitions and theorems in Isabelle/HOL, developed as an extension of
the proof language \textit{Isabelle/Isar}
\cite{bertot_isar_1999,wenzel_isabelleisar_2007}.
The software framework incorporates the implementation of the
proposed extension of the logic and associated tools provided in
\cite{blanchette_types_2016} and improved further in \cite{immler_smooth_2019}
by Fabian Immler and Bohua Zhan, and builds upon
some of the ideas for further work expressed in \cite{immler_smooth_2019}
and \cite{kuncar_types_2019}.
\end{abstract}
\newpage
\renewcommand{\abstractname}{Acknowledgements}
\begin{abstract}
The author would like to acknowledge the assistance that he received from
the users of the mailing list of Isabelle
\href{https://lists.cam.ac.uk/mailman/listinfo/cl-isabelle-users}
in the form of answers given to his general queries and
the persons responsible for the development of
Types-To-Sets \cite{blanchette_types_2016, kuncar_types_2019}
and its official extensions
\cite{immler_smooth_2019, immler_automation_2019}
for providing explanations of the existing functionality of the framework
and some of the ideas that laid the foundation of this work.
Special thanks go to Fabian Immler for the conceptual design of the
commands \textbf{tts\_context},
-\textbf{tts\_lemmas} and \textbf{tts\_lemma}.
-Special thanks also go to Andrei Popescu for trying the software and
-providing feedback.
+\textbf{tts\_lemmas} and \textbf{tts\_lemma},
+to Andrei Popescu for trying the software and
+providing feedback, and to Kevin Kappelmann for explaining certain aspects of
+\cite{kappelmann_speccheck_2021}.
Furthermore, the author would like to acknowledge the positive
impact of \cite{urban_isabelle_2019} and
\cite{wenzel_isabelle/isar_2019} on his ability to code in Isabelle/ML
\cite{milner_definition_1997,wenzel_isabelle/isar_2019}.
The author would also like to acknowledge
the positive role that the numerous Q\&A posted on the Stack Exchange network
(especially Stack Overflow and TeX Stack Exchange) played in the
development of this work.
Finally, the author would like to express gratitude to all members of his family
and friends for their continuous support.
\end{abstract}
\newpage
\tableofcontents
\newpage
\parindent 0pt\parskip 0.5ex
\input{session}
\newpage
\bibliography{root}
\end{document}
\ No newline at end of file

File Metadata

Mime Type
application/octet-stream
Expires
Sat, Apr 20, 2:21 PM (2 d)
Storage Engine
blob
Storage Format
Raw Data
Storage Handle
1135091
Default Alt Text
(211 KB)

Event Timeline