diff --git a/doc-src/Nitpick/nitpick.tex b/doc-src/Nitpick/nitpick.tex --- a/doc-src/Nitpick/nitpick.tex +++ b/doc-src/Nitpick/nitpick.tex @@ -1,2512 +1,2723 @@ \documentclass[a4paper,12pt]{article} \usepackage[T1]{fontenc} \usepackage{amsmath} \usepackage{amssymb} \usepackage[english,french]{babel} \usepackage{color} \usepackage{graphicx} %\usepackage{mathpazo} \usepackage{multicol} \usepackage{stmaryrd} %\usepackage[scaled=.85]{beramono} \usepackage{../iman,../pdfsetup} %\oddsidemargin=4.6mm %\evensidemargin=4.6mm %\textwidth=150mm %\topmargin=4.6mm %\headheight=0mm %\headsep=0mm %\textheight=234mm \def\Colon{\mathord{:\mkern-1.5mu:}} %\def\lbrakk{\mathopen{\lbrack\mkern-3.25mu\lbrack}} %\def\rbrakk{\mathclose{\rbrack\mkern-3.255mu\rbrack}} \def\lparr{\mathopen{(\mkern-4mu\mid}} \def\rparr{\mathclose{\mid\mkern-4mu)}} -\def\undef{\textit{undefined}} \def\unk{{?}} +\def\undef{(\lambda x.\; \unk)} %\def\unr{\textit{others}} \def\unr{\ldots} \def\Abs#1{\hbox{\rm{\flqq}}{\,#1\,}\hbox{\rm{\frqq}}} \def\Q{{\smash{\lower.2ex\hbox{$\scriptstyle?$}}}} \hyphenation{Mini-Sat size-change First-Steps grand-parent nit-pick counter-example counter-examples data-type data-types co-data-type co-data-types in-duc-tive co-in-duc-tive} \urlstyle{tt} \begin{document} \selectlanguage{english} \title{\includegraphics[scale=0.5]{isabelle_nitpick} \\[4ex] Picking Nits \\[\smallskipamount] \Large A User's Guide to Nitpick for Isabelle/HOL} \author{\hbox{} \\ Jasmin Christian Blanchette \\ {\normalsize Institut f\"ur Informatik, Technische Universit\"at M\"unchen} \\ \hbox{}} \maketitle \tableofcontents \setlength{\parskip}{.7em plus .2em minus .1em} \setlength{\parindent}{0pt} \setlength{\abovedisplayskip}{\parskip} \setlength{\abovedisplayshortskip}{.9\parskip} \setlength{\belowdisplayskip}{\parskip} \setlength{\belowdisplayshortskip}{.9\parskip} % General-purpose enum environment with correct spacing \newenvironment{enum}% {\begin{list}{}{% \setlength{\topsep}{.1\parskip}% \setlength{\partopsep}{.1\parskip}% \setlength{\itemsep}{\parskip}% \advance\itemsep by-\parsep}} {\end{list}} \def\pre{\begingroup\vskip0pt plus1ex\advance\leftskip by\leftmargin \advance\rightskip by\leftmargin} \def\post{\vskip0pt plus1ex\endgroup} \def\prew{\pre\advance\rightskip by-\leftmargin} \def\postw{\post} \section{Introduction} \label{introduction} Nitpick \cite{blanchette-nipkow-2009} is a counterexample generator for Isabelle/HOL \cite{isa-tutorial} that is designed to handle formulas combining (co)in\-duc\-tive datatypes, (co)in\-duc\-tively defined predicates, and quantifiers. It builds on Kodkod \cite{torlak-jackson-2007}, a highly optimized first-order relational model finder developed by the Software Design Group at MIT. It is conceptually similar to Refute \cite{weber-2008}, from which it borrows many ideas and code fragments, but it benefits from Kodkod's optimizations and a new encoding scheme. The name Nitpick is shamelessly appropriated from a now retired Alloy precursor. Nitpick is easy to use---you simply enter \textbf{nitpick} after a putative theorem and wait a few seconds. Nonetheless, there are situations where knowing how it works under the hood and how it reacts to various options helps increase the test coverage. This manual also explains how to install the tool on your workstation. Should the motivation fail you, think of the many hours of hard work Nitpick will save you. Proving non-theorems is \textsl{hard work}. Another common use of Nitpick is to find out whether the axioms of a locale are satisfiable, while the locale is being developed. To check this, it suffices to write \prew \textbf{lemma}~``$\textit{False}$'' \\ \textbf{nitpick}~[\textit{show\_all}] \postw after the locale's \textbf{begin} keyword. To falsify \textit{False}, Nitpick must find a model for the axioms. If it finds no model, we have an indication that the axioms might be unsatisfiable. Nitpick requires the Kodkodi package for Isabelle as well as a Java 1.5 virtual machine called \texttt{java}. The examples presented in this manual can be found in Isabelle's \texttt{src/HOL/Nitpick\_Examples/Manual\_Nits.thy} theory. Throughout this manual, we will explicitly invoke the \textbf{nitpick} command. Nitpick also provides an automatic mode that can be enabled using the ``Auto Nitpick'' option from the ``Isabelle'' menu in Proof General. In this mode, Nitpick is run on every newly entered theorem, much like Auto Quickcheck. The collective time limit for Auto Nitpick and Auto Quickcheck can be set using the ``Auto Counterexample Time Limit'' option. \newbox\boxA \setbox\boxA=\hbox{\texttt{nospam}} The known bugs and limitations at the time of writing are listed in \S\ref{known-bugs-and-limitations}. Comments and bug reports concerning Nitpick or this manual should be directed to \texttt{blan{\color{white}nospam}\kern-\wd\boxA{}chette@\allowbreak in.\allowbreak tum.\allowbreak de}. \vskip2.5\smallskipamount \textbf{Acknowledgment.} The author would like to thank Mark Summerfield for suggesting several textual improvements. % and Perry James for reporting a typo. \section{First Steps} \label{first-steps} This section introduces Nitpick by presenting small examples. If possible, you should try out the examples on your workstation. Your theory file should start the standard way: \prew \textbf{theory}~\textit{Scratch} \\ \textbf{imports}~\textit{Main} \\ \textbf{begin} \postw The results presented here were obtained using the JNI version of MiniSat and with multithreading disabled to reduce nondeterminism. This was done by adding the line \prew \textbf{nitpick\_params} [\textit{sat\_solver}~= \textit{MiniSatJNI}, \,\textit{max\_threads}~= 1] \postw after the \textbf{begin} keyword. The JNI version of MiniSat is bundled with Kodkodi and is precompiled for the major platforms. Other SAT solvers can also be installed, as explained in \S\ref{optimizations}. If you have already configured SAT solvers in Isabelle (e.g., for Refute), these will also be available to Nitpick. \subsection{Propositional Logic} \label{propositional-logic} Let's start with a trivial example from propositional logic: \prew \textbf{lemma}~``$P \longleftrightarrow Q$'' \\ \textbf{nitpick} \postw You should get the following output: \prew \slshape Nitpick found a counterexample: \\[2\smallskipamount] \hbox{}\qquad Free variables: \nopagebreak \\ \hbox{}\qquad\qquad $P = \textit{True}$ \\ \hbox{}\qquad\qquad $Q = \textit{False}$ \postw Nitpick can also be invoked on individual subgoals, as in the example below: \prew \textbf{apply}~\textit{auto} \\[2\smallskipamount] {\slshape goal (2 subgoals): \\ -\ 1. $P\,\Longrightarrow\, Q$ \\ -\ 2. $Q\,\Longrightarrow\, P$} \\[2\smallskipamount] +\phantom{0}1. $P\,\Longrightarrow\, Q$ \\ +\phantom{0}2. $Q\,\Longrightarrow\, P$} \\[2\smallskipamount] \textbf{nitpick}~1 \\[2\smallskipamount] {\slshape Nitpick found a counterexample: \\[2\smallskipamount] \hbox{}\qquad Free variables: \nopagebreak \\ \hbox{}\qquad\qquad $P = \textit{True}$ \\ \hbox{}\qquad\qquad $Q = \textit{False}$} \\[2\smallskipamount] \textbf{nitpick}~2 \\[2\smallskipamount] {\slshape Nitpick found a counterexample: \\[2\smallskipamount] \hbox{}\qquad Free variables: \nopagebreak \\ \hbox{}\qquad\qquad $P = \textit{False}$ \\ \hbox{}\qquad\qquad $Q = \textit{True}$} \\[2\smallskipamount] \textbf{oops} \postw \subsection{Type Variables} \label{type-variables} If you are left unimpressed by the previous example, don't worry. The next one is more mind- and computer-boggling: \prew \textbf{lemma} ``$P~x\,\Longrightarrow\, P~(\textrm{THE}~y.\;P~y)$'' \postw \pagebreak[2] %% TYPESETTING The putative lemma involves the definite description operator, {THE}, presented in section 5.10.1 of the Isabelle tutorial \cite{isa-tutorial}. The operator is defined by the axiom $(\textrm{THE}~x.\; x = a) = a$. The putative lemma is merely asserting the indefinite description operator axiom with {THE} substituted for {SOME}. The free variable $x$ and the bound variable $y$ have type $'a$. For formulas containing type variables, Nitpick enumerates the possible domains for each type variable, up to a given cardinality (8 by default), looking for a finite countermodel: \prew \textbf{nitpick} [\textit{verbose}] \\[2\smallskipamount] \slshape Trying 8 scopes: \nopagebreak \\ \hbox{}\qquad \textit{card}~$'a$~= 1; \\ \hbox{}\qquad \textit{card}~$'a$~= 2; \\ \hbox{}\qquad $\qquad\vdots$ \\[.5\smallskipamount] \hbox{}\qquad \textit{card}~$'a$~= 8. \\[2\smallskipamount] Nitpick found a counterexample for \textit{card} $'a$~= 3: \\[2\smallskipamount] \hbox{}\qquad Free variables: \nopagebreak \\ \hbox{}\qquad\qquad $P = \{a_2,\, a_3\}$ \\ \hbox{}\qquad\qquad $x = a_3$ \\[2\smallskipamount] Total time: 580 ms. \postw Nitpick found a counterexample in which $'a$ has cardinality 3. (For cardinalities 1 and 2, the formula holds.) In the counterexample, the three values of type $'a$ are written $a_1$, $a_2$, and $a_3$. The message ``Trying $n$ scopes: {\ldots}''\ is shown only if the option \textit{verbose} is enabled. You can specify \textit{verbose} each time you invoke \textbf{nitpick}, or you can set it globally using the command \prew \textbf{nitpick\_params} [\textit{verbose}] \postw This command also displays the current default values for all of the options supported by Nitpick. The options are listed in \S\ref{option-reference}. \subsection{Constants} \label{constants} By just looking at Nitpick's output, it might not be clear why the counterexample in \S\ref{type-variables} is genuine. Let's invoke Nitpick again, this time telling it to show the values of the constants that occur in the formula: \prew \textbf{lemma}~``$P~x\,\Longrightarrow\, P~(\textrm{THE}~y.\;P~y)$'' \\ \textbf{nitpick}~[\textit{show\_consts}] \\[2\smallskipamount] \slshape Nitpick found a counterexample for \textit{card} $'a$~= 3: \\[2\smallskipamount] \hbox{}\qquad Free variables: \nopagebreak \\ \hbox{}\qquad\qquad $P = \{a_2,\, a_3\}$ \\ \hbox{}\qquad\qquad $x = a_3$ \\ \hbox{}\qquad Constant: \nopagebreak \\ \hbox{}\qquad\qquad $\textit{The}~\textsl{fallback} = a_1$ \postw We can see more clearly now. Since the predicate $P$ isn't true for a unique value, $\textrm{THE}~y.\;P~y$ can denote any value of type $'a$, even $a_1$. Since $P~a_1$ is false, the entire formula is falsified. As an optimization, Nitpick's preprocessor introduced the special constant ``\textit{The} fallback'' corresponding to $\textrm{THE}~y.\;P~y$ (i.e., $\mathit{The}~(\lambda y.\;P~y)$) when there doesn't exist a unique $y$ satisfying $P~y$. We disable this optimization by passing the \textit{full\_descrs} option: \prew \textbf{nitpick}~[\textit{full\_descrs},\, \textit{show\_consts}] \\[2\smallskipamount] \slshape Nitpick found a counterexample for \textit{card} $'a$~= 3: \\[2\smallskipamount] \hbox{}\qquad Free variables: \nopagebreak \\ \hbox{}\qquad\qquad $P = \{a_2,\, a_3\}$ \\ \hbox{}\qquad\qquad $x = a_3$ \\ \hbox{}\qquad Constant: \nopagebreak \\ \hbox{}\qquad\qquad $\hbox{\slshape THE}~y.\;P~y = a_1$ \postw As the result of another optimization, Nitpick directly assigned a value to the subterm $\textrm{THE}~y.\;P~y$, rather than to the \textit{The} constant. If we disable this second optimization by using the command \prew \textbf{nitpick}~[\textit{dont\_specialize},\, \textit{full\_descrs},\, \textit{show\_consts}] \postw we finally get \textit{The}: \prew \slshape Constant: \nopagebreak \\ \hbox{}\qquad $\mathit{The} = \undef{} (\!\begin{aligned}[t]% & \{\} := a_3,\> \{a_3\} := a_3,\> \{a_2\} := a_2, \\[-2pt] %% TYPESETTING & \{a_2, a_3\} := a_1,\> \{a_1\} := a_1,\> \{a_1, a_3\} := a_3, \\[-2pt] & \{a_1, a_2\} := a_3,\> \{a_1, a_2, a_3\} := a_3)\end{aligned}$ \postw Notice that $\textit{The}~(\lambda y.\;P~y) = \textit{The}~\{a_2, a_3\} = a_1$, -just like before.\footnote{The \undef{} symbol's presence is explained as -follows: In higher-order logic, any function can be built from the undefined -function using repeated applications of the function update operator $f(x := -y)$, just like any list can be built from the empty list using $x \mathbin{\#} -xs$.} +just like before.\footnote{The Isabelle/HOL notation $f(x := +y)$ denotes the function that maps $x$ to $y$ and that otherwise behaves like +$f$.} Our misadventures with THE suggest adding `$\exists!x{.}$' (``there exists a unique $x$ such that'') at the front of our putative lemma's assumption: \prew \textbf{lemma}~``$\exists {!}x.\; P~x\,\Longrightarrow\, P~(\textrm{THE}~y.\;P~y)$'' \postw The fix appears to work: \prew \textbf{nitpick} \\[2\smallskipamount] \slshape Nitpick found no counterexample. \postw We can further increase our confidence in the formula by exhausting all cardinalities up to 50: \prew \textbf{nitpick} [\textit{card} $'a$~= 1--50]\footnote{The symbol `--' can be entered as \texttt{-} (hyphen) or \texttt{\char`\\\char`\}.} \\[2\smallskipamount] \slshape Nitpick found no counterexample. \postw Let's see if Sledgehammer \cite{sledgehammer-2009} can find a proof: \prew \textbf{sledgehammer} \\[2\smallskipamount] {\slshape Sledgehammer: external prover ``$e$'' for subgoal 1: \\ $\exists{!}x.\; P~x\,\Longrightarrow\, P~(\hbox{\slshape THE}~y.\; P~y)$ \\ Try this command: \textrm{apply}~(\textit{metis~the\_equality})} \\[2\smallskipamount] \textbf{apply}~(\textit{metis~the\_equality\/}) \nopagebreak \\[2\smallskipamount] {\slshape No subgoals!}% \\[2\smallskipamount] %\textbf{done} \postw This must be our lucky day. \subsection{Skolemization} \label{skolemization} Are all invertible functions onto? Let's find out: \prew \textbf{lemma} ``$\exists g.\; \forall x.~g~(f~x) = x \,\Longrightarrow\, \forall y.\; \exists x.~y = f~x$'' \\ \textbf{nitpick} \\[2\smallskipamount] \slshape Nitpick found a counterexample for \textit{card} $'a$~= 2 and \textit{card} $'b$~=~1: \\[2\smallskipamount] \hbox{}\qquad Free variable: \nopagebreak \\ \hbox{}\qquad\qquad $f = \undef{}(b_1 := a_1)$ \\ \hbox{}\qquad Skolem constants: \nopagebreak \\ \hbox{}\qquad\qquad $g = \undef{}(a_1 := b_1,\> a_2 := b_1)$ \\ \hbox{}\qquad\qquad $y = a_2$ \postw Although $f$ is the only free variable occurring in the formula, Nitpick also displays values for the bound variables $g$ and $y$. These values are available to Nitpick because it performs skolemization as a preprocessing step. In the previous example, skolemization only affected the outermost quantifiers. This is not always the case, as illustrated below: \prew \textbf{lemma} ``$\exists x.\; \forall f.\; f~x = x$'' \\ \textbf{nitpick} \\[2\smallskipamount] \slshape Nitpick found a counterexample for \textit{card} $'a$~= 2: \\[2\smallskipamount] \hbox{}\qquad Skolem constant: \nopagebreak \\ \hbox{}\qquad\qquad $\lambda x.\; f = \undef{}(\!\begin{aligned}[t] & a_1 := \undef{}(a_1 := a_2,\> a_2 := a_1), \\[-2pt] & a_2 := \undef{}(a_1 := a_1,\> a_2 := a_1))\end{aligned}$ \postw The variable $f$ is bound within the scope of $x$; therefore, $f$ depends on $x$, as suggested by the notation $\lambda x.\,f$. If $x = a_1$, then $f$ is the function that maps $a_1$ to $a_2$ and vice versa; otherwise, $x = a_2$ and $f$ maps both $a_1$ and $a_2$ to $a_1$. In both cases, $f~x \not= x$. The source of the Skolem constants is sometimes more obscure: \prew \textbf{lemma} ``$\mathit{refl}~r\,\Longrightarrow\, \mathit{sym}~r$'' \\ \textbf{nitpick} \\[2\smallskipamount] \slshape Nitpick found a counterexample for \textit{card} $'a$~= 2: \\[2\smallskipamount] \hbox{}\qquad Free variable: \nopagebreak \\ \hbox{}\qquad\qquad $r = \{(a_1, a_1),\, (a_2, a_1),\, (a_2, a_2)\}$ \\ \hbox{}\qquad Skolem constants: \nopagebreak \\ \hbox{}\qquad\qquad $\mathit{sym}.x = a_2$ \\ \hbox{}\qquad\qquad $\mathit{sym}.y = a_1$ \postw What happened here is that Nitpick expanded the \textit{sym} constant to its definition: \prew $\mathit{sym}~r \,\equiv\, \forall x\> y.\,\> (x, y) \in r \longrightarrow (y, x) \in r.$ \postw As their names suggest, the Skolem constants $\mathit{sym}.x$ and $\mathit{sym}.y$ are simply the bound variables $x$ and $y$ from \textit{sym}'s definition. Although skolemization is a useful optimization, you can disable it by invoking Nitpick with \textit{dont\_skolemize}. See \S\ref{optimizations} for details. \subsection{Natural Numbers and Integers} \label{natural-numbers-and-integers} Because of the axiom of infinity, the type \textit{nat} does not admit any finite models. To deal with this, Nitpick's approach is to consider finite subsets $N$ of \textit{nat} and maps all numbers $\notin N$ to the undefined value (displayed as `$\unk$'). The type \textit{int} is handled similarly. Internally, undefined values lead to a three-valued logic. Here is an example involving \textit{int}: \prew \textbf{lemma} ``$\lbrakk i \le j;\> n \le (m{\Colon}\mathit{int})\rbrakk \,\Longrightarrow\, i * n + j * m \le i * m + j * n$'' \\ \textbf{nitpick} \\[2\smallskipamount] \slshape Nitpick found a counterexample: \\[2\smallskipamount] \hbox{}\qquad Free variables: \nopagebreak \\ \hbox{}\qquad\qquad $i = 0$ \\ \hbox{}\qquad\qquad $j = 1$ \\ \hbox{}\qquad\qquad $m = 1$ \\ \hbox{}\qquad\qquad $n = 0$ \postw Internally, Nitpick uses either a unary or a binary representation of numbers. The unary representation is more efficient but only suitable for numbers very close to zero. By default, Nitpick attempts to choose the more appropriate encoding by inspecting the formula at hand. This behavior can be overridden by passing either \textit{unary\_ints} or \textit{binary\_ints} as option. For binary notation, the number of bits to use can be specified using the \textit{bits} option. For example: \prew \textbf{nitpick} [\textit{binary\_ints}, \textit{bits}${} = 16$] \postw With infinite types, we don't always have the luxury of a genuine counterexample and must often content ourselves with a potential one. The tedious task of finding out whether the potential counterexample is in fact genuine can be outsourced to \textit{auto} by passing \textit{check\_potential}. For example: \prew \textbf{lemma} ``$\forall n.\; \textit{Suc}~n \mathbin{\not=} n \,\Longrightarrow\, P$'' \\ \textbf{nitpick} [\textit{card~nat}~= 100, \textit{check\_potential}] \\[2\smallskipamount] \slshape Nitpick found a potential counterexample: \\[2\smallskipamount] \hbox{}\qquad Free variable: \nopagebreak \\ \hbox{}\qquad\qquad $P = \textit{False}$ \\[2\smallskipamount] Confirmation by ``\textit{auto}'': The above counterexample is genuine. \postw You might wonder why the counterexample is first reported as potential. The root of the problem is that the bound variable in $\forall n.\; \textit{Suc}~n \mathbin{\not=} n$ ranges over an infinite type. If Nitpick finds an $n$ such that $\textit{Suc}~n \mathbin{=} n$, it evaluates the assumption to \textit{False}; but otherwise, it does not know anything about values of $n \ge \textit{card~nat}$ and must therefore evaluate the assumption to $\unk$, not \textit{True}. Since the assumption can never be satisfied, the putative lemma can never be falsified. Incidentally, if you distrust the so-called genuine counterexamples, you can enable \textit{check\_\allowbreak genuine} to verify them as well. However, be aware that \textit{auto} will usually fail to prove that the counterexample is genuine or spurious. Some conjectures involving elementary number theory make Nitpick look like a giant with feet of clay: \prew \textbf{lemma} ``$P~\textit{Suc}$'' \\ \textbf{nitpick} [\textit{card} = 1--6] \\[2\smallskipamount] \slshape Nitpick found no counterexample. \postw On any finite set $N$, \textit{Suc} is a partial function; for example, if $N = \{0, 1, \ldots, k\}$, then \textit{Suc} is $\{0 \mapsto 1,\, 1 \mapsto 2,\, \ldots,\, k \mapsto \unk\}$, which evaluates to $\unk$ when passed as argument to $P$. As a result, $P~\textit{Suc}$ is always $\unk$. The next example is similar: \prew \textbf{lemma} ``$P~(\textit{op}~{+}\Colon \textit{nat}\mathbin{\Rightarrow}\textit{nat}\mathbin{\Rightarrow}\textit{nat})$'' \\ \textbf{nitpick} [\textit{card nat} = 1] \\[2\smallskipamount] {\slshape Nitpick found a counterexample:} \\[2\smallskipamount] \hbox{}\qquad Free variable: \nopagebreak \\ \hbox{}\qquad\qquad $P = \{\}$ \\[2\smallskipamount] \textbf{nitpick} [\textit{card nat} = 2] \\[2\smallskipamount] {\slshape Nitpick found no counterexample.} \postw The problem here is that \textit{op}~+ is total when \textit{nat} is taken to be $\{0\}$ but becomes partial as soon as we add $1$, because $1 + 1 \notin \{0, 1\}$. Because numbers are infinite and are approximated using a three-valued logic, there is usually no need to systematically enumerate domain sizes. If Nitpick cannot find a genuine counterexample for \textit{card~nat}~= $k$, it is very unlikely that one could be found for smaller domains. (The $P~(\textit{op}~{+})$ example above is an exception to this principle.) Nitpick nonetheless enumerates all cardinalities from 1 to 8 for \textit{nat}, mainly because smaller cardinalities are fast to handle and give rise to simpler counterexamples. This is explained in more detail in \S\ref{scope-monotonicity}. \subsection{Inductive Datatypes} \label{inductive-datatypes} Like natural numbers and integers, inductive datatypes with recursive constructors admit no finite models and must be approximated by a subterm-closed subset. For example, using a cardinality of 10 for ${'}a~\textit{list}$, Nitpick looks for all counterexamples that can be built using at most 10 different lists. Let's see with an example involving \textit{hd} (which returns the first element of a list) and $@$ (which concatenates two lists): \prew \textbf{lemma} ``$\textit{hd}~(\textit{xs} \mathbin{@} [y, y]) = \textit{hd}~\textit{xs}$'' \\ \textbf{nitpick} \\[2\smallskipamount] \slshape Nitpick found a counterexample for \textit{card} $'a$~= 3: \\[2\smallskipamount] \hbox{}\qquad Free variables: \nopagebreak \\ \hbox{}\qquad\qquad $\textit{xs} = []$ \\ \hbox{}\qquad\qquad $\textit{y} = a_3$ \postw To see why the counterexample is genuine, we enable \textit{show\_consts} and \textit{show\_\allowbreak datatypes}: \prew {\slshape Datatype:} \\ \hbox{}\qquad $'a$~\textit{list}~= $\{[],\, [a_3, a_3],\, [a_3],\, \unr\}$ \\ {\slshape Constants:} \\ -\hbox{}\qquad $\lambda x_1.\; x_1 \mathbin{@} [y, y] = \undef([] := [a_3, a_3],\> [a_3, a_3] := \unk,\> [a_3] := \unk)$ \\ +\hbox{}\qquad $\lambda x_1.\; x_1 \mathbin{@} [y, y] = \undef([] := [a_3, a_3])$ \\ \hbox{}\qquad $\textit{hd} = \undef([] := a_2,\> [a_3, a_3] := a_3,\> [a_3] := a_3)$ \postw Since $\mathit{hd}~[]$ is undefined in the logic, it may be given any value, including $a_2$. The second constant, $\lambda x_1.\; x_1 \mathbin{@} [y, y]$, is simply the append operator whose second argument is fixed to be $[y, y]$. Appending $[a_3, a_3]$ to $[a_3]$ would normally give $[a_3, a_3, a_3]$, but this value is not representable in the subset of $'a$~\textit{list} considered by Nitpick, which is shown under the ``Datatype'' heading; hence the result is $\unk$. Similarly, appending $[a_3, a_3]$ to itself gives $\unk$. Given \textit{card}~$'a = 3$ and \textit{card}~$'a~\textit{list} = 3$, Nitpick considers the following subsets: \kern-.5\smallskipamount %% TYPESETTING \prew \begin{multicols}{3} $\{[],\, [a_1],\, [a_2]\}$; \\ $\{[],\, [a_1],\, [a_3]\}$; \\ $\{[],\, [a_2],\, [a_3]\}$; \\ $\{[],\, [a_1],\, [a_1, a_1]\}$; \\ $\{[],\, [a_1],\, [a_2, a_1]\}$; \\ $\{[],\, [a_1],\, [a_3, a_1]\}$; \\ $\{[],\, [a_2],\, [a_1, a_2]\}$; \\ $\{[],\, [a_2],\, [a_2, a_2]\}$; \\ $\{[],\, [a_2],\, [a_3, a_2]\}$; \\ $\{[],\, [a_3],\, [a_1, a_3]\}$; \\ $\{[],\, [a_3],\, [a_2, a_3]\}$; \\ $\{[],\, [a_3],\, [a_3, a_3]\}$. \end{multicols} \postw \kern-2\smallskipamount %% TYPESETTING All subterm-closed subsets of $'a~\textit{list}$ consisting of three values are listed and only those. As an example of a non-subterm-closed subset, consider $\mathcal{S} = \{[],\, [a_1],\,\allowbreak [a_1, a_3]\}$, and observe that $[a_1, a_3]$ (i.e., $a_1 \mathbin{\#} [a_3]$) has $[a_3] \notin \mathcal{S}$ as a subterm. Here's another m\"ochtegern-lemma that Nitpick can refute without a blink: \prew \textbf{lemma} ``$\lbrakk \textit{length}~\textit{xs} = 1;\> \textit{length}~\textit{ys} = 1 \rbrakk \,\Longrightarrow\, \textit{xs} = \textit{ys}$'' \\ \textbf{nitpick} [\textit{show\_datatypes}] \\[2\smallskipamount] \slshape Nitpick found a counterexample for \textit{card} $'a$~= 3: \\[2\smallskipamount] \hbox{}\qquad Free variables: \nopagebreak \\ \hbox{}\qquad\qquad $\textit{xs} = [a_2]$ \\ \hbox{}\qquad\qquad $\textit{ys} = [a_3]$ \\ \hbox{}\qquad Datatypes: \\ \hbox{}\qquad\qquad $\textit{nat} = \{0,\, 1,\, 2,\, \unr\}$ \\ \hbox{}\qquad\qquad $'a$~\textit{list} = $\{[],\, [a_3],\, [a_2],\, \unr\}$ \postw Because datatypes are approximated using a three-valued logic, there is usually no need to systematically enumerate cardinalities: If Nitpick cannot find a genuine counterexample for \textit{card}~$'a~\textit{list}$~= 10, it is very unlikely that one could be found for smaller cardinalities. \subsection{Typedefs, Records, Rationals, and Reals} \label{typedefs-records-rationals-and-reals} Nitpick generally treats types declared using \textbf{typedef} as datatypes whose single constructor is the corresponding \textit{Abs\_\kern.1ex} function. For example: \prew \textbf{typedef}~\textit{three} = ``$\{0\Colon\textit{nat},\, 1,\, 2\}$'' \\ \textbf{by}~\textit{blast} \\[2\smallskipamount] \textbf{definition}~$A \mathbin{\Colon} \textit{three}$ \textbf{where} ``\kern-.1em$A \,\equiv\, \textit{Abs\_\allowbreak three}~0$'' \\ \textbf{definition}~$B \mathbin{\Colon} \textit{three}$ \textbf{where} ``$B \,\equiv\, \textit{Abs\_three}~1$'' \\ \textbf{definition}~$C \mathbin{\Colon} \textit{three}$ \textbf{where} ``$C \,\equiv\, \textit{Abs\_three}~2$'' \\[2\smallskipamount] \textbf{lemma} ``$\lbrakk P~A;\> P~B\rbrakk \,\Longrightarrow\, P~x$'' \\ \textbf{nitpick} [\textit{show\_datatypes}] \\[2\smallskipamount] \slshape Nitpick found a counterexample: \\[2\smallskipamount] \hbox{}\qquad Free variables: \nopagebreak \\ \hbox{}\qquad\qquad $P = \{\Abs{1},\, \Abs{0}\}$ \\ \hbox{}\qquad\qquad $x = \Abs{2}$ \\ \hbox{}\qquad Datatypes: \\ \hbox{}\qquad\qquad $\textit{nat} = \{0,\, 1,\, 2,\, \unr\}$ \\ \hbox{}\qquad\qquad $\textit{three} = \{\Abs{2},\, \Abs{1},\, \Abs{0},\, \unr\}$ \postw %% MARK In the output above, $\Abs{n}$ abbreviates $\textit{Abs\_three}~n$. %% MARK Records, which are implemented as \textbf{typedef}s behind the scenes, are handled in much the same way: \prew \textbf{record} \textit{point} = \\ \hbox{}\quad $\textit{Xcoord} \mathbin{\Colon} \textit{int}$ \\ \hbox{}\quad $\textit{Ycoord} \mathbin{\Colon} \textit{int}$ \\[2\smallskipamount] \textbf{lemma} ``$\textit{Xcoord}~(p\Colon\textit{point}) = \textit{Xcoord}~(q\Colon\textit{point})$'' \\ \textbf{nitpick} [\textit{show\_datatypes}] \\[2\smallskipamount] \slshape Nitpick found a counterexample: \\[2\smallskipamount] \hbox{}\qquad Free variables: \nopagebreak \\ \hbox{}\qquad\qquad $p = \lparr\textit{Xcoord} = 0,\> \textit{Ycoord} = 0\rparr$ \\ \hbox{}\qquad\qquad $q = \lparr\textit{Xcoord} = 1,\> \textit{Ycoord} = 1\rparr$ \\ \hbox{}\qquad Datatypes: \\ \hbox{}\qquad\qquad $\textit{int} = \{0,\, 1,\, \unr\}$ \\ \hbox{}\qquad\qquad $\textit{point} = \{\lparr\textit{Xcoord} = 1,\> \textit{Ycoord} = 1\rparr,\> \lparr\textit{Xcoord} = 0,\> \textit{Ycoord} = 0\rparr,\, \unr\}$\kern-1pt %% QUIET \postw Finally, Nitpick provides rudimentary support for rationals and reals using a similar approach: \prew \textbf{lemma} ``$4 * x + 3 * (y\Colon\textit{real}) \not= 1/2$'' \\ \textbf{nitpick} [\textit{show\_datatypes}] \\[2\smallskipamount] \slshape Nitpick found a counterexample: \\[2\smallskipamount] \hbox{}\qquad Free variables: \nopagebreak \\ \hbox{}\qquad\qquad $x = 1/2$ \\ \hbox{}\qquad\qquad $y = -1/2$ \\ \hbox{}\qquad Datatypes: \\ \hbox{}\qquad\qquad $\textit{nat} = \{0,\, 1,\, 2,\, 3,\, 4,\, 5,\, 6,\, 7,\, \unr\}$ \\ \hbox{}\qquad\qquad $\textit{int} = \{0,\, 1,\, 2,\, 3,\, 4,\, -3,\, -2,\, -1,\, \unr\}$ \\ \hbox{}\qquad\qquad $\textit{real} = \{1,\, 0,\, 4,\, -3/2,\, 3,\, 2,\, 1/2,\, -1/2,\, \unr\}$ \postw \subsection{Inductive and Coinductive Predicates} \label{inductive-and-coinductive-predicates} Inductively defined predicates (and sets) are particularly problematic for counterexample generators. They can make Quickcheck~\cite{berghofer-nipkow-2004} loop forever and Refute~\cite{weber-2008} run out of resources. The crux of the problem is that they are defined using a least fixed point construction. Nitpick's philosophy is that not all inductive predicates are equal. Consider the \textit{even} predicate below: \prew \textbf{inductive}~\textit{even}~\textbf{where} \\ ``\textit{even}~0'' $\,\mid$ \\ ``\textit{even}~$n\,\Longrightarrow\, \textit{even}~(\textit{Suc}~(\textit{Suc}~n))$'' \postw This predicate enjoys the desirable property of being well-founded, which means that the introduction rules don't give rise to infinite chains of the form \prew $\cdots\,\Longrightarrow\, \textit{even}~k'' \,\Longrightarrow\, \textit{even}~k' \,\Longrightarrow\, \textit{even}~k.$ \postw For \textit{even}, this is obvious: Any chain ending at $k$ will be of length $k/2 + 1$: \prew $\textit{even}~0\,\Longrightarrow\, \textit{even}~2\,\Longrightarrow\, \cdots \,\Longrightarrow\, \textit{even}~(k - 2) \,\Longrightarrow\, \textit{even}~k.$ \postw Wellfoundedness is desirable because it enables Nitpick to use a very efficient fixed point computation.% \footnote{If an inductive predicate is well-founded, then it has exactly one fixed point, which is simultaneously the least and the greatest fixed point. In these circumstances, the computation of the least fixed point amounts to the computation of an arbitrary fixed point, which can be performed using a straightforward recursive equation.} Moreover, Nitpick can prove wellfoundedness of most well-founded predicates, just as Isabelle's \textbf{function} package usually discharges termination proof obligations automatically. Let's try an example: \prew \textbf{lemma} ``$\exists n.\; \textit{even}~n \mathrel{\land} \textit{even}~(\textit{Suc}~n)$'' \\ \textbf{nitpick}~[\textit{card nat}~= 100, \textit{unary\_ints}, \textit{verbose}] \\[2\smallskipamount] \slshape The inductive predicate ``\textit{even}'' was proved well-founded. Nitpick can compute it efficiently. \\[2\smallskipamount] Trying 1 scope: \\ \hbox{}\qquad \textit{card nat}~= 100. \\[2\smallskipamount] Nitpick found a potential counterexample for \textit{card nat}~= 100: \\[2\smallskipamount] \hbox{}\qquad Empty assignment \\[2\smallskipamount] Nitpick could not find a better counterexample. \\[2\smallskipamount] Total time: 2274 ms. \postw No genuine counterexample is possible because Nitpick cannot rule out the existence of a natural number $n \ge 100$ such that both $\textit{even}~n$ and $\textit{even}~(\textit{Suc}~n)$ are true. To help Nitpick, we can bound the existential quantifier: \prew \textbf{lemma} ``$\exists n \mathbin{\le} 99.\; \textit{even}~n \mathrel{\land} \textit{even}~(\textit{Suc}~n)$'' \\ \textbf{nitpick}~[\textit{card nat}~= 100, \textit{unary\_ints}] \\[2\smallskipamount] \slshape Nitpick found a counterexample: \\[2\smallskipamount] \hbox{}\qquad Empty assignment \postw So far we were blessed by the wellfoundedness of \textit{even}. What happens if we use the following definition instead? \prew \textbf{inductive} $\textit{even}'$ \textbf{where} \\ ``$\textit{even}'~(0{\Colon}\textit{nat})$'' $\,\mid$ \\ ``$\textit{even}'~2$'' $\,\mid$ \\ ``$\lbrakk\textit{even}'~m;\> \textit{even}'~n\rbrakk \,\Longrightarrow\, \textit{even}'~(m + n)$'' \postw This definition is not well-founded: From $\textit{even}'~0$ and $\textit{even}'~0$, we can derive that $\textit{even}'~0$. Nonetheless, the predicates $\textit{even}$ and $\textit{even}'$ are equivalent. Let's check a property involving $\textit{even}'$. To make up for the foreseeable computational hurdles entailed by non-wellfoundedness, we decrease \textit{nat}'s cardinality to a mere 10: \prew \textbf{lemma}~``$\exists n \in \{0, 2, 4, 6, 8\}.\; \lnot\;\textit{even}'~n$'' \\ \textbf{nitpick}~[\textit{card nat}~= 10,\, \textit{verbose},\, \textit{show\_consts}] \\[2\smallskipamount] \slshape The inductive predicate ``$\textit{even}'\!$'' could not be proved well-founded. Nitpick might need to unroll it. \\[2\smallskipamount] Trying 6 scopes: \\ \hbox{}\qquad \textit{card nat}~= 10 and \textit{iter} $\textit{even}'$~= 0; \\ \hbox{}\qquad \textit{card nat}~= 10 and \textit{iter} $\textit{even}'$~= 1; \\ \hbox{}\qquad \textit{card nat}~= 10 and \textit{iter} $\textit{even}'$~= 2; \\ \hbox{}\qquad \textit{card nat}~= 10 and \textit{iter} $\textit{even}'$~= 4; \\ \hbox{}\qquad \textit{card nat}~= 10 and \textit{iter} $\textit{even}'$~= 8; \\ \hbox{}\qquad \textit{card nat}~= 10 and \textit{iter} $\textit{even}'$~= 9. \\[2\smallskipamount] Nitpick found a counterexample for \textit{card nat}~= 10 and \textit{iter} $\textit{even}'$~= 2: \\[2\smallskipamount] \hbox{}\qquad Constant: \nopagebreak \\ \hbox{}\qquad\qquad $\lambda i.\; \textit{even}'$ = $\undef(\!\begin{aligned}[t] & 2 := \{0, 2, 4, 6, 8, 1^\Q, 3^\Q, 5^\Q, 7^\Q, 9^\Q\}, \\[-2pt] & 1 := \{0, 2, 4, 1^\Q, 3^\Q, 5^\Q, 6^\Q, 7^\Q, 8^\Q, 9^\Q\}, \\[-2pt] & 0 := \{0, 2, 1^\Q, 3^\Q, 4^\Q, 5^\Q, 6^\Q, 7^\Q, 8^\Q, 9^\Q\})\end{aligned}$ \\[2\smallskipamount] Total time: 1140 ms. \postw Nitpick's output is very instructive. First, it tells us that the predicate is unrolled, meaning that it is computed iteratively from the empty set. Then it lists six scopes specifying different bounds on the numbers of iterations:\ 0, 1, 2, 4, 8, and~9. The output also shows how each iteration contributes to $\textit{even}'$. The notation $\lambda i.\; \textit{even}'$ indicates that the value of the predicate depends on an iteration counter. Iteration 0 provides the basis elements, $0$ and $2$. Iteration 1 contributes $4$ ($= 2 + 2$). Iteration 2 throws $6$ ($= 2 + 4 = 4 + 2$) and $8$ ($= 4 + 4$) into the mix. Further iterations would not contribute any new elements. Some values are marked with superscripted question marks~(`\lower.2ex\hbox{$^\Q$}'). These are the elements for which the predicate evaluates to $\unk$. Thus, $\textit{even}'$ evaluates to either \textit{True} or $\unk$, never \textit{False}. When unrolling a predicate, Nitpick tries 0, 1, 2, 4, 8, 12, 16, and 24 iterations. However, these numbers are bounded by the cardinality of the predicate's domain. With \textit{card~nat}~= 10, no more than 9 iterations are ever needed to compute the value of a \textit{nat} predicate. You can specify the number of iterations using the \textit{iter} option, as explained in \S\ref{scope-of-search}. In the next formula, $\textit{even}'$ occurs both positively and negatively: \prew \textbf{lemma} ``$\textit{even}'~(n - 2) \,\Longrightarrow\, \textit{even}'~n$'' \\ \textbf{nitpick} [\textit{card nat} = 10, \textit{show\_consts}] \\[2\smallskipamount] \slshape Nitpick found a counterexample: \\[2\smallskipamount] \hbox{}\qquad Free variable: \nopagebreak \\ \hbox{}\qquad\qquad $n = 1$ \\ \hbox{}\qquad Constants: \nopagebreak \\ \hbox{}\qquad\qquad $\lambda i.\; \textit{even}'$ = $\undef(\!\begin{aligned}[t] & 0 := \{0, 2, 1^\Q, 3^\Q, 4^\Q, 5^\Q, 6^\Q, 7^\Q, 8^\Q, 9^\Q\})\end{aligned}$ \\ \hbox{}\qquad\qquad $\textit{even}' \subseteq \{0, 2, 4, 6, 8, \unr\}$ \postw Notice the special constraint $\textit{even}' \subseteq \{0,\, 2,\, 4,\, 6,\, 8,\, \unr\}$ in the output, whose right-hand side represents an arbitrary fixed point (not necessarily the least one). It is used to falsify $\textit{even}'~n$. In contrast, the unrolled predicate is used to satisfy $\textit{even}'~(n - 2)$. Coinductive predicates are handled dually. For example: \prew \textbf{coinductive} \textit{nats} \textbf{where} \\ ``$\textit{nats}~(x\Colon\textit{nat}) \,\Longrightarrow\, \textit{nats}~x$'' \\[2\smallskipamount] \textbf{lemma} ``$\textit{nats} = \{0, 1, 2, 3, 4\}$'' \\ \textbf{nitpick}~[\textit{card nat} = 10,\, \textit{show\_consts}] \\[2\smallskipamount] \slshape Nitpick found a counterexample: \\[2\smallskipamount] \hbox{}\qquad Constants: \nopagebreak \\ \hbox{}\qquad\qquad $\lambda i.\; \textit{nats} = \undef(0 := \{\!\begin{aligned}[t] & 0^\Q, 1^\Q, 2^\Q, 3^\Q, 4^\Q, 5^\Q, 6^\Q, 7^\Q, 8^\Q, 9^\Q, \\[-2pt] & \unr\})\end{aligned}$ \\ \hbox{}\qquad\qquad $nats \supseteq \{9, 5^\Q, 6^\Q, 7^\Q, 8^\Q, \unr\}$ \postw As a special case, Nitpick uses Kodkod's transitive closure operator to encode negative occurrences of non-well-founded ``linear inductive predicates,'' i.e., inductive predicates for which each the predicate occurs in at most one assumption of each introduction rule. For example: \prew \textbf{inductive} \textit{odd} \textbf{where} \\ ``$\textit{odd}~1$'' $\,\mid$ \\ ``$\lbrakk \textit{odd}~m;\>\, \textit{even}~n\rbrakk \,\Longrightarrow\, \textit{odd}~(m + n)$'' \\[2\smallskipamount] \textbf{lemma}~``$\textit{odd}~n \,\Longrightarrow\, \textit{odd}~(n - 2)$'' \\ \textbf{nitpick}~[\textit{card nat} = 10,\, \textit{show\_consts}] \\[2\smallskipamount] \slshape Nitpick found a counterexample: \\[2\smallskipamount] \hbox{}\qquad Free variable: \nopagebreak \\ \hbox{}\qquad\qquad $n = 1$ \\ \hbox{}\qquad Constants: \nopagebreak \\ \hbox{}\qquad\qquad $\textit{even} = \{0, 2, 4, 6, 8, \unr\}$ \\ \hbox{}\qquad\qquad $\textit{odd}_{\textsl{base}} = \{1, \unr\}$ \\ \hbox{}\qquad\qquad $\textit{odd}_{\textsl{step}} = \! \!\begin{aligned}[t] & \{(0, 0), (0, 2), (0, 4), (0, 6), (0, 8), (1, 1), (1, 3), (1, 5), \\[-2pt] & \phantom{\{} (1, 7), (1, 9), (2, 2), (2, 4), (2, 6), (2, 8), (3, 3), (3, 5), \\[-2pt] & \phantom{\{} (3, 7), (3, 9), (4, 4), (4, 6), (4, 8), (5, 5), (5, 7), (5, 9), \\[-2pt] & \phantom{\{} (6, 6), (6, 8), (7, 7), (7, 9), (8, 8), (9, 9), \unr\}\end{aligned}$ \\ \hbox{}\qquad\qquad $\textit{odd} \subseteq \{1, 3, 5, 7, 9, 8^\Q, \unr\}$ \postw \noindent In the output, $\textit{odd}_{\textrm{base}}$ represents the base elements and $\textit{odd}_{\textrm{step}}$ is a transition relation that computes new elements from known ones. The set $\textit{odd}$ consists of all the values reachable through the reflexive transitive closure of $\textit{odd}_{\textrm{step}}$ starting with any element from $\textit{odd}_{\textrm{base}}$, namely 1, 3, 5, 7, and 9. Using Kodkod's transitive closure to encode linear predicates is normally either more thorough or more efficient than unrolling (depending on the value of \textit{iter}), but for those cases where it isn't you can disable it by passing the \textit{dont\_star\_linear\_preds} option. \subsection{Coinductive Datatypes} \label{coinductive-datatypes} While Isabelle regrettably lacks a high-level mechanism for defining coinductive datatypes, the \textit{Coinductive\_List} theory provides a coinductive ``lazy list'' datatype, $'a~\textit{llist}$, defined the hard way. Nitpick supports these lazy lists seamlessly and provides a hook, described in \S\ref{registration-of-coinductive-datatypes}, to register custom coinductive datatypes. (Co)intuitively, a coinductive datatype is similar to an inductive datatype but allows infinite objects. Thus, the infinite lists $\textit{ps}$ $=$ $[a, a, a, \ldots]$, $\textit{qs}$ $=$ $[a, b, a, b, \ldots]$, and $\textit{rs}$ $=$ $[0, 1, 2, 3, \ldots]$ can be defined as lazy lists using the $\textit{LNil}\mathbin{\Colon}{'}a~\textit{llist}$ and $\textit{LCons}\mathbin{\Colon}{'}a \mathbin{\Rightarrow} {'}a~\textit{llist} \mathbin{\Rightarrow} {'}a~\textit{llist}$ constructors. Although it is otherwise no friend of infinity, Nitpick can find counterexamples involving cyclic lists such as \textit{ps} and \textit{qs} above as well as finite lists: \prew \textbf{lemma} ``$\textit{xs} \not= \textit{LCons}~a~\textit{xs}$'' \\ \textbf{nitpick} \\[2\smallskipamount] \slshape Nitpick found a counterexample for {\itshape card}~$'a$ = 1: \\[2\smallskipamount] \hbox{}\qquad Free variables: \nopagebreak \\ \hbox{}\qquad\qquad $\textit{a} = a_1$ \\ \hbox{}\qquad\qquad $\textit{xs} = \textsl{THE}~\omega.\; \omega = \textit{LCons}~a_1~\omega$ \postw The notation $\textrm{THE}~\omega.\; \omega = t(\omega)$ stands for the infinite term $t(t(t(\ldots)))$. Hence, \textit{xs} is simply the infinite list $[a_1, a_1, a_1, \ldots]$. The next example is more interesting: \prew \textbf{lemma}~``$\lbrakk\textit{xs} = \textit{LCons}~a~\textit{xs};\>\, \textit{ys} = \textit{iterates}~(\lambda b.\> a)~b\rbrakk \,\Longrightarrow\, \textit{xs} = \textit{ys}$'' \\ \textbf{nitpick} [\textit{verbose}] \\[2\smallskipamount] \slshape The type ``\kern1pt$'a$'' passed the monotonicity test. Nitpick might be able to skip some scopes. \\[2\smallskipamount] Trying 8 scopes: \\ \hbox{}\qquad \textit{card} $'a$~= 1, \textit{card} ``\kern1pt$'a~\textit{list}$''~= 1, and \textit{bisim\_depth}~= 0. \\ \hbox{}\qquad $\qquad\vdots$ \\[.5\smallskipamount] \hbox{}\qquad \textit{card} $'a$~= 8, \textit{card} ``\kern1pt$'a~\textit{list}$''~= 8, and \textit{bisim\_depth}~= 7. \\[2\smallskipamount] Nitpick found a counterexample for {\itshape card}~$'a$ = 2, \textit{card}~``\kern1pt$'a~\textit{list}$''~= 2, and \textit{bisim\_\allowbreak depth}~= 1: \\[2\smallskipamount] \hbox{}\qquad Free variables: \nopagebreak \\ \hbox{}\qquad\qquad $\textit{a} = a_2$ \\ \hbox{}\qquad\qquad $\textit{b} = a_1$ \\ \hbox{}\qquad\qquad $\textit{xs} = \textsl{THE}~\omega.\; \omega = \textit{LCons}~a_2~\omega$ \\ \hbox{}\qquad\qquad $\textit{ys} = \textit{LCons}~a_1~(\textsl{THE}~\omega.\; \omega = \textit{LCons}~a_2~\omega)$ \\[2\smallskipamount] Total time: 726 ms. \postw The lazy list $\textit{xs}$ is simply $[a_2, a_2, a_2, \ldots]$, whereas $\textit{ys}$ is $[a_1, a_2, a_2, a_2, \ldots]$, i.e., a lasso-shaped list with $[a_1]$ as its stem and $[a_2]$ as its cycle. In general, the list segment within the scope of the {THE} binder corresponds to the lasso's cycle, whereas the segment leading to the binder is the stem. A salient property of coinductive datatypes is that two objects are considered equal if and only if they lead to the same observations. For example, the lazy lists $\textrm{THE}~\omega.\; \omega = \textit{LCons}~a~(\textit{LCons}~b~\omega)$ and $\textit{LCons}~a~(\textrm{THE}~\omega.\; \omega = \textit{LCons}~b~(\textit{LCons}~a~\omega))$ are identical, because both lead to the sequence of observations $a$, $b$, $a$, $b$, \hbox{\ldots} (or, equivalently, both encode the infinite list $[a, b, a, b, \ldots]$). This concept of equality for coinductive datatypes is called bisimulation and is defined coinductively. Internally, Nitpick encodes the coinductive bisimilarity predicate as part of the Kodkod problem to ensure that distinct objects lead to different observations. This precaution is somewhat expensive and often unnecessary, so it can be disabled by setting the \textit{bisim\_depth} option to $-1$. The bisimilarity check is then performed \textsl{after} the counterexample has been found to ensure correctness. If this after-the-fact check fails, the counterexample is tagged as ``likely genuine'' and Nitpick recommends to try again with \textit{bisim\_depth} set to a nonnegative integer. Disabling the check for the previous example saves approximately 150~milli\-seconds; the speed gains can be more significant for larger scopes. The next formula illustrates the need for bisimilarity (either as a Kodkod predicate or as an after-the-fact check) to prevent spurious counterexamples: \prew \textbf{lemma} ``$\lbrakk xs = \textit{LCons}~a~\textit{xs};\>\, \textit{ys} = \textit{LCons}~a~\textit{ys}\rbrakk \,\Longrightarrow\, \textit{xs} = \textit{ys}$'' \\ \textbf{nitpick} [\textit{bisim\_depth} = $-1$, \textit{show\_datatypes}] \\[2\smallskipamount] \slshape Nitpick found a likely genuine counterexample for $\textit{card}~'a$ = 2: \\[2\smallskipamount] \hbox{}\qquad Free variables: \nopagebreak \\ \hbox{}\qquad\qquad $a = a_2$ \\ \hbox{}\qquad\qquad $\textit{xs} = \textsl{THE}~\omega.\; \omega = \textit{LCons}~a_2~\omega$ \\ \hbox{}\qquad\qquad $\textit{ys} = \textsl{THE}~\omega.\; \omega = \textit{LCons}~a_2~\omega$ \\ \hbox{}\qquad Codatatype:\strut \nopagebreak \\ \hbox{}\qquad\qquad $'a~\textit{llist} = \{\!\begin{aligned}[t] & \textsl{THE}~\omega.\; \omega = \textit{LCons}~a_2~\omega, \\[-2pt] & \textsl{THE}~\omega.\; \omega = \textit{LCons}~a_2~\omega,\> \unr\}\end{aligned}$ \\[2\smallskipamount] Try again with ``\textit{bisim\_depth}'' set to a nonnegative value to confirm that the counterexample is genuine. \\[2\smallskipamount] {\upshape\textbf{nitpick}} \\[2\smallskipamount] \slshape Nitpick found no counterexample. \postw In the first \textbf{nitpick} invocation, the after-the-fact check discovered that the two known elements of type $'a~\textit{llist}$ are bisimilar. A compromise between leaving out the bisimilarity predicate from the Kodkod problem and performing the after-the-fact check is to specify a lower nonnegative \textit{bisim\_depth} value than the default one provided by Nitpick. In general, a value of $K$ means that Nitpick will require all lists to be distinguished from each other by their prefixes of length $K$. Be aware that setting $K$ to a too low value can overconstrain Nitpick, preventing it from finding any counterexamples. \subsection{Boxing} \label{boxing} Nitpick normally maps function and product types directly to the corresponding Kodkod concepts. As a consequence, if $'a$ has cardinality 3 and $'b$ has cardinality 4, then $'a \times {'}b$ has cardinality 12 ($= 4 \times 3$) and $'a \Rightarrow {'}b$ has cardinality 64 ($= 4^3$). In some circumstances, it pays off to treat these types in the same way as plain datatypes, by approximating them by a subset of a given cardinality. This technique is called ``boxing'' and is particularly useful for functions passed as arguments to other functions, for high-arity functions, and for large tuples. Under the hood, boxing involves wrapping occurrences of the types $'a \times {'}b$ and $'a \Rightarrow {'}b$ in isomorphic datatypes, as can be seen by enabling the \textit{debug} option. To illustrate boxing, we consider a formalization of $\lambda$-terms represented using de Bruijn's notation: \prew \textbf{datatype} \textit{tm} = \textit{Var}~\textit{nat}~$\mid$~\textit{Lam}~\textit{tm} $\mid$ \textit{App~tm~tm} \postw The $\textit{lift}~t~k$ function increments all variables with indices greater than or equal to $k$ by one: \prew \textbf{primrec} \textit{lift} \textbf{where} \\ ``$\textit{lift}~(\textit{Var}~j)~k = \textit{Var}~(\textrm{if}~j < k~\textrm{then}~j~\textrm{else}~j + 1)$'' $\mid$ \\ ``$\textit{lift}~(\textit{Lam}~t)~k = \textit{Lam}~(\textit{lift}~t~(k + 1))$'' $\mid$ \\ ``$\textit{lift}~(\textit{App}~t~u)~k = \textit{App}~(\textit{lift}~t~k)~(\textit{lift}~u~k)$'' \postw The $\textit{loose}~t~k$ predicate returns \textit{True} if and only if term $t$ has a loose variable with index $k$ or more: \prew \textbf{primrec}~\textit{loose} \textbf{where} \\ ``$\textit{loose}~(\textit{Var}~j)~k = (j \ge k)$'' $\mid$ \\ ``$\textit{loose}~(\textit{Lam}~t)~k = \textit{loose}~t~(\textit{Suc}~k)$'' $\mid$ \\ ``$\textit{loose}~(\textit{App}~t~u)~k = (\textit{loose}~t~k \mathrel{\lor} \textit{loose}~u~k)$'' \postw Next, the $\textit{subst}~\sigma~t$ function applies the substitution $\sigma$ on $t$: \prew \textbf{primrec}~\textit{subst} \textbf{where} \\ ``$\textit{subst}~\sigma~(\textit{Var}~j) = \sigma~j$'' $\mid$ \\ ``$\textit{subst}~\sigma~(\textit{Lam}~t) = {}$\phantom{''} \\ \phantom{``}$\textit{Lam}~(\textit{subst}~(\lambda n.\> \textrm{case}~n~\textrm{of}~0 \Rightarrow \textit{Var}~0 \mid \textit{Suc}~m \Rightarrow \textit{lift}~(\sigma~m)~1)~t)$'' $\mid$ \\ ``$\textit{subst}~\sigma~(\textit{App}~t~u) = \textit{App}~(\textit{subst}~\sigma~t)~(\textit{subst}~\sigma~u)$'' \postw A substitution is a function that maps variable indices to terms. Observe that $\sigma$ is a function passed as argument and that Nitpick can't optimize it away, because the recursive call for the \textit{Lam} case involves an altered version. Also notice the \textit{lift} call, which increments the variable indices when moving under a \textit{Lam}. A reasonable property to expect of substitution is that it should leave closed terms unchanged. Alas, even this simple property does not hold: \pre \textbf{lemma}~``$\lnot\,\textit{loose}~t~0 \,\Longrightarrow\, \textit{subst}~\sigma~t = t$'' \\ \textbf{nitpick} [\textit{verbose}] \\[2\smallskipamount] \slshape Trying 8 scopes: \nopagebreak \\ \hbox{}\qquad \textit{card~nat}~= 1, \textit{card tm}~= 1, and \textit{card} ``$\textit{nat} \Rightarrow \textit{tm}$'' = 1; \\ \hbox{}\qquad \textit{card~nat}~= 2, \textit{card tm}~= 2, and \textit{card} ``$\textit{nat} \Rightarrow \textit{tm}$'' = 2; \\ \hbox{}\qquad $\qquad\vdots$ \\[.5\smallskipamount] \hbox{}\qquad \textit{card~nat}~= 8, \textit{card tm}~= 8, and \textit{card} ``$\textit{nat} \Rightarrow \textit{tm}$'' = 8. \\[2\smallskipamount] Nitpick found a counterexample for \textit{card~nat}~= 6, \textit{card~tm}~= 6, and \textit{card}~``$\textit{nat} \Rightarrow \textit{tm}$''~= 6: \\[2\smallskipamount] \hbox{}\qquad Free variables: \nopagebreak \\ \hbox{}\qquad\qquad $\sigma = \undef(\!\begin{aligned}[t] & 0 := \textit{Var}~0,\> 1 := \textit{Var}~0,\> 2 := \textit{Var}~0, \\[-2pt] & 3 := \textit{Var}~0,\> 4 := \textit{Var}~0,\> 5 := \textit{Var}~0)\end{aligned}$ \\ \hbox{}\qquad\qquad $t = \textit{Lam}~(\textit{Lam}~(\textit{Var}~1))$ \\[2\smallskipamount] Total time: $4679$ ms. \postw Using \textit{eval}, we find out that $\textit{subst}~\sigma~t = \textit{Lam}~(\textit{Lam}~(\textit{Var}~0))$. Using the traditional $\lambda$-term notation, $t$~is $\lambda x\, y.\> x$ whereas $\textit{subst}~\sigma~t$ is $\lambda x\, y.\> y$. The bug is in \textit{subst}: The $\textit{lift}~(\sigma~m)~1$ call should be replaced with $\textit{lift}~(\sigma~m)~0$. An interesting aspect of Nitpick's verbose output is that it assigned inceasing cardinalities from 1 to 8 to the type $\textit{nat} \Rightarrow \textit{tm}$. For the formula of interest, knowing 6 values of that type was enough to find the counterexample. Without boxing, $46\,656$ ($= 6^6$) values must be considered, a hopeless undertaking: \prew \textbf{nitpick} [\textit{dont\_box}] \\[2\smallskipamount] {\slshape Nitpick ran out of time after checking 4 of 8 scopes.} \postw {\looseness=-1 Boxing can be enabled or disabled globally or on a per-type basis using the \textit{box} option. Moreover, setting the cardinality of a function or product type implicitly enables boxing for that type. Nitpick usually performs reasonable choices about which types should be boxed, but option tweaking sometimes helps. } \subsection{Scope Monotonicity} \label{scope-monotonicity} The \textit{card} option (together with \textit{iter}, \textit{bisim\_depth}, and \textit{max}) controls which scopes are actually tested. In general, to exhaust all models below a certain cardinality bound, the number of scopes that Nitpick must consider increases exponentially with the number of type variables (and \textbf{typedecl}'d types) occurring in the formula. Given the default cardinality specification of 1--8, no fewer than $8^4 = 4096$ scopes must be considered for a formula involving $'a$, $'b$, $'c$, and $'d$. Fortunately, many formulas exhibit a property called \textsl{scope monotonicity}, meaning that if the formula is falsifiable for a given scope, it is also falsifiable for all larger scopes \cite[p.~165]{jackson-2006}. Consider the formula \prew \textbf{lemma}~``$\textit{length~xs} = \textit{length~ys} \,\Longrightarrow\, \textit{rev}~(\textit{zip~xs~ys}) = \textit{zip~xs}~(\textit{rev~ys})$'' \postw where \textit{xs} is of type $'a~\textit{list}$ and \textit{ys} is of type $'b~\textit{list}$. A priori, Nitpick would need to consider 512 scopes to exhaust the specification \textit{card}~= 1--8. However, our intuition tells us that any counterexample found with a small scope would still be a counterexample in a larger scope---by simply ignoring the fresh $'a$ and $'b$ values provided by the larger scope. Nitpick comes to the same conclusion after a careful inspection of the formula and the relevant definitions: \prew \textbf{nitpick}~[\textit{verbose}] \\[2\smallskipamount] \slshape The types ``\kern1pt$'a$'' and ``\kern1pt$'b$'' passed the monotonicity test. Nitpick might be able to skip some scopes. \\[2\smallskipamount] Trying 8 scopes: \\ \hbox{}\qquad \textit{card} $'a$~= 1, \textit{card} $'b$~= 1, \textit{card} \textit{nat}~= 1, \textit{card} ``$('a \times {'}b)$ \textit{list}''~= 1, \\ \hbox{}\qquad\quad \textit{card} ``\kern1pt$'a$ \textit{list}''~= 1, and \textit{card} ``\kern1pt$'b$ \textit{list}''~= 1. \\ \hbox{}\qquad \textit{card} $'a$~= 2, \textit{card} $'b$~= 2, \textit{card} \textit{nat}~= 2, \textit{card} ``$('a \times {'}b)$ \textit{list}''~= 2, \\ \hbox{}\qquad\quad \textit{card} ``\kern1pt$'a$ \textit{list}''~= 2, and \textit{card} ``\kern1pt$'b$ \textit{list}''~= 2. \\ \hbox{}\qquad $\qquad\vdots$ \\[.5\smallskipamount] \hbox{}\qquad \textit{card} $'a$~= 8, \textit{card} $'b$~= 8, \textit{card} \textit{nat}~= 8, \textit{card} ``$('a \times {'}b)$ \textit{list}''~= 8, \\ \hbox{}\qquad\quad \textit{card} ``\kern1pt$'a$ \textit{list}''~= 8, and \textit{card} ``\kern1pt$'b$ \textit{list}''~= 8. \\[2\smallskipamount] Nitpick found a counterexample for \textit{card} $'a$~= 5, \textit{card} $'b$~= 5, \textit{card} \textit{nat}~= 5, \textit{card} ``$('a \times {'}b)$ \textit{list}''~= 5, \textit{card} ``\kern1pt$'a$ \textit{list}''~= 5, and \textit{card} ``\kern1pt$'b$ \textit{list}''~= 5: \\[2\smallskipamount] \hbox{}\qquad Free variables: \nopagebreak \\ \hbox{}\qquad\qquad $\textit{xs} = [a_4, a_5]$ \\ \hbox{}\qquad\qquad $\textit{ys} = [b_3, b_3]$ \\[2\smallskipamount] Total time: 1636 ms. \postw In theory, it should be sufficient to test a single scope: \prew \textbf{nitpick}~[\textit{card}~= 8] \postw However, this is often less efficient in practice and may lead to overly complex counterexamples. If the monotonicity check fails but we believe that the formula is monotonic (or we don't mind missing some counterexamples), we can pass the \textit{mono} option. To convince yourself that this option is risky, simply consider this example from \S\ref{skolemization}: \prew \textbf{lemma} ``$\exists g.\; \forall x\Colon 'b.~g~(f~x) = x \,\Longrightarrow\, \forall y\Colon {'}a.\; \exists x.~y = f~x$'' \\ \textbf{nitpick} [\textit{mono}] \\[2\smallskipamount] {\slshape Nitpick found no counterexample.} \\[2\smallskipamount] \textbf{nitpick} \\[2\smallskipamount] \slshape Nitpick found a counterexample for \textit{card} $'a$~= 2 and \textit{card} $'b$~=~1: \\ \hbox{}\qquad $\vdots$ \postw (It turns out the formula holds if and only if $\textit{card}~'a \le \textit{card}~'b$.) Although this is rarely advisable, the automatic monotonicity checks can be disabled by passing \textit{non\_mono} (\S\ref{optimizations}). As insinuated in \S\ref{natural-numbers-and-integers} and \S\ref{inductive-datatypes}, \textit{nat}, \textit{int}, and inductive datatypes are normally monotonic and treated as such. The same is true for record types, \textit{rat}, \textit{real}, and some \textbf{typedef}'d types. Thus, given the cardinality specification 1--8, a formula involving \textit{nat}, \textit{int}, \textit{int~list}, \textit{rat}, and \textit{rat~list} will lead Nitpick to consider only 8~scopes instead of $32\,768$. +\subsection{Inductive Properties} +\label{inductive-properties} + +Inductive properties are a particular pain to prove, because the failure to +establish an induction step can mean several things: +% +\begin{enumerate} +\item The property is invalid. +\item The property is valid but is too weak to support the induction step. +\item The property is valid and strong enough; it's just that we haven't found +the proof yet. +\end{enumerate} +% +Depending on which scenario applies, we would take the appropriate course of +action: +% +\begin{enumerate} +\item Repair the statement of the property so that it becomes valid. +\item Generalize the property and/or prove auxiliary properties. +\item Work harder on a proof. +\end{enumerate} +% +How can we distinguish between the three scenarios? Nitpick's normal mode of +operation can often detect scenario 1, and Isabelle's automatic tactics help with +scenario 3. Using appropriate techniques, it is also often possible to use +Nitpick to identify scenario 2. Consider the following transition system, +in which natural numbers represent states: + +\prew +\textbf{inductive\_set}~\textit{reach}~\textbf{where} \\ +``$(4\Colon\textit{nat}) \in \textit{reach\/}$'' $\mid$ \\ +``$\lbrakk n < 4;\> n \in \textit{reach\/}\rbrakk \,\Longrightarrow\, 3 * n + 1 \in \textit{reach\/}$'' $\mid$ \\ +``$n \in \textit{reach} \,\Longrightarrow n + 2 \in \textit{reach\/}$'' +\postw + +We will try to prove that only even numbers are reachable: + +\prew +\textbf{lemma}~``$n \in \textit{reach} \,\Longrightarrow\, 2~\textrm{dvd}~n$'' +\postw + +Does this property hold? Nitpick cannot find a counterexample within 30 seconds, +so let's attempt a proof by induction: + +\prew +\textbf{apply}~(\textit{induct~set}{:}~\textit{reach\/}) \\ +\textbf{apply}~\textit{auto} +\postw + +This leaves us in the following proof state: + +\prew +{\slshape goal (2 subgoals): \\ +\phantom{0}1. ${\bigwedge}n.\;\, \lbrakk n \in \textit{reach\/};\, n < 4;\, 2~\textsl{dvd}~n\rbrakk \,\Longrightarrow\, 2~\textsl{dvd}~\textit{Suc}~(3 * n)$ \\ +\phantom{0}2. ${\bigwedge}n.\;\, \lbrakk n \in \textit{reach\/};\, 2~\textsl{dvd}~n\rbrakk \,\Longrightarrow\, 2~\textsl{dvd}~\textit{Suc}~(\textit{Suc}~n)$ +} +\postw + +If we run Nitpick on the first subgoal, it still won't find any +counterexample; and yet, \textit{auto} fails to go further, and \textit{arith} +is helpless. However, notice the $n \in \textit{reach}$ assumption, which +strengthens the induction hypothesis but is not immediately usable in the proof. +If we remove it and invoke Nitpick, this time we get a counterexample: + +\prew +\textbf{apply}~(\textit{thin\_tac}~``$n \in \textit{reach\/}$'') \\ +\textbf{nitpick} \\[2\smallskipamount] +\slshape Nitpick found a counterexample: \\[2\smallskipamount] +\hbox{}\qquad Skolem constant: \nopagebreak \\ +\hbox{}\qquad\qquad $n = 0$ +\postw + +Indeed, 0 < 4, 2 divides 0, but 2 does not divide 1. We can use this information +to strength the lemma: + +\prew +\textbf{lemma}~``$n \in \textit{reach} \,\Longrightarrow\, 2~\textrm{dvd}~n \mathrel{\lor} n \not= 0$'' +\postw + +Unfortunately, the proof by induction still gets stuck, except that Nitpick now +finds the counterexample $n = 2$. We generalize the lemma further to + +\prew +\textbf{lemma}~``$n \in \textit{reach} \,\Longrightarrow\, 2~\textrm{dvd}~n \mathrel{\lor} n \ge 4$'' +\postw + +and this time \textit{arith} can finish off the subgoals. + +A similar technique can be employed for structural induction. The +following mini-formalization of full binary trees will serve as illustration: + +\prew +\textbf{datatype} $\kern1pt'a$~\textit{bin\_tree} = $\textit{Leaf}~{\kern1pt'a}$ $\mid$ $\textit{Branch}$ ``\kern1pt$'a$ \textit{bin\_tree}'' ``\kern1pt$'a$ \textit{bin\_tree}'' \\[2\smallskipamount] +\textbf{primrec}~\textit{labels}~\textbf{where} \\ +``$\textit{labels}~(\textit{Leaf}~a) = \{a\}$'' $\mid$ \\ +``$\textit{labels}~(\textit{Branch}~t~u) = \textit{labels}~t \mathrel{\cup} \textit{labels}~u$'' \\[2\smallskipamount] +\textbf{primrec}~\textit{swap}~\textbf{where} \\ +``$\textit{swap}~(\textit{Leaf}~c)~a~b =$ \\ +\phantom{``}$(\textrm{if}~c = a~\textrm{then}~\textit{Leaf}~b~\textrm{else~if}~c = b~\textrm{then}~\textit{Leaf}~a~\textrm{else}~\textit{Leaf}~c)$'' $\mid$ \\ +``$\textit{swap}~(\textit{Branch}~t~u)~a~b = \textit{Branch}~(\textit{swap}~t~a~b)~(\textit{swap}~u~a~b)$'' +\postw + +The \textit{labels} function returns the set of labels occurring on leaves of a +tree, and \textit{swap} exchanges two labels. Intuitively, if two distinct +labels $a$ and $b$ occur in a tree $t$, they should also occur in the tree +obtained by swapping $a$ and $b$: + +\prew +\textbf{lemma} $``\lbrakk a \in \textit{labels}~t;\, b \in \textit{labels}~t;\, a \not= b\rbrakk {}$ \\ +\phantom{\textbf{lemma} ``}$\,{\Longrightarrow}{\;\,} \textit{labels}~(\textit{swap}~t~a~b) = \textit{labels}~t$'' +\postw + +Nitpick can't find any counterexample, so we proceed with induction +(this time favoring a more structured style): + +\prew +\textbf{proof}~(\textit{induct}~$t$) \\ +\hbox{}\quad \textbf{case}~\textit{Leaf}~\textbf{thus}~\textit{?case}~\textbf{by}~\textit{simp} \\ +\textbf{next} \\ +\hbox{}\quad \textbf{case}~$(\textit{Branch}~t~u)$~\textbf{thus} \textit{?case} +\postw + +Nitpick can't find any counterexample at this point either, but it makes the +following suggestion: + +\prew +\slshape +Hint: To check that the induction hypothesis is general enough, try the following command: +\textbf{nitpick}~[\textit{non\_std} ``${\kern1pt'a}~\textit{bin\_tree}$'', \textit{show\_consts}]. +\postw + +If we follow the hint, we get a ``nonstandard'' counterexample for the step: + +\prew +\slshape Nitpick found a nonstandard counterexample for \textit{card} $'a$ = 4: \\[2\smallskipamount] +\hbox{}\qquad Free variables: \nopagebreak \\ +\hbox{}\qquad\qquad $a = a_4$ \\ +\hbox{}\qquad\qquad $b = a_3$ \\ +\hbox{}\qquad\qquad $t = \xi_3$ \\ +\hbox{}\qquad\qquad $u = \xi_4$ \\ +\hbox{}\qquad {\slshape Constants:} \nopagebreak \\ +\hbox{}\qquad\qquad $\textit{labels} = \undef + (\!\begin{aligned}[t]% + & \xi_3 := \{a_4\},\> \xi_4 := \{a_1, a_3\}, \\[-2pt] %% TYPESETTING + & \textit{Branch}~\xi_3~\xi_3 := \{a_4\}, \\[-2pt] + & \textit{Branch}~\xi_3~\xi_4 := \{a_1, a_3, a_4\})\end{aligned}$ \\ +\hbox{}\qquad\qquad $\lambda x_1.\> \textit{swap}~x_1~a~b = \undef + (\!\begin{aligned}[t]% + & \xi_3 := \xi_3,\> \xi_4 := \xi_3, \\[-2pt] + & \textit{Branch}~\xi_3~\xi_3 := \textit{Branch}~\xi_3~\xi_3, \\[-2pt] + & \textit{Branch}~\xi_4~\xi_3 := \textit{Branch}~\xi_3~\xi_3)\end{aligned}$ \\[2\smallskipamount] +The existence of a nonstandard model suggests that the induction hypothesis is not general enough or perhaps +even wrong. See the ``Inductive Properties'' section of the Nitpick manual for details (``\textit{isabelle doc nitpick}''). +\postw + +Reading the Nitpick manual is a most excellent idea. +But what's going on? The \textit{non\_std} ``${\kern1pt'a}~\textit{bin\_tree}$'' +option told the tool to look for nonstandard models of binary trees, which +means that new ``nonstandard'' trees $\xi_1, \xi_2, \ldots$, are now allowed in +addition to the standard trees generated by the \textit{Leaf} and +\textit{Branch} constructors.% +\footnote{Notice the similarity between allowing nonstandard trees here and +allowing unreachable states in the preceding example (by removing the ``$n \in +\textit{reach\/}$'' assumption). In both cases, we effectively enlarge the +set of objects over which the induction is performed while doing the step +so as to test the induction hypothesis's strength.} +The new trees are so nonstandard that we know nothing about them, except what +the induction hypothesis states and what can be proved about all trees without +relying on induction or case distinction. The key observation is, +% +\begin{quote} +\textsl{If the induction +hypothesis is strong enough, the induction step will hold even for nonstandard +objects, and Nitpick won't find any nonstandard counterexample.} +\end{quote} +% +But here, Nitpick did find some nonstandard trees $t = \xi_3$ +and $u = \xi_4$ such that $a \in \textit{labels}~t$, $b \notin +\textit{labels}~t$, $a \notin \textit{labels}~u$, and $b \in \textit{labels}~u$. +Because neither tree contains both $a$ and $b$, the induction hypothesis tells +us nothing about the labels of $\textit{swap}~t~a~b$ and $\textit{swap}~u~a~b$, +and as a result we know nothing about the labels of the tree +$\textit{swap}~(\textit{Branch}~t~u)~a~b$, which by definition equals +$\textit{Branch}$ $(\textit{swap}~t~a~b)$ $(\textit{swap}~u~a~b)$, whose +labels are $\textit{labels}$ $(\textit{swap}~t~a~b) \mathrel{\cup} +\textit{labels}$ $(\textit{swap}~u~a~b)$. + +The solution is to ensure that we always know what the labels of the subtrees +are in the inductive step, by covering the cases where $a$ and/or~$b$ is not in +$t$ in the statement of the lemma: + +\prew +\textbf{lemma} ``$\textit{labels}~(\textit{swap}~t~a~b) = {}$ \\ +\phantom{\textbf{lemma} ``}$(\textrm{if}~a \in \textit{labels}~t~\textrm{then}$ \nopagebreak \\ +\phantom{\textbf{lemma} ``(\quad}$\textrm{if}~b \in \textit{labels}~t~\textrm{then}~\textit{labels}~t~\textrm{else}~(\textit{labels}~t - \{a\}) \mathrel{\cup} \{b\}$ \\ +\phantom{\textbf{lemma} ``(}$\textrm{else}$ \\ +\phantom{\textbf{lemma} ``(\quad}$\textrm{if}~b \in \textit{labels}~t~\textrm{then}~(\textit{labels}~t - \{b\}) \mathrel{\cup} \{a\}~\textrm{else}~\textit{labels}~t)$'' +\postw + +This time, Nitpick won't find any nonstandard counterexample, and we can perform +the induction step using \textbf{auto}. + \section{Case Studies} \label{case-studies} As a didactic device, the previous section focused mostly on toy formulas whose validity can easily be assessed just by looking at the formula. We will now review two somewhat more realistic case studies that are within Nitpick's reach:\ a context-free grammar modeled by mutually inductive sets and a functional implementation of AA trees. The results presented in this section were produced with the following settings: \prew \textbf{nitpick\_params} [\textit{max\_potential}~= 0,\, \textit{max\_threads} = 2] \postw \subsection{A Context-Free Grammar} \label{a-context-free-grammar} Our first case study is taken from section 7.4 in the Isabelle tutorial \cite{isa-tutorial}. The following grammar, originally due to Hopcroft and Ullman, produces all strings with an equal number of $a$'s and $b$'s: \prew \begin{tabular}{@{}r@{$\;\,$}c@{$\;\,$}l@{}} $S$ & $::=$ & $\epsilon \mid bA \mid aB$ \\ $A$ & $::=$ & $aS \mid bAA$ \\ $B$ & $::=$ & $bS \mid aBB$ \end{tabular} \postw The intuition behind the grammar is that $A$ generates all string with one more $a$ than $b$'s and $B$ generates all strings with one more $b$ than $a$'s. The alphabet consists exclusively of $a$'s and $b$'s: \prew \textbf{datatype} \textit{alphabet}~= $a$ $\mid$ $b$ \postw Strings over the alphabet are represented by \textit{alphabet list}s. Nonterminals in the grammar become sets of strings. The production rules presented above can be expressed as a mutually inductive definition: \prew \textbf{inductive\_set} $S$ \textbf{and} $A$ \textbf{and} $B$ \textbf{where} \\ \textit{R1}:\kern.4em ``$[] \in S$'' $\,\mid$ \\ \textit{R2}:\kern.4em ``$w \in A\,\Longrightarrow\, b \mathbin{\#} w \in S$'' $\,\mid$ \\ \textit{R3}:\kern.4em ``$w \in B\,\Longrightarrow\, a \mathbin{\#} w \in S$'' $\,\mid$ \\ \textit{R4}:\kern.4em ``$w \in S\,\Longrightarrow\, a \mathbin{\#} w \in A$'' $\,\mid$ \\ \textit{R5}:\kern.4em ``$w \in S\,\Longrightarrow\, b \mathbin{\#} w \in S$'' $\,\mid$ \\ \textit{R6}:\kern.4em ``$\lbrakk v \in B;\> v \in B\rbrakk \,\Longrightarrow\, a \mathbin{\#} v \mathbin{@} w \in B$'' \postw The conversion of the grammar into the inductive definition was done manually by Joe Blow, an underpaid undergraduate student. As a result, some errors might have sneaked in. Debugging faulty specifications is at the heart of Nitpick's \textsl{raison d'\^etre}. A good approach is to state desirable properties of the specification (here, that $S$ is exactly the set of strings over $\{a, b\}$ with as many $a$'s as $b$'s) and check them with Nitpick. If the properties are correctly stated, counterexamples will point to bugs in the specification. For our grammar example, we will proceed in two steps, separating the soundness and the completeness of the set $S$. First, soundness: \prew \textbf{theorem}~\textit{S\_sound}: \\ ``$w \in S \longrightarrow \textit{length}~[x\mathbin{\leftarrow} w.\; x = a] = \textit{length}~[x\mathbin{\leftarrow} w.\; x = b]$'' \\ \textbf{nitpick} \\[2\smallskipamount] \slshape Nitpick found a counterexample: \\[2\smallskipamount] \hbox{}\qquad Free variable: \nopagebreak \\ \hbox{}\qquad\qquad $w = [b]$ \postw It would seem that $[b] \in S$. How could this be? An inspection of the introduction rules reveals that the only rule with a right-hand side of the form $b \mathbin{\#} {\ldots} \in S$ that could have introduced $[b]$ into $S$ is \textit{R5}: \prew ``$w \in S\,\Longrightarrow\, b \mathbin{\#} w \in S$'' \postw On closer inspection, we can see that this rule is wrong. To match the production $B ::= bS$, the second $S$ should be a $B$. We fix the typo and try again: \prew \textbf{nitpick} \\[2\smallskipamount] \slshape Nitpick found a counterexample: \\[2\smallskipamount] \hbox{}\qquad Free variable: \nopagebreak \\ \hbox{}\qquad\qquad $w = [a, a, b]$ \postw Some detective work is necessary to find out what went wrong here. To get $[a, a, b] \in S$, we need $[a, b] \in B$ by \textit{R3}, which in turn can only come from \textit{R6}: \prew ``$\lbrakk v \in B;\> v \in B\rbrakk \,\Longrightarrow\, a \mathbin{\#} v \mathbin{@} w \in B$'' \postw Now, this formula must be wrong: The same assumption occurs twice, and the variable $w$ is unconstrained. Clearly, one of the two occurrences of $v$ in the assumptions should have been a $w$. With the correction made, we don't get any counterexample from Nitpick. Let's move on and check completeness: \prew \textbf{theorem}~\textit{S\_complete}: \\ ``$\textit{length}~[x\mathbin{\leftarrow} w.\; x = a] = \textit{length}~[x\mathbin{\leftarrow} w.\; x = b] \longrightarrow w \in S$'' \\ \textbf{nitpick} \\[2\smallskipamount] \slshape Nitpick found a counterexample: \\[2\smallskipamount] \hbox{}\qquad Free variable: \nopagebreak \\ \hbox{}\qquad\qquad $w = [b, b, a, a]$ \postw Apparently, $[b, b, a, a] \notin S$, even though it has the same numbers of $a$'s and $b$'s. But since our inductive definition passed the soundness check, the introduction rules we have are probably correct. Perhaps we simply lack an introduction rule. Comparing the grammar with the inductive definition, our suspicion is confirmed: Joe Blow simply forgot the production $A ::= bAA$, without which the grammar cannot generate two or more $b$'s in a row. So we add the rule \prew ``$\lbrakk v \in A;\> w \in A\rbrakk \,\Longrightarrow\, b \mathbin{\#} v \mathbin{@} w \in A$'' \postw With this last change, we don't get any counterexamples from Nitpick for either soundness or completeness. We can even generalize our result to cover $A$ and $B$ as well: \prew \textbf{theorem} \textit{S\_A\_B\_sound\_and\_complete}: \\ ``$w \in S \longleftrightarrow \textit{length}~[x \mathbin{\leftarrow} w.\; x = a] = \textit{length}~[x \mathbin{\leftarrow} w.\; x = b]$'' \\ ``$w \in A \longleftrightarrow \textit{length}~[x \mathbin{\leftarrow} w.\; x = a] = \textit{length}~[x \mathbin{\leftarrow} w.\; x = b] + 1$'' \\ ``$w \in B \longleftrightarrow \textit{length}~[x \mathbin{\leftarrow} w.\; x = b] = \textit{length}~[x \mathbin{\leftarrow} w.\; x = a] + 1$'' \\ \textbf{nitpick} \\[2\smallskipamount] \slshape Nitpick found no counterexample. \postw \subsection{AA Trees} \label{aa-trees} AA trees are a kind of balanced trees discovered by Arne Andersson that provide similar performance to red-black trees, but with a simpler implementation \cite{andersson-1993}. They can be used to store sets of elements equipped with a total order $<$. We start by defining the datatype and some basic extractor functions: \prew -\textbf{datatype} $'a$~\textit{tree} = $\Lambda$ $\mid$ $N$ ``\kern1pt$'a\Colon \textit{linorder}$'' \textit{nat} ``\kern1pt$'a$ \textit{tree}'' ``\kern1pt$'a$ \textit{tree}'' \\[2\smallskipamount] +\textbf{datatype} $'a$~\textit{aa\_tree} = \\ +\hbox{}\quad $\Lambda$ $\mid$ $N$ ``\kern1pt$'a\Colon \textit{linorder}$'' \textit{nat} ``\kern1pt$'a$ \textit{aa\_tree}'' ``\kern1pt$'a$ \textit{aa\_tree}'' \\[2\smallskipamount] \textbf{primrec} \textit{data} \textbf{where} \\ ``$\textit{data}~\Lambda = \undef$'' $\,\mid$ \\ ``$\textit{data}~(N~x~\_~\_~\_) = x$'' \\[2\smallskipamount] \textbf{primrec} \textit{dataset} \textbf{where} \\ ``$\textit{dataset}~\Lambda = \{\}$'' $\,\mid$ \\ ``$\textit{dataset}~(N~x~\_~t~u) = \{x\} \cup \textit{dataset}~t \mathrel{\cup} \textit{dataset}~u$'' \\[2\smallskipamount] \textbf{primrec} \textit{level} \textbf{where} \\ ``$\textit{level}~\Lambda = 0$'' $\,\mid$ \\ ``$\textit{level}~(N~\_~k~\_~\_) = k$'' \\[2\smallskipamount] \textbf{primrec} \textit{left} \textbf{where} \\ ``$\textit{left}~\Lambda = \Lambda$'' $\,\mid$ \\ ``$\textit{left}~(N~\_~\_~t~\_) = t$'' \\[2\smallskipamount] \textbf{primrec} \textit{right} \textbf{where} \\ ``$\textit{right}~\Lambda = \Lambda$'' $\,\mid$ \\ ``$\textit{right}~(N~\_~\_~\_~u) = u$'' \postw The wellformedness criterion for AA trees is fairly complex. Wikipedia states it as follows \cite{wikipedia-2009-aa-trees}: \kern.2\parskip %% TYPESETTING \pre Each node has a level field, and the following invariants must remain true for the tree to be valid: \raggedright \kern-.4\parskip %% TYPESETTING \begin{enum} \item[] \begin{enum} \item[1.] The level of a leaf node is one. \item[2.] The level of a left child is strictly less than that of its parent. \item[3.] The level of a right child is less than or equal to that of its parent. \item[4.] The level of a right grandchild is strictly less than that of its grandparent. \item[5.] Every node of level greater than one must have two children. \end{enum} \end{enum} \post \kern.4\parskip %% TYPESETTING The \textit{wf} predicate formalizes this description: \prew \textbf{primrec} \textit{wf} \textbf{where} \\ ``$\textit{wf}~\Lambda = \textit{True}$'' $\,\mid$ \\ ``$\textit{wf}~(N~\_~k~t~u) =$ \\ \phantom{``}$(\textrm{if}~t = \Lambda~\textrm{then}$ \\ \phantom{``$(\quad$}$k = 1 \mathrel{\land} (u = \Lambda \mathrel{\lor} (\textit{level}~u = 1 \mathrel{\land} \textit{left}~u = \Lambda \mathrel{\land} \textit{right}~u = \Lambda))$ \\ \phantom{``$($}$\textrm{else}$ \\ \hbox{}\phantom{``$(\quad$}$\textit{wf}~t \mathrel{\land} \textit{wf}~u \mathrel{\land} u \not= \Lambda \mathrel{\land} \textit{level}~t < k \mathrel{\land} \textit{level}~u \le k$ \\ \hbox{}\phantom{``$(\quad$}${\land}\; \textit{level}~(\textit{right}~u) < k)$'' \postw Rebalancing the tree upon insertion and removal of elements is performed by two auxiliary functions called \textit{skew} and \textit{split}, defined below: \prew \textbf{primrec} \textit{skew} \textbf{where} \\ ``$\textit{skew}~\Lambda = \Lambda$'' $\,\mid$ \\ ``$\textit{skew}~(N~x~k~t~u) = {}$ \\ \phantom{``}$(\textrm{if}~t \not= \Lambda \mathrel{\land} k = \textit{level}~t~\textrm{then}$ \\ \phantom{``(\quad}$N~(\textit{data}~t)~k~(\textit{left}~t)~(N~x~k~ (\textit{right}~t)~u)$ \\ \phantom{``(}$\textrm{else}$ \\ \phantom{``(\quad}$N~x~k~t~u)$'' \postw \prew \textbf{primrec} \textit{split} \textbf{where} \\ ``$\textit{split}~\Lambda = \Lambda$'' $\,\mid$ \\ ``$\textit{split}~(N~x~k~t~u) = {}$ \\ \phantom{``}$(\textrm{if}~u \not= \Lambda \mathrel{\land} k = \textit{level}~(\textit{right}~u)~\textrm{then}$ \\ \phantom{``(\quad}$N~(\textit{data}~u)~(\textit{Suc}~k)~ (N~x~k~t~(\textit{left}~u))~(\textit{right}~u)$ \\ \phantom{``(}$\textrm{else}$ \\ \phantom{``(\quad}$N~x~k~t~u)$'' \postw Performing a \textit{skew} or a \textit{split} should have no impact on the set of elements stored in the tree: \prew \textbf{theorem}~\textit{dataset\_skew\_split}:\\ ``$\textit{dataset}~(\textit{skew}~t) = \textit{dataset}~t$'' \\ ``$\textit{dataset}~(\textit{split}~t) = \textit{dataset}~t$'' \\ \textbf{nitpick} \\[2\smallskipamount] {\slshape Nitpick ran out of time after checking 7 of 8 scopes.} \postw Furthermore, applying \textit{skew} or \textit{split} to a well-formed tree should not alter the tree: \prew \textbf{theorem}~\textit{wf\_skew\_split}:\\ ``$\textit{wf}~t\,\Longrightarrow\, \textit{skew}~t = t$'' \\ ``$\textit{wf}~t\,\Longrightarrow\, \textit{split}~t = t$'' \\ \textbf{nitpick} \\[2\smallskipamount] {\slshape Nitpick found no counterexample.} \postw Insertion is implemented recursively. It preserves the sort order: \prew \textbf{primrec}~\textit{insort} \textbf{where} \\ ``$\textit{insort}~\Lambda~x = N~x~1~\Lambda~\Lambda$'' $\,\mid$ \\ ``$\textit{insort}~(N~y~k~t~u)~x =$ \\ \phantom{``}$({*}~(\textit{split} \circ \textit{skew})~{*})~(N~y~k~(\textrm{if}~x < y~\textrm{then}~\textit{insort}~t~x~\textrm{else}~t)$ \\ \phantom{``$({*}~(\textit{split} \circ \textit{skew})~{*})~(N~y~k~$}$(\textrm{if}~x > y~\textrm{then}~\textit{insort}~u~x~\textrm{else}~u))$'' \postw Notice that we deliberately commented out the application of \textit{skew} and \textit{split}. Let's see if this causes any problems: \prew \textbf{theorem}~\textit{wf\_insort}:\kern.4em ``$\textit{wf}~t\,\Longrightarrow\, \textit{wf}~(\textit{insort}~t~x)$'' \\ \textbf{nitpick} \\[2\smallskipamount] \slshape Nitpick found a counterexample for \textit{card} $'a$ = 4: \\[2\smallskipamount] \hbox{}\qquad Free variables: \nopagebreak \\ \hbox{}\qquad\qquad $t = N~a_3~1~\Lambda~\Lambda$ \\ -\hbox{}\qquad\qquad $x = a_4$ \\[2\smallskipamount] +\hbox{}\qquad\qquad $x = a_4$ \postw It's hard to see why this is a counterexample. To improve readability, we will restrict the theorem to \textit{nat}, so that we don't need to look up the value of the $\textit{op}~{<}$ constant to find out which element is smaller than the other. In addition, we will tell Nitpick to display the value of $\textit{insort}~t~x$ using the \textit{eval} option. This gives \prew \textbf{theorem} \textit{wf\_insort\_nat}:\kern.4em ``$\textit{wf}~t\,\Longrightarrow\, \textit{wf}~(\textit{insort}~t~(x\Colon\textit{nat}))$'' \\ \textbf{nitpick} [\textit{eval} = ``$\textit{insort}~t~x$''] \\[2\smallskipamount] \slshape Nitpick found a counterexample: \\[2\smallskipamount] \hbox{}\qquad Free variables: \nopagebreak \\ \hbox{}\qquad\qquad $t = N~1~1~\Lambda~\Lambda$ \\ \hbox{}\qquad\qquad $x = 0$ \\ \hbox{}\qquad Evaluated term: \\ \hbox{}\qquad\qquad $\textit{insort}~t~x = N~1~1~(N~0~1~\Lambda~\Lambda)~\Lambda$ \postw Nitpick's output reveals that the element $0$ was added as a left child of $1$, where both have a level of 1. This violates the second AA tree invariant, which states that a left child's level must be less than its parent's. This shouldn't come as a surprise, considering that we commented out the tree rebalancing code. Reintroducing the code seems to solve the problem: \prew \textbf{theorem}~\textit{wf\_insort}:\kern.4em ``$\textit{wf}~t\,\Longrightarrow\, \textit{wf}~(\textit{insort}~t~x)$'' \\ \textbf{nitpick} \\[2\smallskipamount] {\slshape Nitpick ran out of time after checking 6 of 8 scopes.} \postw Insertion should transform the set of elements represented by the tree in the obvious way: \prew \textbf{theorem} \textit{dataset\_insort}:\kern.4em ``$\textit{dataset}~(\textit{insort}~t~x) = \{x\} \cup \textit{dataset}~t$'' \\ \textbf{nitpick} \\[2\smallskipamount] {\slshape Nitpick ran out of time after checking 5 of 8 scopes.} \postw We could continue like this and sketch a complete theory of AA trees without performing a single proof. Once the definitions and main theorems are in place and have been thoroughly tested using Nitpick, we could start working on the proofs. Developing theories this way usually saves time, because faulty theorems and definitions are discovered much earlier in the process. \section{Option Reference} \label{option-reference} \def\flushitem#1{\item[]\noindent\kern-\leftmargin \textbf{#1}} \def\qty#1{$\left<\textit{#1}\right>$} \def\qtybf#1{$\mathbf{\left<\textbf{\textit{#1}}\right>}$} \def\optrue#1#2{\flushitem{\textit{#1} $\bigl[$= \qtybf{bool}$\bigr]$\quad [\textit{true}]\hfill (neg.: \textit{#2})}\nopagebreak\\[\parskip]} \def\opfalse#1#2{\flushitem{\textit{#1} $\bigl[$= \qtybf{bool}$\bigr]$\quad [\textit{false}]\hfill (neg.: \textit{#2})}\nopagebreak\\[\parskip]} \def\opsmart#1#2{\flushitem{\textit{#1} $\bigl[$= \qtybf{bool\_or\_smart}$\bigr]$\quad [\textit{smart}]\hfill (neg.: \textit{#2})}\nopagebreak\\[\parskip]} -\def\ops#1#2{\flushitem{\textit{#1} = \qtybf{#2}} \nopagebreak\\[\parskip]} -\def\opt#1#2#3{\flushitem{\textit{#1} = \qtybf{#2}\quad [\textit{#3}]} \nopagebreak\\[\parskip]} -\def\opu#1#2#3{\flushitem{\textit{#1} \qtybf{#2} = \qtybf{#3}} \nopagebreak\\[\parskip]} -\def\opusmart#1#2#3{\flushitem{\textit{#1} \qtybf{#2} $\bigl[$= \qtybf{bool\_or\_smart}$\bigr]$\hfill (neg.: \textit{#3})}\nopagebreak\\[\parskip]} +\def\opnodefault#1#2{\flushitem{\textit{#1} = \qtybf{#2}} \nopagebreak\\[\parskip]} +\def\opdefault#1#2#3{\flushitem{\textit{#1} = \qtybf{#2}\quad [\textit{#3}]} \nopagebreak\\[\parskip]} +\def\oparg#1#2#3{\flushitem{\textit{#1} \qtybf{#2} = \qtybf{#3}} \nopagebreak\\[\parskip]} +\def\opargbool#1#2#3{\flushitem{\textit{#1} \qtybf{#2} $\bigl[$= \qtybf{bool}$\bigr]$\hfill (neg.: \textit{#3})}\nopagebreak\\[\parskip]} +\def\opargboolorsmart#1#2#3{\flushitem{\textit{#1} \qtybf{#2} $\bigl[$= \qtybf{bool\_or\_smart}$\bigr]$\hfill (neg.: \textit{#3})}\nopagebreak\\[\parskip]} Nitpick's behavior can be influenced by various options, which can be specified in brackets after the \textbf{nitpick} command. Default values can be set using \textbf{nitpick\_\allowbreak params}. For example: \prew \textbf{nitpick\_params} [\textit{verbose}, \,\textit{timeout} = 60$\,s$] \postw The options are categorized as follows:\ mode of operation (\S\ref{mode-of-operation}), scope of search (\S\ref{scope-of-search}), output format (\S\ref{output-format}), automatic counterexample checks (\S\ref{authentication}), optimizations (\S\ref{optimizations}), and timeouts (\S\ref{timeouts}). You can instruct Nitpick to run automatically on newly entered theorems by enabling the ``Auto Nitpick'' option from the ``Isabelle'' menu in Proof General. For automatic runs, \textit{user\_axioms} (\S\ref{mode-of-operation}) and \textit{assms} (\S\ref{mode-of-operation}) are implicitly enabled, \textit{blocking} (\S\ref{mode-of-operation}), \textit{verbose} (\S\ref{output-format}), and \textit{debug} (\S\ref{output-format}) are disabled, \textit{max\_potential} (\S\ref{output-format}) is taken to be 0, and \textit{timeout} (\S\ref{timeouts}) is superseded by the ``Auto Counterexample Time Limit'' in Proof General's ``Isabelle'' menu. Nitpick's output is also more concise. The number of options can be overwhelming at first glance. Do not let that worry you: Nitpick's defaults have been chosen so that it almost always does the right thing, and the most important options have been covered in context in \S\ref{first-steps}. The descriptions below refer to the following syntactic quantities: \begin{enum} \item[$\bullet$] \qtybf{string}: A string. \item[$\bullet$] \qtybf{bool}: \textit{true} or \textit{false}. \item[$\bullet$] \qtybf{bool\_or\_smart}: \textit{true}, \textit{false}, or \textit{smart}. \item[$\bullet$] \qtybf{int}: An integer. Negative integers are prefixed with a hyphen. \item[$\bullet$] \qtybf{int\_or\_smart}: An integer or \textit{smart}. \item[$\bullet$] \qtybf{int\_range}: An integer (e.g., 3) or a range of nonnegative integers (e.g., $1$--$4$). The range symbol `--' can be entered as \texttt{-} (hyphen) or \texttt{\char`\\\char`\}. \item[$\bullet$] \qtybf{int\_seq}: A comma-separated sequence of ranges of integers (e.g.,~1{,}3{,}\allowbreak6--8). \item[$\bullet$] \qtybf{time}: An integer followed by $\textit{min}$ (minutes), $s$ (seconds), or \textit{ms} (milliseconds), or the keyword \textit{none} ($\infty$ years). \item[$\bullet$] \qtybf{const}: The name of a HOL constant. \item[$\bullet$] \qtybf{term}: A HOL term (e.g., ``$f~x$''). \item[$\bullet$] \qtybf{term\_list}: A space-separated list of HOL terms (e.g., ``$f~x$''~``$g~y$''). \item[$\bullet$] \qtybf{type}: A HOL type. \end{enum} Default values are indicated in square brackets. Boolean options have a negated counterpart (e.g., \textit{blocking} vs.\ \textit{no\_blocking}). When setting Boolean options, ``= \textit{true}'' may be omitted. \subsection{Mode of Operation} \label{mode-of-operation} \begin{enum} \optrue{blocking}{non\_blocking} Specifies whether the \textbf{nitpick} command should operate synchronously. The asynchronous (non-blocking) mode lets the user start proving the putative theorem while Nitpick looks for a counterexample, but it can also be more confusing. For technical reasons, automatic runs currently always block. \optrue{falsify}{satisfy} Specifies whether Nitpick should look for falsifying examples (countermodels) or satisfying examples (models). This manual assumes throughout that \textit{falsify} is enabled. \opsmart{user\_axioms}{no\_user\_axioms} Specifies whether the user-defined axioms (specified using \textbf{axiomatization} and \textbf{axioms}) should be considered. If the option is set to \textit{smart}, Nitpick performs an ad hoc axiom selection based on the constants that occur in the formula to falsify. The option is implicitly set to \textit{true} for automatic runs. \textbf{Warning:} If the option is set to \textit{true}, Nitpick might nonetheless ignore some polymorphic axioms. Counterexamples generated under these conditions are tagged as ``likely genuine.'' The \textit{debug} (\S\ref{output-format}) option can be used to find out which axioms were considered. \nopagebreak {\small See also \textit{assms} (\S\ref{mode-of-operation}) and \textit{debug} (\S\ref{output-format}).} \optrue{assms}{no\_assms} Specifies whether the relevant assumptions in structured proof should be considered. The option is implicitly enabled for automatic runs. \nopagebreak {\small See also \textit{user\_axioms} (\S\ref{mode-of-operation}).} \opfalse{overlord}{no\_overlord} Specifies whether Nitpick should put its temporary files in \texttt{\$ISABELLE\_\allowbreak HOME\_\allowbreak USER}, which is useful for debugging Nitpick but also unsafe if several instances of the tool are run simultaneously. \nopagebreak {\small See also \textit{debug} (\S\ref{output-format}).} \end{enum} \subsection{Scope of Search} \label{scope-of-search} \begin{enum} -\opu{card}{type}{int\_seq} +\oparg{card}{type}{int\_seq} Specifies the sequence of cardinalities to use for a given type. For free types, and often also for \textbf{typedecl}'d types, it usually makes sense to specify cardinalities as a range of the form \textit{$1$--$n$}. Although function and product types are normally mapped directly to the corresponding Kodkod concepts, setting the cardinality of such types is also allowed and implicitly enables ``boxing'' for them, as explained in the description of the \textit{box}~\qty{type} and \textit{box} (\S\ref{scope-of-search}) options. \nopagebreak {\small See also \textit{mono} (\S\ref{scope-of-search}).} -\opt{card}{int\_seq}{$\mathbf{1}$--$\mathbf{8}$} +\opdefault{card}{int\_seq}{$\mathbf{1}$--$\mathbf{8}$} Specifies the default sequence of cardinalities to use. This can be overridden on a per-type basis using the \textit{card}~\qty{type} option described above. -\opu{max}{const}{int\_seq} +\oparg{max}{const}{int\_seq} Specifies the sequence of maximum multiplicities to use for a given (co)in\-duc\-tive datatype constructor. A constructor's multiplicity is the number of distinct values that it can construct. Nonsensical values (e.g., \textit{max}~[]~$=$~2) are silently repaired. This option is only available for datatypes equipped with several constructors. -\ops{max}{int\_seq} +\opnodefault{max}{int\_seq} Specifies the default sequence of maximum multiplicities to use for (co)in\-duc\-tive datatype constructors. This can be overridden on a per-constructor basis using the \textit{max}~\qty{const} option described above. \opsmart{binary\_ints}{unary\_ints} Specifies whether natural numbers and integers should be encoded using a unary or binary notation. In unary mode, the cardinality fully specifies the subset used to approximate the type. For example: % $$\hbox{\begin{tabular}{@{}rll@{}}% \textit{card nat} = 4 & induces & $\{0,\, 1,\, 2,\, 3\}$ \\ \textit{card int} = 4 & induces & $\{-1,\, 0,\, +1,\, +2\}$ \\ \textit{card int} = 5 & induces & $\{-2,\, -1,\, 0,\, +1,\, +2\}.$% \end{tabular}}$$ % In general: % $$\hbox{\begin{tabular}{@{}rll@{}}% \textit{card nat} = $K$ & induces & $\{0,\, \ldots,\, K - 1\}$ \\ \textit{card int} = $K$ & induces & $\{-\lceil K/2 \rceil + 1,\, \ldots,\, +\lfloor K/2 \rfloor\}.$% \end{tabular}}$$ % In binary mode, the cardinality specifies the number of distinct values that can be constructed. Each of these value is represented by a bit pattern whose length is specified by the \textit{bits} (\S\ref{scope-of-search}) option. By default, Nitpick attempts to choose the more appropriate encoding by inspecting the formula at hand, preferring the binary notation for problems involving multiplicative operators or large constants. \textbf{Warning:} For technical reasons, Nitpick always reverts to unary for problems that refer to the types \textit{rat} or \textit{real} or the constants \textit{Suc}, \textit{gcd}, or \textit{lcm}. {\small See also \textit{bits} (\S\ref{scope-of-search}) and \textit{show\_datatypes} (\S\ref{output-format}).} -\opt{bits}{int\_seq}{$\mathbf{1},\mathbf{2},\mathbf{3},\mathbf{4},\mathbf{6},\mathbf{8},\mathbf{10},\mathbf{12}$} +\opdefault{bits}{int\_seq}{$\mathbf{1},\mathbf{2},\mathbf{3},\mathbf{4},\mathbf{6},\mathbf{8},\mathbf{10},\mathbf{12}$} Specifies the number of bits to use to represent natural numbers and integers in binary, excluding the sign bit. The minimum is 1 and the maximum is 31. {\small See also \textit{binary\_ints} (\S\ref{scope-of-search}).} -\opusmart{wf}{const}{non\_wf} +\opargboolorsmart{wf}{const}{non\_wf} Specifies whether the specified (co)in\-duc\-tively defined predicate is well-founded. The option can take the following values: \begin{enum} \item[$\bullet$] \textbf{\textit{true}}: Tentatively treat the (co)in\-duc\-tive predicate as if it were well-founded. Since this is generally not sound when the predicate is not well-founded, the counterexamples are tagged as ``likely genuine.'' \item[$\bullet$] \textbf{\textit{false}}: Treat the (co)in\-duc\-tive predicate as if it were not well-founded. The predicate is then unrolled as prescribed by the \textit{star\_linear\_preds}, \textit{iter}~\qty{const}, and \textit{iter} options. \item[$\bullet$] \textbf{\textit{smart}}: Try to prove that the inductive predicate is well-founded using Isabelle's \textit{lexicographic\_order} and -\textit{sizechange} tactics. If this succeeds (or the predicate occurs with an +\textit{size\_change} tactics. If this succeeds (or the predicate occurs with an appropriate polarity in the formula to falsify), use an efficient fixed point equation as specification of the predicate; otherwise, unroll the predicates according to the \textit{iter}~\qty{const} and \textit{iter} options. \end{enum} \nopagebreak {\small See also \textit{iter} (\S\ref{scope-of-search}), \textit{star\_linear\_preds} (\S\ref{optimizations}), and \textit{tac\_timeout} (\S\ref{timeouts}).} \opsmart{wf}{non\_wf} Specifies the default wellfoundedness setting to use. This can be overridden on a per-predicate basis using the \textit{wf}~\qty{const} option above. -\opu{iter}{const}{int\_seq} +\oparg{iter}{const}{int\_seq} Specifies the sequence of iteration counts to use when unrolling a given (co)in\-duc\-tive predicate. By default, unrolling is applied for inductive predicates that occur negatively and coinductive predicates that occur positively in the formula to falsify and that cannot be proved to be well-founded, but this behavior is influenced by the \textit{wf} option. The iteration counts are automatically bounded by the cardinality of the predicate's domain. {\small See also \textit{wf} (\S\ref{scope-of-search}) and \textit{star\_linear\_preds} (\S\ref{optimizations}).} -\opt{iter}{int\_seq}{$\mathbf{1{,}2{,}4{,}8{,}12{,}16{,}24{,}32}$} +\opdefault{iter}{int\_seq}{$\mathbf{1{,}2{,}4{,}8{,}12{,}16{,}24{,}32}$} Specifies the sequence of iteration counts to use when unrolling (co)in\-duc\-tive predicates. This can be overridden on a per-predicate basis using the \textit{iter} \qty{const} option above. -\opt{bisim\_depth}{int\_seq}{$\mathbf{7}$} +\opdefault{bisim\_depth}{int\_seq}{$\mathbf{7}$} Specifies the sequence of iteration counts to use when unrolling the bisimilarity predicate generated by Nitpick for coinductive datatypes. A value of $-1$ means that no predicate is generated, in which case Nitpick performs an after-the-fact check to see if the known coinductive datatype values are bidissimilar. If two values are found to be bisimilar, the counterexample is tagged as ``likely genuine.'' The iteration counts are automatically bounded by the sum of the cardinalities of the coinductive datatypes occurring in the formula to falsify. -\opusmart{box}{type}{dont\_box} +\opargboolorsmart{box}{type}{dont\_box} Specifies whether Nitpick should attempt to wrap (``box'') a given function or product type in an isomorphic datatype internally. Boxing is an effective mean to reduce the search space and speed up Nitpick, because the isomorphic datatype is approximated by a subset of the possible function or pair values; like other drastic optimizations, it can also prevent the discovery of counterexamples. The option can take the following values: \begin{enum} \item[$\bullet$] \textbf{\textit{true}}: Box the specified type whenever practicable. \item[$\bullet$] \textbf{\textit{false}}: Never box the type. \item[$\bullet$] \textbf{\textit{smart}}: Box the type only in contexts where it is likely to help. For example, $n$-tuples where $n > 2$ and arguments to higher-order functions are good candidates for boxing. \end{enum} Setting the \textit{card}~\qty{type} option for a function or product type implicitly enables boxing for that type. \nopagebreak {\small See also \textit{verbose} (\S\ref{output-format}) and \textit{debug} (\S\ref{output-format}).} \opsmart{box}{dont\_box} Specifies the default boxing setting to use. This can be overridden on a per-type basis using the \textit{box}~\qty{type} option described above. -\opusmart{mono}{type}{non\_mono} -Specifies whether the specified type should be considered monotonic when +\opargboolorsmart{mono}{type}{non\_mono} +Specifies whether the given type should be considered monotonic when enumerating scopes. If the option is set to \textit{smart}, Nitpick performs a monotonicity check on the type. Setting this option to \textit{true} can reduce the number of scopes tried, but it also diminishes the theoretical chance of finding a counterexample, as demonstrated in \S\ref{scope-monotonicity}. \nopagebreak {\small See also \textit{card} (\S\ref{scope-of-search}), \textit{merge\_type\_vars} (\S\ref{scope-of-search}), and \textit{verbose} (\S\ref{output-format}).} \opsmart{mono}{non\_box} Specifies the default monotonicity setting to use. This can be overridden on a per-type basis using the \textit{mono}~\qty{type} option described above. \opfalse{merge\_type\_vars}{dont\_merge\_type\_vars} Specifies whether type variables with the same sort constraints should be merged. Setting this option to \textit{true} can reduce the number of scopes tried and the size of the generated Kodkod formulas, but it also diminishes the theoretical chance of finding a counterexample. {\small See also \textit{mono} (\S\ref{scope-of-search}).} + +\opargbool{std}{type}{non\_std} +Specifies whether the given type should be given standard models. +Nonstandard models are unsound but can help debug inductive arguments, +as explained in \S\ref{inductive-properties}. + +\optrue{std}{non\_std} +Specifies the default standardness to use. This can be overridden on a per-type +basis using the \textit{std}~\qty{type} option described above. \end{enum} \subsection{Output Format} \label{output-format} \begin{enum} \opfalse{verbose}{quiet} Specifies whether the \textbf{nitpick} command should explain what it does. This option is useful to determine which scopes are tried or which SAT solver is used. This option is implicitly disabled for automatic runs. \opfalse{debug}{no\_debug} Specifies whether Nitpick should display additional debugging information beyond what \textit{verbose} already displays. Enabling \textit{debug} also enables \textit{verbose} and \textit{show\_all} behind the scenes. The \textit{debug} option is implicitly disabled for automatic runs. \nopagebreak {\small See also \textit{overlord} (\S\ref{mode-of-operation}) and \textit{batch\_size} (\S\ref{optimizations}).} \optrue{show\_skolems}{hide\_skolem} Specifies whether the values of Skolem constants should be displayed as part of counterexamples. Skolem constants correspond to bound variables in the original formula and usually help us to understand why the counterexample falsifies the formula. \nopagebreak {\small See also \textit{skolemize} (\S\ref{optimizations}).} \opfalse{show\_datatypes}{hide\_datatypes} Specifies whether the subsets used to approximate (co)in\-duc\-tive datatypes should be displayed as part of counterexamples. Such subsets are sometimes helpful when investigating whether a potential counterexample is genuine or spurious, but their potential for clutter is real. \opfalse{show\_consts}{hide\_consts} Specifies whether the values of constants occurring in the formula (including its axioms) should be displayed along with any counterexample. These values are sometimes helpful when investigating why a counterexample is genuine, but they can clutter the output. \opfalse{show\_all}{dont\_show\_all} Enabling this option effectively enables \textit{show\_skolems}, \textit{show\_datatypes}, and \textit{show\_consts}. -\opt{max\_potential}{int}{$\mathbf{1}$} +\opdefault{max\_potential}{int}{$\mathbf{1}$} Specifies the maximum number of potential counterexamples to display. Setting this option to 0 speeds up the search for a genuine counterexample. This option is implicitly set to 0 for automatic runs. If you set this option to a value greater than 1, you will need an incremental SAT solver: For efficiency, it is recommended to install the JNI version of MiniSat and set \textit{sat\_solver} = \textit{MiniSatJNI}. Also be aware that many of the counterexamples may look identical, unless the \textit{show\_all} (\S\ref{output-format}) option is enabled. \nopagebreak {\small See also \textit{check\_potential} (\S\ref{authentication}) and \textit{sat\_solver} (\S\ref{optimizations}).} -\opt{max\_genuine}{int}{$\mathbf{1}$} +\opdefault{max\_genuine}{int}{$\mathbf{1}$} Specifies the maximum number of genuine counterexamples to display. If you set this option to a value greater than 1, you will need an incremental SAT solver: For efficiency, it is recommended to install the JNI version of MiniSat and set \textit{sat\_solver} = \textit{MiniSatJNI}. Also be aware that many of the counterexamples may look identical, unless the \textit{show\_all} (\S\ref{output-format}) option is enabled. \nopagebreak {\small See also \textit{check\_genuine} (\S\ref{authentication}) and \textit{sat\_solver} (\S\ref{optimizations}).} -\ops{eval}{term\_list} +\opnodefault{eval}{term\_list} Specifies the list of terms whose values should be displayed along with counterexamples. This option suffers from an ``observer effect'': Nitpick might find different counterexamples for different values of this option. -\opu{format}{term}{int\_seq} +\oparg{format}{term}{int\_seq} Specifies how to uncurry the value displayed for a variable or constant. Uncurrying sometimes increases the readability of the output for high-arity functions. For example, given the variable $y \mathbin{\Colon} {'a}\Rightarrow {'b}\Rightarrow {'c}\Rightarrow {'d}\Rightarrow {'e}\Rightarrow {'f}\Rightarrow {'g}$, setting \textit{format}~$y$ = 3 tells Nitpick to group the last three arguments, as if the type had been ${'a}\Rightarrow {'b}\Rightarrow {'c}\Rightarrow {'d}\times {'e}\times {'f}\Rightarrow {'g}$. In general, a list of values $n_1,\ldots,n_k$ tells Nitpick to show the last $n_k$ arguments as an $n_k$-tuple, the previous $n_{k-1}$ arguments as an $n_{k-1}$-tuple, and so on; arguments that are not accounted for are left alone, as if the specification had been $1,\ldots,1,n_1,\ldots,n_k$. \nopagebreak {\small See also \textit{uncurry} (\S\ref{optimizations}).} -\opt{format}{int\_seq}{$\mathbf{1}$} +\opdefault{format}{int\_seq}{$\mathbf{1}$} Specifies the default format to use. Irrespective of the default format, the extra arguments to a Skolem constant corresponding to the outer bound variables are kept separated from the remaining arguments, the \textbf{for} arguments of an inductive definitions are kept separated from the remaining arguments, and the iteration counter of an unrolled inductive definition is shown alone. The default format can be overridden on a per-variable or per-constant basis using the \textit{format}~\qty{term} option described above. \end{enum} %% MARK: Authentication \subsection{Authentication} \label{authentication} \begin{enum} \opfalse{check\_potential}{trust\_potential} Specifies whether potential counterexamples should be given to Isabelle's \textit{auto} tactic to assess their validity. If a potential counterexample is shown to be genuine, Nitpick displays a message to this effect and terminates. \nopagebreak {\small See also \textit{max\_potential} (\S\ref{output-format}).} \opfalse{check\_genuine}{trust\_genuine} Specifies whether genuine and likely genuine counterexamples should be given to Isabelle's \textit{auto} tactic to assess their validity. If a ``genuine'' counterexample is shown to be spurious, the user is kindly asked to send a bug report to the author at \texttt{blan{\color{white}nospam}\kern-\wd\boxA{}chette@in.tum.de}. \nopagebreak {\small See also \textit{max\_genuine} (\S\ref{output-format}).} -\ops{expect}{string} +\opnodefault{expect}{string} Specifies the expected outcome, which must be one of the following: \begin{enum} \item[$\bullet$] \textbf{\textit{genuine}}: Nitpick found a genuine counterexample. \item[$\bullet$] \textbf{\textit{likely\_genuine}}: Nitpick found a ``likely genuine'' counterexample (i.e., a counterexample that is genuine unless it contradicts a missing axiom or a dangerous option was used inappropriately). \item[$\bullet$] \textbf{\textit{potential}}: Nitpick found a potential counterexample. \item[$\bullet$] \textbf{\textit{none}}: Nitpick found no counterexample. \item[$\bullet$] \textbf{\textit{unknown}}: Nitpick encountered some problem (e.g., Kodkod ran out of memory). \end{enum} Nitpick emits an error if the actual outcome differs from the expected outcome. This option is useful for regression testing. \end{enum} \subsection{Optimizations} \label{optimizations} \def\cpp{C\nobreak\raisebox{.1ex}{+}\nobreak\raisebox{.1ex}{+}} \sloppy \begin{enum} -\opt{sat\_solver}{string}{smart} +\opdefault{sat\_solver}{string}{smart} Specifies which SAT solver to use. SAT solvers implemented in C or \cpp{} tend to be faster than their Java counterparts, but they can be more difficult to install. Also, if you set the \textit{max\_potential} (\S\ref{output-format}) or \textit{max\_genuine} (\S\ref{output-format}) option to a value greater than 1, you will need an incremental SAT solver, such as \textit{MiniSatJNI} (recommended) or \textit{SAT4J}. The supported solvers are listed below: \begin{enum} \item[$\bullet$] \textbf{\textit{MiniSat}}: MiniSat is an efficient solver written in \cpp{}. To use MiniSat, set the environment variable \texttt{MINISAT\_HOME} to the directory that contains the \texttt{minisat} executable. The \cpp{} sources and executables for MiniSat are available at \url{http://minisat.se/MiniSat.html}. Nitpick has been tested with versions 1.14 and 2.0 beta (2007-07-21). \item[$\bullet$] \textbf{\textit{MiniSatJNI}}: The JNI (Java Native Interface) version of MiniSat is bundled in \texttt{nativesolver.\allowbreak tgz}, which you will find on Kodkod's web site \cite{kodkod-2009}. Unlike the standard version of MiniSat, the JNI version can be used incrementally. %%% No longer true: %%% "It is bundled with Kodkodi and requires no further installation or %%% configuration steps. Alternatively," \item[$\bullet$] \textbf{\textit{PicoSAT}}: PicoSAT is an efficient solver written in C. You can install a standard version of PicoSAT and set the environment variable \texttt{PICOSAT\_HOME} to the directory that contains the \texttt{picosat} executable. The C sources for PicoSAT are available at \url{http://fmv.jku.at/picosat/} and are also bundled with Kodkodi. Nitpick has been tested with version 913. \item[$\bullet$] \textbf{\textit{zChaff}}: zChaff is an efficient solver written in \cpp{}. To use zChaff, set the environment variable \texttt{ZCHAFF\_HOME} to the directory that contains the \texttt{zchaff} executable. The \cpp{} sources and executables for zChaff are available at \url{http://www.princeton.edu/~chaff/zchaff.html}. Nitpick has been tested with versions 2004-05-13, 2004-11-15, and 2007-03-12. \item[$\bullet$] \textbf{\textit{zChaffJNI}}: The JNI version of zChaff is bundled in \texttt{native\-solver.\allowbreak tgz}, which you will find on Kodkod's web site \cite{kodkod-2009}. \item[$\bullet$] \textbf{\textit{RSat}}: RSat is an efficient solver written in \cpp{}. To use RSat, set the environment variable \texttt{RSAT\_HOME} to the directory that contains the \texttt{rsat} executable. The \cpp{} sources for RSat are available at \url{http://reasoning.cs.ucla.edu/rsat/}. Nitpick has been tested with version 2.01. \item[$\bullet$] \textbf{\textit{BerkMin}}: BerkMin561 is an efficient solver written in C. To use BerkMin, set the environment variable \texttt{BERKMIN\_HOME} to the directory that contains the \texttt{BerkMin561} executable. The BerkMin executables are available at \url{http://eigold.tripod.com/BerkMin.html}. \item[$\bullet$] \textbf{\textit{BerkMinAlloy}}: Variant of BerkMin that is included with Alloy 4 and calls itself ``sat56'' in its banner text. To use this version of BerkMin, set the environment variable \texttt{BERKMINALLOY\_HOME} to the directory that contains the \texttt{berkmin} executable. \item[$\bullet$] \textbf{\textit{Jerusat}}: Jerusat 1.3 is an efficient solver written in C. To use Jerusat, set the environment variable \texttt{JERUSAT\_HOME} to the directory that contains the \texttt{Jerusat1.3} executable. The C sources for Jerusat are available at \url{http://www.cs.tau.ac.il/~ale1/Jerusat1.3.tgz}. \item[$\bullet$] \textbf{\textit{SAT4J}}: SAT4J is a reasonably efficient solver written in Java that can be used incrementally. It is bundled with Kodkodi and requires no further installation or configuration steps. Do not attempt to install the official SAT4J packages, because their API is incompatible with Kodkod. \item[$\bullet$] \textbf{\textit{SAT4JLight}}: Variant of SAT4J that is optimized for small problems. It can also be used incrementally. \item[$\bullet$] \textbf{\textit{HaifaSat}}: HaifaSat 1.0 beta is an experimental solver written in \cpp. To use HaifaSat, set the environment variable \texttt{HAIFASAT\_\allowbreak HOME} to the directory that contains the \texttt{HaifaSat} executable. The \cpp{} sources for HaifaSat are available at \url{http://cs.technion.ac.il/~gershman/HaifaSat.htm}. \item[$\bullet$] \textbf{\textit{smart}}: If \textit{sat\_solver} is set to \textit{smart}, Nitpick selects the first solver among MiniSat, PicoSAT, zChaff, RSat, BerkMin, BerkMinAlloy, Jerusat, MiniSatJNI, and zChaffJNI that is recognized by Isabelle. If none is found, it falls back on SAT4J, which should always be available. If \textit{verbose} (\S\ref{output-format}) is enabled, Nitpick displays which SAT solver was chosen. \end{enum} \fussy -\opt{batch\_size}{int\_or\_smart}{smart} +\opdefault{batch\_size}{int\_or\_smart}{smart} Specifies the maximum number of Kodkod problems that should be lumped together when invoking Kodkodi. Each problem corresponds to one scope. Lumping problems together ensures that Kodkodi is launched less often, but it makes the verbose output less readable and is sometimes detrimental to performance. If \textit{batch\_size} is set to \textit{smart}, the actual value used is 1 if \textit{debug} (\S\ref{output-format}) is set and 64 otherwise. \optrue{destroy\_constrs}{dont\_destroy\_constrs} Specifies whether formulas involving (co)in\-duc\-tive datatype constructors should be rewritten to use (automatically generated) discriminators and destructors. This optimization can drastically reduce the size of the Boolean formulas given to the SAT solver. \nopagebreak {\small See also \textit{debug} (\S\ref{output-format}).} \optrue{specialize}{dont\_specialize} Specifies whether functions invoked with static arguments should be specialized. This optimization can drastically reduce the search space, especially for higher-order functions. \nopagebreak {\small See also \textit{debug} (\S\ref{output-format}) and \textit{show\_consts} (\S\ref{output-format}).} \optrue{skolemize}{dont\_skolemize} Specifies whether the formula should be skolemized. For performance reasons, (positive) $\forall$-quanti\-fiers that occur in the scope of a higher-order (positive) $\exists$-quanti\-fier are left unchanged. \nopagebreak {\small See also \textit{debug} (\S\ref{output-format}) and \textit{show\_skolems} (\S\ref{output-format}).} \optrue{star\_linear\_preds}{dont\_star\_linear\_preds} Specifies whether Nitpick should use Kodkod's transitive closure operator to encode non-well-founded ``linear inductive predicates,'' i.e., inductive predicates for which each the predicate occurs in at most one assumption of each introduction rule. Using the reflexive transitive closure is in principle equivalent to setting \textit{iter} to the cardinality of the predicate's domain, but it is usually more efficient. {\small See also \textit{wf} (\S\ref{scope-of-search}), \textit{debug} (\S\ref{output-format}), and \textit{iter} (\S\ref{scope-of-search}).} \optrue{uncurry}{dont\_uncurry} Specifies whether Nitpick should uncurry functions. Uncurrying has on its own no tangible effect on efficiency, but it creates opportunities for the boxing optimization. \nopagebreak {\small See also \textit{box} (\S\ref{scope-of-search}), \textit{debug} (\S\ref{output-format}), and \textit{format} (\S\ref{output-format}).} \optrue{fast\_descrs}{full\_descrs} Specifies whether Nitpick should optimize the definite and indefinite description operators (THE and SOME). The optimized versions usually help Nitpick generate more counterexamples or at least find them faster, but only the unoptimized versions are complete when all types occurring in the formula are finite. {\small See also \textit{debug} (\S\ref{output-format}).} \optrue{peephole\_optim}{no\_peephole\_optim} Specifies whether Nitpick should simplify the generated Kodkod formulas using a peephole optimizer. These optimizations can make a significant difference. Unless you are tracking down a bug in Nitpick or distrust the peephole optimizer, you should leave this option enabled. -\opt{sym\_break}{int}{20} +\opdefault{sym\_break}{int}{20} Specifies an upper bound on the number of relations for which Kodkod generates symmetry breaking predicates. According to the Kodkod documentation \cite{kodkod-2009-options}, ``in general, the higher this value, the more symmetries will be broken, and the faster the formula will be solved. But, setting the value too high may have the opposite effect and slow down the solving.'' -\opt{sharing\_depth}{int}{3} +\opdefault{sharing\_depth}{int}{3} Specifies the depth to which Kodkod should check circuits for equivalence during the translation to SAT. The default of 3 is the same as in Alloy. The minimum allowed depth is 1. Increasing the sharing may result in a smaller SAT problem, but can also slow down Kodkod. \opfalse{flatten\_props}{dont\_flatten\_props} Specifies whether Kodkod should try to eliminate intermediate Boolean variables. Although this might sound like a good idea, in practice it can drastically slow down Kodkod. -\opt{max\_threads}{int}{0} +\opdefault{max\_threads}{int}{0} Specifies the maximum number of threads to use in Kodkod. If this option is set to 0, Kodkod will compute an appropriate value based on the number of processor cores available. \nopagebreak {\small See also \textit{batch\_size} (\S\ref{optimizations}) and \textit{timeout} (\S\ref{timeouts}).} \end{enum} \subsection{Timeouts} \label{timeouts} \begin{enum} -\opt{timeout}{time}{$\mathbf{30}$ s} +\opdefault{timeout}{time}{$\mathbf{30}$ s} Specifies the maximum amount of time that the \textbf{nitpick} command should spend looking for a counterexample. Nitpick tries to honor this constraint as well as it can but offers no guarantees. For automatic runs, \textit{timeout} is ignored; instead, Auto Quickcheck and Auto Nitpick share a time slot whose length is specified by the ``Auto Counterexample Time Limit'' option in Proof General. \nopagebreak {\small See also \textit{max\_threads} (\S\ref{optimizations}).} -\opt{tac\_timeout}{time}{$\mathbf{500}$\,ms} +\opdefault{tac\_timeout}{time}{$\mathbf{500}$\,ms} Specifies the maximum amount of time that the \textit{auto} tactic should use when checking a counterexample, and similarly that \textit{lexicographic\_order} -and \textit{sizechange} should use when checking whether a (co)in\-duc\-tive +and \textit{size\_change} should use when checking whether a (co)in\-duc\-tive predicate is well-founded. Nitpick tries to honor this constraint as well as it can but offers no guarantees. \nopagebreak {\small See also \textit{wf} (\S\ref{scope-of-search}), \textit{check\_potential} (\S\ref{authentication}), and \textit{check\_genuine} (\S\ref{authentication}).} \end{enum} \section{Attribute Reference} \label{attribute-reference} Nitpick needs to consider the definitions of all constants occurring in a formula in order to falsify it. For constants introduced using the \textbf{definition} command, the definition is simply the associated \textit{\_def} axiom. In contrast, instead of using the internal representation of functions synthesized by Isabelle's \textbf{primrec}, \textbf{function}, and \textbf{nominal\_primrec} packages, Nitpick relies on the more natural equational specification entered by the user. Behind the scenes, Isabelle's built-in packages and theories rely on the following attributes to affect Nitpick's behavior: \begin{itemize} \flushitem{\textit{nitpick\_def}} \nopagebreak This attribute specifies an alternative definition of a constant. The alternative definition should be logically equivalent to the constant's actual axiomatic definition and should be of the form \qquad $c~{?}x_1~\ldots~{?}x_n \,\equiv\, t$, where ${?}x_1, \ldots, {?}x_n$ are distinct variables and $c$ does not occur in $t$. \flushitem{\textit{nitpick\_simp}} \nopagebreak This attribute specifies the equations that constitute the specification of a constant. For functions defined using the \textbf{primrec}, \textbf{function}, and \textbf{nominal\_\allowbreak primrec} packages, this corresponds to the \textit{simps} rules. The equations must be of the form \qquad $c~t_1~\ldots\ t_n \,=\, u.$ \flushitem{\textit{nitpick\_psimp}} \nopagebreak This attribute specifies the equations that constitute the partial specification of a constant. For functions defined using the \textbf{function} package, this corresponds to the \textit{psimps} rules. The conditional equations must be of the form \qquad $\lbrakk P_1;\> \ldots;\> P_m\rbrakk \,\Longrightarrow\, c\ t_1\ \ldots\ t_n \,=\, u$. \flushitem{\textit{nitpick\_intro}} \nopagebreak This attribute specifies the introduction rules of a (co)in\-duc\-tive predicate. For predicates defined using the \textbf{inductive} or \textbf{coinductive} command, this corresponds to the \textit{intros} rules. The introduction rules must be of the form \qquad $\lbrakk P_1;\> \ldots;\> P_m;\> M~(c\ t_{11}\ \ldots\ t_{1n});\> \ldots;\> M~(c\ t_{k1}\ \ldots\ t_{kn})\rbrakk \,\Longrightarrow\, c\ u_1\ \ldots\ u_n$, where the $P_i$'s are side conditions that do not involve $c$ and $M$ is an optional monotonic operator. The order of the assumptions is irrelevant. \end{itemize} When faced with a constant, Nitpick proceeds as follows: \begin{enum} \item[1.] If the \textit{nitpick\_simp} set associated with the constant is not empty, Nitpick uses these rules as the specification of the constant. \item[2.] Otherwise, if the \textit{nitpick\_psimp} set associated with the constant is not empty, it uses these rules as the specification of the constant. \item[3.] Otherwise, it looks up the definition of the constant: \begin{enum} \item[1.] If the \textit{nitpick\_def} set associated with the constant is not empty, it uses the latest rule added to the set as the definition of the constant; otherwise it uses the actual definition axiom. \item[2.] If the definition is of the form \qquad $c~{?}x_1~\ldots~{?}x_m \,\equiv\, \lambda y_1~\ldots~y_n.\; \textit{lfp}~(\lambda f.\; t)$, then Nitpick assumes that the definition was made using an inductive package and based on the introduction rules marked with \textit{nitpick\_\allowbreak ind\_\allowbreak intros} tries to determine whether the definition is well-founded. \end{enum} \end{enum} As an illustration, consider the inductive definition \prew \textbf{inductive}~\textit{odd}~\textbf{where} \\ ``\textit{odd}~1'' $\,\mid$ \\ ``\textit{odd}~$n\,\Longrightarrow\, \textit{odd}~(\textit{Suc}~(\textit{Suc}~n))$'' \postw Isabelle automatically attaches the \textit{nitpick\_intro} attribute to the above rules. Nitpick then uses the \textit{lfp}-based definition in conjunction with these rules. To override this, we can specify an alternative definition as follows: \prew \textbf{lemma} $\mathit{odd\_def}'$ [\textit{nitpick\_def}]: ``$\textit{odd}~n \,\equiv\, n~\textrm{mod}~2 = 1$'' \postw Nitpick then expands all occurrences of $\mathit{odd}~n$ to $n~\textrm{mod}~2 = 1$. Alternatively, we can specify an equational specification of the constant: \prew \textbf{lemma} $\mathit{odd\_simp}'$ [\textit{nitpick\_simp}]: ``$\textit{odd}~n = (n~\textrm{mod}~2 = 1)$'' \postw Such tweaks should be done with great care, because Nitpick will assume that the constant is completely defined by its equational specification. For example, if you make ``$\textit{odd}~(2 * k + 1)$'' a \textit{nitpick\_simp} rule and neglect to provide rules to handle the $2 * k$ case, Nitpick will define $\textit{odd}~n$ arbitrarily for even values of $n$. The \textit{debug} (\S\ref{output-format}) option is extremely useful to understand what is going on when experimenting with \textit{nitpick\_} attributes. \section{Standard ML Interface} \label{standard-ml-interface} Nitpick provides a rich Standard ML interface used mainly for internal purposes and debugging. Among the most interesting functions exported by Nitpick are those that let you invoke the tool programmatically and those that let you register and unregister custom coinductive datatypes. \subsection{Invocation of Nitpick} \label{invocation-of-nitpick} The \textit{Nitpick} structure offers the following functions for invoking your favorite counterexample generator: \prew $\textbf{val}\,~\textit{pick\_nits\_in\_term} : \\ \hbox{}\quad\textit{Proof.state} \rightarrow \textit{params} \rightarrow \textit{bool} \rightarrow \textit{term~list} \rightarrow \textit{term} \\ \hbox{}\quad{\rightarrow}\; \textit{string} * \textit{Proof.state}$ \\ $\textbf{val}\,~\textit{pick\_nits\_in\_subgoal} : \\ \hbox{}\quad\textit{Proof.state} \rightarrow \textit{params} \rightarrow \textit{bool} \rightarrow \textit{int} \rightarrow \textit{string} * \textit{Proof.state}$ \postw The return value is a new proof state paired with an outcome string (``genuine'', ``likely\_genuine'', ``potential'', ``none'', or ``unknown''). The \textit{params} type is a large record that lets you set Nitpick's options. The current default options can be retrieved by calling the following function defined in the \textit{Nitpick\_Isar} structure: \prew $\textbf{val}\,~\textit{default\_params} :\, \textit{theory} \rightarrow (\textit{string} * \textit{string})~\textit{list} \rightarrow \textit{params}$ \postw The second argument lets you override option values before they are parsed and put into a \textit{params} record. Here is an example: \prew $\textbf{val}\,~\textit{params} = \textit{Nitpick\_Isar.default\_params}~\textit{thy}~[(\textrm{``}\textrm{timeout}\textrm{''},\, \textrm{``}\textrm{none}\textrm{''})]$ \\ $\textbf{val}\,~(\textit{outcome},\, \textit{state}') = \textit{Nitpick.pick\_nits\_in\_subgoal}~\begin{aligned}[t] & \textit{state}~\textit{params}~\textit{false} \\[-2pt] & \textit{subgoal}\end{aligned}$ \postw \let\antiq=\textrm \subsection{Registration of Coinductive Datatypes} \label{registration-of-coinductive-datatypes} If you have defined a custom coinductive datatype, you can tell Nitpick about it, so that it can use an efficient Kodkod axiomatization similar to the one it uses for lazy lists. The interface for registering and unregistering coinductive datatypes consists of the following pair of functions defined in the \textit{Nitpick} structure: \prew $\textbf{val}\,~\textit{register\_codatatype} :\, \textit{typ} \rightarrow \textit{string} \rightarrow \textit{styp~list} \rightarrow \textit{theory} \rightarrow \textit{theory}$ \\ $\textbf{val}\,~\textit{unregister\_codatatype} :\, \textit{typ} \rightarrow \textit{theory} \rightarrow \textit{theory}$ \postw The type $'a~\textit{llist}$ of lazy lists is already registered; had it not been, you could have told Nitpick about it by adding the following line to your theory file: \prew $\textbf{setup}~\,\{{*}\,~\!\begin{aligned}[t] & \textit{Nitpick.register\_codatatype} \\[-2pt] & \qquad @\{\antiq{typ}~``\kern1pt'a~\textit{llist}\textrm{''}\}~@\{\antiq{const\_name}~ \textit{llist\_case}\} \\[-2pt] %% TYPESETTING & \qquad (\textit{map}~\textit{dest\_Const}~[@\{\antiq{term}~\textit{LNil}\},\, @\{\antiq{term}~\textit{LCons}\}])\,\ {*}\}\end{aligned}$ \postw The \textit{register\_codatatype} function takes a coinductive type, its case function, and the list of its constructors. The case function must take its arguments in the order that the constructors are listed. If no case function with the correct signature is available, simply pass the empty string. On the other hand, if your goal is to cripple Nitpick, add the following line to your theory file and try to check a few conjectures about lazy lists: \prew $\textbf{setup}~\,\{{*}\,~\textit{Nitpick.unregister\_codatatype}~@\{\antiq{typ}~`` \kern1pt'a~\textit{list}\textrm{''}\}\ \,{*}\}$ \postw Inductive datatypes can be registered as coinductive datatypes, given appropriate coinductive constructors. However, doing so precludes the use of the inductive constructors---Nitpick will generate an error if they are needed. \section{Known Bugs and Limitations} \label{known-bugs-and-limitations} Here are the known bugs and limitations in Nitpick at the time of writing: \begin{enum} \item[$\bullet$] Underspecified functions defined using the \textbf{primrec}, \textbf{function}, or \textbf{nominal\_\allowbreak primrec} packages can lead Nitpick to generate spurious counterexamples for theorems that refer to values for which the function is not defined. For example: \prew \textbf{primrec} \textit{prec} \textbf{where} \\ ``$\textit{prec}~(\textit{Suc}~n) = n$'' \\[2\smallskipamount] \textbf{lemma} ``$\textit{prec}~0 = \undef$'' \\ \textbf{nitpick} \\[2\smallskipamount] \quad{\slshape Nitpick found a counterexample for \textit{card nat}~= 2: \nopagebreak \\[2\smallskipamount] \hbox{}\qquad Empty assignment} \nopagebreak\\[2\smallskipamount] -\textbf{by}~(\textit{auto simp}: \textit{prec\_def}) +\textbf{by}~(\textit{auto simp}:~\textit{prec\_def}) \postw Such theorems are considered bad style because they rely on the internal representation of functions synthesized by Isabelle, which is an implementation detail. \item[$\bullet$] Nitpick maintains a global cache of wellfoundedness conditions, which can become invalid if you change the definition of an inductive predicate that is registered in the cache. To clear the cache, run Nitpick with the \textit{tac\_timeout} option set to a new value (e.g., 501$\,\textit{ms}$). \item[$\bullet$] Nitpick produces spurious counterexamples when invoked after a \textbf{guess} command in a structured proof. \item[$\bullet$] The \textit{nitpick\_} attributes and the \textit{Nitpick.register\_} functions can cause havoc if used improperly. \item[$\bullet$] Although this has never been observed, arbitrary theorem morphisms could possibly confuse Nitpick, resulting in spurious counterexamples. \item[$\bullet$] Local definitions are not supported and result in an error. %\item[$\bullet$] All constants and types whose names start with %\textit{Nitpick}{.} are reserved for internal use. \end{enum} \let\em=\sl \bibliography{../manual}{} \bibliographystyle{abbrv} \end{document} diff --git a/src/HOL/Divides.thy b/src/HOL/Divides.thy --- a/src/HOL/Divides.thy +++ b/src/HOL/Divides.thy @@ -1,2499 +1,2500 @@ (* Title: HOL/Divides.thy Author: Lawrence C Paulson, Cambridge University Computer Laboratory Copyright 1999 University of Cambridge *) header {* The division operators div and mod *} theory Divides imports Nat_Numeral Nat_Transfer uses "~~/src/Provers/Arith/cancel_div_mod.ML" begin subsection {* Syntactic division operations *} class div = dvd + fixes div :: "'a \ 'a \ 'a" (infixl "div" 70) and mod :: "'a \ 'a \ 'a" (infixl "mod" 70) subsection {* Abstract division in commutative semirings. *} class semiring_div = comm_semiring_1_cancel + no_zero_divisors + div + assumes mod_div_equality: "a div b * b + a mod b = a" and div_by_0 [simp]: "a div 0 = 0" and div_0 [simp]: "0 div a = 0" and div_mult_self1 [simp]: "b \ 0 \ (a + c * b) div b = c + a div b" and div_mult_mult1 [simp]: "c \ 0 \ (c * a) div (c * b) = a div b" begin text {* @{const div} and @{const mod} *} lemma mod_div_equality2: "b * (a div b) + a mod b = a" unfolding mult_commute [of b] by (rule mod_div_equality) lemma mod_div_equality': "a mod b + a div b * b = a" using mod_div_equality [of a b] by (simp only: add_ac) lemma div_mod_equality: "((a div b) * b + a mod b) + c = a + c" by (simp add: mod_div_equality) lemma div_mod_equality2: "(b * (a div b) + a mod b) + c = a + c" by (simp add: mod_div_equality2) lemma mod_by_0 [simp]: "a mod 0 = a" using mod_div_equality [of a zero] by simp lemma mod_0 [simp]: "0 mod a = 0" using mod_div_equality [of zero a] div_0 by simp lemma div_mult_self2 [simp]: assumes "b \ 0" shows "(a + b * c) div b = c + a div b" using assms div_mult_self1 [of b a c] by (simp add: mult_commute) lemma mod_mult_self1 [simp]: "(a + c * b) mod b = a mod b" proof (cases "b = 0") case True then show ?thesis by simp next case False have "a + c * b = (a + c * b) div b * b + (a + c * b) mod b" by (simp add: mod_div_equality) also from False div_mult_self1 [of b a c] have "\ = (c + a div b) * b + (a + c * b) mod b" by (simp add: algebra_simps) finally have "a = a div b * b + (a + c * b) mod b" by (simp add: add_commute [of a] add_assoc left_distrib) then have "a div b * b + (a + c * b) mod b = a div b * b + a mod b" by (simp add: mod_div_equality) then show ?thesis by simp qed lemma mod_mult_self2 [simp]: "(a + b * c) mod b = a mod b" by (simp add: mult_commute [of b]) lemma div_mult_self1_is_id [simp]: "b \ 0 \ b * a div b = a" using div_mult_self2 [of b 0 a] by simp lemma div_mult_self2_is_id [simp]: "b \ 0 \ a * b div b = a" using div_mult_self1 [of b 0 a] by simp lemma mod_mult_self1_is_0 [simp]: "b * a mod b = 0" using mod_mult_self2 [of 0 b a] by simp lemma mod_mult_self2_is_0 [simp]: "a * b mod b = 0" using mod_mult_self1 [of 0 a b] by simp lemma div_by_1 [simp]: "a div 1 = a" using div_mult_self2_is_id [of 1 a] zero_neq_one by simp lemma mod_by_1 [simp]: "a mod 1 = 0" proof - from mod_div_equality [of a one] div_by_1 have "a + a mod 1 = a" by simp then have "a + a mod 1 = a + 0" by simp then show ?thesis by (rule add_left_imp_eq) qed lemma mod_self [simp]: "a mod a = 0" using mod_mult_self2_is_0 [of 1] by simp lemma div_self [simp]: "a \ 0 \ a div a = 1" using div_mult_self2_is_id [of _ 1] by simp lemma div_add_self1 [simp]: assumes "b \ 0" shows "(b + a) div b = a div b + 1" using assms div_mult_self1 [of b a 1] by (simp add: add_commute) lemma div_add_self2 [simp]: assumes "b \ 0" shows "(a + b) div b = a div b + 1" using assms div_add_self1 [of b a] by (simp add: add_commute) lemma mod_add_self1 [simp]: "(b + a) mod b = a mod b" using mod_mult_self1 [of a 1 b] by (simp add: add_commute) lemma mod_add_self2 [simp]: "(a + b) mod b = a mod b" using mod_mult_self1 [of a 1 b] by simp lemma mod_div_decomp: fixes a b obtains q r where "q = a div b" and "r = a mod b" and "a = q * b + r" proof - from mod_div_equality have "a = a div b * b + a mod b" by simp moreover have "a div b = a div b" .. moreover have "a mod b = a mod b" .. note that ultimately show thesis by blast qed lemma dvd_eq_mod_eq_0 [code, code_unfold, code_inline del]: "a dvd b \ b mod a = 0" proof assume "b mod a = 0" with mod_div_equality [of b a] have "b div a * a = b" by simp then have "b = a * (b div a)" unfolding mult_commute .. then have "\c. b = a * c" .. then show "a dvd b" unfolding dvd_def . next assume "a dvd b" then have "\c. b = a * c" unfolding dvd_def . then obtain c where "b = a * c" .. then have "b mod a = a * c mod a" by simp then have "b mod a = c * a mod a" by (simp add: mult_commute) then show "b mod a = 0" by simp qed lemma mod_div_trivial [simp]: "a mod b div b = 0" proof (cases "b = 0") assume "b = 0" thus ?thesis by simp next assume "b \ 0" hence "a div b + a mod b div b = (a mod b + a div b * b) div b" by (rule div_mult_self1 [symmetric]) also have "\ = a div b" by (simp only: mod_div_equality') also have "\ = a div b + 0" by simp finally show ?thesis by (rule add_left_imp_eq) qed lemma mod_mod_trivial [simp]: "a mod b mod b = a mod b" proof - have "a mod b mod b = (a mod b + a div b * b) mod b" by (simp only: mod_mult_self1) also have "\ = a mod b" by (simp only: mod_div_equality') finally show ?thesis . qed lemma dvd_imp_mod_0: "a dvd b \ b mod a = 0" by (rule dvd_eq_mod_eq_0[THEN iffD1]) lemma dvd_div_mult_self: "a dvd b \ (b div a) * a = b" by (subst (2) mod_div_equality [of b a, symmetric]) (simp add:dvd_imp_mod_0) lemma dvd_mult_div_cancel: "a dvd b \ a * (b div a) = b" by (drule dvd_div_mult_self) (simp add: mult_commute) lemma dvd_div_mult: "a dvd b \ (b div a) * c = b * c div a" apply (cases "a = 0") apply simp apply (auto simp: dvd_def mult_assoc) done lemma div_dvd_div[simp]: "a dvd b \ a dvd c \ (b div a dvd c div a) = (b dvd c)" apply (cases "a = 0") apply simp apply (unfold dvd_def) apply auto apply(blast intro:mult_assoc[symmetric]) apply(fastsimp simp add: mult_assoc) done lemma dvd_mod_imp_dvd: "[| k dvd m mod n; k dvd n |] ==> k dvd m" apply (subgoal_tac "k dvd (m div n) *n + m mod n") apply (simp add: mod_div_equality) apply (simp only: dvd_add dvd_mult) done text {* Addition respects modular equivalence. *} lemma mod_add_left_eq: "(a + b) mod c = (a mod c + b) mod c" proof - have "(a + b) mod c = (a div c * c + a mod c + b) mod c" by (simp only: mod_div_equality) also have "\ = (a mod c + b + a div c * c) mod c" by (simp only: add_ac) also have "\ = (a mod c + b) mod c" by (rule mod_mult_self1) finally show ?thesis . qed lemma mod_add_right_eq: "(a + b) mod c = (a + b mod c) mod c" proof - have "(a + b) mod c = (a + (b div c * c + b mod c)) mod c" by (simp only: mod_div_equality) also have "\ = (a + b mod c + b div c * c) mod c" by (simp only: add_ac) also have "\ = (a + b mod c) mod c" by (rule mod_mult_self1) finally show ?thesis . qed lemma mod_add_eq: "(a + b) mod c = (a mod c + b mod c) mod c" by (rule trans [OF mod_add_left_eq mod_add_right_eq]) lemma mod_add_cong: assumes "a mod c = a' mod c" assumes "b mod c = b' mod c" shows "(a + b) mod c = (a' + b') mod c" proof - have "(a mod c + b mod c) mod c = (a' mod c + b' mod c) mod c" unfolding assms .. thus ?thesis by (simp only: mod_add_eq [symmetric]) qed lemma div_add [simp]: "z dvd x \ z dvd y \ (x + y) div z = x div z + y div z" by (cases "z = 0", simp, unfold dvd_def, auto simp add: algebra_simps) text {* Multiplication respects modular equivalence. *} lemma mod_mult_left_eq: "(a * b) mod c = ((a mod c) * b) mod c" proof - have "(a * b) mod c = ((a div c * c + a mod c) * b) mod c" by (simp only: mod_div_equality) also have "\ = (a mod c * b + a div c * b * c) mod c" by (simp only: algebra_simps) also have "\ = (a mod c * b) mod c" by (rule mod_mult_self1) finally show ?thesis . qed lemma mod_mult_right_eq: "(a * b) mod c = (a * (b mod c)) mod c" proof - have "(a * b) mod c = (a * (b div c * c + b mod c)) mod c" by (simp only: mod_div_equality) also have "\ = (a * (b mod c) + a * (b div c) * c) mod c" by (simp only: algebra_simps) also have "\ = (a * (b mod c)) mod c" by (rule mod_mult_self1) finally show ?thesis . qed lemma mod_mult_eq: "(a * b) mod c = ((a mod c) * (b mod c)) mod c" by (rule trans [OF mod_mult_left_eq mod_mult_right_eq]) lemma mod_mult_cong: assumes "a mod c = a' mod c" assumes "b mod c = b' mod c" shows "(a * b) mod c = (a' * b') mod c" proof - have "(a mod c * (b mod c)) mod c = (a' mod c * (b' mod c)) mod c" unfolding assms .. thus ?thesis by (simp only: mod_mult_eq [symmetric]) qed lemma mod_mod_cancel: assumes "c dvd b" shows "a mod b mod c = a mod c" proof - from `c dvd b` obtain k where "b = c * k" by (rule dvdE) have "a mod b mod c = a mod (c * k) mod c" by (simp only: `b = c * k`) also have "\ = (a mod (c * k) + a div (c * k) * k * c) mod c" by (simp only: mod_mult_self1) also have "\ = (a div (c * k) * (c * k) + a mod (c * k)) mod c" by (simp only: add_ac mult_ac) also have "\ = a mod c" by (simp only: mod_div_equality) finally show ?thesis . qed lemma div_mult_div_if_dvd: "y dvd x \ z dvd w \ (x div y) * (w div z) = (x * w) div (y * z)" apply (cases "y = 0", simp) apply (cases "z = 0", simp) apply (auto elim!: dvdE simp add: algebra_simps) apply (subst mult_assoc [symmetric]) apply (simp add: no_zero_divisors) done lemma div_mult_mult2 [simp]: "c \ 0 \ (a * c) div (b * c) = a div b" by (drule div_mult_mult1) (simp add: mult_commute) lemma div_mult_mult1_if [simp]: "(c * a) div (c * b) = (if c = 0 then 0 else a div b)" by simp_all lemma mod_mult_mult1: "(c * a) mod (c * b) = c * (a mod b)" proof (cases "c = 0") case True then show ?thesis by simp next case False from mod_div_equality have "((c * a) div (c * b)) * (c * b) + (c * a) mod (c * b) = c * a" . with False have "c * ((a div b) * b + a mod b) + (c * a) mod (c * b) = c * a + c * (a mod b)" by (simp add: algebra_simps) with mod_div_equality show ?thesis by simp qed lemma mod_mult_mult2: "(a * c) mod (b * c) = (a mod b) * c" using mod_mult_mult1 [of c a b] by (simp add: mult_commute) lemma dvd_mod: "k dvd m \ k dvd n \ k dvd (m mod n)" unfolding dvd_def by (auto simp add: mod_mult_mult1) lemma dvd_mod_iff: "k dvd n \ k dvd (m mod n) \ k dvd m" by (blast intro: dvd_mod_imp_dvd dvd_mod) lemma div_power: "y dvd x \ (x div y) ^ n = x ^ n div y ^ n" apply (induct n) apply simp apply(simp add: div_mult_div_if_dvd dvd_power_same) done end class ring_div = semiring_div + idom begin text {* Negation respects modular equivalence. *} lemma mod_minus_eq: "(- a) mod b = (- (a mod b)) mod b" proof - have "(- a) mod b = (- (a div b * b + a mod b)) mod b" by (simp only: mod_div_equality) also have "\ = (- (a mod b) + - (a div b) * b) mod b" by (simp only: minus_add_distrib minus_mult_left add_ac) also have "\ = (- (a mod b)) mod b" by (rule mod_mult_self1) finally show ?thesis . qed lemma mod_minus_cong: assumes "a mod b = a' mod b" shows "(- a) mod b = (- a') mod b" proof - have "(- (a mod b)) mod b = (- (a' mod b)) mod b" unfolding assms .. thus ?thesis by (simp only: mod_minus_eq [symmetric]) qed text {* Subtraction respects modular equivalence. *} lemma mod_diff_left_eq: "(a - b) mod c = (a mod c - b) mod c" unfolding diff_minus by (intro mod_add_cong mod_minus_cong) simp_all lemma mod_diff_right_eq: "(a - b) mod c = (a - b mod c) mod c" unfolding diff_minus by (intro mod_add_cong mod_minus_cong) simp_all lemma mod_diff_eq: "(a - b) mod c = (a mod c - b mod c) mod c" unfolding diff_minus by (intro mod_add_cong mod_minus_cong) simp_all lemma mod_diff_cong: assumes "a mod c = a' mod c" assumes "b mod c = b' mod c" shows "(a - b) mod c = (a' - b') mod c" unfolding diff_minus using assms by (intro mod_add_cong mod_minus_cong) lemma dvd_neg_div: "y dvd x \ -x div y = - (x div y)" apply (case_tac "y = 0") apply simp apply (auto simp add: dvd_def) apply (subgoal_tac "-(y * k) = y * - k") apply (erule ssubst) apply (erule div_mult_self1_is_id) apply simp done lemma dvd_div_neg: "y dvd x \ x div -y = - (x div y)" apply (case_tac "y = 0") apply simp apply (auto simp add: dvd_def) apply (subgoal_tac "y * k = -y * -k") apply (erule ssubst) apply (rule div_mult_self1_is_id) apply simp apply simp done end subsection {* Division on @{typ nat} *} text {* We define @{const div} and @{const mod} on @{typ nat} by means of a characteristic relation with two input arguments @{term "m\nat"}, @{term "n\nat"} and two output arguments @{term "q\nat"}(uotient) and @{term "r\nat"}(emainder). *} definition divmod_nat_rel :: "nat \ nat \ nat \ nat \ bool" where "divmod_nat_rel m n qr \ m = fst qr * n + snd qr \ (if n = 0 then fst qr = 0 else if n > 0 then 0 \ snd qr \ snd qr < n else n < snd qr \ snd qr \ 0)" text {* @{const divmod_nat_rel} is total: *} lemma divmod_nat_rel_ex: obtains q r where "divmod_nat_rel m n (q, r)" proof (cases "n = 0") case True with that show thesis by (auto simp add: divmod_nat_rel_def) next case False have "\q r. m = q * n + r \ r < n" proof (induct m) case 0 with `n \ 0` have "(0\nat) = 0 * n + 0 \ 0 < n" by simp then show ?case by blast next case (Suc m) then obtain q' r' where m: "m = q' * n + r'" and n: "r' < n" by auto then show ?case proof (cases "Suc r' < n") case True from m n have "Suc m = q' * n + Suc r'" by simp with True show ?thesis by blast next case False then have "n \ Suc r'" by auto moreover from n have "Suc r' \ n" by auto ultimately have "n = Suc r'" by auto with m have "Suc m = Suc q' * n + 0" by simp with `n \ 0` show ?thesis by blast qed qed with that show thesis using `n \ 0` by (auto simp add: divmod_nat_rel_def) qed text {* @{const divmod_nat_rel} is injective: *} lemma divmod_nat_rel_unique: assumes "divmod_nat_rel m n qr" and "divmod_nat_rel m n qr'" shows "qr = qr'" proof (cases "n = 0") case True with assms show ?thesis by (cases qr, cases qr') (simp add: divmod_nat_rel_def) next case False have aux: "\q r q' r'. q' * n + r' = q * n + r \ r < n \ q' \ (q\nat)" apply (rule leI) apply (subst less_iff_Suc_add) apply (auto simp add: add_mult_distrib) done from `n \ 0` assms have "fst qr = fst qr'" by (auto simp add: divmod_nat_rel_def intro: order_antisym dest: aux sym) moreover from this assms have "snd qr = snd qr'" by (simp add: divmod_nat_rel_def) ultimately show ?thesis by (cases qr, cases qr') simp qed text {* We instantiate divisibility on the natural numbers by means of @{const divmod_nat_rel}: *} instantiation nat :: semiring_div begin definition divmod_nat :: "nat \ nat \ nat \ nat" where [code del]: "divmod_nat m n = (THE qr. divmod_nat_rel m n qr)" lemma divmod_nat_rel_divmod_nat: "divmod_nat_rel m n (divmod_nat m n)" proof - from divmod_nat_rel_ex obtain qr where rel: "divmod_nat_rel m n qr" . then show ?thesis by (auto simp add: divmod_nat_def intro: theI elim: divmod_nat_rel_unique) qed lemma divmod_nat_eq: assumes "divmod_nat_rel m n qr" shows "divmod_nat m n = qr" using assms by (auto intro: divmod_nat_rel_unique divmod_nat_rel_divmod_nat) definition div_nat where "m div n = fst (divmod_nat m n)" definition mod_nat where "m mod n = snd (divmod_nat m n)" lemma divmod_nat_div_mod: "divmod_nat m n = (m div n, m mod n)" unfolding div_nat_def mod_nat_def by simp lemma div_eq: assumes "divmod_nat_rel m n (q, r)" shows "m div n = q" using assms by (auto dest: divmod_nat_eq simp add: divmod_nat_div_mod) lemma mod_eq: assumes "divmod_nat_rel m n (q, r)" shows "m mod n = r" using assms by (auto dest: divmod_nat_eq simp add: divmod_nat_div_mod) lemma divmod_nat_rel: "divmod_nat_rel m n (m div n, m mod n)" by (simp add: div_nat_def mod_nat_def divmod_nat_rel_divmod_nat) lemma divmod_nat_zero: "divmod_nat m 0 = (0, m)" proof - from divmod_nat_rel [of m 0] show ?thesis unfolding divmod_nat_div_mod divmod_nat_rel_def by simp qed lemma divmod_nat_base: assumes "m < n" shows "divmod_nat m n = (0, m)" proof - from divmod_nat_rel [of m n] show ?thesis unfolding divmod_nat_div_mod divmod_nat_rel_def using assms by (cases "m div n = 0") (auto simp add: gr0_conv_Suc [of "m div n"]) qed lemma divmod_nat_step: assumes "0 < n" and "n \ m" shows "divmod_nat m n = (Suc ((m - n) div n), (m - n) mod n)" proof - from divmod_nat_rel have divmod_nat_m_n: "divmod_nat_rel m n (m div n, m mod n)" . with assms have m_div_n: "m div n \ 1" by (cases "m div n") (auto simp add: divmod_nat_rel_def) from assms divmod_nat_m_n have "divmod_nat_rel (m - n) n (m div n - Suc 0, m mod n)" by (cases "m div n") (auto simp add: divmod_nat_rel_def) with divmod_nat_eq have "divmod_nat (m - n) n = (m div n - Suc 0, m mod n)" by simp moreover from divmod_nat_div_mod have "divmod_nat (m - n) n = ((m - n) div n, (m - n) mod n)" . ultimately have "m div n = Suc ((m - n) div n)" and "m mod n = (m - n) mod n" using m_div_n by simp_all then show ?thesis using divmod_nat_div_mod by simp qed text {* The ''recursion'' equations for @{const div} and @{const mod} *} lemma div_less [simp]: fixes m n :: nat assumes "m < n" shows "m div n = 0" using assms divmod_nat_base divmod_nat_div_mod by simp lemma le_div_geq: fixes m n :: nat assumes "0 < n" and "n \ m" shows "m div n = Suc ((m - n) div n)" using assms divmod_nat_step divmod_nat_div_mod by simp lemma mod_less [simp]: fixes m n :: nat assumes "m < n" shows "m mod n = m" using assms divmod_nat_base divmod_nat_div_mod by simp lemma le_mod_geq: fixes m n :: nat assumes "n \ m" shows "m mod n = (m - n) mod n" using assms divmod_nat_step divmod_nat_div_mod by (cases "n = 0") simp_all instance proof - have [simp]: "\n::nat. n div 0 = 0" by (simp add: div_nat_def divmod_nat_zero) have [simp]: "\n::nat. 0 div n = 0" proof - fix n :: nat show "0 div n = 0" by (cases "n = 0") simp_all qed show "OFCLASS(nat, semiring_div_class)" proof fix m n :: nat show "m div n * n + m mod n = m" using divmod_nat_rel [of m n] by (simp add: divmod_nat_rel_def) next fix m n q :: nat assume "n \ 0" then show "(q + m * n) div n = m + q div n" by (induct m) (simp_all add: le_div_geq) next fix m n q :: nat assume "m \ 0" then show "(m * n) div (m * q) = n div q" proof (cases "n \ 0 \ q \ 0") case False then show ?thesis by auto next case True with `m \ 0` have "m > 0" and "n > 0" and "q > 0" by auto then have "\a b. divmod_nat_rel n q (a, b) \ divmod_nat_rel (m * n) (m * q) (a, m * b)" by (auto simp add: divmod_nat_rel_def) (simp_all add: algebra_simps) moreover from divmod_nat_rel have "divmod_nat_rel n q (n div q, n mod q)" . ultimately have "divmod_nat_rel (m * n) (m * q) (n div q, m * (n mod q))" . then show ?thesis by (simp add: div_eq) qed qed simp_all qed end lemma divmod_nat_if [code]: "divmod_nat m n = (if n = 0 \ m < n then (0, m) else let (q, r) = divmod_nat (m - n) n in (Suc q, r))" by (simp add: divmod_nat_zero divmod_nat_base divmod_nat_step) (simp add: divmod_nat_div_mod) text {* Simproc for cancelling @{const div} and @{const mod} *} ML {* local structure CancelDivMod = CancelDivModFun(struct val div_name = @{const_name div}; val mod_name = @{const_name mod}; val mk_binop = HOLogic.mk_binop; val mk_sum = Nat_Arith.mk_sum; val dest_sum = Nat_Arith.dest_sum; val div_mod_eqs = map mk_meta_eq [@{thm div_mod_equality}, @{thm div_mod_equality2}]; val trans = trans; val prove_eq_sums = Arith_Data.prove_conv2 all_tac (Arith_Data.simp_all_tac (@{thm monoid_add_class.add_0_left} :: @{thm monoid_add_class.add_0_right} :: @{thms add_ac})) end) in val cancel_div_mod_nat_proc = Simplifier.simproc @{theory} "cancel_div_mod" ["(m::nat) + n"] (K CancelDivMod.proc); val _ = Addsimprocs [cancel_div_mod_nat_proc]; end *} subsubsection {* Quotient *} lemma div_geq: "0 < n \ \ m < n \ m div n = Suc ((m - n) div n)" by (simp add: le_div_geq linorder_not_less) lemma div_if: "0 < n \ m div n = (if m < n then 0 else Suc ((m - n) div n))" by (simp add: div_geq) lemma div_mult_self_is_m [simp]: "0 (m*n) div n = (m::nat)" by simp lemma div_mult_self1_is_m [simp]: "0 (n*m) div n = (m::nat)" by simp subsubsection {* Remainder *} lemma mod_less_divisor [simp]: fixes m n :: nat assumes "n > 0" shows "m mod n < (n::nat)" using assms divmod_nat_rel [of m n] unfolding divmod_nat_rel_def by auto lemma mod_less_eq_dividend [simp]: fixes m n :: nat shows "m mod n \ m" proof (rule add_leD2) from mod_div_equality have "m div n * n + m mod n = m" . then show "m div n * n + m mod n \ m" by auto qed lemma mod_geq: "\ m < (n\nat) \ m mod n = (m - n) mod n" by (simp add: le_mod_geq linorder_not_less) lemma mod_if: "m mod (n\nat) = (if m < n then m else (m - n) mod n)" by (simp add: le_mod_geq) lemma mod_1 [simp]: "m mod Suc 0 = 0" by (induct m) (simp_all add: mod_geq) lemma mod_mult_distrib: "(m mod n) * (k\nat) = (m * k) mod (n * k)" apply (cases "n = 0", simp) apply (cases "k = 0", simp) apply (induct m rule: nat_less_induct) apply (subst mod_if, simp) apply (simp add: mod_geq diff_mult_distrib) done lemma mod_mult_distrib2: "(k::nat) * (m mod n) = (k*m) mod (k*n)" by (simp add: mult_commute [of k] mod_mult_distrib) (* a simple rearrangement of mod_div_equality: *) lemma mult_div_cancel: "(n::nat) * (m div n) = m - (m mod n)" by (cut_tac a = m and b = n in mod_div_equality2, arith) lemma mod_le_divisor[simp]: "0 < n \ m mod n \ (n::nat)" apply (drule mod_less_divisor [where m = m]) apply simp done subsubsection {* Quotient and Remainder *} lemma divmod_nat_rel_mult1_eq: "divmod_nat_rel b c (q, r) \ c > 0 \ divmod_nat_rel (a * b) c (a * q + a * r div c, a * r mod c)" by (auto simp add: split_ifs divmod_nat_rel_def algebra_simps) lemma div_mult1_eq: "(a * b) div c = a * (b div c) + a * (b mod c) div (c::nat)" apply (cases "c = 0", simp) apply (blast intro: divmod_nat_rel [THEN divmod_nat_rel_mult1_eq, THEN div_eq]) done lemma divmod_nat_rel_add1_eq: "divmod_nat_rel a c (aq, ar) \ divmod_nat_rel b c (bq, br) \ c > 0 \ divmod_nat_rel (a + b) c (aq + bq + (ar + br) div c, (ar + br) mod c)" by (auto simp add: split_ifs divmod_nat_rel_def algebra_simps) (*NOT suitable for rewriting: the RHS has an instance of the LHS*) lemma div_add1_eq: "(a+b) div (c::nat) = a div c + b div c + ((a mod c + b mod c) div c)" apply (cases "c = 0", simp) apply (blast intro: divmod_nat_rel_add1_eq [THEN div_eq] divmod_nat_rel) done lemma mod_lemma: "[| (0::nat) < c; r < b |] ==> b * (q mod c) + r < b * c" apply (cut_tac m = q and n = c in mod_less_divisor) apply (drule_tac [2] m = "q mod c" in less_imp_Suc_add, auto) apply (erule_tac P = "%x. ?lhs < ?rhs x" in ssubst) apply (simp add: add_mult_distrib2) done lemma divmod_nat_rel_mult2_eq: "divmod_nat_rel a b (q, r) \ 0 < b \ 0 < c \ divmod_nat_rel a (b * c) (q div c, b *(q mod c) + r)" by (auto simp add: mult_ac divmod_nat_rel_def add_mult_distrib2 [symmetric] mod_lemma) lemma div_mult2_eq: "a div (b*c) = (a div b) div (c::nat)" apply (cases "b = 0", simp) apply (cases "c = 0", simp) apply (force simp add: divmod_nat_rel [THEN divmod_nat_rel_mult2_eq, THEN div_eq]) done lemma mod_mult2_eq: "a mod (b*c) = b*(a div b mod c) + a mod (b::nat)" apply (cases "b = 0", simp) apply (cases "c = 0", simp) apply (auto simp add: mult_commute divmod_nat_rel [THEN divmod_nat_rel_mult2_eq, THEN mod_eq]) done subsubsection{*Further Facts about Quotient and Remainder*} lemma div_1 [simp]: "m div Suc 0 = m" by (induct m) (simp_all add: div_geq) (* Monotonicity of div in first argument *) lemma div_le_mono [rule_format (no_asm)]: "\m::nat. m \ n --> (m div k) \ (n div k)" apply (case_tac "k=0", simp) apply (induct "n" rule: nat_less_induct, clarify) apply (case_tac "n= k *) apply (case_tac "m=k *) apply (simp add: div_geq diff_le_mono) done (* Antimonotonicity of div in second argument *) lemma div_le_mono2: "!!m::nat. [| 0n |] ==> (k div n) \ (k div m)" apply (subgoal_tac "0 (k-m) div n") prefer 2 apply (blast intro: div_le_mono diff_le_mono2) apply (rule le_trans, simp) apply (simp) done lemma div_le_dividend [simp]: "m div n \ (m::nat)" apply (case_tac "n=0", simp) apply (subgoal_tac "m div n \ m div 1", simp) apply (rule div_le_mono2) apply (simp_all (no_asm_simp)) done (* Similar for "less than" *) lemma div_less_dividend [rule_format]: "!!n::nat. 1 0 < m --> m div n < m" apply (induct_tac m rule: nat_less_induct) apply (rename_tac "m") apply (case_tac "m Suc(na) *) apply (simp add: linorder_not_less le_Suc_eq mod_geq) apply (auto simp add: Suc_diff_le le_mod_geq) done lemma mod_eq_0_iff: "(m mod d = 0) = (\q::nat. m = d*q)" by (auto simp add: dvd_eq_mod_eq_0 [symmetric] dvd_def) lemmas mod_eq_0D [dest!] = mod_eq_0_iff [THEN iffD1] (*Loses information, namely we also have r \q::nat. m = r + q*d" apply (cut_tac a = m in mod_div_equality) apply (simp only: add_ac) apply (blast intro: sym) done lemma split_div: "P(n div k :: nat) = ((k = 0 \ P 0) \ (k \ 0 \ (!i. !j P i)))" (is "?P = ?Q" is "_ = (_ \ (_ \ ?R))") proof assume P: ?P show ?Q proof (cases) assume "k = 0" with P show ?Q by simp next assume not0: "k \ 0" thus ?Q proof (simp, intro allI impI) fix i j assume n: "n = k*i + j" and j: "j < k" show "P i" proof (cases) assume "i = 0" with n j P show "P i" by simp next assume "i \ 0" with not0 n j P show "P i" by(simp add:add_ac) qed qed qed next assume Q: ?Q show ?P proof (cases) assume "k = 0" with Q show ?P by simp next assume not0: "k \ 0" with Q have R: ?R by simp from not0 R[THEN spec,of "n div k",THEN spec, of "n mod k"] show ?P by simp qed qed lemma split_div_lemma: assumes "0 < n" shows "n * q \ m \ m < n * Suc q \ q = ((m\nat) div n)" (is "?lhs \ ?rhs") proof assume ?rhs with mult_div_cancel have nq: "n * q = m - (m mod n)" by simp then have A: "n * q \ m" by simp have "n - (m mod n) > 0" using mod_less_divisor assms by auto then have "m < m + (n - (m mod n))" by simp then have "m < n + (m - (m mod n))" by simp with nq have "m < n + n * q" by simp then have B: "m < n * Suc q" by simp from A B show ?lhs .. next assume P: ?lhs then have "divmod_nat_rel m n (q, m - n * q)" unfolding divmod_nat_rel_def by (auto simp add: mult_ac) with divmod_nat_rel_unique divmod_nat_rel [of m n] have "(q, m - n * q) = (m div n, m mod n)" by auto then show ?rhs by simp qed theorem split_div': "P ((m::nat) div n) = ((n = 0 \ P 0) \ (\q. (n * q \ m \ m < n * (Suc q)) \ P q))" apply (case_tac "0 < n") apply (simp only: add: split_div_lemma) apply simp_all done lemma split_mod: "P(n mod k :: nat) = ((k = 0 \ P n) \ (k \ 0 \ (!i. !j P j)))" (is "?P = ?Q" is "_ = (_ \ (_ \ ?R))") proof assume P: ?P show ?Q proof (cases) assume "k = 0" with P show ?Q by simp next assume not0: "k \ 0" thus ?Q proof (simp, intro allI impI) fix i j assume "n = k*i + j" "j < k" thus "P j" using not0 P by(simp add:add_ac mult_ac) qed qed next assume Q: ?Q show ?P proof (cases) assume "k = 0" with Q show ?P by simp next assume not0: "k \ 0" with Q have R: ?R by simp from not0 R[THEN spec,of "n div k",THEN spec, of "n mod k"] show ?P by simp qed qed theorem mod_div_equality': "(m::nat) mod n = m - (m div n) * n" apply (rule_tac P="%x. m mod n = x - (m div n) * n" in subst [OF mod_div_equality [of _ n]]) apply arith done lemma div_mod_equality': fixes m n :: nat shows "m div n * n = m - m mod n" proof - have "m mod n \ m mod n" .. from div_mod_equality have "m div n * n + m mod n - m mod n = m - m mod n" by simp with diff_add_assoc [OF `m mod n \ m mod n`, of "m div n * n"] have "m div n * n + (m mod n - m mod n) = m - m mod n" by simp then show ?thesis by simp qed subsubsection {*An ``induction'' law for modulus arithmetic.*} lemma mod_induct_0: assumes step: "\i P ((Suc i) mod p)" and base: "P i" and i: "i(P 0)" from i have p: "0k. 0 \ P (p-k)" (is "\k. ?A k") proof fix k show "?A k" proof (induct k) show "?A 0" by simp -- "by contradiction" next fix n assume ih: "?A n" show "?A (Suc n)" proof (clarsimp) assume y: "P (p - Suc n)" have n: "Suc n < p" proof (rule ccontr) assume "\(Suc n < p)" hence "p - Suc n = 0" by simp with y contra show "False" by simp qed hence n2: "Suc (p - Suc n) = p-n" by arith from p have "p - Suc n < p" by arith with y step have z: "P ((Suc (p - Suc n)) mod p)" by blast show "False" proof (cases "n=0") case True with z n2 contra show ?thesis by simp next case False with p have "p-n < p" by arith with z n2 False ih show ?thesis by simp qed qed qed qed moreover from i obtain k where "0 i+k=p" by (blast dest: less_imp_add_positive) hence "0 i=p-k" by auto moreover note base ultimately show "False" by blast qed lemma mod_induct: assumes step: "\i P ((Suc i) mod p)" and base: "P i" and i: "ij P j" (is "?A j") proof (induct j) from step base i show "?A 0" by (auto elim: mod_induct_0) next fix k assume ih: "?A k" show "?A (Suc k)" proof assume suc: "Suc k < p" hence k: "knat) mod 2 \ m mod 2 = 1" proof - { fix n :: nat have "(n::nat) < 2 \ n = 0 \ n = 1" by (induct n) simp_all } moreover have "m mod 2 < 2" by simp ultimately have "m mod 2 = 0 \ m mod 2 = 1" . then show ?thesis by auto qed text{*These lemmas collapse some needless occurrences of Suc: at least three Sucs, since two and fewer are rewritten back to Suc again! We already have some rules to simplify operands smaller than 3.*} lemma div_Suc_eq_div_add3 [simp]: "m div (Suc (Suc (Suc n))) = m div (3+n)" by (simp add: Suc3_eq_add_3) lemma mod_Suc_eq_mod_add3 [simp]: "m mod (Suc (Suc (Suc n))) = m mod (3+n)" by (simp add: Suc3_eq_add_3) lemma Suc_div_eq_add3_div: "(Suc (Suc (Suc m))) div n = (3+m) div n" by (simp add: Suc3_eq_add_3) lemma Suc_mod_eq_add3_mod: "(Suc (Suc (Suc m))) mod n = (3+m) mod n" by (simp add: Suc3_eq_add_3) lemmas Suc_div_eq_add3_div_number_of = Suc_div_eq_add3_div [of _ "number_of v", standard] declare Suc_div_eq_add3_div_number_of [simp] lemmas Suc_mod_eq_add3_mod_number_of = Suc_mod_eq_add3_mod [of _ "number_of v", standard] declare Suc_mod_eq_add3_mod_number_of [simp] lemma Suc_times_mod_eq: "1 Suc (k * m) mod k = 1" apply (induct "m") apply (simp_all add: mod_Suc) done declare Suc_times_mod_eq [of "number_of w", standard, simp] lemma [simp]: "n div k \ (Suc n) div k" by (simp add: div_le_mono) lemma Suc_n_div_2_gt_zero [simp]: "(0::nat) < n ==> 0 < (n + 1) div 2" by (cases n) simp_all lemma div_2_gt_zero [simp]: "(1::nat) < n ==> 0 < n div 2" using Suc_n_div_2_gt_zero [of "n - 1"] by simp (* Potential use of algebra : Equality modulo n*) lemma mod_mult_self3 [simp]: "(k*n + m) mod n = m mod (n::nat)" by (simp add: mult_ac add_ac) lemma mod_mult_self4 [simp]: "Suc (k*n + m) mod n = Suc m mod n" proof - have "Suc (k * n + m) mod n = (k * n + Suc m) mod n" by simp also have "... = Suc m mod n" by (rule mod_mult_self3) finally show ?thesis . qed lemma mod_Suc_eq_Suc_mod: "Suc m mod n = Suc (m mod n) mod n" apply (subst mod_Suc [of m]) apply (subst mod_Suc [of "m mod n"], simp) done subsection {* Division on @{typ int} *} definition divmod_int_rel :: "int \ int \ int \ int \ bool" where --{*definition of quotient and remainder*} [code]: "divmod_int_rel a b = (\(q, r). a = b * q + r \ (if 0 < b then 0 \ r \ r < b else b < r \ r \ 0))" definition adjust :: "int \ int \ int \ int \ int" where --{*for the division algorithm*} [code]: "adjust b = (\(q, r). if 0 \ r - b then (2 * q + 1, r - b) else (2 * q, r))" text{*algorithm for the case @{text "a\0, b>0"}*} function posDivAlg :: "int \ int \ int \ int" where "posDivAlg a b = (if a < b \ b \ 0 then (0, a) else adjust b (posDivAlg a (2 * b)))" by auto termination by (relation "measure (\(a, b). nat (a - b + 1))") (auto simp add: mult_2) text{*algorithm for the case @{text "a<0, b>0"}*} function negDivAlg :: "int \ int \ int \ int" where "negDivAlg a b = (if 0 \a + b \ b \ 0 then (-1, a + b) else adjust b (negDivAlg a (2 * b)))" by auto termination by (relation "measure (\(a, b). nat (- a - b))") (auto simp add: mult_2) text{*algorithm for the general case @{term "b\0"}*} definition negateSnd :: "int \ int \ int \ int" where [code_unfold]: "negateSnd = apsnd uminus" definition divmod_int :: "int \ int \ int \ int" where --{*The full division algorithm considers all possible signs for a, b including the special case @{text "a=0, b<0"} because @{term negDivAlg} requires @{term "a<0"}.*} "divmod_int a b = (if 0 \ a then if 0 \ b then posDivAlg a b else if a = 0 then (0, 0) else negateSnd (negDivAlg (-a) (-b)) else if 0 < b then negDivAlg a b else negateSnd (posDivAlg (-a) (-b)))" instantiation int :: Divides.div begin definition "a div b = fst (divmod_int a b)" definition "a mod b = snd (divmod_int a b)" instance .. end lemma divmod_int_mod_div: "divmod_int p q = (p div q, p mod q)" by (auto simp add: div_int_def mod_int_def) text{* Here is the division algorithm in ML: \begin{verbatim} fun posDivAlg (a,b) = if ar-b then (2*q+1, r-b) else (2*q, r) end fun negDivAlg (a,b) = if 0\a+b then (~1,a+b) else let val (q,r) = negDivAlg(a, 2*b) in if 0\r-b then (2*q+1, r-b) else (2*q, r) end; fun negateSnd (q,r:int) = (q,~r); fun divmod (a,b) = if 0\a then if b>0 then posDivAlg (a,b) else if a=0 then (0,0) else negateSnd (negDivAlg (~a,~b)) else if 0 b*q + r; 0 \ r'; r' < b; r < b |] ==> q' \ (q::int)" apply (subgoal_tac "r' + b * (q'-q) \ r") prefer 2 apply (simp add: right_diff_distrib) apply (subgoal_tac "0 < b * (1 + q - q') ") apply (erule_tac [2] order_le_less_trans) prefer 2 apply (simp add: right_diff_distrib right_distrib) apply (subgoal_tac "b * q' < b * (1 + q) ") prefer 2 apply (simp add: right_diff_distrib right_distrib) apply (simp add: mult_less_cancel_left) done lemma unique_quotient_lemma_neg: "[| b*q' + r' \ b*q + r; r \ 0; b < r; b < r' |] ==> q \ (q'::int)" by (rule_tac b = "-b" and r = "-r'" and r' = "-r" in unique_quotient_lemma, auto) lemma unique_quotient: "[| divmod_int_rel a b (q, r); divmod_int_rel a b (q', r'); b \ 0 |] ==> q = q'" apply (simp add: divmod_int_rel_def linorder_neq_iff split: split_if_asm) apply (blast intro: order_antisym dest: order_eq_refl [THEN unique_quotient_lemma] order_eq_refl [THEN unique_quotient_lemma_neg] sym)+ done lemma unique_remainder: "[| divmod_int_rel a b (q, r); divmod_int_rel a b (q', r'); b \ 0 |] ==> r = r'" apply (subgoal_tac "q = q'") apply (simp add: divmod_int_rel_def) apply (blast intro: unique_quotient) done subsubsection{*Correctness of @{term posDivAlg}, the Algorithm for Non-Negative Dividends*} text{*And positive divisors*} lemma adjust_eq [simp]: "adjust b (q,r) = (let diff = r-b in if 0 \ diff then (2*q + 1, diff) else (2*q, r))" by (simp add: Let_def adjust_def) declare posDivAlg.simps [simp del] text{*use with a simproc to avoid repeatedly proving the premise*} lemma posDivAlg_eqn: "0 < b ==> posDivAlg a b = (if a a" and "0 < b" shows "divmod_int_rel a b (posDivAlg a b)" using prems apply (induct a b rule: posDivAlg.induct) apply auto apply (simp add: divmod_int_rel_def) apply (subst posDivAlg_eqn, simp add: right_distrib) apply (case_tac "a < b") apply simp_all apply (erule splitE) apply (auto simp add: right_distrib Let_def mult_ac mult_2_right) done subsubsection{*Correctness of @{term negDivAlg}, the Algorithm for Negative Dividends*} text{*And positive divisors*} declare negDivAlg.simps [simp del] text{*use with a simproc to avoid repeatedly proving the premise*} lemma negDivAlg_eqn: "0 < b ==> negDivAlg a b = (if 0\a+b then (-1,a+b) else adjust b (negDivAlg a (2*b)))" by (rule negDivAlg.simps [THEN trans], simp) (*Correctness of negDivAlg: it computes quotients correctly It doesn't work if a=0 because the 0/b equals 0, not -1*) lemma negDivAlg_correct: assumes "a < 0" and "b > 0" shows "divmod_int_rel a b (negDivAlg a b)" using prems apply (induct a b rule: negDivAlg.induct) apply (auto simp add: linorder_not_le) apply (simp add: divmod_int_rel_def) apply (subst negDivAlg_eqn, assumption) apply (case_tac "a + b < (0\int)") apply simp_all apply (erule splitE) apply (auto simp add: right_distrib Let_def mult_ac mult_2_right) done subsubsection{*Existence Shown by Proving the Division Algorithm to be Correct*} (*the case a=0*) lemma divmod_int_rel_0: "b \ 0 ==> divmod_int_rel 0 b (0, 0)" by (auto simp add: divmod_int_rel_def linorder_neq_iff) lemma posDivAlg_0 [simp]: "posDivAlg 0 b = (0, 0)" by (subst posDivAlg.simps, auto) lemma negDivAlg_minus1 [simp]: "negDivAlg -1 b = (-1, b - 1)" by (subst negDivAlg.simps, auto) lemma negateSnd_eq [simp]: "negateSnd(q,r) = (q,-r)" by (simp add: negateSnd_def) lemma divmod_int_rel_neg: "divmod_int_rel (-a) (-b) qr ==> divmod_int_rel a b (negateSnd qr)" by (auto simp add: split_ifs divmod_int_rel_def) lemma divmod_int_correct: "b \ 0 ==> divmod_int_rel a b (divmod_int a b)" by (force simp add: linorder_neq_iff divmod_int_rel_0 divmod_int_def divmod_int_rel_neg posDivAlg_correct negDivAlg_correct) text{*Arbitrary definitions for division by zero. Useful to simplify certain equations.*} lemma DIVISION_BY_ZERO [simp]: "a div (0::int) = 0 & a mod (0::int) = a" by (simp add: div_int_def mod_int_def divmod_int_def posDivAlg.simps) text{*Basic laws about division and remainder*} lemma zmod_zdiv_equality: "(a::int) = b * (a div b) + (a mod b)" apply (case_tac "b = 0", simp) apply (cut_tac a = a and b = b in divmod_int_correct) apply (auto simp add: divmod_int_rel_def div_int_def mod_int_def) done lemma zdiv_zmod_equality: "(b * (a div b) + (a mod b)) + k = (a::int)+k" by(simp add: zmod_zdiv_equality[symmetric]) lemma zdiv_zmod_equality2: "((a div b) * b + (a mod b)) + k = (a::int)+k" by(simp add: mult_commute zmod_zdiv_equality[symmetric]) text {* Tool setup *} ML {* local structure CancelDivMod = CancelDivModFun(struct val div_name = @{const_name div}; val mod_name = @{const_name mod}; val mk_binop = HOLogic.mk_binop; val mk_sum = Arith_Data.mk_sum HOLogic.intT; val dest_sum = Arith_Data.dest_sum; val div_mod_eqs = map mk_meta_eq [@{thm zdiv_zmod_equality}, @{thm zdiv_zmod_equality2}]; val trans = trans; val prove_eq_sums = Arith_Data.prove_conv2 all_tac (Arith_Data.simp_all_tac (@{thm diff_minus} :: @{thms add_0s} @ @{thms add_ac})) end) in val cancel_div_mod_int_proc = Simplifier.simproc @{theory} "cancel_zdiv_zmod" ["(k::int) + l"] (K CancelDivMod.proc); val _ = Addsimprocs [cancel_div_mod_int_proc]; end *} lemma pos_mod_conj : "(0::int) < b ==> 0 \ a mod b & a mod b < b" apply (cut_tac a = a and b = b in divmod_int_correct) apply (auto simp add: divmod_int_rel_def mod_int_def) done lemmas pos_mod_sign [simp] = pos_mod_conj [THEN conjunct1, standard] and pos_mod_bound [simp] = pos_mod_conj [THEN conjunct2, standard] lemma neg_mod_conj : "b < (0::int) ==> a mod b \ 0 & b < a mod b" apply (cut_tac a = a and b = b in divmod_int_correct) apply (auto simp add: divmod_int_rel_def div_int_def mod_int_def) done lemmas neg_mod_sign [simp] = neg_mod_conj [THEN conjunct1, standard] and neg_mod_bound [simp] = neg_mod_conj [THEN conjunct2, standard] subsubsection{*General Properties of div and mod*} lemma divmod_int_rel_div_mod: "b \ 0 ==> divmod_int_rel a b (a div b, a mod b)" apply (cut_tac a = a and b = b in zmod_zdiv_equality) apply (force simp add: divmod_int_rel_def linorder_neq_iff) done lemma divmod_int_rel_div: "[| divmod_int_rel a b (q, r); b \ 0 |] ==> a div b = q" by (simp add: divmod_int_rel_div_mod [THEN unique_quotient]) lemma divmod_int_rel_mod: "[| divmod_int_rel a b (q, r); b \ 0 |] ==> a mod b = r" by (simp add: divmod_int_rel_div_mod [THEN unique_remainder]) lemma div_pos_pos_trivial: "[| (0::int) \ a; a < b |] ==> a div b = 0" apply (rule divmod_int_rel_div) apply (auto simp add: divmod_int_rel_def) done lemma div_neg_neg_trivial: "[| a \ (0::int); b < a |] ==> a div b = 0" apply (rule divmod_int_rel_div) apply (auto simp add: divmod_int_rel_def) done lemma div_pos_neg_trivial: "[| (0::int) < a; a+b \ 0 |] ==> a div b = -1" apply (rule divmod_int_rel_div) apply (auto simp add: divmod_int_rel_def) done (*There is no div_neg_pos_trivial because 0 div b = 0 would supersede it*) lemma mod_pos_pos_trivial: "[| (0::int) \ a; a < b |] ==> a mod b = a" apply (rule_tac q = 0 in divmod_int_rel_mod) apply (auto simp add: divmod_int_rel_def) done lemma mod_neg_neg_trivial: "[| a \ (0::int); b < a |] ==> a mod b = a" apply (rule_tac q = 0 in divmod_int_rel_mod) apply (auto simp add: divmod_int_rel_def) done lemma mod_pos_neg_trivial: "[| (0::int) < a; a+b \ 0 |] ==> a mod b = a+b" apply (rule_tac q = "-1" in divmod_int_rel_mod) apply (auto simp add: divmod_int_rel_def) done text{*There is no @{text mod_neg_pos_trivial}.*} (*Simpler laws such as -a div b = -(a div b) FAIL, but see just below*) lemma zdiv_zminus_zminus [simp]: "(-a) div (-b) = a div (b::int)" apply (case_tac "b = 0", simp) apply (simp add: divmod_int_rel_div_mod [THEN divmod_int_rel_neg, simplified, THEN divmod_int_rel_div, THEN sym]) done (*Simpler laws such as -a mod b = -(a mod b) FAIL, but see just below*) lemma zmod_zminus_zminus [simp]: "(-a) mod (-b) = - (a mod (b::int))" apply (case_tac "b = 0", simp) apply (subst divmod_int_rel_div_mod [THEN divmod_int_rel_neg, simplified, THEN divmod_int_rel_mod], auto) done subsubsection{*Laws for div and mod with Unary Minus*} lemma zminus1_lemma: "divmod_int_rel a b (q, r) ==> divmod_int_rel (-a) b (if r=0 then -q else -q - 1, if r=0 then 0 else b-r)" by (force simp add: split_ifs divmod_int_rel_def linorder_neq_iff right_diff_distrib) lemma zdiv_zminus1_eq_if: "b \ (0::int) ==> (-a) div b = (if a mod b = 0 then - (a div b) else - (a div b) - 1)" by (blast intro: divmod_int_rel_div_mod [THEN zminus1_lemma, THEN divmod_int_rel_div]) lemma zmod_zminus1_eq_if: "(-a::int) mod b = (if a mod b = 0 then 0 else b - (a mod b))" apply (case_tac "b = 0", simp) apply (blast intro: divmod_int_rel_div_mod [THEN zminus1_lemma, THEN divmod_int_rel_mod]) done lemma zmod_zminus1_not_zero: fixes k l :: int shows "- k mod l \ 0 \ k mod l \ 0" unfolding zmod_zminus1_eq_if by auto lemma zdiv_zminus2: "a div (-b) = (-a::int) div b" by (cut_tac a = "-a" in zdiv_zminus_zminus, auto) lemma zmod_zminus2: "a mod (-b) = - ((-a::int) mod b)" by (cut_tac a = "-a" and b = b in zmod_zminus_zminus, auto) lemma zdiv_zminus2_eq_if: "b \ (0::int) ==> a div (-b) = (if a mod b = 0 then - (a div b) else - (a div b) - 1)" by (simp add: zdiv_zminus1_eq_if zdiv_zminus2) lemma zmod_zminus2_eq_if: "a mod (-b::int) = (if a mod b = 0 then 0 else (a mod b) - b)" by (simp add: zmod_zminus1_eq_if zmod_zminus2) lemma zmod_zminus2_not_zero: fixes k l :: int shows "k mod - l \ 0 \ k mod l \ 0" unfolding zmod_zminus2_eq_if by auto subsubsection{*Division of a Number by Itself*} lemma self_quotient_aux1: "[| (0::int) < a; a = r + a*q; r < a |] ==> 1 \ q" apply (subgoal_tac "0 < a*q") apply (simp add: zero_less_mult_iff, arith) done lemma self_quotient_aux2: "[| (0::int) < a; a = r + a*q; 0 \ r |] ==> q \ 1" apply (subgoal_tac "0 \ a* (1-q) ") apply (simp add: zero_le_mult_iff) apply (simp add: right_diff_distrib) done lemma self_quotient: "[| divmod_int_rel a a (q, r); a \ (0::int) |] ==> q = 1" apply (simp add: split_ifs divmod_int_rel_def linorder_neq_iff) apply (rule order_antisym, safe, simp_all) apply (rule_tac [3] a = "-a" and r = "-r" in self_quotient_aux1) apply (rule_tac a = "-a" and r = "-r" in self_quotient_aux2) apply (force intro: self_quotient_aux1 self_quotient_aux2 simp add: add_commute)+ done lemma self_remainder: "[| divmod_int_rel a a (q, r); a \ (0::int) |] ==> r = 0" apply (frule self_quotient, assumption) apply (simp add: divmod_int_rel_def) done lemma zdiv_self [simp]: "a \ 0 ==> a div a = (1::int)" by (simp add: divmod_int_rel_div_mod [THEN self_quotient]) (*Here we have 0 mod 0 = 0, also assumed by Knuth (who puts m mod 0 = 0) *) lemma zmod_self [simp]: "a mod a = (0::int)" apply (case_tac "a = 0", simp) apply (simp add: divmod_int_rel_div_mod [THEN self_remainder]) done subsubsection{*Computation of Division and Remainder*} lemma zdiv_zero [simp]: "(0::int) div b = 0" by (simp add: div_int_def divmod_int_def) lemma div_eq_minus1: "(0::int) < b ==> -1 div b = -1" by (simp add: div_int_def divmod_int_def) lemma zmod_zero [simp]: "(0::int) mod b = 0" by (simp add: mod_int_def divmod_int_def) lemma zmod_minus1: "(0::int) < b ==> -1 mod b = b - 1" by (simp add: mod_int_def divmod_int_def) text{*a positive, b positive *} lemma div_pos_pos: "[| 0 < a; 0 \ b |] ==> a div b = fst (posDivAlg a b)" by (simp add: div_int_def divmod_int_def) lemma mod_pos_pos: "[| 0 < a; 0 \ b |] ==> a mod b = snd (posDivAlg a b)" by (simp add: mod_int_def divmod_int_def) text{*a negative, b positive *} lemma div_neg_pos: "[| a < 0; 0 < b |] ==> a div b = fst (negDivAlg a b)" by (simp add: div_int_def divmod_int_def) lemma mod_neg_pos: "[| a < 0; 0 < b |] ==> a mod b = snd (negDivAlg a b)" by (simp add: mod_int_def divmod_int_def) text{*a positive, b negative *} lemma div_pos_neg: "[| 0 < a; b < 0 |] ==> a div b = fst (negateSnd (negDivAlg (-a) (-b)))" by (simp add: div_int_def divmod_int_def) lemma mod_pos_neg: "[| 0 < a; b < 0 |] ==> a mod b = snd (negateSnd (negDivAlg (-a) (-b)))" by (simp add: mod_int_def divmod_int_def) text{*a negative, b negative *} lemma div_neg_neg: "[| a < 0; b \ 0 |] ==> a div b = fst (negateSnd (posDivAlg (-a) (-b)))" by (simp add: div_int_def divmod_int_def) lemma mod_neg_neg: "[| a < 0; b \ 0 |] ==> a mod b = snd (negateSnd (posDivAlg (-a) (-b)))" by (simp add: mod_int_def divmod_int_def) text {*Simplify expresions in which div and mod combine numerical constants*} lemma divmod_int_relI: "\a == b * q + r; if 0 < b then 0 \ r \ r < b else b < r \ r \ 0\ \ divmod_int_rel a b (q, r)" unfolding divmod_int_rel_def by simp lemmas divmod_int_rel_div_eq = divmod_int_relI [THEN divmod_int_rel_div, THEN eq_reflection] lemmas divmod_int_rel_mod_eq = divmod_int_relI [THEN divmod_int_rel_mod, THEN eq_reflection] lemmas arithmetic_simps = arith_simps add_special OrderedGroup.add_0_left OrderedGroup.add_0_right mult_zero_left mult_zero_right mult_1_left mult_1_right (* simprocs adapted from HOL/ex/Binary.thy *) ML {* local val mk_number = HOLogic.mk_number HOLogic.intT; fun mk_cert u k l = @{term "plus :: int \ int \ int"} $ (@{term "times :: int \ int \ int"} $ u $ mk_number k) $ mk_number l; fun prove ctxt prop = Goal.prove ctxt [] [] prop (K (ALLGOALS (full_simp_tac (HOL_basic_ss addsimps @{thms arithmetic_simps})))); fun binary_proc proc ss ct = (case Thm.term_of ct of _ $ t $ u => (case try (pairself (`(snd o HOLogic.dest_number))) (t, u) of SOME args => proc (Simplifier.the_context ss) args | NONE => NONE) | _ => NONE); in fun divmod_proc rule = binary_proc (fn ctxt => fn ((m, t), (n, u)) => if n = 0 then NONE else let val (k, l) = Integer.div_mod m n; in SOME (rule OF [prove ctxt (Logic.mk_equals (t, mk_cert u k l))]) end); end *} simproc_setup binary_int_div ("number_of m div number_of n :: int") = {* K (divmod_proc (@{thm divmod_int_rel_div_eq})) *} simproc_setup binary_int_mod ("number_of m mod number_of n :: int") = {* K (divmod_proc (@{thm divmod_int_rel_mod_eq})) *} lemmas posDivAlg_eqn_number_of [simp] = posDivAlg_eqn [of "number_of v" "number_of w", standard] lemmas negDivAlg_eqn_number_of [simp] = negDivAlg_eqn [of "number_of v" "number_of w", standard] text{*Special-case simplification *} lemma zmod_minus1_right [simp]: "a mod (-1::int) = 0" apply (cut_tac a = a and b = "-1" in neg_mod_sign) apply (cut_tac [2] a = a and b = "-1" in neg_mod_bound) apply (auto simp del: neg_mod_sign neg_mod_bound) done lemma zdiv_minus1_right [simp]: "a div (-1::int) = -a" by (cut_tac a = a and b = "-1" in zmod_zdiv_equality, auto) (** The last remaining special cases for constant arithmetic: 1 div z and 1 mod z **) lemmas div_pos_pos_1_number_of [simp] = div_pos_pos [OF int_0_less_1, of "number_of w", standard] lemmas div_pos_neg_1_number_of [simp] = div_pos_neg [OF int_0_less_1, of "number_of w", standard] lemmas mod_pos_pos_1_number_of [simp] = mod_pos_pos [OF int_0_less_1, of "number_of w", standard] lemmas mod_pos_neg_1_number_of [simp] = mod_pos_neg [OF int_0_less_1, of "number_of w", standard] lemmas posDivAlg_eqn_1_number_of [simp] = posDivAlg_eqn [of concl: 1 "number_of w", standard] lemmas negDivAlg_eqn_1_number_of [simp] = negDivAlg_eqn [of concl: 1 "number_of w", standard] subsubsection{*Monotonicity in the First Argument (Dividend)*} lemma zdiv_mono1: "[| a \ a'; 0 < (b::int) |] ==> a div b \ a' div b" apply (cut_tac a = a and b = b in zmod_zdiv_equality) apply (cut_tac a = a' and b = b in zmod_zdiv_equality) apply (rule unique_quotient_lemma) apply (erule subst) apply (erule subst, simp_all) done lemma zdiv_mono1_neg: "[| a \ a'; (b::int) < 0 |] ==> a' div b \ a div b" apply (cut_tac a = a and b = b in zmod_zdiv_equality) apply (cut_tac a = a' and b = b in zmod_zdiv_equality) apply (rule unique_quotient_lemma_neg) apply (erule subst) apply (erule subst, simp_all) done subsubsection{*Monotonicity in the Second Argument (Divisor)*} lemma q_pos_lemma: "[| 0 \ b'*q' + r'; r' < b'; 0 < b' |] ==> 0 \ (q'::int)" apply (subgoal_tac "0 < b'* (q' + 1) ") apply (simp add: zero_less_mult_iff) apply (simp add: right_distrib) done lemma zdiv_mono2_lemma: "[| b*q + r = b'*q' + r'; 0 \ b'*q' + r'; r' < b'; 0 \ r; 0 < b'; b' \ b |] ==> q \ (q'::int)" apply (frule q_pos_lemma, assumption+) apply (subgoal_tac "b*q < b* (q' + 1) ") apply (simp add: mult_less_cancel_left) apply (subgoal_tac "b*q = r' - r + b'*q'") prefer 2 apply simp apply (simp (no_asm_simp) add: right_distrib) apply (subst add_commute, rule zadd_zless_mono, arith) apply (rule mult_right_mono, auto) done lemma zdiv_mono2: "[| (0::int) \ a; 0 < b'; b' \ b |] ==> a div b \ a div b'" apply (subgoal_tac "b \ 0") prefer 2 apply arith apply (cut_tac a = a and b = b in zmod_zdiv_equality) apply (cut_tac a = a and b = b' in zmod_zdiv_equality) apply (rule zdiv_mono2_lemma) apply (erule subst) apply (erule subst, simp_all) done lemma q_neg_lemma: "[| b'*q' + r' < 0; 0 \ r'; 0 < b' |] ==> q' \ (0::int)" apply (subgoal_tac "b'*q' < 0") apply (simp add: mult_less_0_iff, arith) done lemma zdiv_mono2_neg_lemma: "[| b*q + r = b'*q' + r'; b'*q' + r' < 0; r < b; 0 \ r'; 0 < b'; b' \ b |] ==> q' \ (q::int)" apply (frule q_neg_lemma, assumption+) apply (subgoal_tac "b*q' < b* (q + 1) ") apply (simp add: mult_less_cancel_left) apply (simp add: right_distrib) apply (subgoal_tac "b*q' \ b'*q'") prefer 2 apply (simp add: mult_right_mono_neg, arith) done lemma zdiv_mono2_neg: "[| a < (0::int); 0 < b'; b' \ b |] ==> a div b' \ a div b" apply (cut_tac a = a and b = b in zmod_zdiv_equality) apply (cut_tac a = a and b = b' in zmod_zdiv_equality) apply (rule zdiv_mono2_neg_lemma) apply (erule subst) apply (erule subst, simp_all) done subsubsection{*More Algebraic Laws for div and mod*} text{*proving (a*b) div c = a * (b div c) + a * (b mod c) *} lemma zmult1_lemma: "[| divmod_int_rel b c (q, r); c \ 0 |] ==> divmod_int_rel (a * b) c (a*q + a*r div c, a*r mod c)" by (auto simp add: split_ifs divmod_int_rel_def linorder_neq_iff right_distrib mult_ac) lemma zdiv_zmult1_eq: "(a*b) div c = a*(b div c) + a*(b mod c) div (c::int)" apply (case_tac "c = 0", simp) apply (blast intro: divmod_int_rel_div_mod [THEN zmult1_lemma, THEN divmod_int_rel_div]) done lemma zmod_zmult1_eq: "(a*b) mod c = a*(b mod c) mod (c::int)" apply (case_tac "c = 0", simp) apply (blast intro: divmod_int_rel_div_mod [THEN zmult1_lemma, THEN divmod_int_rel_mod]) done lemma zmod_zdiv_trivial: "(a mod b) div b = (0::int)" apply (case_tac "b = 0", simp) apply (auto simp add: linorder_neq_iff div_pos_pos_trivial div_neg_neg_trivial) done text{*proving (a+b) div c = a div c + b div c + ((a mod c + b mod c) div c) *} lemma zadd1_lemma: "[| divmod_int_rel a c (aq, ar); divmod_int_rel b c (bq, br); c \ 0 |] ==> divmod_int_rel (a+b) c (aq + bq + (ar+br) div c, (ar+br) mod c)" by (force simp add: split_ifs divmod_int_rel_def linorder_neq_iff right_distrib) (*NOT suitable for rewriting: the RHS has an instance of the LHS*) lemma zdiv_zadd1_eq: "(a+b) div (c::int) = a div c + b div c + ((a mod c + b mod c) div c)" apply (case_tac "c = 0", simp) apply (blast intro: zadd1_lemma [OF divmod_int_rel_div_mod divmod_int_rel_div_mod] divmod_int_rel_div) done instance int :: ring_div proof fix a b c :: int assume not0: "b \ 0" show "(a + c * b) div b = c + a div b" unfolding zdiv_zadd1_eq [of a "c * b"] using not0 by (simp add: zmod_zmult1_eq zmod_zdiv_trivial zdiv_zmult1_eq) next fix a b c :: int assume "a \ 0" then show "(a * b) div (a * c) = b div c" proof (cases "b \ 0 \ c \ 0") case False then show ?thesis by auto next case True then have "b \ 0" and "c \ 0" by auto with `a \ 0` have "\q r. divmod_int_rel b c (q, r) \ divmod_int_rel (a * b) (a * c) (q, a * r)" apply (auto simp add: divmod_int_rel_def) apply (auto simp add: algebra_simps) apply (auto simp add: zero_less_mult_iff zero_le_mult_iff mult_le_0_iff mult_commute [of a] mult_less_cancel_right) done moreover with `c \ 0` divmod_int_rel_div_mod have "divmod_int_rel b c (b div c, b mod c)" by auto ultimately have "divmod_int_rel (a * b) (a * c) (b div c, a * (b mod c))" . moreover from `a \ 0` `c \ 0` have "a * c \ 0" by simp ultimately show ?thesis by (rule divmod_int_rel_div) qed qed auto lemma posDivAlg_div_mod: assumes "k \ 0" and "l \ 0" shows "posDivAlg k l = (k div l, k mod l)" proof (cases "l = 0") case True then show ?thesis by (simp add: posDivAlg.simps) next case False with assms posDivAlg_correct have "divmod_int_rel k l (fst (posDivAlg k l), snd (posDivAlg k l))" by simp from divmod_int_rel_div [OF this `l \ 0`] divmod_int_rel_mod [OF this `l \ 0`] show ?thesis by simp qed lemma negDivAlg_div_mod: assumes "k < 0" and "l > 0" shows "negDivAlg k l = (k div l, k mod l)" proof - from assms have "l \ 0" by simp from assms negDivAlg_correct have "divmod_int_rel k l (fst (negDivAlg k l), snd (negDivAlg k l))" by simp from divmod_int_rel_div [OF this `l \ 0`] divmod_int_rel_mod [OF this `l \ 0`] show ?thesis by simp qed lemma zmod_eq_0_iff: "(m mod d = 0) = (EX q::int. m = d*q)" by (simp add: dvd_eq_mod_eq_0 [symmetric] dvd_def) (* REVISIT: should this be generalized to all semiring_div types? *) lemmas zmod_eq_0D [dest!] = zmod_eq_0_iff [THEN iffD1] subsubsection{*Proving @{term "a div (b*c) = (a div b) div c"} *} (*The condition c>0 seems necessary. Consider that 7 div ~6 = ~2 but 7 div 2 div ~3 = 3 div ~3 = ~1. The subcase (a div b) mod c = 0 seems to cause particular problems.*) text{*first, four lemmas to bound the remainder for the cases b<0 and b>0 *} lemma zmult2_lemma_aux1: "[| (0::int) < c; b < r; r \ 0 |] ==> b*c < b*(q mod c) + r" apply (subgoal_tac "b * (c - q mod c) < r * 1") apply (simp add: algebra_simps) apply (rule order_le_less_trans) apply (erule_tac [2] mult_strict_right_mono) apply (rule mult_left_mono_neg) using add1_zle_eq[of "q mod c"]apply(simp add: algebra_simps pos_mod_bound) apply (simp) apply (simp) done lemma zmult2_lemma_aux2: "[| (0::int) < c; b < r; r \ 0 |] ==> b * (q mod c) + r \ 0" apply (subgoal_tac "b * (q mod c) \ 0") apply arith apply (simp add: mult_le_0_iff) done lemma zmult2_lemma_aux3: "[| (0::int) < c; 0 \ r; r < b |] ==> 0 \ b * (q mod c) + r" apply (subgoal_tac "0 \ b * (q mod c) ") apply arith apply (simp add: zero_le_mult_iff) done lemma zmult2_lemma_aux4: "[| (0::int) < c; 0 \ r; r < b |] ==> b * (q mod c) + r < b * c" apply (subgoal_tac "r * 1 < b * (c - q mod c) ") apply (simp add: right_diff_distrib) apply (rule order_less_le_trans) apply (erule mult_strict_right_mono) apply (rule_tac [2] mult_left_mono) apply simp using add1_zle_eq[of "q mod c"] apply (simp add: algebra_simps pos_mod_bound) apply simp done lemma zmult2_lemma: "[| divmod_int_rel a b (q, r); b \ 0; 0 < c |] ==> divmod_int_rel a (b * c) (q div c, b*(q mod c) + r)" by (auto simp add: mult_ac divmod_int_rel_def linorder_neq_iff zero_less_mult_iff right_distrib [symmetric] zmult2_lemma_aux1 zmult2_lemma_aux2 zmult2_lemma_aux3 zmult2_lemma_aux4) lemma zdiv_zmult2_eq: "(0::int) < c ==> a div (b*c) = (a div b) div c" apply (case_tac "b = 0", simp) apply (force simp add: divmod_int_rel_div_mod [THEN zmult2_lemma, THEN divmod_int_rel_div]) done lemma zmod_zmult2_eq: "(0::int) < c ==> a mod (b*c) = b*(a div b mod c) + a mod b" apply (case_tac "b = 0", simp) apply (force simp add: divmod_int_rel_div_mod [THEN zmult2_lemma, THEN divmod_int_rel_mod]) done subsubsection {*Splitting Rules for div and mod*} text{*The proofs of the two lemmas below are essentially identical*} lemma split_pos_lemma: "0 P(n div k :: int)(n mod k) = (\i j. 0\j & j P i j)" apply (rule iffI, clarify) apply (erule_tac P="P ?x ?y" in rev_mp) apply (subst mod_add_eq) apply (subst zdiv_zadd1_eq) apply (simp add: div_pos_pos_trivial mod_pos_pos_trivial) txt{*converse direction*} apply (drule_tac x = "n div k" in spec) apply (drule_tac x = "n mod k" in spec, simp) done lemma split_neg_lemma: "k<0 ==> P(n div k :: int)(n mod k) = (\i j. k0 & n = k*i + j --> P i j)" apply (rule iffI, clarify) apply (erule_tac P="P ?x ?y" in rev_mp) apply (subst mod_add_eq) apply (subst zdiv_zadd1_eq) apply (simp add: div_neg_neg_trivial mod_neg_neg_trivial) txt{*converse direction*} apply (drule_tac x = "n div k" in spec) apply (drule_tac x = "n mod k" in spec, simp) done lemma split_zdiv: "P(n div k :: int) = ((k = 0 --> P 0) & (0 (\i j. 0\j & j P i)) & (k<0 --> (\i j. k0 & n = k*i + j --> P i)))" apply (case_tac "k=0", simp) apply (simp only: linorder_neq_iff) apply (erule disjE) apply (simp_all add: split_pos_lemma [of concl: "%x y. P x"] split_neg_lemma [of concl: "%x y. P x"]) done lemma split_zmod: "P(n mod k :: int) = ((k = 0 --> P n) & (0 (\i j. 0\j & j P j)) & (k<0 --> (\i j. k0 & n = k*i + j --> P j)))" apply (case_tac "k=0", simp) apply (simp only: linorder_neq_iff) apply (erule disjE) apply (simp_all add: split_pos_lemma [of concl: "%x y. P y"] split_neg_lemma [of concl: "%x y. P y"]) done text {* Enable (lin)arith to deal with @{const div} and @{const mod} when these are applied to some constant that is of the form @{term "number_of k"}: *} declare split_zdiv [of _ _ "number_of k", standard, arith_split] declare split_zmod [of _ _ "number_of k", standard, arith_split] subsubsection{*Speeding up the Division Algorithm with Shifting*} text{*computing div by shifting *} lemma pos_zdiv_mult_2: "(0::int) \ a ==> (1 + 2*b) div (2*a) = b div a" proof cases assume "a=0" thus ?thesis by simp next assume "a\0" and le_a: "0\a" hence a_pos: "1 \ a" by arith hence one_less_a2: "1 < 2 * a" by arith hence le_2a: "2 * (1 + b mod a) \ 2 * a" unfolding mult_le_cancel_left by (simp add: add1_zle_eq add_commute [of 1]) with a_pos have "0 \ b mod a" by simp hence le_addm: "0 \ 1 mod (2*a) + 2*(b mod a)" by (simp add: mod_pos_pos_trivial one_less_a2) with le_2a have "(1 mod (2*a) + 2*(b mod a)) div (2*a) = 0" by (simp add: div_pos_pos_trivial le_addm mod_pos_pos_trivial one_less_a2 right_distrib) thus ?thesis by (subst zdiv_zadd1_eq, simp add: mod_mult_mult1 one_less_a2 div_pos_pos_trivial) qed lemma neg_zdiv_mult_2: "a \ (0::int) ==> (1 + 2*b) div (2*a) = (b+1) div a" apply (subgoal_tac " (1 + 2* (-b - 1)) div (2 * (-a)) = (-b - 1) div (-a) ") apply (rule_tac [2] pos_zdiv_mult_2) apply (auto simp add: right_diff_distrib) apply (subgoal_tac " (-1 - (2 * b)) = - (1 + (2 * b))") apply (simp only: zdiv_zminus_zminus diff_minus minus_add_distrib [symmetric]) apply (simp_all add: algebra_simps) apply (simp only: ab_diff_minus minus_add_distrib [symmetric] number_of_Min zdiv_zminus_zminus) done lemma zdiv_number_of_Bit0 [simp]: "number_of (Int.Bit0 v) div number_of (Int.Bit0 w) = number_of v div (number_of w :: int)" by (simp only: number_of_eq numeral_simps) (simp add: mult_2 [symmetric]) lemma zdiv_number_of_Bit1 [simp]: "number_of (Int.Bit1 v) div number_of (Int.Bit0 w) = (if (0::int) \ number_of w then number_of v div (number_of w) else (number_of v + (1::int)) div (number_of w))" apply (simp only: number_of_eq numeral_simps UNIV_I split: split_if) apply (simp add: pos_zdiv_mult_2 neg_zdiv_mult_2 add_ac mult_2 [symmetric]) done subsubsection{*Computing mod by Shifting (proofs resemble those for div)*} lemma pos_zmod_mult_2: fixes a b :: int assumes "0 \ a" shows "(1 + 2 * b) mod (2 * a) = 1 + 2 * (b mod a)" proof (cases "0 < a") case False with assms show ?thesis by simp next case True then have "b mod a < a" by (rule pos_mod_bound) then have "1 + b mod a \ a" by simp then have A: "2 * (1 + b mod a) \ 2 * a" by simp from `0 < a` have "0 \ b mod a" by (rule pos_mod_sign) then have B: "0 \ 1 + 2 * (b mod a)" by simp have "((1\int) mod ((2\int) * a) + (2\int) * b mod ((2\int) * a)) mod ((2\int) * a) = (1\int) + (2\int) * (b mod a)" using `0 < a` and A by (auto simp add: mod_mult_mult1 mod_pos_pos_trivial ring_distribs intro!: mod_pos_pos_trivial B) then show ?thesis by (subst mod_add_eq) qed lemma neg_zmod_mult_2: fixes a b :: int assumes "a \ 0" shows "(1 + 2 * b) mod (2 * a) = 2 * ((b + 1) mod a) - 1" proof - from assms have "0 \ - a" by auto then have "(1 + 2 * (- b - 1)) mod (2 * (- a)) = 1 + 2 * ((- b - 1) mod (- a))" by (rule pos_zmod_mult_2) then show ?thesis by (simp add: zmod_zminus2 algebra_simps) (simp add: diff_minus add_ac) qed lemma zmod_number_of_Bit0 [simp]: "number_of (Int.Bit0 v) mod number_of (Int.Bit0 w) = (2::int) * (number_of v mod number_of w)" apply (simp only: number_of_eq numeral_simps) apply (simp add: mod_mult_mult1 pos_zmod_mult_2 neg_zmod_mult_2 add_ac mult_2 [symmetric]) done lemma zmod_number_of_Bit1 [simp]: "number_of (Int.Bit1 v) mod number_of (Int.Bit0 w) = (if (0::int) \ number_of w then 2 * (number_of v mod number_of w) + 1 else 2 * ((number_of v + (1::int)) mod number_of w) - 1)" apply (simp only: number_of_eq numeral_simps) apply (simp add: mod_mult_mult1 pos_zmod_mult_2 neg_zmod_mult_2 add_ac mult_2 [symmetric]) done subsubsection{*Quotients of Signs*} lemma div_neg_pos_less0: "[| a < (0::int); 0 < b |] ==> a div b < 0" apply (subgoal_tac "a div b \ -1", force) apply (rule order_trans) apply (rule_tac a' = "-1" in zdiv_mono1) apply (auto simp add: div_eq_minus1) done lemma div_nonneg_neg_le0: "[| (0::int) \ a; b < 0 |] ==> a div b \ 0" by (drule zdiv_mono1_neg, auto) lemma div_nonpos_pos_le0: "[| (a::int) \ 0; b > 0 |] ==> a div b \ 0" by (drule zdiv_mono1, auto) text{* Now for some equivalences of the form @{text"a div b >=< 0 \ \"} conditional upon the sign of @{text a} or @{text b}. There are many more. They should all be simp rules unless that causes too much search. *} lemma pos_imp_zdiv_nonneg_iff: "(0::int) < b ==> (0 \ a div b) = (0 \ a)" apply auto apply (drule_tac [2] zdiv_mono1) apply (auto simp add: linorder_neq_iff) apply (simp (no_asm_use) add: linorder_not_less [symmetric]) apply (blast intro: div_neg_pos_less0) done lemma neg_imp_zdiv_nonneg_iff: "b < (0::int) ==> (0 \ a div b) = (a \ (0::int))" apply (subst zdiv_zminus_zminus [symmetric]) apply (subst pos_imp_zdiv_nonneg_iff, auto) done (*But not (a div b \ 0 iff a\0); consider a=1, b=2 when a div b = 0.*) lemma pos_imp_zdiv_neg_iff: "(0::int) < b ==> (a div b < 0) = (a < 0)" by (simp add: linorder_not_le [symmetric] pos_imp_zdiv_nonneg_iff) (*Again the law fails for \: consider a = -1, b = -2 when a div b = 0*) lemma neg_imp_zdiv_neg_iff: "b < (0::int) ==> (a div b < 0) = (0 < a)" by (simp add: linorder_not_le [symmetric] neg_imp_zdiv_nonneg_iff) lemma nonneg1_imp_zdiv_pos_iff: "(0::int) <= a \ (a div b > 0) = (a >= b & b>0)" apply rule apply rule using div_pos_pos_trivial[of a b]apply arith apply(cases "b=0")apply simp using div_nonneg_neg_le0[of a b]apply arith using int_one_le_iff_zero_less[of "a div b"] zdiv_mono1[of b a b]apply simp done subsubsection {* The Divides Relation *} lemmas zdvd_iff_zmod_eq_0_number_of [simp] = dvd_eq_mod_eq_0 [of "number_of x::int" "number_of y::int", standard] lemma zdvd_zmod: "f dvd m ==> f dvd (n::int) ==> f dvd m mod n" by (rule dvd_mod) (* TODO: remove *) lemma zdvd_zmod_imp_zdvd: "k dvd m mod n ==> k dvd n ==> k dvd (m::int)" by (rule dvd_mod_imp_dvd) (* TODO: remove *) lemma zmult_div_cancel: "(n::int) * (m div n) = m - (m mod n)" using zmod_zdiv_equality[where a="m" and b="n"] by (simp add: algebra_simps) lemma zpower_zmod: "((x::int) mod m)^y mod m = x^y mod m" apply (induct "y", auto) apply (rule zmod_zmult1_eq [THEN trans]) apply (simp (no_asm_simp)) apply (rule mod_mult_eq [symmetric]) done lemma zdiv_int: "int (a div b) = (int a) div (int b)" apply (subst split_div, auto) apply (subst split_zdiv, auto) apply (rule_tac a="int (b * i) + int j" and b="int b" and r="int j" and r'=ja in unique_quotient) apply (auto simp add: divmod_int_rel_def of_nat_mult) done lemma zmod_int: "int (a mod b) = (int a) mod (int b)" apply (subst split_mod, auto) apply (subst split_zmod, auto) apply (rule_tac a="int (b * i) + int j" and b="int b" and q="int i" and q'=ia in unique_remainder) apply (auto simp add: divmod_int_rel_def of_nat_mult) done lemma abs_div: "(y::int) dvd x \ abs (x div y) = abs x div abs y" by (unfold dvd_def, cases "y=0", auto simp add: abs_mult) lemma zdvd_mult_div_cancel:"(n::int) dvd m \ n * (m div n) = m" apply (subgoal_tac "m mod n = 0") apply (simp add: zmult_div_cancel) apply (simp only: dvd_eq_mod_eq_0) done text{*Suggested by Matthias Daum*} lemma int_power_div_base: "\0 < m; 0 < k\ \ k ^ m div k = (k::int) ^ (m - Suc 0)" apply (subgoal_tac "k ^ m = k ^ ((m - Suc 0) + Suc 0)") apply (erule ssubst) apply (simp only: power_add) apply simp_all done text {* by Brian Huffman *} lemma zminus_zmod: "- ((x::int) mod m) mod m = - x mod m" by (rule mod_minus_eq [symmetric]) lemma zdiff_zmod_left: "(x mod m - y) mod m = (x - y) mod (m::int)" by (rule mod_diff_left_eq [symmetric]) lemma zdiff_zmod_right: "(x - y mod m) mod m = (x - y) mod (m::int)" by (rule mod_diff_right_eq [symmetric]) lemmas zmod_simps = mod_add_left_eq [symmetric] mod_add_right_eq [symmetric] zmod_zmult1_eq [symmetric] mod_mult_left_eq [symmetric] zpower_zmod zminus_zmod zdiff_zmod_left zdiff_zmod_right text {* Distributive laws for function @{text nat}. *} lemma nat_div_distrib: "0 \ x \ nat (x div y) = nat x div nat y" apply (rule linorder_cases [of y 0]) apply (simp add: div_nonneg_neg_le0) apply simp apply (simp add: nat_eq_iff pos_imp_zdiv_nonneg_iff zdiv_int) done (*Fails if y<0: the LHS collapses to (nat z) but the RHS doesn't*) lemma nat_mod_distrib: "\0 \ x; 0 \ y\ \ nat (x mod y) = nat x mod nat y" apply (case_tac "y = 0", simp) apply (simp add: nat_eq_iff zmod_int) done text {* transfer setup *} lemma transfer_nat_int_functions: "(x::int) >= 0 \ y >= 0 \ (nat x) div (nat y) = nat (x div y)" "(x::int) >= 0 \ y >= 0 \ (nat x) mod (nat y) = nat (x mod y)" by (auto simp add: nat_div_distrib nat_mod_distrib) lemma transfer_nat_int_function_closures: "(x::int) >= 0 \ y >= 0 \ x div y >= 0" "(x::int) >= 0 \ y >= 0 \ x mod y >= 0" apply (cases "y = 0") apply (auto simp add: pos_imp_zdiv_nonneg_iff) apply (cases "y = 0") apply auto done declare TransferMorphism_nat_int [transfer add return: transfer_nat_int_functions transfer_nat_int_function_closures ] lemma transfer_int_nat_functions: "(int x) div (int y) = int (x div y)" "(int x) mod (int y) = int (x mod y)" by (auto simp add: zdiv_int zmod_int) lemma transfer_int_nat_function_closures: "is_nat x \ is_nat y \ is_nat (x div y)" "is_nat x \ is_nat y \ is_nat (x mod y)" by (simp_all only: is_nat_def transfer_nat_int_function_closures) declare TransferMorphism_int_nat [transfer add return: transfer_int_nat_functions transfer_int_nat_function_closures ] text{*Suggested by Matthias Daum*} lemma int_div_less_self: "\0 < x; 1 < k\ \ x div k < (x::int)" apply (subgoal_tac "nat x div nat k < nat x") apply (simp add: nat_div_distrib [symmetric]) apply (rule Divides.div_less_dividend, simp_all) done text {* code generator setup *} context ring_1 begin lemma of_int_num [code]: "of_int k = (if k = 0 then 0 else if k < 0 then - of_int (- k) else let (l, m) = divmod_int k 2; l' = of_int l in if m = 0 then l' + l' else l' + l' + 1)" proof - have aux1: "k mod (2\int) \ (0\int) \ of_int k = of_int (k div 2 * 2 + 1)" proof - have "k mod 2 < 2" by (auto intro: pos_mod_bound) moreover have "0 \ k mod 2" by (auto intro: pos_mod_sign) moreover assume "k mod 2 \ 0" ultimately have "k mod 2 = 1" by arith moreover have "of_int k = of_int (k div 2 * 2 + k mod 2)" by simp ultimately show ?thesis by auto qed have aux2: "\x. of_int 2 * x = x + x" proof - fix x have int2: "(2::int) = 1 + 1" by arith show "of_int 2 * x = x + x" unfolding int2 of_int_add left_distrib by simp qed have aux3: "\x. x * of_int 2 = x + x" proof - fix x have int2: "(2::int) = 1 + 1" by arith show "x * of_int 2 = x + x" unfolding int2 of_int_add right_distrib by simp qed from aux1 show ?thesis by (auto simp add: divmod_int_mod_div Let_def aux2 aux3) qed end lemma zmod_eq_dvd_iff: "(x::int) mod n = y mod n \ n dvd x - y" proof assume H: "x mod n = y mod n" hence "x mod n - y mod n = 0" by simp hence "(x mod n - y mod n) mod n = 0" by simp hence "(x - y) mod n = 0" by (simp add: mod_diff_eq[symmetric]) thus "n dvd x - y" by (simp add: dvd_eq_mod_eq_0) next assume H: "n dvd x - y" then obtain k where k: "x-y = n*k" unfolding dvd_def by blast hence "x = n*k + y" by simp hence "x mod n = (n*k + y) mod n" by simp thus "x mod n = y mod n" by (simp add: mod_add_left_eq) qed lemma nat_mod_eq_lemma: assumes xyn: "(x::nat) mod n = y mod n" and xy:"y \ x" shows "\q. x = y + n * q" proof- from xy have th: "int x - int y = int (x - y)" by simp from xyn have "int x mod int n = int y mod int n" by (simp add: zmod_int[symmetric]) hence "int n dvd int x - int y" by (simp only: zmod_eq_dvd_iff[symmetric]) hence "n dvd x - y" by (simp add: th zdvd_int) then show ?thesis using xy unfolding dvd_def apply clarsimp apply (rule_tac x="k" in exI) by arith qed lemma nat_mod_eq_iff: "(x::nat) mod n = y mod n \ (\q1 q2. x + n * q1 = y + n * q2)" (is "?lhs = ?rhs") proof assume H: "x mod n = y mod n" {assume xy: "x \ y" from H have th: "y mod n = x mod n" by simp from nat_mod_eq_lemma[OF th xy] have ?rhs apply clarify apply (rule_tac x="q" in exI) by (rule exI[where x="0"], simp)} moreover {assume xy: "y \ x" from nat_mod_eq_lemma[OF H xy] have ?rhs apply clarify apply (rule_tac x="0" in exI) by (rule_tac x="q" in exI, simp)} ultimately show ?rhs using linear[of x y] by blast next assume ?rhs then obtain q1 q2 where q12: "x + n * q1 = y + n * q2" by blast hence "(x + n * q1) mod n = (y + n * q2) mod n" by simp thus ?lhs by simp qed lemma div_nat_number_of [simp]: "(number_of v :: nat) div number_of v' = (if neg (number_of v :: int) then 0 else nat (number_of v div number_of v'))" unfolding nat_number_of_def number_of_is_id neg_def by (simp add: nat_div_distrib) lemma one_div_nat_number_of [simp]: "Suc 0 div number_of v' = nat (1 div number_of v')" by (simp del: nat_numeral_1_eq_1 add: numeral_1_eq_Suc_0 [symmetric]) lemma mod_nat_number_of [simp]: "(number_of v :: nat) mod number_of v' = (if neg (number_of v :: int) then 0 else if neg (number_of v' :: int) then number_of v else nat (number_of v mod number_of v'))" unfolding nat_number_of_def number_of_is_id neg_def by (simp add: nat_mod_distrib) lemma one_mod_nat_number_of [simp]: "Suc 0 mod number_of v' = (if neg (number_of v' :: int) then Suc 0 else nat (1 mod number_of v'))" by (simp del: nat_numeral_1_eq_1 add: numeral_1_eq_Suc_0 [symmetric]) lemmas dvd_eq_mod_eq_0_number_of = dvd_eq_mod_eq_0 [of "number_of x" "number_of y", standard] declare dvd_eq_mod_eq_0_number_of [simp] subsubsection {* Nitpick *} lemma zmod_zdiv_equality': "(m\int) mod n = m - (m div n) * n" by (rule_tac P="%x. m mod n = x - (m div n) * n" in subst [OF mod_div_equality [of _ n]]) arith -lemmas [nitpick_def] = mod_div_equality' [THEN eq_reflection] +lemmas [nitpick_def] = dvd_eq_mod_eq_0 [THEN eq_reflection] + mod_div_equality' [THEN eq_reflection] zmod_zdiv_equality' [THEN eq_reflection] subsubsection {* Code generation *} definition pdivmod :: "int \ int \ int \ int" where "pdivmod k l = (\k\ div \l\, \k\ mod \l\)" lemma pdivmod_posDivAlg [code]: "pdivmod k l = (if l = 0 then (0, \k\) else posDivAlg \k\ \l\)" by (subst posDivAlg_div_mod) (simp_all add: pdivmod_def) lemma divmod_int_pdivmod: "divmod_int k l = (if k = 0 then (0, 0) else if l = 0 then (0, k) else apsnd ((op *) (sgn l)) (if 0 < l \ 0 \ k \ l < 0 \ k < 0 then pdivmod k l else (let (r, s) = pdivmod k l in if s = 0 then (- r, 0) else (- r - 1, \l\ - s))))" proof - have aux: "\q::int. - k = l * q \ k = l * - q" by auto show ?thesis by (simp add: divmod_int_mod_div pdivmod_def) (auto simp add: aux not_less not_le zdiv_zminus1_eq_if zmod_zminus1_eq_if zdiv_zminus2_eq_if zmod_zminus2_eq_if) qed lemma divmod_int_code [code]: "divmod_int k l = (if k = 0 then (0, 0) else if l = 0 then (0, k) else apsnd ((op *) (sgn l)) (if sgn k = sgn l then pdivmod k l else (let (r, s) = pdivmod k l in if s = 0 then (- r, 0) else (- r - 1, \l\ - s))))" proof - have "k \ 0 \ l \ 0 \ 0 < l \ 0 \ k \ l < 0 \ k < 0 \ sgn k = sgn l" by (auto simp add: not_less sgn_if) then show ?thesis by (simp add: divmod_int_pdivmod) qed code_modulename SML Divides Arith code_modulename OCaml Divides Arith code_modulename Haskell Divides Arith end diff --git a/src/HOL/Nitpick.thy b/src/HOL/Nitpick.thy --- a/src/HOL/Nitpick.thy +++ b/src/HOL/Nitpick.thy @@ -1,267 +1,269 @@ (* Title: HOL/Nitpick.thy Author: Jasmin Blanchette, TU Muenchen Copyright 2008, 2009 Nitpick: Yet another counterexample generator for Isabelle/HOL. *) header {* Nitpick: Yet Another Counterexample Generator for Isabelle/HOL *} theory Nitpick imports Map SAT uses ("Tools/Nitpick/kodkod.ML") ("Tools/Nitpick/kodkod_sat.ML") ("Tools/Nitpick/nitpick_util.ML") ("Tools/Nitpick/nitpick_hol.ML") ("Tools/Nitpick/nitpick_mono.ML") ("Tools/Nitpick/nitpick_scope.ML") ("Tools/Nitpick/nitpick_peephole.ML") ("Tools/Nitpick/nitpick_rep.ML") ("Tools/Nitpick/nitpick_nut.ML") ("Tools/Nitpick/nitpick_kodkod.ML") ("Tools/Nitpick/nitpick_model.ML") ("Tools/Nitpick/nitpick.ML") ("Tools/Nitpick/nitpick_isar.ML") ("Tools/Nitpick/nitpick_tests.ML") ("Tools/Nitpick/minipick.ML") begin typedecl bisim_iterator axiomatization unknown :: 'a and is_unknown :: "'a \ bool" and undefined_fast_The :: 'a and undefined_fast_Eps :: 'a and bisim :: "bisim_iterator \ 'a \ 'a \ bool" and bisim_iterator_max :: bisim_iterator and Quot :: "'a \ 'b" and quot_normal :: "'a \ 'a" + and NonStd :: "'a \ 'b" and Tha :: "('a \ bool) \ 'a" datatype ('a, 'b) pair_box = PairBox 'a 'b datatype ('a, 'b) fun_box = FunBox "('a \ 'b)" typedecl unsigned_bit typedecl signed_bit +typedecl \ datatype 'a word = Word "('a set)" text {* Alternative definitions. *} lemma If_def [nitpick_def]: "(if P then Q else R) \ (P \ Q) \ (\ P \ R)" by (rule eq_reflection) (rule if_bool_eq_conj) lemma Ex1_def [nitpick_def]: "Ex1 P \ \x. P = {x}" apply (rule eq_reflection) apply (simp add: Ex1_def expand_set_eq) apply (rule iffI) apply (erule exE) apply (erule conjE) apply (rule_tac x = x in exI) apply (rule allI) apply (rename_tac y) apply (erule_tac x = y in allE) by (auto simp: mem_def) lemma rtrancl_def [nitpick_def]: "r\<^sup>* \ (r\<^sup>+)\<^sup>=" by simp lemma rtranclp_def [nitpick_def]: "rtranclp r a b \ (a = b \ tranclp r a b)" by (rule eq_reflection) (auto dest: rtranclpD) lemma tranclp_def [nitpick_def]: "tranclp r a b \ trancl (split r) (a, b)" by (simp add: trancl_def Collect_def mem_def) definition refl' :: "('a \ 'a \ bool) \ bool" where "refl' r \ \x. (x, x) \ r" definition wf' :: "('a \ 'a \ bool) \ bool" where "wf' r \ acyclic r \ (finite r \ unknown)" axiomatization wf_wfrec :: "('a \ 'a \ bool) \ (('a \ 'b) \ 'a \ 'b) \ 'a \ 'b" definition wf_wfrec' :: "('a \ 'a \ bool) \ (('a \ 'b) \ 'a \ 'b) \ 'a \ 'b" where [nitpick_simp]: "wf_wfrec' R F x = F (Recdef.cut (wf_wfrec R F) R x) x" definition wfrec' :: "('a \ 'a \ bool) \ (('a \ 'b) \ 'a \ 'b) \ 'a \ 'b" where "wfrec' R F x \ if wf R then wf_wfrec' R F x else THE y. wfrec_rel R (%f x. F (Recdef.cut f R x) x) x y" definition card' :: "('a \ bool) \ nat" where "card' X \ length (SOME xs. set xs = X \ distinct xs)" definition setsum' :: "('a \ 'b\comm_monoid_add) \ ('a \ bool) \ 'b" where "setsum' f A \ if finite A then listsum (map f (SOME xs. set xs = A \ distinct xs)) else 0" inductive fold_graph' :: "('a \ 'b \ 'b) \ 'b \ ('a \ bool) \ 'b \ bool" where "fold_graph' f z {} z" | "\x \ A; fold_graph' f z (A - {x}) y\ \ fold_graph' f z A (f x y)" text {* The following lemmas are not strictly necessary but they help the \textit{special\_level} optimization. *} lemma The_psimp [nitpick_psimp]: "P = {x} \ The P = x" by (subgoal_tac "{x} = (\y. y = x)") (auto simp: mem_def) lemma Eps_psimp [nitpick_psimp]: "\P x; \ P y; Eps P = y\ \ Eps P = x" apply (case_tac "P (Eps P)") apply auto apply (erule contrapos_np) by (rule someI) lemma unit_case_def [nitpick_def]: "unit_case x u \ x" apply (subgoal_tac "u = ()") apply (simp only: unit.cases) by simp declare unit.cases [nitpick_simp del] lemma nat_case_def [nitpick_def]: "nat_case x f n \ if n = 0 then x else f (n - 1)" apply (rule eq_reflection) by (case_tac n) auto declare nat.cases [nitpick_simp del] lemma list_size_simp [nitpick_simp]: "list_size f xs = (if xs = [] then 0 else Suc (f (hd xs) + list_size f (tl xs)))" "size xs = (if xs = [] then 0 else Suc (size (tl xs)))" by (case_tac xs) auto text {* Auxiliary definitions used to provide an alternative representation for @{text rat} and @{text real}. *} function nat_gcd :: "nat \ nat \ nat" where [simp del]: "nat_gcd x y = (if y = 0 then x else nat_gcd y (x mod y))" by auto termination apply (relation "measure (\(x, y). x + y + (if y > x then 1 else 0))") apply auto apply (metis mod_less_divisor xt1(9)) by (metis mod_mod_trivial mod_self nat_neq_iff xt1(10)) definition nat_lcm :: "nat \ nat \ nat" where "nat_lcm x y = x * y div (nat_gcd x y)" definition int_gcd :: "int \ int \ int" where "int_gcd x y = int (nat_gcd (nat (abs x)) (nat (abs y)))" definition int_lcm :: "int \ int \ int" where "int_lcm x y = int (nat_lcm (nat (abs x)) (nat (abs y)))" definition Frac :: "int \ int \ bool" where "Frac \ \(a, b). b > 0 \ int_gcd a b = 1" axiomatization Abs_Frac :: "int \ int \ 'a" and Rep_Frac :: "'a \ int \ int" definition zero_frac :: 'a where "zero_frac \ Abs_Frac (0, 1)" definition one_frac :: 'a where "one_frac \ Abs_Frac (1, 1)" definition num :: "'a \ int" where "num \ fst o Rep_Frac" definition denom :: "'a \ int" where "denom \ snd o Rep_Frac" function norm_frac :: "int \ int \ int \ int" where [simp del]: "norm_frac a b = (if b < 0 then norm_frac (- a) (- b) else if a = 0 \ b = 0 then (0, 1) else let c = int_gcd a b in (a div c, b div c))" by pat_completeness auto termination by (relation "measure (\(_, b). if b < 0 then 1 else 0)") auto definition frac :: "int \ int \ 'a" where "frac a b \ Abs_Frac (norm_frac a b)" definition plus_frac :: "'a \ 'a \ 'a" where [nitpick_simp]: "plus_frac q r = (let d = int_lcm (denom q) (denom r) in frac (num q * (d div denom q) + num r * (d div denom r)) d)" definition times_frac :: "'a \ 'a \ 'a" where [nitpick_simp]: "times_frac q r = frac (num q * num r) (denom q * denom r)" definition uminus_frac :: "'a \ 'a" where "uminus_frac q \ Abs_Frac (- num q, denom q)" definition number_of_frac :: "int \ 'a" where "number_of_frac n \ Abs_Frac (n, 1)" definition inverse_frac :: "'a \ 'a" where "inverse_frac q \ frac (denom q) (num q)" definition less_eq_frac :: "'a \ 'a \ bool" where [nitpick_simp]: "less_eq_frac q r \ num (plus_frac q (uminus_frac r)) \ 0" definition of_frac :: "'a \ 'b\{inverse,ring_1}" where "of_frac q \ of_int (num q) / of_int (denom q)" (* While Nitpick normally avoids to unfold definitions for locales, it unfortunately needs to unfold them when dealing with the following built-in constants. A cleaner approach would be to change "Nitpick_HOL" and "Nitpick_Nut" so that they handle the unexpanded overloaded constants directly, but this is slightly more tricky to implement. *) lemmas [nitpick_def] = div_int_inst.div_int div_int_inst.mod_int div_nat_inst.div_nat div_nat_inst.mod_nat lower_semilattice_fun_inst.inf_fun minus_fun_inst.minus_fun minus_int_inst.minus_int minus_nat_inst.minus_nat one_int_inst.one_int one_nat_inst.one_nat ord_fun_inst.less_eq_fun ord_int_inst.less_eq_int ord_int_inst.less_int ord_nat_inst.less_eq_nat ord_nat_inst.less_nat plus_int_inst.plus_int plus_nat_inst.plus_nat times_int_inst.times_int times_nat_inst.times_nat uminus_int_inst.uminus_int upper_semilattice_fun_inst.sup_fun zero_int_inst.zero_int zero_nat_inst.zero_nat use "Tools/Nitpick/kodkod.ML" use "Tools/Nitpick/kodkod_sat.ML" use "Tools/Nitpick/nitpick_util.ML" use "Tools/Nitpick/nitpick_hol.ML" use "Tools/Nitpick/nitpick_mono.ML" use "Tools/Nitpick/nitpick_scope.ML" use "Tools/Nitpick/nitpick_peephole.ML" use "Tools/Nitpick/nitpick_rep.ML" use "Tools/Nitpick/nitpick_nut.ML" use "Tools/Nitpick/nitpick_kodkod.ML" use "Tools/Nitpick/nitpick_model.ML" use "Tools/Nitpick/nitpick.ML" use "Tools/Nitpick/nitpick_isar.ML" use "Tools/Nitpick/nitpick_tests.ML" use "Tools/Nitpick/minipick.ML" setup {* Nitpick_Isar.setup *} hide (open) const unknown is_unknown undefined_fast_The undefined_fast_Eps bisim - bisim_iterator_max Quot quot_normal Tha PairBox FunBox Word refl' wf' + bisim_iterator_max Quot quot_normal NonStd Tha PairBox FunBox Word refl' wf' wf_wfrec wf_wfrec' wfrec' card' setsum' fold_graph' nat_gcd nat_lcm int_gcd int_lcm Frac Abs_Frac Rep_Frac zero_frac one_frac num denom norm_frac frac plus_frac times_frac uminus_frac number_of_frac inverse_frac less_eq_frac of_frac -hide (open) type bisim_iterator pair_box fun_box unsigned_bit signed_bit word +hide (open) type bisim_iterator pair_box fun_box unsigned_bit signed_bit \ word hide (open) fact If_def Ex1_def rtrancl_def rtranclp_def tranclp_def refl'_def wf'_def wf_wfrec'_def wfrec'_def card'_def setsum'_def fold_graph'_def The_psimp Eps_psimp unit_case_def nat_case_def list_size_simp nat_gcd_def nat_lcm_def int_gcd_def int_lcm_def Frac_def zero_frac_def one_frac_def num_def denom_def norm_frac_def frac_def plus_frac_def times_frac_def uminus_frac_def number_of_frac_def inverse_frac_def less_eq_frac_def of_frac_def end diff --git a/src/HOL/Nitpick_Examples/Manual_Nits.thy b/src/HOL/Nitpick_Examples/Manual_Nits.thy --- a/src/HOL/Nitpick_Examples/Manual_Nits.thy +++ b/src/HOL/Nitpick_Examples/Manual_Nits.thy @@ -1,389 +1,452 @@ (* Title: HOL/Nitpick_Examples/Manual_Nits.thy Author: Jasmin Blanchette, TU Muenchen Copyright 2009 Examples from the Nitpick manual. *) header {* Examples from the Nitpick Manual *} theory Manual_Nits imports Main Coinductive_List RealDef begin chapter {* 3. First Steps *} nitpick_params [sat_solver = MiniSatJNI, max_threads = 1] subsection {* 3.1. Propositional Logic *} lemma "P \ Q" nitpick apply auto nitpick 1 nitpick 2 oops subsection {* 3.2. Type Variables *} lemma "P x \ P (THE y. P y)" nitpick [verbose] oops subsection {* 3.3. Constants *} lemma "P x \ P (THE y. P y)" nitpick [show_consts] nitpick [full_descrs, show_consts] nitpick [dont_specialize, full_descrs, show_consts] oops lemma "\!x. P x \ P (THE y. P y)" nitpick nitpick [card 'a = 1-50] (* sledgehammer *) apply (metis the_equality) done subsection {* 3.4. Skolemization *} lemma "\g. \x. g (f x) = x \ \y. \x. y = f x" nitpick oops lemma "\x. \f. f x = x" nitpick oops lemma "refl r \ sym r" nitpick oops subsection {* 3.5. Natural Numbers and Integers *} lemma "\i \ j; n \ (m\int)\ \ i * n + j * m \ i * m + j * n" nitpick oops lemma "\n. Suc n \ n \ P" nitpick [card nat = 100, check_potential] oops lemma "P Suc" nitpick [card = 1-6] oops lemma "P (op +\nat\nat\nat)" nitpick [card nat = 1] nitpick [card nat = 2] oops subsection {* 3.6. Inductive Datatypes *} lemma "hd (xs @ [y, y]) = hd xs" nitpick nitpick [show_consts, show_datatypes] oops lemma "\length xs = 1; length ys = 1\ \ xs = ys" nitpick [show_datatypes] oops subsection {* 3.7. Typedefs, Records, Rationals, and Reals *} typedef three = "{0\nat, 1, 2}" by blast definition A :: three where "A \ Abs_three 0" definition B :: three where "B \ Abs_three 1" definition C :: three where "C \ Abs_three 2" lemma "\P A; P B\ \ P x" nitpick [show_datatypes] oops record point = Xcoord :: int Ycoord :: int lemma "Xcoord (p\point) = Xcoord (q\point)" nitpick [show_datatypes] oops lemma "4 * x + 3 * (y\real) \ 1 / 2" nitpick [show_datatypes] oops subsection {* 3.8. Inductive and Coinductive Predicates *} inductive even where "even 0" | "even n \ even (Suc (Suc n))" lemma "\n. even n \ even (Suc n)" nitpick [card nat = 100, unary_ints, verbose] oops lemma "\n \ 99. even n \ even (Suc n)" nitpick [card nat = 100, unary_ints, verbose] oops inductive even' where "even' (0\nat)" | "even' 2" | "\even' m; even' n\ \ even' (m + n)" lemma "\n \ {0, 2, 4, 6, 8}. \ even' n" nitpick [card nat = 10, unary_ints, verbose, show_consts] (* FIXME: should be genuine *) oops lemma "even' (n - 2) \ even' n" nitpick [card nat = 10, show_consts] oops coinductive nats where "nats (x\nat) \ nats x" lemma "nats = {0, 1, 2, 3, 4}" nitpick [card nat = 10, show_consts] oops inductive odd where "odd 1" | "\odd m; even n\ \ odd (m + n)" lemma "odd n \ odd (n - 2)" nitpick [card nat = 10, show_consts] oops subsection {* 3.9. Coinductive Datatypes *} lemma "xs \ LCons a xs" nitpick oops lemma "\xs = LCons a xs; ys = iterates (\b. a) b\ \ xs = ys" nitpick [verbose] nitpick [bisim_depth = -1, verbose] oops lemma "\xs = LCons a xs; ys = LCons a ys\ \ xs = ys" nitpick [bisim_depth = -1, show_datatypes] nitpick sorry subsection {* 3.10. Boxing *} datatype tm = Var nat | Lam tm | App tm tm primrec lift where "lift (Var j) k = Var (if j < k then j else j + 1)" | "lift (Lam t) k = Lam (lift t (k + 1))" | "lift (App t u) k = App (lift t k) (lift u k)" primrec loose where "loose (Var j) k = (j \ k)" | "loose (Lam t) k = loose t (Suc k)" | "loose (App t u) k = (loose t k \ loose u k)" primrec subst\<^isub>1 where "subst\<^isub>1 \ (Var j) = \ j" | "subst\<^isub>1 \ (Lam t) = Lam (subst\<^isub>1 (\n. case n of 0 \ Var 0 | Suc m \ lift (\ m) 1) t)" | "subst\<^isub>1 \ (App t u) = App (subst\<^isub>1 \ t) (subst\<^isub>1 \ u)" lemma "\ loose t 0 \ subst\<^isub>1 \ t = t" nitpick [verbose] nitpick [eval = "subst\<^isub>1 \ t"] nitpick [dont_box] oops primrec subst\<^isub>2 where "subst\<^isub>2 \ (Var j) = \ j" | "subst\<^isub>2 \ (Lam t) = Lam (subst\<^isub>2 (\n. case n of 0 \ Var 0 | Suc m \ lift (\ m) 0) t)" | "subst\<^isub>2 \ (App t u) = App (subst\<^isub>2 \ t) (subst\<^isub>2 \ u)" lemma "\ loose t 0 \ subst\<^isub>2 \ t = t" nitpick sorry subsection {* 3.11. Scope Monotonicity *} lemma "length xs = length ys \ rev (zip xs ys) = zip xs (rev ys)" nitpick [verbose] nitpick [card = 8, verbose] oops lemma "\g. \x\'b. g (f x) = x \ \y\'a. \x. y = f x" nitpick [mono] nitpick oops +subsection {* 3.12. Inductive Properties *} + +inductive_set reach where +"(4\nat) \ reach" | +"n \ reach \ n < 4 \ 3 * n + 1 \ reach" | +"n \ reach \ n + 2 \ reach" + +lemma "n \ reach \ 2 dvd n" +nitpick +apply (induct set: reach) + apply auto + nitpick + apply (thin_tac "n \ reach") + nitpick +oops + +lemma "n \ reach \ 2 dvd n \ n \ 0" +nitpick +apply (induct set: reach) + apply auto + nitpick + apply (thin_tac "n \ reach") + nitpick +oops + +lemma "n \ reach \ 2 dvd n \ n \ 4" +by (induct set: reach) arith+ + +datatype 'a bin_tree = Leaf 'a | Branch "'a bin_tree" "'a bin_tree" + +primrec labels where +"labels (Leaf a) = {a}" | +"labels (Branch t u) = labels t \ labels u" + +primrec swap where +"swap (Leaf c) a b = + (if c = a then Leaf b else if c = b then Leaf a else Leaf c)" | +"swap (Branch t u) a b = Branch (swap t a b) (swap u a b)" + +lemma "\a \ labels t; b \ labels t; a \ b\ \ labels (swap t a b) = labels t" +nitpick +proof (induct t) + case Leaf thus ?case by simp +next + case (Branch t u) thus ?case + nitpick + nitpick [non_std "'a bin_tree", show_consts] +oops + +lemma "labels (swap t a b) = + (if a \ labels t then + if b \ labels t then labels t else (labels t - {a}) \ {b} + else + if b \ labels t then (labels t - {b}) \ {a} else labels t)" +nitpick +proof (induct t) + case Leaf thus ?case by simp +next + case (Branch t u) thus ?case + nitpick [non_std "'a bin_tree", show_consts] + by auto +qed + section {* 4. Case Studies *} nitpick_params [max_potential = 0, max_threads = 2] subsection {* 4.1. A Context-Free Grammar *} datatype alphabet = a | b inductive_set S\<^isub>1 and A\<^isub>1 and B\<^isub>1 where "[] \ S\<^isub>1" | "w \ A\<^isub>1 \ b # w \ S\<^isub>1" | "w \ B\<^isub>1 \ a # w \ S\<^isub>1" | "w \ S\<^isub>1 \ a # w \ A\<^isub>1" | "w \ S\<^isub>1 \ b # w \ S\<^isub>1" | "\v \ B\<^isub>1; v \ B\<^isub>1\ \ a # v @ w \ B\<^isub>1" theorem S\<^isub>1_sound: "w \ S\<^isub>1 \ length [x \ w. x = a] = length [x \ w. x = b]" nitpick oops inductive_set S\<^isub>2 and A\<^isub>2 and B\<^isub>2 where "[] \ S\<^isub>2" | "w \ A\<^isub>2 \ b # w \ S\<^isub>2" | "w \ B\<^isub>2 \ a # w \ S\<^isub>2" | "w \ S\<^isub>2 \ a # w \ A\<^isub>2" | "w \ S\<^isub>2 \ b # w \ B\<^isub>2" | "\v \ B\<^isub>2; v \ B\<^isub>2\ \ a # v @ w \ B\<^isub>2" theorem S\<^isub>2_sound: "w \ S\<^isub>2 \ length [x \ w. x = a] = length [x \ w. x = b]" nitpick oops inductive_set S\<^isub>3 and A\<^isub>3 and B\<^isub>3 where "[] \ S\<^isub>3" | "w \ A\<^isub>3 \ b # w \ S\<^isub>3" | "w \ B\<^isub>3 \ a # w \ S\<^isub>3" | "w \ S\<^isub>3 \ a # w \ A\<^isub>3" | "w \ S\<^isub>3 \ b # w \ B\<^isub>3" | "\v \ B\<^isub>3; w \ B\<^isub>3\ \ a # v @ w \ B\<^isub>3" theorem S\<^isub>3_sound: "w \ S\<^isub>3 \ length [x \ w. x = a] = length [x \ w. x = b]" nitpick sorry theorem S\<^isub>3_complete: "length [x \ w. x = a] = length [x \ w. x = b] \ w \ S\<^isub>3" nitpick oops inductive_set S\<^isub>4 and A\<^isub>4 and B\<^isub>4 where "[] \ S\<^isub>4" | "w \ A\<^isub>4 \ b # w \ S\<^isub>4" | "w \ B\<^isub>4 \ a # w \ S\<^isub>4" | "w \ S\<^isub>4 \ a # w \ A\<^isub>4" | "\v \ A\<^isub>4; w \ A\<^isub>4\ \ b # v @ w \ A\<^isub>4" | "w \ S\<^isub>4 \ b # w \ B\<^isub>4" | "\v \ B\<^isub>4; w \ B\<^isub>4\ \ a # v @ w \ B\<^isub>4" theorem S\<^isub>4_sound: "w \ S\<^isub>4 \ length [x \ w. x = a] = length [x \ w. x = b]" nitpick sorry theorem S\<^isub>4_complete: "length [x \ w. x = a] = length [x \ w. x = b] \ w \ S\<^isub>4" nitpick sorry theorem S\<^isub>4_A\<^isub>4_B\<^isub>4_sound_and_complete: "w \ S\<^isub>4 \ length [x \ w. x = a] = length [x \ w. x = b]" "w \ A\<^isub>4 \ length [x \ w. x = a] = length [x \ w. x = b] + 1" "w \ B\<^isub>4 \ length [x \ w. x = b] = length [x \ w. x = a] + 1" nitpick sorry subsection {* 4.2. AA Trees *} -datatype 'a tree = \ | N "'a\linorder" nat "'a tree" "'a tree" +datatype 'a aa_tree = \ | N "'a\linorder" nat "'a aa_tree" "'a aa_tree" primrec data where "data \ = undefined" | "data (N x _ _ _) = x" primrec dataset where "dataset \ = {}" | "dataset (N x _ t u) = {x} \ dataset t \ dataset u" primrec level where "level \ = 0" | "level (N _ k _ _) = k" primrec left where "left \ = \" | "left (N _ _ t\<^isub>1 _) = t\<^isub>1" primrec right where "right \ = \" | "right (N _ _ _ t\<^isub>2) = t\<^isub>2" fun wf where "wf \ = True" | "wf (N _ k t u) = (if t = \ then k = 1 \ (u = \ \ (level u = 1 \ left u = \ \ right u = \)) else wf t \ wf u \ u \ \ \ level t < k \ level u \ k \ level (right u) < k)" fun skew where "skew \ = \" | "skew (N x k t u) = (if t \ \ \ k = level t then N (data t) k (left t) (N x k (right t) u) else N x k t u)" fun split where "split \ = \" | "split (N x k t u) = (if u \ \ \ k = level (right u) then N (data u) (Suc k) (N x k t (left u)) (right u) else N x k t u)" theorem dataset_skew_split: "dataset (skew t) = dataset t" "dataset (split t) = dataset t" nitpick sorry theorem wf_skew_split: "wf t \ skew t = t" "wf t \ split t = t" nitpick sorry primrec insort\<^isub>1 where "insort\<^isub>1 \ x = N x 1 \ \" | "insort\<^isub>1 (N y k t u) x = (* (split \ skew) *) (N y k (if x < y then insort\<^isub>1 t x else t) (if x > y then insort\<^isub>1 u x else u))" theorem wf_insort\<^isub>1: "wf t \ wf (insort\<^isub>1 t x)" nitpick oops theorem wf_insort\<^isub>1_nat: "wf t \ wf (insort\<^isub>1 t (x\nat))" nitpick [eval = "insort\<^isub>1 t x"] oops primrec insort\<^isub>2 where "insort\<^isub>2 \ x = N x 1 \ \" | "insort\<^isub>2 (N y k t u) x = (split \ skew) (N y k (if x < y then insort\<^isub>2 t x else t) (if x > y then insort\<^isub>2 u x else u))" theorem wf_insort\<^isub>2: "wf t \ wf (insort\<^isub>2 t x)" nitpick sorry theorem dataset_insort\<^isub>2: "dataset (insort\<^isub>2 t x) = {x} \ dataset t" nitpick sorry end diff --git a/src/HOL/Nitpick_Examples/Mono_Nits.thy b/src/HOL/Nitpick_Examples/Mono_Nits.thy --- a/src/HOL/Nitpick_Examples/Mono_Nits.thy +++ b/src/HOL/Nitpick_Examples/Mono_Nits.thy @@ -1,99 +1,100 @@ (* Title: HOL/Nitpick_Examples/Mono_Nits.thy Author: Jasmin Blanchette, TU Muenchen Copyright 2009 Examples featuring Nitpick's monotonicity check. *) header {* Examples Featuring Nitpick's Monotonicity Check *} theory Mono_Nits imports Main begin ML {* exception FAIL val defs = Nitpick_HOL.all_axioms_of @{theory} |> #1 val def_table = Nitpick_HOL.const_def_table @{context} defs val ext_ctxt : Nitpick_HOL.extended_context = {thy = @{theory}, ctxt = @{context}, max_bisim_depth = ~1, boxes = [], - user_axioms = NONE, debug = false, wfs = [], binary_ints = SOME false, - destroy_constrs = false, specialize = false, skolemize = false, - star_linear_preds = false, uncurry = false, fast_descrs = false, - tac_timeout = NONE, evals = [], case_names = [], def_table = def_table, - nondef_table = Symtab.empty, user_nondefs = [], + stds = [(NONE, true)], wfs = [], user_axioms = NONE, debug = false, + binary_ints = SOME false, destroy_constrs = false, specialize = false, + skolemize = false, star_linear_preds = false, uncurry = false, + fast_descrs = false, tac_timeout = NONE, evals = [], case_names = [], + def_table = def_table, nondef_table = Symtab.empty, user_nondefs = [], simp_table = Unsynchronized.ref Symtab.empty, psimp_table = Symtab.empty, intro_table = Symtab.empty, ground_thm_table = Inttab.empty, ersatz_table = [], skolems = Unsynchronized.ref [], special_funs = Unsynchronized.ref [], unrolled_preds = Unsynchronized.ref [], wf_cache = Unsynchronized.ref [], constr_cache = Unsynchronized.ref []} (* term -> bool *) -val is_mono = Nitpick_Mono.formulas_monotonic ext_ctxt @{typ 'a} [] [] +val is_mono = Nitpick_Mono.formulas_monotonic ext_ctxt @{typ 'a} + Nitpick_Mono.Plus [] [] fun is_const t = let val T = fastype_of t in is_mono (Logic.mk_implies (Logic.mk_equals (Free ("dummyP", T), t), @{const False})) end fun mono t = is_mono t orelse raise FAIL fun nonmono t = not (is_mono t) orelse raise FAIL fun const t = is_const t orelse raise FAIL fun nonconst t = not (is_const t) orelse raise FAIL *} ML {* const @{term "A::('a\'b)"} *} ML {* const @{term "(A::'a set) = A"} *} ML {* const @{term "(A::'a set set) = A"} *} ML {* const @{term "(\x::'a set. x a)"} *} ML {* const @{term "{{a}} = C"} *} ML {* const @{term "{f::'a\nat} = {g::'a\nat}"} *} ML {* const @{term "A \ B"} *} ML {* const @{term "P (a::'a)"} *} ML {* const @{term "\a::'a. b (c (d::'a)) (e::'a) (f::'a)"} *} ML {* const @{term "\A::'a set. A a"} *} ML {* const @{term "\A::'a set. P A"} *} ML {* const @{term "P \ Q"} *} ML {* const @{term "A \ B = C"} *} ML {* const @{term "(if P then (A::'a set) else B) = C"} *} ML {* const @{term "let A = C in A \ B"} *} ML {* const @{term "THE x::'b. P x"} *} ML {* const @{term "{}::'a set"} *} ML {* const @{term "(\x::'a. True)"} *} ML {* const @{term "Let a A"} *} ML {* const @{term "A (a::'a)"} *} ML {* const @{term "insert a A = B"} *} ML {* const @{term "- (A::'a set)"} *} ML {* const @{term "finite A"} *} ML {* const @{term "\ finite A"} *} ML {* const @{term "finite (A::'a set set)"} *} ML {* const @{term "\a::'a. A a \ \ B a"} *} ML {* const @{term "A < (B::'a set)"} *} ML {* const @{term "A \ (B::'a set)"} *} ML {* const @{term "[a::'a]"} *} ML {* const @{term "[a::'a set]"} *} ML {* const @{term "[A \ (B::'a set)]"} *} ML {* const @{term "[A \ (B::'a set)] = [C]"} *} ML {* const @{term "\P. P a"} *} ML {* nonconst @{term "{%x. True}"} *} ML {* nonconst @{term "{(%x. x = a)} = C"} *} ML {* nonconst @{term "\P (a::'a). P a"} *} ML {* nonconst @{term "\a::'a. P a"} *} ML {* nonconst @{term "(\a::'a. \ A a) = B"} *} ML {* nonconst @{term "THE x. P x"} *} ML {* nonconst @{term "SOME x. P x"} *} ML {* mono @{prop "Q (\x::'a set. P x)"} *} ML {* mono @{prop "P (a::'a)"} *} ML {* mono @{prop "{a} = {b}"} *} ML {* mono @{prop "P (a::'a) \ P \ P = P"} *} ML {* mono @{prop "\F::'a set set. P"} *} ML {* mono @{prop "\ (\F f g (h::'a set). F f \ F g \ \ f a \ g a \ F h)"} *} ML {* mono @{prop "\ Q (\x::'a set. P x)"} *} ML {* mono @{prop "\ (\x. P x)"} *} ML {* nonmono @{prop "\x. P x"} *} ML {* nonmono @{prop "\F f g (h::'a set). F f \ F g \ \ f a \ g a \ F h"} *} ML {* nonmono @{prop "myall P = (P = (\x. True))"} *} end diff --git a/src/HOL/Tools/Nitpick/HISTORY b/src/HOL/Tools/Nitpick/HISTORY --- a/src/HOL/Tools/Nitpick/HISTORY +++ b/src/HOL/Tools/Nitpick/HISTORY @@ -1,171 +1,173 @@ Version 2010 * Added and implemented "binary_ints" and "bits" options - * Fixed soundness bug in "destroy_constrs" optimization + * Added "std" option and implemented support for nonstandard models + * Fixed soundness bugs related to "destroy_constrs" optimization and record + getters Version 2009-1 * Moved into Isabelle/HOL "Main" * Renamed "nitpick_const_def" to "nitpick_def", "nitpick_const_simp" to "nitpick_simp", "nitpick_const_psimp" to "nitpick_psimp", and "nitpick_ind_intro" to "nitpick_intro" * Replaced "special_depth" and "skolemize_depth" options by "specialize" and "skolemize" * Renamed "coalesce_type_vars" to "merge_type_vars" * Optimized Kodkod encoding of datatypes whose constructors don't appear in the formula to falsify * Added support for codatatype view of datatypes * Fixed soundness bug in the "uncurry" optimization * Fixed soundness bugs related to sets, sets of sets, (co)inductive predicates, typedefs, "finite", "rel_comp", and negative literals * Fixed monotonicity check * Fixed error when processing definitions * Fixed error in "star_linear_preds" optimization * Fixed error in Kodkod encoding of "The" and "Eps" * Fixed error in display of uncurried constants * Speeded up scope enumeration Version 1.2.2 (16 Oct 2009) * Added and implemented "star_linear_preds" option * Added and implemented "format" option * Added and implemented "coalesce_type_vars" option * Added and implemented "max_genuine" option * Fixed soundness issues related to "set", "distinct", "image", "Sigma", "-1::nat", subset, constructors, sort axioms, and partially applied interpreted constants * Fixed error in "show_consts" for boxed specialized constants * Fixed errors in Kodkod encoding of "The", "Eps", and "ind" * Fixed display of Skolem constants * Fixed error in "check_potential" and "check_genuine" * Added boxing support for higher-order constructor parameters * Changed notation used for coinductive datatypes * Optimized Kodkod encoding of "If", "card", and "setsum" * Improved the monotonicity check Version 1.2.1 (25 Sep 2009) * Added explicit support for coinductive datatypes * Added and implemented "box" option * Added and implemented "fast_descrs" option * Added and implemented "uncurry" option * Renamed and generalized "sync_cards" and "inductive_mood" to "mono" and "wf" * Fixed soundness issue related to nullary constructors * Fixed soundness issue related to higher-order quantifiers * Fixed soundness issue related to the "destroy_constrs" optimization * Fixed soundness issues related to the "special_depth" optimization * Added support for PicoSAT and incorporated it with the Nitpick package * Added automatic detection of installed SAT solvers based on naming convention * Optimized handling of quantifiers by moving them inward whenever possible * Optimized and improved precision of "wf" and "wfrec" * Improved handling of definitions made in locales * Fixed checked scope count in message shown upon interruption and timeout * Added minimalistic Nitpick-like tool called Minipick Version 1.2.0 (27 Jul 2009) * Optimized Kodkod encoding of datatypes and records * Optimized coinductive definitions * Fixed soundness issues related to pairs of functions * Fixed soundness issue in the peephole optimizer * Improved precision of non-well-founded predicates occurring positively in the formula to falsify * Added and implemented "destroy_constrs" option * Changed semantics of "inductive_mood" option to ensure soundness * Fixed semantics of "lockstep" option (broken in 1.1.1) and renamed it "sync_cards" * Improved precision of "trancl" and "rtrancl" * Optimized Kodkod encoding of "tranclp" and "rtranclp" * Made detection of inconsistent scope specifications more robust * Fixed a few Kodkod generation bugs Version 1.1.1 (24 Jun 2009) * Added "show_all" option * Fixed soundness issues related to type classes * Improved precision of some set constructs * Fiddled with the automatic monotonicity check * Fixed performance issues related to scope enumeration * Fixed a few Kodkod generation bugs Version 1.1.0 (16 Jun 2009) * Redesigned handling of datatypes to provide an interface baded on "card" and "max", obsoleting "mult" * Redesigned handling of typedefs, "rat", and "real" * Made "lockstep" option a three-state option and added an automatic monotonicity check * Made "batch_size" a (n + 1)-state option whose default depends on whether "debug" is enabled * Made "debug" automatically enable "verbose" * Added "destroy_equals" option * Added "no_assms" option * Fixed bug in computation of ground type * Fixed performance issue related to datatype acyclicity constraint generation * Fixed performance issue related to axiom selection * Improved precision of some well-founded inductive predicates * Added more checks to guard against very large cardinalities * Improved hit rate of potential counterexamples * Fixed several soundness issues * Optimized the Kodkod encoding to benefit more from symmetry breaking * Optimized relational composition, cartesian product, and converse * Added support for HaifaSat Version 1.0.3 (17 Mar 2009) * Added "HOL-Nominal-Nitpick" as a target in addition to "HOL-Nitpick" * Added "overlord" option to assist debugging * Increased default value of "special_depth" option * Fixed a bug that effectively disabled the "nitpick_const_def" attribute * Ensured that no scopes are skipped when multithreading is enabled * Fixed soundness issue in handling of "The", "Eps", and partial functions defined using Isabelle's function package * Fixed soundness issue in handling of non-definitional axioms * Fixed soundness issue in handling of "Abs_" and "Rep_" functions for "unit", "nat", "int", and "*" * Fixed a few Kodkod generation bugs * Optimized "div", "mod", and "dvd" on "nat" and "int" Version 1.0.2 (12 Mar 2009) * Added support for non-definitional axioms * Improved Isar integration * Added support for multiplicities of 0 * Added "max_threads" option and support for multithreaded Kodkodi * Added "blocking" option to control whether Nitpick should be run synchronously or asynchronously * Merged "auto_timeout" and "wellfounded_timeout" into "tac_timeout" * Added "auto" option to run Nitpick automatically (like Auto Quickcheck) * Introduced "auto_timeout" to specify Auto Nitpick's time limit * Renamed the possible values for the "expect" option * Renamed the "peephole" option to "peephole_optim" * Added negative versions of all Boolean options and made "= true" optional * Altered order of automatic SAT solver selection Version 1.0.1 (6 Mar 2009) * Eliminated the need to import "Nitpick" to use "nitpick" * Added "debug" option * Replaced "specialize_funs" with the more general "special_depth" option * Renamed "watch" option to "eval" * Improved parsing of "card", "mult", and "iter" options * Fixed a soundness bug in the "specialize_funs" optimization * Increased the scope of the "specialize_funs" optimization * Fixed a soundness bug in the treatment of "<" and "<=" on type "int" * Fixed a soundness bug in the "subterm property" optimization for types of cardinality 1 * Improved the axiom selection for overloaded constants, which led to an infinite loop for "Nominal.perm" * Added support for multiple instantiations of non-well-founded inductive predicates, which previously raised an exception * Fixed a Kodkod generation bug * Altered order of scope enumeration and automatic SAT solver selection * Optimized "Eps", "nat_case", and "list_case" * Improved output formatting * Added checks to prevent infinite loops in axiom selector and constant unfolding Version 1.0.0 (27 Feb 2009) * First release diff --git a/src/HOL/Tools/Nitpick/kodkod.ML b/src/HOL/Tools/Nitpick/kodkod.ML --- a/src/HOL/Tools/Nitpick/kodkod.ML +++ b/src/HOL/Tools/Nitpick/kodkod.ML @@ -1,1099 +1,1099 @@ (* Title: HOL/Tools/Nitpick/kodkod.ML Author: Jasmin Blanchette, TU Muenchen - Copyright 2008, 2009 + Copyright 2008, 2009, 2010 ML interface on top of Kodkod. *) signature KODKOD = sig type n_ary_index = int * int type setting = string * string datatype tuple = Tuple of int list | TupleIndex of n_ary_index | TupleReg of n_ary_index datatype tuple_set = TupleUnion of tuple_set * tuple_set | TupleDifference of tuple_set * tuple_set | TupleIntersect of tuple_set * tuple_set | TupleProduct of tuple_set * tuple_set | TupleProject of tuple_set * int | TupleSet of tuple list | TupleRange of tuple * tuple | TupleArea of tuple * tuple | TupleAtomSeq of int * int | TupleSetReg of n_ary_index datatype tuple_assign = AssignTuple of n_ary_index * tuple | AssignTupleSet of n_ary_index * tuple_set type bound = (n_ary_index * string) list * tuple_set list type int_bound = int option * tuple_set list datatype formula = All of decl list * formula | Exist of decl list * formula | FormulaLet of expr_assign list * formula | FormulaIf of formula * formula * formula | Or of formula * formula | Iff of formula * formula | Implies of formula * formula | And of formula * formula | Not of formula | Acyclic of n_ary_index | Function of n_ary_index * rel_expr * rel_expr | Functional of n_ary_index * rel_expr * rel_expr | TotalOrdering of n_ary_index * n_ary_index * n_ary_index * n_ary_index | Subset of rel_expr * rel_expr | RelEq of rel_expr * rel_expr | IntEq of int_expr * int_expr | LT of int_expr * int_expr | LE of int_expr * int_expr | No of rel_expr | Lone of rel_expr | One of rel_expr | Some of rel_expr | False | True | FormulaReg of int and rel_expr = RelLet of expr_assign list * rel_expr | RelIf of formula * rel_expr * rel_expr | Union of rel_expr * rel_expr | Difference of rel_expr * rel_expr | Override of rel_expr * rel_expr | Intersect of rel_expr * rel_expr | Product of rel_expr * rel_expr | IfNo of rel_expr * rel_expr | Project of rel_expr * int_expr list | Join of rel_expr * rel_expr | Closure of rel_expr | ReflexiveClosure of rel_expr | Transpose of rel_expr | Comprehension of decl list * formula | Bits of int_expr | Int of int_expr | Iden | Ints | None | Univ | Atom of int | AtomSeq of int * int | Rel of n_ary_index | Var of n_ary_index | RelReg of n_ary_index and int_expr = Sum of decl list * int_expr | IntLet of expr_assign list * int_expr | IntIf of formula * int_expr * int_expr | SHL of int_expr * int_expr | SHA of int_expr * int_expr | SHR of int_expr * int_expr | Add of int_expr * int_expr | Sub of int_expr * int_expr | Mult of int_expr * int_expr | Div of int_expr * int_expr | Mod of int_expr * int_expr | Cardinality of rel_expr | SetSum of rel_expr | BitOr of int_expr * int_expr | BitXor of int_expr * int_expr | BitAnd of int_expr * int_expr | BitNot of int_expr | Neg of int_expr | Absolute of int_expr | Signum of int_expr | Num of int | IntReg of int and decl = DeclNo of n_ary_index * rel_expr | DeclLone of n_ary_index * rel_expr | DeclOne of n_ary_index * rel_expr | DeclSome of n_ary_index * rel_expr | DeclSet of n_ary_index * rel_expr and expr_assign = AssignFormulaReg of int * formula | AssignRelReg of n_ary_index * rel_expr | AssignIntReg of int * int_expr type 'a fold_expr_funcs = { formula_func: formula -> 'a -> 'a, rel_expr_func: rel_expr -> 'a -> 'a, int_expr_func: int_expr -> 'a -> 'a } val fold_formula : 'a fold_expr_funcs -> formula -> 'a -> 'a val fold_rel_expr : 'a fold_expr_funcs -> rel_expr -> 'a -> 'a val fold_int_expr : 'a fold_expr_funcs -> int_expr -> 'a -> 'a val fold_decl : 'a fold_expr_funcs -> decl -> 'a -> 'a val fold_expr_assign : 'a fold_expr_funcs -> expr_assign -> 'a -> 'a type 'a fold_tuple_funcs = { tuple_func: tuple -> 'a -> 'a, tuple_set_func: tuple_set -> 'a -> 'a } val fold_tuple : 'a fold_tuple_funcs -> tuple -> 'a -> 'a val fold_tuple_set : 'a fold_tuple_funcs -> tuple_set -> 'a -> 'a val fold_tuple_assign : 'a fold_tuple_funcs -> tuple_assign -> 'a -> 'a val fold_bound : 'a fold_expr_funcs -> 'a fold_tuple_funcs -> bound -> 'a -> 'a val fold_int_bound : 'a fold_tuple_funcs -> int_bound -> 'a -> 'a type problem = { comment: string, settings: setting list, univ_card: int, tuple_assigns: tuple_assign list, bounds: bound list, int_bounds: int_bound list, expr_assigns: expr_assign list, formula: formula} type raw_bound = n_ary_index * int list list datatype outcome = NotInstalled | Normal of (int * raw_bound list) list * int list | TimedOut of int list | Interrupted of int list option | Error of string * int list exception SYNTAX of string * string val max_arity : int -> int val arity_of_rel_expr : rel_expr -> int val problems_equivalent : problem -> problem -> bool val solve_any_problem : bool -> Time.time option -> int -> int -> problem list -> outcome end; structure Kodkod : KODKOD = struct type n_ary_index = int * int type setting = string * string datatype tuple = Tuple of int list | TupleIndex of n_ary_index | TupleReg of n_ary_index datatype tuple_set = TupleUnion of tuple_set * tuple_set | TupleDifference of tuple_set * tuple_set | TupleIntersect of tuple_set * tuple_set | TupleProduct of tuple_set * tuple_set | TupleProject of tuple_set * int | TupleSet of tuple list | TupleRange of tuple * tuple | TupleArea of tuple * tuple | TupleAtomSeq of int * int | TupleSetReg of n_ary_index datatype tuple_assign = AssignTuple of n_ary_index * tuple | AssignTupleSet of n_ary_index * tuple_set type bound = (n_ary_index * string) list * tuple_set list type int_bound = int option * tuple_set list datatype formula = All of decl list * formula | Exist of decl list * formula | FormulaLet of expr_assign list * formula | FormulaIf of formula * formula * formula | Or of formula * formula | Iff of formula * formula | Implies of formula * formula | And of formula * formula | Not of formula | Acyclic of n_ary_index | Function of n_ary_index * rel_expr * rel_expr | Functional of n_ary_index * rel_expr * rel_expr | TotalOrdering of n_ary_index * n_ary_index * n_ary_index * n_ary_index | Subset of rel_expr * rel_expr | RelEq of rel_expr * rel_expr | IntEq of int_expr * int_expr | LT of int_expr * int_expr | LE of int_expr * int_expr | No of rel_expr | Lone of rel_expr | One of rel_expr | Some of rel_expr | False | True | FormulaReg of int and rel_expr = RelLet of expr_assign list * rel_expr | RelIf of formula * rel_expr * rel_expr | Union of rel_expr * rel_expr | Difference of rel_expr * rel_expr | Override of rel_expr * rel_expr | Intersect of rel_expr * rel_expr | Product of rel_expr * rel_expr | IfNo of rel_expr * rel_expr | Project of rel_expr * int_expr list | Join of rel_expr * rel_expr | Closure of rel_expr | ReflexiveClosure of rel_expr | Transpose of rel_expr | Comprehension of decl list * formula | Bits of int_expr | Int of int_expr | Iden | Ints | None | Univ | Atom of int | AtomSeq of int * int | Rel of n_ary_index | Var of n_ary_index | RelReg of n_ary_index and int_expr = Sum of decl list * int_expr | IntLet of expr_assign list * int_expr | IntIf of formula * int_expr * int_expr | SHL of int_expr * int_expr | SHA of int_expr * int_expr | SHR of int_expr * int_expr | Add of int_expr * int_expr | Sub of int_expr * int_expr | Mult of int_expr * int_expr | Div of int_expr * int_expr | Mod of int_expr * int_expr | Cardinality of rel_expr | SetSum of rel_expr | BitOr of int_expr * int_expr | BitXor of int_expr * int_expr | BitAnd of int_expr * int_expr | BitNot of int_expr | Neg of int_expr | Absolute of int_expr | Signum of int_expr | Num of int | IntReg of int and decl = DeclNo of n_ary_index * rel_expr | DeclLone of n_ary_index * rel_expr | DeclOne of n_ary_index * rel_expr | DeclSome of n_ary_index * rel_expr | DeclSet of n_ary_index * rel_expr and expr_assign = AssignFormulaReg of int * formula | AssignRelReg of n_ary_index * rel_expr | AssignIntReg of int * int_expr type problem = { comment: string, settings: setting list, univ_card: int, tuple_assigns: tuple_assign list, bounds: bound list, int_bounds: int_bound list, expr_assigns: expr_assign list, formula: formula} type raw_bound = n_ary_index * int list list datatype outcome = NotInstalled | Normal of (int * raw_bound list) list * int list | TimedOut of int list | Interrupted of int list option | Error of string * int list exception SYNTAX of string * string type 'a fold_expr_funcs = { formula_func: formula -> 'a -> 'a, rel_expr_func: rel_expr -> 'a -> 'a, int_expr_func: int_expr -> 'a -> 'a } (* 'a fold_expr_funcs -> formula -> 'a -> 'a *) fun fold_formula (F : 'a fold_expr_funcs) formula = case formula of All (ds, f) => fold (fold_decl F) ds #> fold_formula F f | Exist (ds, f) => fold (fold_decl F) ds #> fold_formula F f | FormulaLet (bs, f) => fold (fold_expr_assign F) bs #> fold_formula F f | FormulaIf (f, f1, f2) => fold_formula F f #> fold_formula F f1 #> fold_formula F f2 | Or (f1, f2) => fold_formula F f1 #> fold_formula F f2 | Iff (f1, f2) => fold_formula F f1 #> fold_formula F f2 | Implies (f1, f2) => fold_formula F f1 #> fold_formula F f2 | And (f1, f2) => fold_formula F f1 #> fold_formula F f2 | Not f => fold_formula F f | Acyclic x => fold_rel_expr F (Rel x) | Function (x, r1, r2) => fold_rel_expr F (Rel x) #> fold_rel_expr F r1 #> fold_rel_expr F r2 | Functional (x, r1, r2) => fold_rel_expr F (Rel x) #> fold_rel_expr F r1 #> fold_rel_expr F r2 | TotalOrdering (x1, x2, x3, x4) => fold_rel_expr F (Rel x1) #> fold_rel_expr F (Rel x2) #> fold_rel_expr F (Rel x3) #> fold_rel_expr F (Rel x4) | Subset (r1, r2) => fold_rel_expr F r1 #> fold_rel_expr F r2 | RelEq (r1, r2) => fold_rel_expr F r1 #> fold_rel_expr F r2 | IntEq (i1, i2) => fold_int_expr F i1 #> fold_int_expr F i2 | LT (i1, i2) => fold_int_expr F i1 #> fold_int_expr F i2 | LE (i1, i2) => fold_int_expr F i1 #> fold_int_expr F i2 | No r => fold_rel_expr F r | Lone r => fold_rel_expr F r | One r => fold_rel_expr F r | Some r => fold_rel_expr F r | False => #formula_func F formula | True => #formula_func F formula | FormulaReg _ => #formula_func F formula (* 'a fold_expr_funcs -> rel_expr -> 'a -> 'a *) and fold_rel_expr F rel_expr = case rel_expr of RelLet (bs, r) => fold (fold_expr_assign F) bs #> fold_rel_expr F r | RelIf (f, r1, r2) => fold_formula F f #> fold_rel_expr F r1 #> fold_rel_expr F r2 | Union (r1, r2) => fold_rel_expr F r1 #> fold_rel_expr F r2 | Difference (r1, r2) => fold_rel_expr F r1 #> fold_rel_expr F r2 | Override (r1, r2) => fold_rel_expr F r1 #> fold_rel_expr F r2 | Intersect (r1, r2) => fold_rel_expr F r1 #> fold_rel_expr F r2 | Product (r1, r2) => fold_rel_expr F r1 #> fold_rel_expr F r2 | IfNo (r1, r2) => fold_rel_expr F r1 #> fold_rel_expr F r2 | Project (r1, is) => fold_rel_expr F r1 #> fold (fold_int_expr F) is | Join (r1, r2) => fold_rel_expr F r1 #> fold_rel_expr F r2 | Closure r => fold_rel_expr F r | ReflexiveClosure r => fold_rel_expr F r | Transpose r => fold_rel_expr F r | Comprehension (ds, f) => fold (fold_decl F) ds #> fold_formula F f | Bits i => fold_int_expr F i | Int i => fold_int_expr F i | Iden => #rel_expr_func F rel_expr | Ints => #rel_expr_func F rel_expr | None => #rel_expr_func F rel_expr | Univ => #rel_expr_func F rel_expr | Atom _ => #rel_expr_func F rel_expr | AtomSeq _ => #rel_expr_func F rel_expr | Rel _ => #rel_expr_func F rel_expr | Var _ => #rel_expr_func F rel_expr | RelReg _ => #rel_expr_func F rel_expr (* 'a fold_expr_funcs -> int_expr -> 'a -> 'a *) and fold_int_expr F int_expr = case int_expr of Sum (ds, i) => fold (fold_decl F) ds #> fold_int_expr F i | IntLet (bs, i) => fold (fold_expr_assign F) bs #> fold_int_expr F i | IntIf (f, i1, i2) => fold_formula F f #> fold_int_expr F i1 #> fold_int_expr F i2 | SHL (i1, i2) => fold_int_expr F i1 #> fold_int_expr F i2 | SHA (i1, i2) => fold_int_expr F i1 #> fold_int_expr F i2 | SHR (i1, i2) => fold_int_expr F i1 #> fold_int_expr F i2 | Add (i1, i2) => fold_int_expr F i1 #> fold_int_expr F i2 | Sub (i1, i2) => fold_int_expr F i1 #> fold_int_expr F i2 | Mult (i1, i2) => fold_int_expr F i1 #> fold_int_expr F i2 | Div (i1, i2) => fold_int_expr F i1 #> fold_int_expr F i2 | Mod (i1, i2) => fold_int_expr F i1 #> fold_int_expr F i2 | Cardinality r => fold_rel_expr F r | SetSum r => fold_rel_expr F r | BitOr (i1, i2) => fold_int_expr F i1 #> fold_int_expr F i2 | BitXor (i1, i2) => fold_int_expr F i1 #> fold_int_expr F i2 | BitAnd (i1, i2) => fold_int_expr F i1 #> fold_int_expr F i2 | BitNot i => fold_int_expr F i | Neg i => fold_int_expr F i | Absolute i => fold_int_expr F i | Signum i => fold_int_expr F i | Num _ => #int_expr_func F int_expr | IntReg _ => #int_expr_func F int_expr (* 'a fold_expr_funcs -> decl -> 'a -> 'a *) and fold_decl F decl = case decl of DeclNo (x, r) => fold_rel_expr F (Var x) #> fold_rel_expr F r | DeclLone (x, r) => fold_rel_expr F (Var x) #> fold_rel_expr F r | DeclOne (x, r) => fold_rel_expr F (Var x) #> fold_rel_expr F r | DeclSome (x, r) => fold_rel_expr F (Var x) #> fold_rel_expr F r | DeclSet (x, r) => fold_rel_expr F (Var x) #> fold_rel_expr F r (* 'a fold_expr_funcs -> expr_assign -> 'a -> 'a *) and fold_expr_assign F assign = case assign of AssignFormulaReg (x, f) => fold_formula F (FormulaReg x) #> fold_formula F f | AssignRelReg (x, r) => fold_rel_expr F (RelReg x) #> fold_rel_expr F r | AssignIntReg (x, i) => fold_int_expr F (IntReg x) #> fold_int_expr F i type 'a fold_tuple_funcs = { tuple_func: tuple -> 'a -> 'a, tuple_set_func: tuple_set -> 'a -> 'a } (* 'a fold_tuple_funcs -> tuple -> 'a -> 'a *) fun fold_tuple (F : 'a fold_tuple_funcs) = #tuple_func F (* 'a fold_tuple_funcs -> tuple_set -> 'a -> 'a *) fun fold_tuple_set F tuple_set = case tuple_set of TupleUnion (ts1, ts2) => fold_tuple_set F ts1 #> fold_tuple_set F ts2 | TupleDifference (ts1, ts2) => fold_tuple_set F ts1 #> fold_tuple_set F ts2 | TupleIntersect (ts1, ts2) => fold_tuple_set F ts1 #> fold_tuple_set F ts2 | TupleProduct (ts1, ts2) => fold_tuple_set F ts1 #> fold_tuple_set F ts2 | TupleProject (ts, _) => fold_tuple_set F ts | TupleSet ts => fold (fold_tuple F) ts | TupleRange (t1, t2) => fold_tuple F t1 #> fold_tuple F t2 | TupleArea (t1, t2) => fold_tuple F t1 #> fold_tuple F t2 | TupleAtomSeq _ => #tuple_set_func F tuple_set | TupleSetReg _ => #tuple_set_func F tuple_set (* 'a fold_tuple_funcs -> tuple_assign -> 'a -> 'a *) fun fold_tuple_assign F assign = case assign of AssignTuple (x, t) => fold_tuple F (TupleReg x) #> fold_tuple F t | AssignTupleSet (x, ts) => fold_tuple_set F (TupleSetReg x) #> fold_tuple_set F ts (* 'a fold_expr_funcs -> 'a fold_tuple_funcs -> bound -> 'a -> 'a *) fun fold_bound expr_F tuple_F (zs, tss) = fold (fold_rel_expr expr_F) (map (Rel o fst) zs) #> fold (fold_tuple_set tuple_F) tss (* 'a fold_tuple_funcs -> int_bound -> 'a -> 'a *) fun fold_int_bound F (_, tss) = fold (fold_tuple_set F) tss (* int -> int *) fun max_arity univ_card = floor (Math.ln 2147483647.0 / Math.ln (Real.fromInt univ_card)) (* rel_expr -> int *) fun arity_of_rel_expr (RelLet (_, r)) = arity_of_rel_expr r | arity_of_rel_expr (RelIf (_, r1, _)) = arity_of_rel_expr r1 | arity_of_rel_expr (Union (r1, _)) = arity_of_rel_expr r1 | arity_of_rel_expr (Difference (r1, _)) = arity_of_rel_expr r1 | arity_of_rel_expr (Override (r1, _)) = arity_of_rel_expr r1 | arity_of_rel_expr (Intersect (r1, _)) = arity_of_rel_expr r1 | arity_of_rel_expr (Product (r1, r2)) = sum_arities_of_rel_exprs r1 r2 | arity_of_rel_expr (IfNo (r1, _)) = arity_of_rel_expr r1 | arity_of_rel_expr (Project (r, is)) = length is | arity_of_rel_expr (Join (r1, r2)) = sum_arities_of_rel_exprs r1 r2 - 2 | arity_of_rel_expr (Closure _) = 2 | arity_of_rel_expr (ReflexiveClosure _) = 2 | arity_of_rel_expr (Transpose _) = 2 | arity_of_rel_expr (Comprehension (ds, _)) = fold (curry op + o arity_of_decl) ds 0 | arity_of_rel_expr (Bits _) = 1 | arity_of_rel_expr (Int _) = 1 | arity_of_rel_expr Iden = 2 | arity_of_rel_expr Ints = 1 | arity_of_rel_expr None = 1 | arity_of_rel_expr Univ = 1 | arity_of_rel_expr (Atom _) = 1 | arity_of_rel_expr (AtomSeq _) = 1 | arity_of_rel_expr (Rel (n, _)) = n | arity_of_rel_expr (Var (n, _)) = n | arity_of_rel_expr (RelReg (n, _)) = n (* rel_expr -> rel_expr -> int *) and sum_arities_of_rel_exprs r1 r2 = arity_of_rel_expr r1 + arity_of_rel_expr r2 (* decl -> int *) and arity_of_decl (DeclNo ((n, _), _)) = n | arity_of_decl (DeclLone ((n, _), _)) = n | arity_of_decl (DeclOne ((n, _), _)) = n | arity_of_decl (DeclSome ((n, _), _)) = n | arity_of_decl (DeclSet ((n, _), _)) = n (* string -> bool *) val is_relevant_setting = not o member (op =) ["solver", "delay"] (* problem -> problem -> bool *) fun problems_equivalent (p1 : problem) (p2 : problem) = #univ_card p1 = #univ_card p2 andalso #formula p1 = #formula p2 andalso #bounds p1 = #bounds p2 andalso #expr_assigns p1 = #expr_assigns p2 andalso #tuple_assigns p1 = #tuple_assigns p2 andalso #int_bounds p1 = #int_bounds p2 andalso filter (is_relevant_setting o fst) (#settings p1) = filter (is_relevant_setting o fst) (#settings p2) (* int -> string *) fun base_name j = if j < 0 then string_of_int (~j - 1) ^ "'" else string_of_int j (* n_ary_index -> string -> string -> string -> string *) fun n_ary_name (1, j) prefix _ _ = prefix ^ base_name j | n_ary_name (2, j) _ prefix _ = prefix ^ base_name j | n_ary_name (n, j) _ _ prefix = prefix ^ string_of_int n ^ "_" ^ base_name j (* int -> string *) fun atom_name j = "A" ^ base_name j fun atom_seq_name (k, 0) = "u" ^ base_name k | atom_seq_name (k, j0) = "u" ^ base_name k ^ "@" ^ base_name j0 fun formula_reg_name j = "$f" ^ base_name j fun rel_reg_name j = "$e" ^ base_name j fun int_reg_name j = "$i" ^ base_name j (* n_ary_index -> string *) fun tuple_name x = n_ary_name x "A" "P" "T" fun rel_name x = n_ary_name x "s" "r" "m" fun var_name x = n_ary_name x "S" "R" "M" fun tuple_reg_name x = n_ary_name x "$A" "$P" "$T" fun tuple_set_reg_name x = n_ary_name x "$a" "$p" "$t" (* string -> string *) fun inline_comment "" = "" | inline_comment comment = " /* " ^ translate_string (fn "\n" => " " | "*" => "* " | s => s) comment ^ " */" fun block_comment "" = "" | block_comment comment = prefix_lines "// " comment ^ "\n" (* (n_ary_index * string) -> string *) fun commented_rel_name (x, s) = rel_name x ^ inline_comment s (* tuple -> string *) fun string_for_tuple (Tuple js) = "[" ^ commas (map atom_name js) ^ "]" | string_for_tuple (TupleIndex x) = tuple_name x | string_for_tuple (TupleReg x) = tuple_reg_name x val no_prec = 100 val prec_TupleUnion = 1 val prec_TupleIntersect = 2 val prec_TupleProduct = 3 val prec_TupleProject = 4 (* tuple_set -> int *) fun precedence_ts (TupleUnion _) = prec_TupleUnion | precedence_ts (TupleDifference _) = prec_TupleUnion | precedence_ts (TupleIntersect _) = prec_TupleIntersect | precedence_ts (TupleProduct _) = prec_TupleProduct | precedence_ts (TupleProject _) = prec_TupleProject | precedence_ts _ = no_prec (* tuple_set -> string *) fun string_for_tuple_set tuple_set = let (* tuple_set -> int -> string *) fun sub tuple_set outer_prec = let val prec = precedence_ts tuple_set val need_parens = (prec < outer_prec) in (if need_parens then "(" else "") ^ (case tuple_set of TupleUnion (ts1, ts2) => sub ts1 prec ^ " + " ^ sub ts2 (prec + 1) | TupleDifference (ts1, ts2) => sub ts1 prec ^ " - " ^ sub ts1 (prec + 1) | TupleIntersect (ts1, ts2) => sub ts1 prec ^ " & " ^ sub ts1 prec | TupleProduct (ts1, ts2) => sub ts1 prec ^ "->" ^ sub ts2 prec | TupleProject (ts, c) => sub ts prec ^ "[" ^ string_of_int c ^ "]" | TupleSet ts => "{" ^ commas (map string_for_tuple ts) ^ "}" | TupleRange (t1, t2) => "{" ^ string_for_tuple t1 ^ (if t1 = t2 then "" else " .. " ^ string_for_tuple t2) ^ "}" | TupleArea (t1, t2) => "{" ^ string_for_tuple t1 ^ " # " ^ string_for_tuple t2 ^ "}" | TupleAtomSeq x => atom_seq_name x | TupleSetReg x => tuple_set_reg_name x) ^ (if need_parens then ")" else "") end in sub tuple_set 0 end (* tuple_assign -> string *) fun string_for_tuple_assign (AssignTuple (x, t)) = tuple_reg_name x ^ " := " ^ string_for_tuple t ^ "\n" | string_for_tuple_assign (AssignTupleSet (x, ts)) = tuple_set_reg_name x ^ " := " ^ string_for_tuple_set ts ^ "\n" (* bound -> string *) fun string_for_bound (zs, tss) = "bounds " ^ commas (map commented_rel_name zs) ^ ": " ^ (if length tss = 1 then "" else "[") ^ commas (map string_for_tuple_set tss) ^ (if length tss = 1 then "" else "]") ^ "\n" (* int_bound -> string *) fun int_string_for_bound (opt_n, tss) = (case opt_n of SOME n => signed_string_of_int n ^ ": " | NONE => "") ^ "[" ^ commas (map string_for_tuple_set tss) ^ "]" val prec_All = 1 val prec_Or = 2 val prec_Iff = 3 val prec_Implies = 4 val prec_And = 5 val prec_Not = 6 val prec_Eq = 7 val prec_Some = 8 val prec_SHL = 9 val prec_Add = 10 val prec_Mult = 11 val prec_Override = 12 val prec_Intersect = 13 val prec_Product = 14 val prec_IfNo = 15 val prec_Project = 17 val prec_Join = 18 val prec_BitNot = 19 (* formula -> int *) fun precedence_f (All _) = prec_All | precedence_f (Exist _) = prec_All | precedence_f (FormulaLet _) = prec_All | precedence_f (FormulaIf _) = prec_All | precedence_f (Or _) = prec_Or | precedence_f (Iff _) = prec_Iff | precedence_f (Implies _) = prec_Implies | precedence_f (And _) = prec_And | precedence_f (Not _) = prec_Not | precedence_f (Acyclic _) = no_prec | precedence_f (Function _) = no_prec | precedence_f (Functional _) = no_prec | precedence_f (TotalOrdering _) = no_prec | precedence_f (Subset _) = prec_Eq | precedence_f (RelEq _) = prec_Eq | precedence_f (IntEq _) = prec_Eq | precedence_f (LT _) = prec_Eq | precedence_f (LE _) = prec_Eq | precedence_f (No _) = prec_Some | precedence_f (Lone _) = prec_Some | precedence_f (One _) = prec_Some | precedence_f (Some _) = prec_Some | precedence_f False = no_prec | precedence_f True = no_prec | precedence_f (FormulaReg _) = no_prec (* rel_expr -> int *) and precedence_r (RelLet _) = prec_All | precedence_r (RelIf _) = prec_All | precedence_r (Union _) = prec_Add | precedence_r (Difference _) = prec_Add | precedence_r (Override _) = prec_Override | precedence_r (Intersect _) = prec_Intersect | precedence_r (Product _) = prec_Product | precedence_r (IfNo _) = prec_IfNo | precedence_r (Project _) = prec_Project | precedence_r (Join _) = prec_Join | precedence_r (Closure _) = prec_BitNot | precedence_r (ReflexiveClosure _) = prec_BitNot | precedence_r (Transpose _) = prec_BitNot | precedence_r (Comprehension _) = no_prec | precedence_r (Bits _) = no_prec | precedence_r (Int _) = no_prec | precedence_r Iden = no_prec | precedence_r Ints = no_prec | precedence_r None = no_prec | precedence_r Univ = no_prec | precedence_r (Atom _) = no_prec | precedence_r (AtomSeq _) = no_prec | precedence_r (Rel _) = no_prec | precedence_r (Var _) = no_prec | precedence_r (RelReg _) = no_prec (* int_expr -> int *) and precedence_i (Sum _) = prec_All | precedence_i (IntLet _) = prec_All | precedence_i (IntIf _) = prec_All | precedence_i (SHL _) = prec_SHL | precedence_i (SHA _) = prec_SHL | precedence_i (SHR _) = prec_SHL | precedence_i (Add _) = prec_Add | precedence_i (Sub _) = prec_Add | precedence_i (Mult _) = prec_Mult | precedence_i (Div _) = prec_Mult | precedence_i (Mod _) = prec_Mult | precedence_i (Cardinality _) = no_prec | precedence_i (SetSum _) = no_prec | precedence_i (BitOr _) = prec_Intersect | precedence_i (BitXor _) = prec_Intersect | precedence_i (BitAnd _) = prec_Intersect | precedence_i (BitNot _) = prec_BitNot | precedence_i (Neg _) = prec_BitNot | precedence_i (Absolute _) = prec_BitNot | precedence_i (Signum _) = prec_BitNot | precedence_i (Num _) = no_prec | precedence_i (IntReg _) = no_prec (* (string -> unit) -> problem list -> unit *) fun write_problem_file out problems = let (* formula -> unit *) fun out_outmost_f (And (f1, f2)) = (out_outmost_f f1; out "\n && "; out_outmost_f f2) | out_outmost_f f = out_f f prec_And (* formula -> int -> unit *) and out_f formula outer_prec = let val prec = precedence_f formula val need_parens = (prec < outer_prec) in (if need_parens then out "(" else ()); (case formula of All (ds, f) => (out "all ["; out_decls ds; out "] | "; out_f f prec) | Exist (ds, f) => (out "some ["; out_decls ds; out "] | "; out_f f prec) | FormulaLet (bs, f) => (out "let ["; out_assigns bs; out "] | "; out_f f prec) | FormulaIf (f, f1, f2) => (out "if "; out_f f prec; out " then "; out_f f1 prec; out " else "; out_f f2 prec) | Or (f1, f2) => (out_f f1 prec; out " || "; out_f f2 prec) | Iff (f1, f2) => (out_f f1 prec; out " <=> "; out_f f2 prec) | Implies (f1, f2) => (out_f f1 (prec + 1); out " => "; out_f f2 prec) | And (f1, f2) => (out_f f1 prec; out " && "; out_f f2 prec) | Not f => (out "! "; out_f f prec) | Acyclic x => out ("ACYCLIC(" ^ rel_name x ^ ")") | Function (x, r1, r2) => (out ("FUNCTION(" ^ rel_name x ^ ", "); out_r r1 0; out " -> one "; out_r r2 0; out ")") | Functional (x, r1, r2) => (out ("FUNCTION(" ^ rel_name x ^ ", "); out_r r1 0; out " -> lone "; out_r r2 0; out ")") | TotalOrdering (x1, x2, x3, x4) => out ("TOTAL_ORDERING(" ^ rel_name x1 ^ ", " ^ rel_name x2 ^ ", " ^ rel_name x3 ^ ", " ^ rel_name x4 ^ ")") | Subset (r1, r2) => (out_r r1 prec; out " in "; out_r r2 prec) | RelEq (r1, r2) => (out_r r1 prec; out " = "; out_r r2 prec) | IntEq (i1, i2) => (out_i i1 prec; out " = "; out_i i2 prec) | LT (i1, i2) => (out_i i1 prec; out " < "; out_i i2 prec) | LE (i1, i2) => (out_i i1 prec; out " <= "; out_i i2 prec) | No r => (out "no "; out_r r prec) | Lone r => (out "lone "; out_r r prec) | One r => (out "one "; out_r r prec) | Some r => (out "some "; out_r r prec) | False => out "false" | True => out "true" | FormulaReg j => out (formula_reg_name j)); (if need_parens then out ")" else ()) end (* rel_expr -> int -> unit *) and out_r rel_expr outer_prec = let val prec = precedence_r rel_expr val need_parens = (prec < outer_prec) in (if need_parens then out "(" else ()); (case rel_expr of RelLet (bs, r) => (out "let ["; out_assigns bs; out "] | "; out_r r prec) | RelIf (f, r1, r2) => (out "if "; out_f f prec; out " then "; out_r r1 prec; out " else "; out_r r2 prec) | Union (r1, r2) => (out_r r1 prec; out " + "; out_r r2 (prec + 1)) | Difference (r1, r2) => (out_r r1 prec; out " - "; out_r r2 (prec + 1)) | Override (r1, r2) => (out_r r1 prec; out " ++ "; out_r r2 prec) | Intersect (r1, r2) => (out_r r1 prec; out " & "; out_r r2 prec) | Product (r1, r2) => (out_r r1 prec; out "->"; out_r r2 prec) | IfNo (r1, r2) => (out_r r1 prec; out "\\"; out_r r2 prec) | Project (r1, is) => (out_r r1 prec; out "["; out_columns is; out "]") | Join (r1, r2) => (out_r r1 prec; out "."; out_r r2 (prec + 1)) | Closure r => (out "^"; out_r r prec) | ReflexiveClosure r => (out "*"; out_r r prec) | Transpose r => (out "~"; out_r r prec) | Comprehension (ds, f) => (out "{["; out_decls ds; out "] | "; out_f f 0; out "}") | Bits i => (out "Bits["; out_i i 0; out "]") | Int i => (out "Int["; out_i i 0; out "]") | Iden => out "iden" | Ints => out "ints" | None => out "none" | Univ => out "univ" | Atom j => out (atom_name j) | AtomSeq x => out (atom_seq_name x) | Rel x => out (rel_name x) | Var x => out (var_name x) | RelReg (_, j) => out (rel_reg_name j)); (if need_parens then out ")" else ()) end (* int_expr -> int -> unit *) and out_i int_expr outer_prec = let val prec = precedence_i int_expr val need_parens = (prec < outer_prec) in (if need_parens then out "(" else ()); (case int_expr of Sum (ds, i) => (out "sum ["; out_decls ds; out "] | "; out_i i prec) | IntLet (bs, i) => (out "let ["; out_assigns bs; out "] | "; out_i i prec) | IntIf (f, i1, i2) => (out "if "; out_f f prec; out " then "; out_i i1 prec; out " else "; out_i i2 prec) | SHL (i1, i2) => (out_i i1 prec; out " << "; out_i i2 (prec + 1)) | SHA (i1, i2) => (out_i i1 prec; out " >> "; out_i i2 (prec + 1)) | SHR (i1, i2) => (out_i i1 prec; out " >>> "; out_i i2 (prec + 1)) | Add (i1, i2) => (out_i i1 prec; out " + "; out_i i2 (prec + 1)) | Sub (i1, i2) => (out_i i1 prec; out " - "; out_i i2 (prec + 1)) | Mult (i1, i2) => (out_i i1 prec; out " * "; out_i i2 (prec + 1)) | Div (i1, i2) => (out_i i1 prec; out " / "; out_i i2 (prec + 1)) | Mod (i1, i2) => (out_i i1 prec; out " % "; out_i i2 (prec + 1)) | Cardinality r => (out "#("; out_r r 0; out ")") | SetSum r => (out "sum("; out_r r 0; out ")") | BitOr (i1, i2) => (out_i i1 prec; out " | "; out_i i2 prec) | BitXor (i1, i2) => (out_i i1 prec; out " ^ "; out_i i2 prec) | BitAnd (i1, i2) => (out_i i1 prec; out " & "; out_i i2 prec) | BitNot i => (out "~"; out_i i prec) | Neg i => (out "-"; out_i i prec) | Absolute i => (out "abs "; out_i i prec) | Signum i => (out "sgn "; out_i i prec) | Num k => out (signed_string_of_int k) | IntReg j => out (int_reg_name j)); (if need_parens then out ")" else ()) end (* decl list -> unit *) and out_decls [] = () | out_decls [d] = out_decl d | out_decls (d :: ds) = (out_decl d; out ", "; out_decls ds) (* decl -> unit *) and out_decl (DeclNo (x, r)) = (out (var_name x); out " : no "; out_r r 0) | out_decl (DeclLone (x, r)) = (out (var_name x); out " : lone "; out_r r 0) | out_decl (DeclOne (x, r)) = (out (var_name x); out " : one "; out_r r 0) | out_decl (DeclSome (x, r)) = (out (var_name x); out " : some "; out_r r 0) | out_decl (DeclSet (x, r)) = (out (var_name x); out " : set "; out_r r 0) (* assign_expr list -> unit *) and out_assigns [] = () | out_assigns [b] = out_assign b | out_assigns (b :: bs) = (out_assign b; out ", "; out_assigns bs) (* assign_expr -> unit *) and out_assign (AssignFormulaReg (j, f)) = (out (formula_reg_name j); out " := "; out_f f 0) | out_assign (AssignRelReg ((_, j), r)) = (out (rel_reg_name j); out " := "; out_r r 0) | out_assign (AssignIntReg (j, i)) = (out (int_reg_name j); out " := "; out_i i 0) (* int_expr list -> unit *) and out_columns [] = () | out_columns [i] = out_i i 0 | out_columns (i :: is) = (out_i i 0; out ", "; out_columns is) (* problem -> unit *) and out_problem {comment, settings, univ_card, tuple_assigns, bounds, int_bounds, expr_assigns, formula} = (out ("\n" ^ block_comment comment ^ implode (map (fn (key, value) => key ^ ": " ^ value ^ "\n") settings) ^ "univ: " ^ atom_seq_name (univ_card, 0) ^ "\n" ^ implode (map string_for_tuple_assign tuple_assigns) ^ implode (map string_for_bound bounds) ^ (if int_bounds = [] then "" else "int_bounds: " ^ commas (map int_string_for_bound int_bounds) ^ "\n")); map (fn b => (out_assign b; out ";")) expr_assigns; out "solve "; out_outmost_f formula; out ";\n") in out ("// This file was generated by Isabelle (probably Nitpick)\n" ^ "// " ^ Date.fmt "%Y-%m-%d %H:%M:%S" (Date.fromTimeLocal (Time.now ())) ^ "\n"); map out_problem problems end (* string -> bool *) fun is_ident_char s = Symbol.is_ascii_letter s orelse Symbol.is_ascii_digit s orelse s = "_" orelse s = "'" orelse s = "$" (* string list -> string list *) fun strip_blanks [] = [] | strip_blanks (" " :: ss) = strip_blanks ss | strip_blanks [s1, " "] = [s1] | strip_blanks (s1 :: " " :: s2 :: ss) = if is_ident_char s1 andalso is_ident_char s2 then s1 :: " " :: strip_blanks (s2 :: ss) else strip_blanks (s1 :: s2 :: ss) | strip_blanks (s :: ss) = s :: strip_blanks ss (* (string list -> 'a * string list) -> string list -> 'a list * string list *) fun scan_non_empty_list scan = scan ::: Scan.repeat ($$ "," |-- scan) fun scan_list scan = scan_non_empty_list scan || Scan.succeed [] (* string list -> int * string list *) val scan_nat = Scan.repeat1 (Scan.one Symbol.is_ascii_digit) >> (the o Int.fromString o space_implode "") (* string list -> (int * int) * string list *) val scan_rel_name = $$ "s" |-- scan_nat >> pair 1 || $$ "r" |-- scan_nat >> pair 2 || ($$ "m" |-- scan_nat --| $$ "_") -- scan_nat (* string list -> int * string list *) val scan_atom = $$ "A" |-- scan_nat (* string list -> int list * string list *) val scan_tuple = $$ "[" |-- scan_list scan_atom --| $$ "]" (* string list -> int list list * string list *) val scan_tuple_set = $$ "[" |-- scan_list scan_tuple --| $$ "]" (* string list -> ((int * int) * int list list) * string list *) val scan_assignment = (scan_rel_name --| $$ "=") -- scan_tuple_set (* string list -> ((int * int) * int list list) list * string list *) val scan_instance = Scan.this_string "relations:" |-- $$ "{" |-- scan_list scan_assignment --| $$ "}" (* string -> raw_bound list *) fun parse_instance inst = Scan.finite Symbol.stopper (Scan.error (!! (fn _ => raise SYNTAX ("Kodkod.parse_instance", "ill-formed Kodkodi output")) scan_instance)) (strip_blanks (explode inst)) |> fst val problem_marker = "*** PROBLEM " val outcome_marker = "---OUTCOME---\n" val instance_marker = "---INSTANCE---\n" (* string -> substring -> string *) fun read_section_body marker = Substring.string o fst o Substring.position "\n\n" o Substring.triml (size marker) (* substring -> raw_bound list *) fun read_next_instance s = let val s = Substring.position instance_marker s |> snd in if Substring.isEmpty s then raise SYNTAX ("Kodkod.read_next_instance", "expected \"INSTANCE\" marker") else read_section_body instance_marker s |> parse_instance end (* int -> substring * (int * raw_bound list) list * int list -> substring * (int * raw_bound list) list * int list *) fun read_next_outcomes j (s, ps, js) = let val (s1, s2) = Substring.position outcome_marker s in if Substring.isEmpty s2 orelse not (Substring.isEmpty (Substring.position problem_marker s1 |> snd)) then (s, ps, js) else let val outcome = read_section_body outcome_marker s2 val s = Substring.triml (size outcome_marker) s2 in if String.isSuffix "UNSATISFIABLE" outcome then read_next_outcomes j (s, ps, j :: js) else if String.isSuffix "SATISFIABLE" outcome then read_next_outcomes j (s, (j, read_next_instance s2) :: ps, js) else raise SYNTAX ("Kodkod.read_next_outcomes", "unknown outcome " ^ quote outcome) end end (* substring * (int * raw_bound list) list * int list -> (int * raw_bound list) list * int list *) fun read_next_problems (s, ps, js) = let val s = Substring.position problem_marker s |> snd in if Substring.isEmpty s then (ps, js) else let val s = Substring.triml (size problem_marker) s val j_plus_1 = s |> Substring.takel (not_equal #" ") |> Substring.string |> Int.fromString |> the val j = j_plus_1 - 1 in read_next_problems (read_next_outcomes j (s, ps, js)) end end handle Option.Option => raise SYNTAX ("Kodkod.read_next_problems", "expected number after \"PROBLEM\"") (* Path.T -> (int * raw_bound list) list * int list *) fun read_output_file path = read_next_problems (Substring.full (File.read path), [], []) |>> rev ||> rev (* The fudge term below is to account for Kodkodi's slow start-up time, which is partly due to the JVM and partly due to the ML "system" function. *) val fudge_ms = 250 (* bool -> Time.time option -> int -> int -> problem list -> outcome *) fun solve_any_problem overlord deadline max_threads max_solutions problems = let val j = find_index (curry (op =) True o #formula) problems val indexed_problems = if j >= 0 then [(j, nth problems j)] else filter (not_equal False o #formula o snd) (0 upto length problems - 1 ~~ problems) val triv_js = filter_out (AList.defined (op =) indexed_problems) (0 upto length problems - 1) (* int -> int *) val reindex = fst o nth indexed_problems in if null indexed_problems then Normal ([], triv_js) else let val (serial_str, tmp_path) = if overlord then ("", Path.append (Path.variable "ISABELLE_HOME_USER") o Path.base) else (serial_string (), File.tmp_path) (* string -> string -> Path.T *) fun path_for base suf = tmp_path (Path.explode (base ^ serial_str ^ "." ^ suf)) val in_path = path_for "isabelle" "kki" val in_buf = Unsynchronized.ref Buffer.empty (* string -> unit *) fun out s = Unsynchronized.change in_buf (Buffer.add s) val out_path = path_for "kodkodi" "out" val err_path = path_for "kodkodi" "err" val _ = write_problem_file out (map snd indexed_problems) val _ = File.write_buffer in_path (!in_buf) (* unit -> unit *) fun remove_temporary_files () = if overlord then () else List.app File.rm [in_path, out_path, err_path] in let val ms = case deadline of NONE => ~1 | SOME time => Int.max (0, Time.toMilliseconds (Time.- (time, Time.now ())) - fudge_ms) val outcome = let val code = system ("env CLASSPATH=\"$KODKODI_CLASSPATH:$CLASSPATH\" \ \JAVA_LIBRARY_PATH=\"$KODKODI_JAVA_LIBRARY_PATH:\ \$JAVA_LIBRARY_PATH\" \ \LD_LIBRARY_PATH=\"$KODKODI_JAVA_LIBRARY_PATH:\ \$LD_LIBRARY_PATH\" \ \\"$KODKODI\"/bin/kodkodi" ^ (if ms >= 0 then " -max-msecs " ^ string_of_int ms else "") ^ (if max_solutions > 1 then " -solve-all" else "") ^ " -max-solutions " ^ string_of_int max_solutions ^ (if max_threads > 0 then " -max-threads " ^ string_of_int max_threads else "") ^ " < " ^ Path.implode in_path ^ " > " ^ Path.implode out_path ^ " 2> " ^ Path.implode err_path) val (ps, nontriv_js) = read_output_file out_path |>> map (apfst reindex) ||> map reindex val js = triv_js @ nontriv_js val first_error = File.fold_lines (fn line => fn "" => line | s => s) err_path "" in if null ps then if code = 2 then TimedOut js else if first_error |> Substring.full |> Substring.position "NoClassDefFoundError" |> snd |> Substring.isEmpty |> not then NotInstalled else if first_error <> "" then Error (first_error |> perhaps (try (unsuffix ".")) |> perhaps (try (unprefix "Error: ")), js) else if code <> 0 then Error ("Unknown error", js) else Normal ([], js) else Normal (ps, js) end in remove_temporary_files (); outcome end handle Exn.Interrupt => let val nontriv_js = map reindex (snd (read_output_file out_path)) in (remove_temporary_files (); Interrupted (SOME (triv_js @ nontriv_js))) handle Exn.Interrupt => (remove_temporary_files (); Interrupted NONE) end end end end; diff --git a/src/HOL/Tools/Nitpick/kodkod_sat.ML b/src/HOL/Tools/Nitpick/kodkod_sat.ML --- a/src/HOL/Tools/Nitpick/kodkod_sat.ML +++ b/src/HOL/Tools/Nitpick/kodkod_sat.ML @@ -1,125 +1,125 @@ (* Title: HOL/Tools/Nitpick/kodkod_sat.ML Author: Jasmin Blanchette, TU Muenchen - Copyright 2009 + Copyright 2009, 2010 Kodkod SAT solver integration. *) signature KODKOD_SAT = sig val configured_sat_solvers : bool -> string list val smart_sat_solver_name : bool -> string val sat_solver_spec : string -> string * string list end; structure Kodkod_SAT : KODKOD_SAT = struct datatype sink = ToStdout | ToFile datatype availability = Java | JNI datatype mode = Batch | Incremental datatype sat_solver_info = Internal of availability * mode * string list | External of sink * string * string * string list | ExternalV2 of sink * string * string * string list * string * string * string val berkmin_exec = getenv "BERKMIN_EXE" (* (string * sat_solver_info) list *) val static_list = [("MiniSat", ExternalV2 (ToFile, "MINISAT_HOME", "minisat", [], "SAT", "", "UNSAT")), ("PicoSAT", External (ToStdout, "PICOSAT_HOME", "picosat", [])), ("zChaff", ExternalV2 (ToStdout, "ZCHAFF_HOME", "zchaff", [], "Instance Satisfiable", "", "Instance Unsatisfiable")), ("RSat", ExternalV2 (ToStdout, "RSAT_HOME", "rsat", ["-s"], "s SATISFIABLE", "v ", "s UNSATISFIABLE")), ("BerkMin", ExternalV2 (ToStdout, "BERKMIN_HOME", if berkmin_exec = "" then "BerkMin561" else berkmin_exec, [], "Satisfiable !!", "solution =", "UNSATISFIABLE !!")), ("BerkMinAlloy", External (ToStdout, "BERKMINALLOY_HOME", "berkmin", [])), ("Jerusat", External (ToStdout, "JERUSAT_HOME", "Jerusat1.3", [])), ("MiniSatJNI", Internal (JNI, Incremental, ["MiniSat"])), ("zChaffJNI", Internal (JNI, Batch, ["zChaff"])), ("SAT4J", Internal (Java, Incremental, ["DefaultSAT4J"])), ("SAT4JLight", Internal (Java, Incremental, ["LightSAT4J"])), ("HaifaSat", ExternalV2 (ToStdout, "HAIFASAT_HOME", "HaifaSat", ["-p", "1"], "s SATISFIABLE", "v ", "s UNSATISFIABLE"))] val created_temp_dir = Unsynchronized.ref false (* string -> sink -> string -> string -> string list -> string list -> (string * (unit -> string list)) option *) fun dynamic_entry_for_external name dev home exec args markers = case getenv home of "" => NONE | dir => SOME (name, fn () => let val temp_dir = getenv "ISABELLE_TMP" val _ = if !created_temp_dir then () else (created_temp_dir := true; File.mkdir (Path.explode temp_dir)) val temp = temp_dir ^ "/" ^ name ^ serial_string () val out_file = temp ^ ".out" in [if null markers then "External" else "ExternalV2", dir ^ "/" ^ exec, temp ^ ".cnf", if dev = ToFile then out_file else ""] @ markers @ (if dev = ToFile then [out_file] else []) @ args end) (* bool -> string * sat_solver_info -> (string * (unit -> string list)) option *) fun dynamic_entry_for_info incremental (name, Internal (Java, mode, ss)) = if incremental andalso mode = Batch then NONE else SOME (name, K ss) | dynamic_entry_for_info incremental (name, Internal (JNI, mode, ss)) = if incremental andalso mode = Batch then NONE else let val lib_paths = getenv "KODKODI_JAVA_LIBRARY_PATH" |> space_explode ":" in if exists (fn path => File.exists (Path.explode (path ^ "/"))) lib_paths then SOME (name, K ss) else NONE end | dynamic_entry_for_info false (name, External (dev, home, exec, args)) = dynamic_entry_for_external name dev home exec args [] | dynamic_entry_for_info false (name, ExternalV2 (dev, home, exec, args, m1, m2, m3)) = dynamic_entry_for_external name dev home exec args [m1, m2, m3] | dynamic_entry_for_info true _ = NONE (* bool -> (string * (unit -> string list)) list *) fun dynamic_list incremental = map_filter (dynamic_entry_for_info incremental) static_list (* bool -> string list *) val configured_sat_solvers = map fst o dynamic_list (* bool -> string *) val smart_sat_solver_name = dynamic_list #> hd #> fst (* (string * 'a) list -> string *) fun enum_solvers xs = commas (map (quote o fst) xs |> distinct (op =)) (* string -> string * string list *) fun sat_solver_spec name = let val dynamic_list = dynamic_list false in (name, the (AList.lookup (op =) dynamic_list name) ()) handle Option.Option => error (if AList.defined (op =) static_list name then "The SAT solver " ^ quote name ^ " is not configured. The \ \following solvers are configured:\n" ^ enum_solvers dynamic_list ^ "." else "Unknown SAT solver " ^ quote name ^ ". The following \ \solvers are supported:\n" ^ enum_solvers static_list ^ ".") end end; diff --git a/src/HOL/Tools/Nitpick/minipick.ML b/src/HOL/Tools/Nitpick/minipick.ML --- a/src/HOL/Tools/Nitpick/minipick.ML +++ b/src/HOL/Tools/Nitpick/minipick.ML @@ -1,327 +1,327 @@ (* Title: HOL/Tools/Nitpick/minipick.ML Author: Jasmin Blanchette, TU Muenchen - Copyright 2009 + Copyright 2009, 2010 Finite model generation for HOL formulas using Kodkod, minimalistic version. *) signature MINIPICK = sig datatype rep = SRep | RRep type styp = Nitpick_Util.styp val vars_for_bound_var : (typ -> int) -> rep -> typ list -> int -> Kodkod.rel_expr list val rel_expr_for_bound_var : (typ -> int) -> rep -> typ list -> int -> Kodkod.rel_expr val decls_for : rep -> (typ -> int) -> typ list -> typ -> Kodkod.decl list val false_atom : Kodkod.rel_expr val true_atom : Kodkod.rel_expr val formula_from_atom : Kodkod.rel_expr -> Kodkod.formula val atom_from_formula : Kodkod.formula -> Kodkod.rel_expr val pick_nits_in_term : Proof.context -> (typ -> int) -> term -> string end; structure Minipick : MINIPICK = struct open Kodkod open Nitpick_Util open Nitpick_HOL open Nitpick_Peephole open Nitpick_Kodkod datatype rep = SRep | RRep (* Proof.context -> typ -> unit *) fun check_type ctxt (Type ("fun", Ts)) = List.app (check_type ctxt) Ts | check_type ctxt (Type ("*", Ts)) = List.app (check_type ctxt) Ts | check_type _ @{typ bool} = () | check_type _ (TFree (_, @{sort "{}"})) = () | check_type _ (TFree (_, @{sort HOL.type})) = () | check_type ctxt T = raise NOT_SUPPORTED ("type " ^ quote (Syntax.string_of_typ ctxt T)) (* rep -> (typ -> int) -> typ -> int list *) fun atom_schema_of SRep card (Type ("fun", [T1, T2])) = replicate_list (card T1) (atom_schema_of SRep card T2) | atom_schema_of RRep card (Type ("fun", [T1, @{typ bool}])) = atom_schema_of SRep card T1 | atom_schema_of RRep card (Type ("fun", [T1, T2])) = atom_schema_of SRep card T1 @ atom_schema_of RRep card T2 | atom_schema_of _ card (Type ("*", Ts)) = maps (atom_schema_of SRep card) Ts | atom_schema_of _ card T = [card T] (* rep -> (typ -> int) -> typ -> int *) val arity_of = length ooo atom_schema_of (* (typ -> int) -> typ list -> int -> int *) fun index_for_bound_var _ [_] 0 = 0 | index_for_bound_var card (_ :: Ts) 0 = index_for_bound_var card Ts 0 + arity_of SRep card (hd Ts) | index_for_bound_var card Ts n = index_for_bound_var card (tl Ts) (n - 1) (* (typ -> int) -> rep -> typ list -> int -> rel_expr list *) fun vars_for_bound_var card R Ts j = map (curry Var 1) (index_seq (index_for_bound_var card Ts j) (arity_of R card (nth Ts j))) (* (typ -> int) -> rep -> typ list -> int -> rel_expr *) val rel_expr_for_bound_var = foldl1 Product oooo vars_for_bound_var (* rep -> (typ -> int) -> typ list -> typ -> decl list *) fun decls_for R card Ts T = map2 (curry DeclOne o pair 1) (index_seq (index_for_bound_var card (T :: Ts) 0) (arity_of R card (nth (T :: Ts) 0))) (map (AtomSeq o rpair 0) (atom_schema_of R card T)) (* int list -> rel_expr *) val atom_product = foldl1 Product o map Atom val false_atom = Atom 0 val true_atom = Atom 1 (* rel_expr -> formula *) fun formula_from_atom r = RelEq (r, true_atom) (* formula -> rel_expr *) fun atom_from_formula f = RelIf (f, true_atom, false_atom) (* Proof.context -> (typ -> int) -> styp list -> term -> formula *) fun kodkod_formula_for_term ctxt card frees = let (* typ -> rel_expr -> rel_expr *) fun R_rep_from_S_rep (T as Type ("fun", [T1, @{typ bool}])) r = let val jss = atom_schema_of SRep card T1 |> map (rpair 0) |> all_combinations in map2 (fn i => fn js => RelIf (formula_from_atom (Project (r, [Num i])), atom_product js, empty_n_ary_rel (length js))) (index_seq 0 (length jss)) jss |> foldl1 Union end | R_rep_from_S_rep (Type ("fun", [T1, T2])) r = let val jss = atom_schema_of SRep card T1 |> map (rpair 0) |> all_combinations val arity2 = arity_of SRep card T2 in map2 (fn i => fn js => Product (atom_product js, Project (r, num_seq (i * arity2) arity2) |> R_rep_from_S_rep T2)) (index_seq 0 (length jss)) jss |> foldl1 Union end | R_rep_from_S_rep _ r = r (* typ list -> typ -> rel_expr -> rel_expr *) fun S_rep_from_R_rep Ts (T as Type ("fun", _)) r = Comprehension (decls_for SRep card Ts T, RelEq (R_rep_from_S_rep T (rel_expr_for_bound_var card SRep (T :: Ts) 0), r)) | S_rep_from_R_rep _ _ r = r (* typ list -> term -> formula *) fun to_F Ts t = (case t of @{const Not} $ t1 => Not (to_F Ts t1) | @{const False} => False | @{const True} => True | Const (@{const_name All}, _) $ Abs (s, T, t') => All (decls_for SRep card Ts T, to_F (T :: Ts) t') | (t0 as Const (@{const_name All}, _)) $ t1 => to_F Ts (t0 $ eta_expand Ts t1 1) | Const (@{const_name Ex}, _) $ Abs (s, T, t') => Exist (decls_for SRep card Ts T, to_F (T :: Ts) t') | (t0 as Const (@{const_name Ex}, _)) $ t1 => to_F Ts (t0 $ eta_expand Ts t1 1) | Const (@{const_name "op ="}, _) $ t1 $ t2 => RelEq (to_R_rep Ts t1, to_R_rep Ts t2) | Const (@{const_name ord_class.less_eq}, Type ("fun", [Type ("fun", [_, @{typ bool}]), _])) $ t1 $ t2 => Subset (to_R_rep Ts t1, to_R_rep Ts t2) | @{const "op &"} $ t1 $ t2 => And (to_F Ts t1, to_F Ts t2) | @{const "op |"} $ t1 $ t2 => Or (to_F Ts t1, to_F Ts t2) | @{const "op -->"} $ t1 $ t2 => Implies (to_F Ts t1, to_F Ts t2) | t1 $ t2 => Subset (to_S_rep Ts t2, to_R_rep Ts t1) | Free _ => raise SAME () | Term.Var _ => raise SAME () | Bound _ => raise SAME () | Const (s, _) => raise NOT_SUPPORTED ("constant " ^ quote s) | _ => raise TERM ("Minipick.kodkod_formula_for_term.to_F", [t])) handle SAME () => formula_from_atom (to_R_rep Ts t) (* typ list -> term -> rel_expr *) and to_S_rep Ts t = case t of Const (@{const_name Pair}, _) $ t1 $ t2 => Product (to_S_rep Ts t1, to_S_rep Ts t2) | Const (@{const_name Pair}, _) $ _ => to_S_rep Ts (eta_expand Ts t 1) | Const (@{const_name Pair}, _) => to_S_rep Ts (eta_expand Ts t 2) | Const (@{const_name fst}, _) $ t1 => let val fst_arity = arity_of SRep card (fastype_of1 (Ts, t)) in Project (to_S_rep Ts t1, num_seq 0 fst_arity) end | Const (@{const_name fst}, _) => to_S_rep Ts (eta_expand Ts t 1) | Const (@{const_name snd}, _) $ t1 => let val pair_arity = arity_of SRep card (fastype_of1 (Ts, t1)) val snd_arity = arity_of SRep card (fastype_of1 (Ts, t)) val fst_arity = pair_arity - snd_arity in Project (to_S_rep Ts t1, num_seq fst_arity snd_arity) end | Const (@{const_name snd}, _) => to_S_rep Ts (eta_expand Ts t 1) | Bound j => rel_expr_for_bound_var card SRep Ts j | _ => S_rep_from_R_rep Ts (fastype_of1 (Ts, t)) (to_R_rep Ts t) (* term -> rel_expr *) and to_R_rep Ts t = (case t of @{const Not} => to_R_rep Ts (eta_expand Ts t 1) | Const (@{const_name All}, _) => to_R_rep Ts (eta_expand Ts t 1) | Const (@{const_name Ex}, _) => to_R_rep Ts (eta_expand Ts t 1) | Const (@{const_name "op ="}, _) $ _ => to_R_rep Ts (eta_expand Ts t 1) | Const (@{const_name "op ="}, _) => to_R_rep Ts (eta_expand Ts t 2) | Const (@{const_name ord_class.less_eq}, Type ("fun", [Type ("fun", [_, @{typ bool}]), _])) $ _ => to_R_rep Ts (eta_expand Ts t 1) | Const (@{const_name ord_class.less_eq}, _) => to_R_rep Ts (eta_expand Ts t 2) | @{const "op &"} $ _ => to_R_rep Ts (eta_expand Ts t 1) | @{const "op &"} => to_R_rep Ts (eta_expand Ts t 2) | @{const "op |"} $ _ => to_R_rep Ts (eta_expand Ts t 1) | @{const "op |"} => to_R_rep Ts (eta_expand Ts t 2) | @{const "op -->"} $ _ => to_R_rep Ts (eta_expand Ts t 1) | @{const "op -->"} => to_R_rep Ts (eta_expand Ts t 2) | Const (@{const_name bot_class.bot}, T as Type ("fun", [_, @{typ bool}])) => empty_n_ary_rel (arity_of RRep card T) | Const (@{const_name insert}, _) $ t1 $ t2 => Union (to_S_rep Ts t1, to_R_rep Ts t2) | Const (@{const_name insert}, _) $ _ => to_R_rep Ts (eta_expand Ts t 1) | Const (@{const_name insert}, _) => to_R_rep Ts (eta_expand Ts t 2) | Const (@{const_name trancl}, _) $ t1 => if arity_of RRep card (fastype_of1 (Ts, t1)) = 2 then Closure (to_R_rep Ts t1) else raise NOT_SUPPORTED "transitive closure for function or pair type" | Const (@{const_name trancl}, _) => to_R_rep Ts (eta_expand Ts t 1) | Const (@{const_name lower_semilattice_class.inf}, Type ("fun", [Type ("fun", [_, @{typ bool}]), _])) $ t1 $ t2 => Intersect (to_R_rep Ts t1, to_R_rep Ts t2) | Const (@{const_name lower_semilattice_class.inf}, _) $ _ => to_R_rep Ts (eta_expand Ts t 1) | Const (@{const_name lower_semilattice_class.inf}, _) => to_R_rep Ts (eta_expand Ts t 2) | Const (@{const_name upper_semilattice_class.sup}, Type ("fun", [Type ("fun", [_, @{typ bool}]), _])) $ t1 $ t2 => Union (to_R_rep Ts t1, to_R_rep Ts t2) | Const (@{const_name upper_semilattice_class.sup}, _) $ _ => to_R_rep Ts (eta_expand Ts t 1) | Const (@{const_name upper_semilattice_class.sup}, _) => to_R_rep Ts (eta_expand Ts t 2) | Const (@{const_name minus_class.minus}, Type ("fun", [Type ("fun", [_, @{typ bool}]), _])) $ t1 $ t2 => Difference (to_R_rep Ts t1, to_R_rep Ts t2) | Const (@{const_name minus_class.minus}, Type ("fun", [Type ("fun", [_, @{typ bool}]), _])) $ _ => to_R_rep Ts (eta_expand Ts t 1) | Const (@{const_name minus_class.minus}, Type ("fun", [Type ("fun", [_, @{typ bool}]), _])) => to_R_rep Ts (eta_expand Ts t 2) | Const (@{const_name Pair}, _) $ _ $ _ => raise SAME () | Const (@{const_name Pair}, _) $ _ => raise SAME () | Const (@{const_name Pair}, _) => raise SAME () | Const (@{const_name fst}, _) $ _ => raise SAME () | Const (@{const_name fst}, _) => raise SAME () | Const (@{const_name snd}, _) $ _ => raise SAME () | Const (@{const_name snd}, _) => raise SAME () | Const (_, @{typ bool}) => atom_from_formula (to_F Ts t) | Free (x as (_, T)) => Rel (arity_of RRep card T, find_index (curry (op =) x) frees) | Term.Var _ => raise NOT_SUPPORTED "schematic variables" | Bound j => raise SAME () | Abs (_, T, t') => (case fastype_of1 (T :: Ts, t') of @{typ bool} => Comprehension (decls_for SRep card Ts T, to_F (T :: Ts) t') | T' => Comprehension (decls_for SRep card Ts T @ decls_for RRep card (T :: Ts) T', Subset (rel_expr_for_bound_var card RRep (T' :: T :: Ts) 0, to_R_rep (T :: Ts) t'))) | t1 $ t2 => (case fastype_of1 (Ts, t) of @{typ bool} => atom_from_formula (to_F Ts t) | T => let val T2 = fastype_of1 (Ts, t2) in case arity_of SRep card T2 of 1 => Join (to_S_rep Ts t2, to_R_rep Ts t1) | n => let val arity2 = arity_of SRep card T2 val res_arity = arity_of RRep card T in Project (Intersect (Product (to_S_rep Ts t2, atom_schema_of RRep card T |> map (AtomSeq o rpair 0) |> foldl1 Product), to_R_rep Ts t1), num_seq arity2 res_arity) end end) | _ => raise NOT_SUPPORTED ("term " ^ quote (Syntax.string_of_term ctxt t))) handle SAME () => R_rep_from_S_rep (fastype_of1 (Ts, t)) (to_S_rep Ts t) in to_F [] end (* (typ -> int) -> int -> styp -> bound *) fun bound_for_free card i (s, T) = let val js = atom_schema_of RRep card T in ([((length js, i), s)], [TupleSet [], atom_schema_of RRep card T |> map (rpair 0) |> tuple_set_from_atom_schema]) end (* (typ -> int) -> typ list -> typ -> rel_expr -> formula *) fun declarative_axiom_for_rel_expr card Ts (Type ("fun", [T1, T2])) r = if body_type T2 = bool_T then True else All (decls_for SRep card Ts T1, declarative_axiom_for_rel_expr card (T1 :: Ts) T2 (List.foldl Join r (vars_for_bound_var card SRep (T1 :: Ts) 0))) | declarative_axiom_for_rel_expr _ _ _ r = One r (* (typ -> int) -> bool -> int -> styp -> formula *) fun declarative_axiom_for_free card i (_, T) = declarative_axiom_for_rel_expr card [] T (Rel (arity_of RRep card T, i)) (* Proof.context -> (typ -> int) -> term -> string *) fun pick_nits_in_term ctxt raw_card t = let val thy = ProofContext.theory_of ctxt val {overlord, ...} = Nitpick_Isar.default_params thy [] (* typ -> int *) fun card (Type ("fun", [T1, T2])) = reasonable_power (card T2) (card T1) | card (Type ("*", [T1, T2])) = card T1 * card T2 | card @{typ bool} = 2 | card T = Int.max (1, raw_card T) val neg_t = @{const Not} $ ObjectLogic.atomize_term thy t val _ = fold_types (K o check_type ctxt) neg_t () val frees = Term.add_frees neg_t [] val bounds = map2 (bound_for_free card) (index_seq 0 (length frees)) frees val declarative_axioms = map2 (declarative_axiom_for_free card) (index_seq 0 (length frees)) frees val formula = kodkod_formula_for_term ctxt card frees neg_t |> fold_rev (curry And) declarative_axioms val univ_card = univ_card 0 0 0 bounds formula val problem = {comment = "", settings = [], univ_card = univ_card, tuple_assigns = [], bounds = bounds, int_bounds = [], expr_assigns = [], formula = formula} in case solve_any_problem overlord NONE 0 1 [problem] of NotInstalled => "unknown" | Normal ([], _) => "none" | Normal _ => "genuine" | TimedOut _ => "unknown" | Interrupted _ => "unknown" | Error (s, _) => error ("Kodkod error: " ^ s) end handle NOT_SUPPORTED details => (warning ("Unsupported case: " ^ details ^ "."); "unknown") end; diff --git a/src/HOL/Tools/Nitpick/nitpick.ML b/src/HOL/Tools/Nitpick/nitpick.ML --- a/src/HOL/Tools/Nitpick/nitpick.ML +++ b/src/HOL/Tools/Nitpick/nitpick.ML @@ -1,905 +1,966 @@ (* Title: HOL/Tools/Nitpick/nitpick.ML Author: Jasmin Blanchette, TU Muenchen - Copyright 2008, 2009 + Copyright 2008, 2009, 2010 Finite model generation for HOL formulas using Kodkod. *) signature NITPICK = sig type styp = Nitpick_Util.styp type params = { cards_assigns: (typ option * int list) list, maxes_assigns: (styp option * int list) list, iters_assigns: (styp option * int list) list, bitss: int list, bisim_depths: int list, boxes: (typ option * bool option) list, monos: (typ option * bool option) list, + stds: (typ option * bool) list, wfs: (styp option * bool option) list, sat_solver: string, blocking: bool, falsify: bool, debug: bool, verbose: bool, overlord: bool, user_axioms: bool option, assms: bool, merge_type_vars: bool, binary_ints: bool option, destroy_constrs: bool, specialize: bool, skolemize: bool, star_linear_preds: bool, uncurry: bool, fast_descrs: bool, peephole_optim: bool, timeout: Time.time option, tac_timeout: Time.time option, sym_break: int, sharing_depth: int, flatten_props: bool, max_threads: int, show_skolems: bool, show_datatypes: bool, show_consts: bool, evals: term list, formats: (term option * int list) list, max_potential: int, max_genuine: int, check_potential: bool, check_genuine: bool, batch_size: int, expect: string} val register_frac_type : string -> (string * string) list -> theory -> theory val unregister_frac_type : string -> theory -> theory val register_codatatype : typ -> string -> styp list -> theory -> theory val unregister_codatatype : typ -> theory -> theory val pick_nits_in_term : - Proof.state -> params -> bool -> term list -> term -> string * Proof.state + Proof.state -> params -> bool -> int -> int -> int -> term list -> term + -> string * Proof.state val pick_nits_in_subgoal : - Proof.state -> params -> bool -> int -> string * Proof.state + Proof.state -> params -> bool -> int -> int -> string * Proof.state end; structure Nitpick : NITPICK = struct open Nitpick_Util open Nitpick_HOL open Nitpick_Mono open Nitpick_Scope open Nitpick_Peephole open Nitpick_Rep open Nitpick_Nut open Nitpick_Kodkod open Nitpick_Model structure KK = Kodkod type params = { cards_assigns: (typ option * int list) list, maxes_assigns: (styp option * int list) list, iters_assigns: (styp option * int list) list, bitss: int list, bisim_depths: int list, boxes: (typ option * bool option) list, monos: (typ option * bool option) list, + stds: (typ option * bool) list, wfs: (styp option * bool option) list, sat_solver: string, blocking: bool, falsify: bool, debug: bool, verbose: bool, overlord: bool, user_axioms: bool option, assms: bool, merge_type_vars: bool, binary_ints: bool option, destroy_constrs: bool, specialize: bool, skolemize: bool, star_linear_preds: bool, uncurry: bool, fast_descrs: bool, peephole_optim: bool, timeout: Time.time option, tac_timeout: Time.time option, sym_break: int, sharing_depth: int, flatten_props: bool, max_threads: int, show_skolems: bool, show_datatypes: bool, show_consts: bool, evals: term list, formats: (term option * int list) list, max_potential: int, max_genuine: int, check_potential: bool, check_genuine: bool, batch_size: int, expect: string} type problem_extension = { free_names: nut list, sel_names: nut list, nonsel_names: nut list, rel_table: nut NameTable.table, liberal: bool, scope: scope, core: KK.formula, defs: KK.formula list} type rich_problem = KK.problem * problem_extension (* Proof.context -> string -> term list -> Pretty.T list *) fun pretties_for_formulas _ _ [] = [] | pretties_for_formulas ctxt s ts = [Pretty.str (s ^ plural_s_for_list ts ^ ":"), Pretty.indent indent_size (Pretty.chunks (map2 (fn j => fn t => Pretty.block [t |> shorten_names_in_term |> Syntax.pretty_term ctxt, Pretty.str (if j = 1 then "." else ";")]) (length ts downto 1) ts))] (* unit -> string *) fun install_kodkodi_message () = "Nitpick requires the external Java program Kodkodi. To install it, download \ \the package from Isabelle's web page and add the \"kodkodi-x.y.z\" \ \directory's full path to \"" ^ Path.implode (Path.expand (Path.appends (Path.variable "ISABELLE_HOME_USER" :: map Path.basic ["etc", "components"]))) ^ "\"." val max_liberal_delay_ms = 200 val max_liberal_delay_percent = 2 (* Time.time option -> int *) fun liberal_delay_for_timeout NONE = max_liberal_delay_ms | liberal_delay_for_timeout (SOME timeout) = Int.max (0, Int.min (max_liberal_delay_ms, Time.toMilliseconds timeout * max_liberal_delay_percent div 100)) (* Time.time option -> bool *) fun passed_deadline NONE = false | passed_deadline (SOME time) = Time.compare (Time.now (), time) <> LESS (* ('a * bool option) list -> bool *) fun none_true assigns = forall (not_equal (SOME true) o snd) assigns val syntactic_sorts = @{sort "{default,zero,one,plus,minus,uminus,times,inverse,abs,sgn,ord,eq}"} @ @{sort number} (* typ -> bool *) fun has_tfree_syntactic_sort (TFree (_, S as _ :: _)) = subset (op =) (S, syntactic_sorts) | has_tfree_syntactic_sort _ = false (* term -> bool *) val has_syntactic_sorts = exists_type (exists_subtype has_tfree_syntactic_sort) (* (unit -> string) -> Pretty.T *) fun plazy f = Pretty.blk (0, pstrs (f ())) -(* Time.time -> Proof.state -> params -> bool -> term -> string * Proof.state *) -fun pick_them_nits_in_term deadline state (params : params) auto orig_assm_ts - orig_t = +(* Time.time -> Proof.state -> params -> bool -> int -> int -> int -> term + -> string * Proof.state *) +fun pick_them_nits_in_term deadline state (params : params) auto i n step + orig_assm_ts orig_t = let val timer = Timer.startRealTimer () val thy = Proof.theory_of state val ctxt = Proof.context_of state +(* FIXME: reintroduce code before new release val nitpick_thy = ThyInfo.get_theory "Nitpick" val _ = Theory.subthy (nitpick_thy, thy) orelse error "You must import the theory \"Nitpick\" to use Nitpick" +*) val {cards_assigns, maxes_assigns, iters_assigns, bitss, bisim_depths, - boxes, monos, wfs, sat_solver, blocking, falsify, debug, verbose, + boxes, monos, stds, wfs, sat_solver, blocking, falsify, debug, verbose, overlord, user_axioms, assms, merge_type_vars, binary_ints, destroy_constrs, specialize, skolemize, star_linear_preds, uncurry, fast_descrs, peephole_optim, tac_timeout, sym_break, sharing_depth, flatten_props, max_threads, show_skolems, show_datatypes, show_consts, evals, formats, max_potential, max_genuine, check_potential, check_genuine, batch_size, ...} = params val state_ref = Unsynchronized.ref state (* Pretty.T -> unit *) val pprint = if auto then Unsynchronized.change state_ref o Proof.goal_message o K o Pretty.chunks o cons (Pretty.str "") o single o Pretty.mark Markup.hilite else priority o Pretty.string_of (* (unit -> Pretty.T) -> unit *) fun pprint_m f = () |> not auto ? pprint o f fun pprint_v f = () |> verbose ? pprint o f fun pprint_d f = () |> debug ? pprint o f (* string -> unit *) val print = pprint o curry Pretty.blk 0 o pstrs (* (unit -> string) -> unit *) val print_m = pprint_m o K o plazy val print_v = pprint_v o K o plazy val print_d = pprint_d o K o plazy (* unit -> unit *) fun check_deadline () = if debug andalso passed_deadline deadline then raise TimeLimit.TimeOut else () (* unit -> 'a *) fun do_interrupted () = if passed_deadline deadline then raise TimeLimit.TimeOut else raise Interrupt - val _ = print_m (K "Nitpicking...") + val _ = + if step = 0 then + print_m (fn () => "Nitpicking formula...") + else + pprint_m (fn () => Pretty.chunks ( + pretties_for_formulas ctxt ("Nitpicking " ^ + (if i <> 1 orelse n <> 1 then + "subgoal " ^ string_of_int i ^ " of " ^ string_of_int n + else + "goal")) [orig_t])) val neg_t = if falsify then Logic.mk_implies (orig_t, @{prop False}) else orig_t val assms_t = if assms orelse auto then Logic.mk_conjunction_list (neg_t :: orig_assm_ts) else neg_t val (assms_t, evals) = assms_t :: evals |> merge_type_vars ? merge_type_vars_in_terms |> pairf hd tl val original_max_potential = max_potential val original_max_genuine = max_genuine (* val _ = priority ("*** " ^ Syntax.string_of_term ctxt orig_t) val _ = List.app (fn t => priority ("*** " ^ Syntax.string_of_term ctxt t)) orig_assm_ts *) val max_bisim_depth = fold Integer.max bisim_depths ~1 val case_names = case_const_names thy val (defs, built_in_nondefs, user_nondefs) = all_axioms_of thy val def_table = const_def_table ctxt defs val nondef_table = const_nondef_table (built_in_nondefs @ user_nondefs) val simp_table = Unsynchronized.ref (const_simp_table ctxt) val psimp_table = const_psimp_table ctxt val intro_table = inductive_intro_table ctxt def_table val ground_thm_table = ground_theorem_table thy val ersatz_table = ersatz_table thy val (ext_ctxt as {wf_cache, ...}) = {thy = thy, ctxt = ctxt, max_bisim_depth = max_bisim_depth, boxes = boxes, - user_axioms = user_axioms, debug = debug, wfs = wfs, + stds = stds, wfs = wfs, user_axioms = user_axioms, debug = debug, binary_ints = binary_ints, destroy_constrs = destroy_constrs, specialize = specialize, skolemize = skolemize, star_linear_preds = star_linear_preds, uncurry = uncurry, fast_descrs = fast_descrs, tac_timeout = tac_timeout, evals = evals, case_names = case_names, def_table = def_table, nondef_table = nondef_table, user_nondefs = user_nondefs, simp_table = simp_table, psimp_table = psimp_table, intro_table = intro_table, ground_thm_table = ground_thm_table, ersatz_table = ersatz_table, skolems = Unsynchronized.ref [], special_funs = Unsynchronized.ref [], unrolled_preds = Unsynchronized.ref [], wf_cache = Unsynchronized.ref [], constr_cache = Unsynchronized.ref []} val frees = Term.add_frees assms_t [] val _ = null (Term.add_tvars assms_t []) orelse raise NOT_SUPPORTED "schematic type variables" val (((def_ts, nondef_ts), (got_all_mono_user_axioms, no_poly_user_axioms)), core_t) = preprocess_term ext_ctxt assms_t val got_all_user_axioms = got_all_mono_user_axioms andalso no_poly_user_axioms (* styp * (bool * bool) -> unit *) fun print_wf (x, (gfp, wf)) = pprint (Pretty.blk (0, pstrs ("The " ^ (if gfp then "co" else "") ^ "inductive predicate \"") @ Syntax.pretty_term ctxt (Const x) :: pstrs (if wf then "\" was proved well-founded. Nitpick can compute it \ \efficiently." else "\" could not be proved well-founded. Nitpick might need to \ \unroll it."))) val _ = if verbose then List.app print_wf (!wf_cache) else () val _ = pprint_d (fn () => Pretty.chunks (pretties_for_formulas ctxt "Preprocessed formula" [core_t] @ pretties_for_formulas ctxt "Relevant definitional axiom" def_ts @ pretties_for_formulas ctxt "Relevant nondefinitional axiom" nondef_ts)) val _ = List.app (ignore o Term.type_of) (core_t :: def_ts @ nondef_ts) handle TYPE (_, Ts, ts) => raise TYPE ("Nitpick.pick_them_nits_in_term", Ts, ts) val core_u = nut_from_term ext_ctxt Eq core_t val def_us = map (nut_from_term ext_ctxt DefEq) def_ts val nondef_us = map (nut_from_term ext_ctxt Eq) nondef_ts val (free_names, const_names) = fold add_free_and_const_names (core_u :: def_us @ nondef_us) ([], []) val (sel_names, nonsel_names) = List.partition (is_sel o nickname_of) const_names - val would_be_genuine = got_all_user_axioms andalso none_true wfs + val genuine_means_genuine = got_all_user_axioms andalso none_true wfs + val standard = forall snd stds (* val _ = List.app (priority o string_for_nut ctxt) (core_u :: def_us @ nondef_us) *) val unique_scope = forall (curry (op =) 1 o length o snd) cards_assigns (* typ -> bool *) fun is_type_always_monotonic T = (is_datatype thy T andalso not (is_quot_type thy T) andalso (not (is_pure_typedef thy T) orelse is_univ_typedef thy T)) orelse - is_number_type thy T orelse is_bit_type T + is_number_type thy T orelse is_bit_type T orelse T = @{typ \} fun is_type_monotonic T = unique_scope orelse case triple_lookup (type_match thy) monos T of SOME (SOME b) => b | _ => is_type_always_monotonic T orelse - formulas_monotonic ext_ctxt T def_ts nondef_ts core_t + formulas_monotonic ext_ctxt T Plus def_ts nondef_ts core_t fun is_deep_datatype T = is_datatype thy T andalso (is_word_type T orelse exists (curry (op =) T o domain_type o type_of) sel_names) - val Ts = ground_types_in_terms ext_ctxt (core_t :: def_ts @ nondef_ts) - |> sort TermOrd.typ_ord + val all_Ts = ground_types_in_terms ext_ctxt (core_t :: def_ts @ nondef_ts) + |> sort TermOrd.typ_ord val _ = if verbose andalso binary_ints = SOME true andalso - exists (member (op =) [nat_T, int_T]) Ts then + exists (member (op =) [nat_T, int_T]) all_Ts then print_v (K "The option \"binary_ints\" will be ignored because \ \of the presence of rationals, reals, \"Suc\", \ \\"gcd\", or \"lcm\" in the problem.") else () - val (mono_Ts, nonmono_Ts) = List.partition is_type_monotonic Ts + val (mono_Ts, nonmono_Ts) = List.partition is_type_monotonic all_Ts val _ = if verbose andalso not unique_scope then case filter_out is_type_always_monotonic mono_Ts of [] => () | interesting_mono_Ts => print_v (fn () => let val ss = map (quote o string_for_type ctxt) interesting_mono_Ts in "The type" ^ plural_s_for_list ss ^ " " ^ space_implode " " (serial_commas "and" ss) ^ " " ^ (if none_true monos then "passed the monotonicity test" else (if length ss = 1 then "is" else "are") ^ " considered monotonic") ^ ". Nitpick might be able to skip some scopes." end) else () - val shallow_dataTs = filter_out is_deep_datatype Ts + val deep_dataTs = filter is_deep_datatype all_Ts + (* FIXME: Implement proper detection of induction datatypes. *) + (* string * (Rule_Cases.T * bool) -> bool *) + fun is_inductive_case (_, (Rule_Cases.Case {fixes, assumes, ...}, _)) = + false +(* + not (null fixes) andalso exists (String.isSuffix ".hyps" o fst) assumes +*) + (* unit -> typ list *) + val induct_dataTs = + if exists is_inductive_case (ProofContext.cases_of ctxt) then + filter (is_datatype thy) all_Ts + else + [] + val _ = if standard andalso not (null induct_dataTs) then + pprint_m (fn () => Pretty.blk (0, + pstrs "Hint: To check that the induction hypothesis is \ + \general enough, try the following command: " @ + [Pretty.mark Markup.sendback (Pretty.blk (0, + pstrs ("nitpick [" ^ + commas (map (prefix "non_std " o maybe_quote + o unyxml o string_for_type ctxt) + induct_dataTs) ^ + ", show_consts]")))] @ pstrs ".")) + else + () (* val _ = priority "Monotonic types:" val _ = List.app (priority o string_for_type ctxt) mono_Ts val _ = priority "Nonmonotonic types:" val _ = List.app (priority o string_for_type ctxt) nonmono_Ts *) val need_incremental = Int.max (max_potential, max_genuine) >= 2 val effective_sat_solver = if sat_solver <> "smart" then if need_incremental andalso not (member (op =) (Kodkod_SAT.configured_sat_solvers true) sat_solver) then (print_m (K ("An incremental SAT solver is required: \"SAT4J\" will \ \be used instead of " ^ quote sat_solver ^ ".")); "SAT4J") else sat_solver else Kodkod_SAT.smart_sat_solver_name need_incremental val _ = if sat_solver = "smart" then print_v (fn () => "Using SAT solver " ^ quote effective_sat_solver ^ ". The following" ^ (if need_incremental then " incremental " else " ") ^ "solvers are configured: " ^ commas (map quote (Kodkod_SAT.configured_sat_solvers need_incremental)) ^ ".") else () val too_big_scopes = Unsynchronized.ref [] (* bool -> scope -> rich_problem option *) fun problem_for_scope liberal (scope as {card_assigns, bits, bisim_depth, datatypes, ofs, ...}) = let val _ = not (exists (fn other => scope_less_eq other scope) (!too_big_scopes)) orelse raise TOO_LARGE ("Nitpick.pick_them_nits_in_term.\ \problem_for_scope", "too large scope") (* val _ = priority "Offsets:" val _ = List.app (fn (T, j0) => priority (string_for_type ctxt T ^ " = " ^ string_of_int j0)) (Typtab.dest ofs) *) - val all_exact = forall (is_exact_type datatypes) Ts + val all_exact = forall (is_exact_type datatypes) all_Ts (* nut list -> rep NameTable.table -> nut list * rep NameTable.table *) val repify_consts = choose_reps_for_consts scope all_exact val main_j0 = offset_of_type ofs bool_T val (nat_card, nat_j0) = spec_of_type scope nat_T val (int_card, int_j0) = spec_of_type scope int_T val _ = (nat_j0 = main_j0 andalso int_j0 = main_j0) orelse raise BAD ("Nitpick.pick_them_nits_in_term.problem_for_scope", "bad offsets") val kk = kodkod_constrs peephole_optim nat_card int_card main_j0 val (free_names, rep_table) = choose_reps_for_free_vars scope free_names NameTable.empty val (sel_names, rep_table) = choose_reps_for_all_sels scope rep_table val (nonsel_names, rep_table) = repify_consts nonsel_names rep_table val min_highest_arity = NameTable.fold (curry Int.max o arity_of_rep o snd) rep_table 1 val min_univ_card = NameTable.fold (curry Int.max o min_univ_card_of_rep o snd) rep_table (univ_card nat_card int_card main_j0 [] KK.True) val _ = check_arity min_univ_card min_highest_arity val core_u = choose_reps_in_nut scope liberal rep_table false core_u val def_us = map (choose_reps_in_nut scope liberal rep_table true) def_us val nondef_us = map (choose_reps_in_nut scope liberal rep_table false) nondef_us (* val _ = List.app (priority o string_for_nut ctxt) (free_names @ sel_names @ nonsel_names @ core_u :: def_us @ nondef_us) *) val (free_rels, pool, rel_table) = rename_free_vars free_names initial_pool NameTable.empty val (sel_rels, pool, rel_table) = rename_free_vars sel_names pool rel_table val (other_rels, pool, rel_table) = rename_free_vars nonsel_names pool rel_table val core_u = rename_vars_in_nut pool rel_table core_u val def_us = map (rename_vars_in_nut pool rel_table) def_us val nondef_us = map (rename_vars_in_nut pool rel_table) nondef_us (* nut -> KK.formula *) val to_f = kodkod_formula_from_nut bits ofs liberal kk val core_f = to_f core_u val def_fs = map to_f def_us val nondef_fs = map to_f nondef_us val formula = fold (fold s_and) [def_fs, nondef_fs] core_f val comment = (if liberal then "liberal" else "conservative") ^ "\n" ^ PrintMode.setmp [] multiline_string_for_scope scope val kodkod_sat_solver = Kodkod_SAT.sat_solver_spec effective_sat_solver |> snd val bit_width = if bits = 0 then 16 else bits + 1 val delay = if liberal then Option.map (fn time => Time.- (time, Time.now ())) deadline |> liberal_delay_for_timeout else 0 val settings = [("solver", commas (map quote kodkod_sat_solver)), ("skolem_depth", "-1"), ("bit_width", string_of_int bit_width), ("symmetry_breaking", signed_string_of_int sym_break), ("sharing", signed_string_of_int sharing_depth), ("flatten", Bool.toString flatten_props), ("delay", signed_string_of_int delay)] val plain_rels = free_rels @ other_rels val plain_bounds = map (bound_for_plain_rel ctxt debug) plain_rels val plain_axioms = map (declarative_axiom_for_plain_rel kk) plain_rels val sel_bounds = map (bound_for_sel_rel ctxt debug datatypes) sel_rels val dtype_axioms = declarative_axioms_for_datatypes ext_ctxt bits ofs kk rel_table datatypes val declarative_axioms = plain_axioms @ dtype_axioms val univ_card = univ_card nat_card int_card main_j0 (plain_bounds @ sel_bounds) formula val built_in_bounds = bounds_for_built_in_rels_in_formula debug univ_card nat_card int_card main_j0 formula val bounds = built_in_bounds @ plain_bounds @ sel_bounds |> not debug ? merge_bounds val highest_arity = fold Integer.max (map (fst o fst) (maps fst bounds)) 0 val formula = fold_rev s_and declarative_axioms formula val _ = if bits = 0 then () else check_bits bits formula val _ = if formula = KK.False then () else check_arity univ_card highest_arity in SOME ({comment = comment, settings = settings, univ_card = univ_card, tuple_assigns = [], bounds = bounds, int_bounds = if bits = 0 then sequential_int_bounds univ_card else pow_of_two_int_bounds bits main_j0 univ_card, expr_assigns = [], formula = formula}, {free_names = free_names, sel_names = sel_names, nonsel_names = nonsel_names, rel_table = rel_table, liberal = liberal, scope = scope, core = core_f, defs = nondef_fs @ def_fs @ declarative_axioms}) end handle TOO_LARGE (loc, msg) => if loc = "Nitpick_Kodkod.check_arity" andalso not (Typtab.is_empty ofs) then problem_for_scope liberal {ext_ctxt = ext_ctxt, card_assigns = card_assigns, bits = bits, bisim_depth = bisim_depth, datatypes = datatypes, ofs = Typtab.empty} else if loc = "Nitpick.pick_them_nits_in_term.\ \problem_for_scope" then NONE else (Unsynchronized.change too_big_scopes (cons scope); print_v (fn () => ("Limit reached: " ^ msg ^ ". Skipping " ^ (if liberal then "potential" else "genuine") ^ " component of scope.")); NONE) | TOO_SMALL (loc, msg) => (print_v (fn () => ("Limit reached: " ^ msg ^ ". Skipping " ^ (if liberal then "potential" else "genuine") ^ " component of scope.")); NONE) (* int -> (''a * int list list) list -> ''a -> KK.tuple_set *) fun tuple_set_for_rel univ_card = KK.TupleSet o map (kk_tuple debug univ_card) o the oo AList.lookup (op =) - val word_model = if falsify then "counterexample" else "model" + val das_wort_model = + (if falsify then "counterexample" else "model") + |> not standard ? prefix "nonstandard " val scopes = Unsynchronized.ref [] val generated_scopes = Unsynchronized.ref [] val generated_problems = Unsynchronized.ref [] val checked_problems = Unsynchronized.ref (SOME []) val met_potential = Unsynchronized.ref 0 (* rich_problem list -> int list -> unit *) fun update_checked_problems problems = List.app (Unsynchronized.change checked_problems o Option.map o cons o nth problems) (* bool -> KK.raw_bound list -> problem_extension -> bool option *) fun print_and_check_model genuine bounds ({free_names, sel_names, nonsel_names, rel_table, scope, ...} : problem_extension) = let val (reconstructed_model, codatatypes_ok) = reconstruct_hol_model {show_skolems = show_skolems, show_datatypes = show_datatypes, show_consts = show_consts} scope formats frees free_names sel_names nonsel_names rel_table bounds - val would_be_genuine = would_be_genuine andalso codatatypes_ok + val genuine_means_genuine = genuine_means_genuine andalso codatatypes_ok in pprint (Pretty.chunks [Pretty.blk (0, (pstrs ("Nitpick found a" ^ (if not genuine then " potential " - else if would_be_genuine then " " - else " likely genuine ") ^ word_model) @ + else if genuine_means_genuine then " " + else " likely genuine ") ^ das_wort_model) @ (case pretties_for_scope scope verbose of [] => [] | pretties => pstrs " for " @ pretties) @ [Pretty.str ":\n"])), Pretty.indent indent_size reconstructed_model]); if genuine then - (if check_genuine then + (if check_genuine andalso standard then (case prove_hol_model scope tac_timeout free_names sel_names rel_table bounds assms_t of SOME true => print ("Confirmation by \"auto\": The above " ^ - word_model ^ " is really genuine.") + das_wort_model ^ " is really genuine.") | SOME false => - if would_be_genuine then - error ("A supposedly genuine " ^ word_model ^ " was shown to\ - \be spurious by \"auto\".\nThis should never happen.\n\ - \Please send a bug report to blanchet\ + if genuine_means_genuine then + error ("A supposedly genuine " ^ das_wort_model ^ " was \ + \shown to be spurious by \"auto\".\nThis should never \ + \happen.\nPlease send a bug report to blanchet\ \te@in.tum.de.") else - print ("Refutation by \"auto\": The above " ^ word_model ^ + print ("Refutation by \"auto\": The above " ^ das_wort_model ^ " is spurious.") | NONE => print "No confirmation by \"auto\".") else (); + if not standard andalso not (null induct_dataTs) then + print "The existence of a nonstandard model suggests that the \ + \induction hypothesis is not general enough or perhaps even \ + \wrong. See the \"Inductive Properties\" section of the \ + \Nitpick manual for details (\"isabelle doc nitpick\")." + else + (); if has_syntactic_sorts orig_t then print "Hint: Maybe you forgot a type constraint?" else (); - if not would_be_genuine then + if not genuine_means_genuine then if no_poly_user_axioms then let val options = [] |> not got_all_mono_user_axioms ? cons ("user_axioms", "\"true\"") |> not (none_true wfs) ? cons ("wf", "\"smart\" or \"false\"") |> not codatatypes_ok ? cons ("bisim_depth", "a nonnegative value") val ss = map (fn (name, value) => quote name ^ " set to " ^ value) options in print ("Try again with " ^ space_implode " " (serial_commas "and" ss) ^ - " to confirm that the " ^ word_model ^ " is genuine.") + " to confirm that the " ^ das_wort_model ^ + " is genuine.") end else print ("Nitpick is unable to guarantee the authenticity of \ - \the " ^ word_model ^ " in the presence of polymorphic \ - \axioms.") + \the " ^ das_wort_model ^ " in the presence of \ + \polymorphic axioms.") else (); NONE) else if not genuine then (Unsynchronized.inc met_potential; if check_potential then let val status = prove_hol_model scope tac_timeout free_names sel_names rel_table bounds assms_t in (case status of SOME true => print ("Confirmation by \"auto\": The above " ^ - word_model ^ " is genuine.") + das_wort_model ^ " is genuine.") | SOME false => print ("Refutation by \"auto\": The above " ^ - word_model ^ " is spurious.") + das_wort_model ^ " is spurious.") | NONE => print "No confirmation by \"auto\"."); status end else NONE) else NONE end (* int -> int -> int -> bool -> rich_problem list -> int * int * int *) fun solve_any_problem max_potential max_genuine donno first_time problems = let val max_potential = Int.max (0, max_potential) val max_genuine = Int.max (0, max_genuine) (* bool -> int * KK.raw_bound list -> bool option *) fun print_and_check genuine (j, bounds) = print_and_check_model genuine bounds (snd (nth problems j)) val max_solutions = max_potential + max_genuine |> not need_incremental ? curry Int.min 1 in if max_solutions <= 0 then (0, 0, donno) else case KK.solve_any_problem overlord deadline max_threads max_solutions (map fst problems) of KK.NotInstalled => (print_m install_kodkodi_message; (max_potential, max_genuine, donno + 1)) | KK.Normal ([], unsat_js) => (update_checked_problems problems unsat_js; (max_potential, max_genuine, donno)) | KK.Normal (sat_ps, unsat_js) => let val (lib_ps, con_ps) = List.partition (#liberal o snd o nth problems o fst) sat_ps in update_checked_problems problems (unsat_js @ map fst lib_ps); if null con_ps then let val num_genuine = take max_potential lib_ps |> map (print_and_check false) |> filter (curry (op =) (SOME true)) |> length val max_genuine = max_genuine - num_genuine val max_potential = max_potential - (length lib_ps - num_genuine) in if max_genuine <= 0 then (0, 0, donno) else let (* "co_js" is the list of conservative problems whose liberal pendants couldn't be satisfied and hence that most probably can't be satisfied themselves. *) val co_js = map (fn j => j - 1) unsat_js |> filter (fn j => j >= 0 andalso scopes_equivalent (#scope (snd (nth problems j))) (#scope (snd (nth problems (j + 1))))) val bye_js = sort_distinct int_ord (map fst sat_ps @ unsat_js @ co_js) val problems = problems |> filter_out_indices bye_js |> max_potential <= 0 ? filter_out (#liberal o snd) in solve_any_problem max_potential max_genuine donno false problems end end else let val _ = take max_genuine con_ps |> List.app (ignore o print_and_check true) val max_genuine = max_genuine - length con_ps in if max_genuine <= 0 orelse not first_time then (0, max_genuine, donno) else let val bye_js = sort_distinct int_ord (map fst sat_ps @ unsat_js) val problems = problems |> filter_out_indices bye_js |> filter_out (#liberal o snd) in solve_any_problem 0 max_genuine donno false problems end end end | KK.TimedOut unsat_js => (update_checked_problems problems unsat_js; raise TimeLimit.TimeOut) | KK.Interrupted NONE => (checked_problems := NONE; do_interrupted ()) | KK.Interrupted (SOME unsat_js) => (update_checked_problems problems unsat_js; do_interrupted ()) | KK.Error (s, unsat_js) => (update_checked_problems problems unsat_js; print_v (K ("Kodkod error: " ^ s ^ ".")); (max_potential, max_genuine, donno + 1)) end (* int -> int -> scope list -> int * int * int -> int * int * int *) fun run_batch j n scopes (max_potential, max_genuine, donno) = let val _ = if null scopes then print_m (K "The scope specification is inconsistent.") else if verbose then pprint (Pretty.chunks [Pretty.blk (0, pstrs ((if n > 1 then "Batch " ^ string_of_int (j + 1) ^ " of " ^ signed_string_of_int n ^ ": " else "") ^ "Trying " ^ string_of_int (length scopes) ^ " scope" ^ plural_s_for_list scopes ^ ":")), Pretty.indent indent_size (Pretty.chunks (map2 (fn j => fn scope => Pretty.block ( (case pretties_for_scope scope true of [] => [Pretty.str "Empty"] | pretties => pretties) @ [Pretty.str (if j = 1 then "." else ";")])) (length scopes downto 1) scopes))]) else () (* scope * bool -> rich_problem list * bool -> rich_problem list * bool *) fun add_problem_for_scope (scope as {datatypes, ...}, liberal) (problems, donno) = (check_deadline (); case problem_for_scope liberal scope of SOME problem => (problems |> (null problems orelse not (KK.problems_equivalent (fst problem) (fst (hd problems)))) ? cons problem, donno) | NONE => (problems, donno + 1)) val (problems, donno) = fold add_problem_for_scope (map_product pair scopes ((if max_genuine > 0 then [false] else []) @ (if max_potential > 0 then [true] else []))) ([], donno) val _ = Unsynchronized.change generated_problems (append problems) val _ = Unsynchronized.change generated_scopes (append scopes) in solve_any_problem max_potential max_genuine donno true (rev problems) end (* rich_problem list -> scope -> int *) fun scope_count (problems : rich_problem list) scope = length (filter (scopes_equivalent scope o #scope o snd) problems) (* string -> string *) fun excipit did_so_and_so = let (* rich_problem list -> rich_problem list *) val do_filter = if !met_potential = max_potential then filter_out (#liberal o snd) else I val total = length (!scopes) val unsat = fold (fn scope => case scope_count (do_filter (!generated_problems)) scope of 0 => I | n => scope_count (do_filter (these (!checked_problems))) scope = n ? Integer.add 1) (!generated_scopes) 0 in "Nitpick " ^ did_so_and_so ^ (if is_some (!checked_problems) andalso total > 0 then " after checking " ^ string_of_int (Int.min (total - 1, unsat)) ^ " of " ^ string_of_int total ^ " scope" ^ plural_s total else "") ^ "." end (* int -> int -> scope list -> int * int * int -> KK.outcome *) fun run_batches _ _ [] (max_potential, max_genuine, donno) = if donno > 0 andalso max_genuine > 0 then (print_m (fn () => excipit "ran out of some resource"); "unknown") else if max_genuine = original_max_genuine then if max_potential = original_max_potential then - (print_m (K ("Nitpick found no " ^ word_model ^ ".")); "none") + (print_m (fn () => + "Nitpick found no " ^ das_wort_model ^ "." ^ + (if not standard andalso not (null induct_dataTs) then + " This suggests that the induction hypothesis might be \ + \general enough to prove this subgoal." + else + "")); "none") else - (print_m (K ("Nitpick could not find " ^ - (if max_genuine = 1 then "a better " ^ word_model ^ "." - else "any better " ^ word_model ^ "s."))); - "potential") + (print_m (fn () => + "Nitpick could not find a" ^ + (if max_genuine = 1 then " better " ^ das_wort_model ^ "." + else "ny better " ^ das_wort_model ^ "s.")); "potential") else - if would_be_genuine then "genuine" else "likely_genuine" + if genuine_means_genuine then "genuine" else "likely_genuine" | run_batches j n (batch :: batches) z = let val (z as (_, max_genuine, _)) = run_batch j n batch z in run_batches (j + 1) n (if max_genuine > 0 then batches else []) z end val (skipped, the_scopes) = all_scopes ext_ctxt sym_break cards_assigns maxes_assigns iters_assigns - bitss bisim_depths mono_Ts nonmono_Ts shallow_dataTs + bitss bisim_depths mono_Ts nonmono_Ts deep_dataTs val _ = if skipped > 0 then print_m (fn () => "Too many scopes. Skipping " ^ string_of_int skipped ^ " scope" ^ plural_s skipped ^ ". (Consider using \"mono\" or \ \\"merge_type_vars\" to prevent this.)") else () val _ = scopes := the_scopes val batches = batch_list batch_size (!scopes) val outcome_code = (run_batches 0 (length batches) batches (max_potential, max_genuine, 0) handle Exn.Interrupt => do_interrupted ()) handle TimeLimit.TimeOut => (print_m (fn () => excipit "ran out of time"); if !met_potential > 0 then "potential" else "unknown") | Exn.Interrupt => if auto orelse debug then raise Interrupt else error (excipit "was interrupted") val _ = print_v (fn () => "Total time: " ^ signed_string_of_int (Time.toMilliseconds (Timer.checkRealTimer timer)) ^ " ms.") in (outcome_code, !state_ref) end handle Exn.Interrupt => if auto orelse #debug params then raise Interrupt else if passed_deadline deadline then (priority "Nitpick ran out of time."; ("unknown", state)) else error "Nitpick was interrupted." -(* Proof.state -> params -> bool -> term -> string * Proof.state *) -fun pick_nits_in_term state (params as {debug, timeout, expect, ...}) auto - orig_assm_ts orig_t = +(* Proof.state -> params -> bool -> int -> int -> int -> term + -> string * Proof.state *) +fun pick_nits_in_term state (params as {debug, timeout, expect, ...}) auto i n + step orig_assm_ts orig_t = if getenv "KODKODI" = "" then (if auto then () else warning (Pretty.string_of (plazy install_kodkodi_message)); ("unknown", state)) else let val deadline = Option.map (curry Time.+ (Time.now ())) timeout val outcome as (outcome_code, _) = time_limit (if debug then NONE else timeout) - (pick_them_nits_in_term deadline state params auto orig_assm_ts) - orig_t + (pick_them_nits_in_term deadline state params auto i n step + orig_assm_ts) orig_t in if expect = "" orelse outcome_code = expect then outcome else error ("Unexpected outcome: " ^ quote outcome_code ^ ".") end -(* Proof.state -> params -> thm -> int -> string * Proof.state *) -fun pick_nits_in_subgoal state params auto i = +(* Proof.state -> params -> bool -> int -> int -> string * Proof.state *) +fun pick_nits_in_subgoal state params auto i step = let val ctxt = Proof.context_of state val t = state |> Proof.raw_goal |> #goal |> prop_of in - if Logic.count_prems t = 0 then - (priority "No subgoal!"; ("none", state)) - else + case Logic.count_prems t of + 0 => (priority "No subgoal!"; ("none", state)) + | n => let val assms = map term_of (Assumption.all_assms_of ctxt) val (t, frees) = Logic.goal_params t i - in pick_nits_in_term state params auto assms (subst_bounds (frees, t)) end + in + pick_nits_in_term state params auto i n step assms + (subst_bounds (frees, t)) + end end end; diff --git a/src/HOL/Tools/Nitpick/nitpick_hol.ML b/src/HOL/Tools/Nitpick/nitpick_hol.ML --- a/src/HOL/Tools/Nitpick/nitpick_hol.ML +++ b/src/HOL/Tools/Nitpick/nitpick_hol.ML @@ -1,3506 +1,3527 @@ (* Title: HOL/Tools/Nitpick/nitpick_hol.ML Author: Jasmin Blanchette, TU Muenchen - Copyright 2008, 2009 + Copyright 2008, 2009, 2010 Auxiliary HOL-related functions used by Nitpick. *) signature NITPICK_HOL = sig type styp = Nitpick_Util.styp type const_table = term list Symtab.table type special_fun = (styp * int list * term list) * styp type unrolled = styp * styp type wf_cache = (styp * (bool * bool)) list type extended_context = { thy: theory, ctxt: Proof.context, max_bisim_depth: int, boxes: (typ option * bool option) list, + stds: (typ option * bool) list, wfs: (styp option * bool option) list, user_axioms: bool option, debug: bool, binary_ints: bool option, destroy_constrs: bool, specialize: bool, skolemize: bool, star_linear_preds: bool, uncurry: bool, fast_descrs: bool, tac_timeout: Time.time option, evals: term list, case_names: (string * int) list, def_table: const_table, nondef_table: const_table, user_nondefs: term list, simp_table: const_table Unsynchronized.ref, psimp_table: const_table, intro_table: const_table, ground_thm_table: term list Inttab.table, ersatz_table: (string * string) list, skolems: (string * string list) list Unsynchronized.ref, special_funs: special_fun list Unsynchronized.ref, unrolled_preds: unrolled list Unsynchronized.ref, wf_cache: wf_cache Unsynchronized.ref, constr_cache: (typ * styp list) list Unsynchronized.ref} val name_sep : string val numeral_prefix : string val skolem_prefix : string val eval_prefix : string val original_name : string -> string val unbit_and_unbox_type : typ -> typ val string_for_type : Proof.context -> typ -> string val prefix_name : string -> string -> string val shortest_name : string -> string val short_name : string -> string val shorten_names_in_term : term -> term val type_match : theory -> typ * typ -> bool val const_match : theory -> styp * styp -> bool val term_match : theory -> term * term -> bool val is_TFree : typ -> bool val is_higher_order_type : typ -> bool val is_fun_type : typ -> bool val is_set_type : typ -> bool val is_pair_type : typ -> bool val is_lfp_iterator_type : typ -> bool val is_gfp_iterator_type : typ -> bool val is_fp_iterator_type : typ -> bool val is_boolean_type : typ -> bool val is_integer_type : typ -> bool val is_bit_type : typ -> bool val is_word_type : typ -> bool val is_record_type : typ -> bool val is_number_type : theory -> typ -> bool val const_for_iterator_type : typ -> styp val nth_range_type : int -> typ -> typ val num_factors_in_type : typ -> int val num_binder_types : typ -> int val curried_binder_types : typ -> typ list val mk_flat_tuple : typ -> term list -> term val dest_n_tuple : int -> term -> term list val instantiate_type : theory -> typ -> typ -> typ -> typ val is_real_datatype : theory -> string -> bool val is_quot_type : theory -> typ -> bool val is_codatatype : theory -> typ -> bool val is_pure_typedef : theory -> typ -> bool val is_univ_typedef : theory -> typ -> bool val is_datatype : theory -> typ -> bool val is_record_constr : styp -> bool val is_record_get : theory -> styp -> bool val is_record_update : theory -> styp -> bool val is_abs_fun : theory -> styp -> bool val is_rep_fun : theory -> styp -> bool val is_quot_abs_fun : Proof.context -> styp -> bool val is_quot_rep_fun : Proof.context -> styp -> bool val is_constr : theory -> styp -> bool val is_stale_constr : theory -> styp -> bool val is_sel : string -> bool val is_sel_like_and_no_discr : string -> bool val discr_for_constr : styp -> styp val num_sels_for_constr_type : typ -> int val nth_sel_name_for_constr_name : string -> int -> string val nth_sel_for_constr : styp -> int -> styp val boxed_nth_sel_for_constr : extended_context -> styp -> int -> styp val sel_no_from_name : string -> int val eta_expand : typ list -> term -> int -> term val extensionalize : term -> term val distinctness_formula : typ -> term list -> term val register_frac_type : string -> (string * string) list -> theory -> theory val unregister_frac_type : string -> theory -> theory val register_codatatype : typ -> string -> styp list -> theory -> theory val unregister_codatatype : typ -> theory -> theory val datatype_constrs : extended_context -> typ -> styp list val boxed_datatype_constrs : extended_context -> typ -> styp list val num_datatype_constrs : extended_context -> typ -> int val constr_name_for_sel_like : string -> string val boxed_constr_for_sel : extended_context -> styp -> styp val card_of_type : (typ * int) list -> typ -> int val bounded_card_of_type : int -> int -> (typ * int) list -> typ -> int val bounded_exact_card_of_type : extended_context -> int -> int -> (typ * int) list -> typ -> int val is_finite_type : extended_context -> typ -> bool val all_axioms_of : theory -> term list * term list * term list val arity_of_built_in_const : bool -> styp -> int option val is_built_in_const : bool -> styp -> bool val case_const_names : theory -> (string * int) list val const_def_table : Proof.context -> term list -> const_table val const_nondef_table : term list -> const_table val const_simp_table : Proof.context -> const_table val const_psimp_table : Proof.context -> const_table val inductive_intro_table : Proof.context -> const_table -> const_table val ground_theorem_table : theory -> term list Inttab.table val ersatz_table : theory -> (string * string) list val def_of_const : theory -> const_table -> styp -> term option val is_inductive_pred : extended_context -> styp -> bool val is_constr_pattern_lhs : theory -> term -> bool val is_constr_pattern_formula : theory -> term -> bool val merge_type_vars_in_terms : term list -> term list val ground_types_in_type : extended_context -> typ -> typ list val ground_types_in_terms : extended_context -> term list -> typ list val format_type : int list -> int list -> typ -> typ val format_term_type : theory -> const_table -> (term option * int list) list -> term -> typ val user_friendly_const : extended_context -> string * string -> (term option * int list) list -> styp -> term * typ val assign_operator_for_const : styp -> string val preprocess_term : extended_context -> term -> ((term list * term list) * (bool * bool)) * term end; structure Nitpick_HOL : NITPICK_HOL = struct open Nitpick_Util type const_table = term list Symtab.table type special_fun = (styp * int list * term list) * styp type unrolled = styp * styp type wf_cache = (styp * (bool * bool)) list type extended_context = { thy: theory, ctxt: Proof.context, max_bisim_depth: int, boxes: (typ option * bool option) list, + stds: (typ option * bool) list, wfs: (styp option * bool option) list, user_axioms: bool option, debug: bool, binary_ints: bool option, destroy_constrs: bool, specialize: bool, skolemize: bool, star_linear_preds: bool, uncurry: bool, fast_descrs: bool, tac_timeout: Time.time option, evals: term list, case_names: (string * int) list, def_table: const_table, nondef_table: const_table, user_nondefs: term list, simp_table: const_table Unsynchronized.ref, psimp_table: const_table, intro_table: const_table, ground_thm_table: term list Inttab.table, ersatz_table: (string * string) list, skolems: (string * string list) list Unsynchronized.ref, special_funs: special_fun list Unsynchronized.ref, unrolled_preds: unrolled list Unsynchronized.ref, wf_cache: wf_cache Unsynchronized.ref, constr_cache: (typ * styp list) list Unsynchronized.ref} structure Data = Theory_Data( type T = {frac_types: (string * (string * string) list) list, codatatypes: (string * (string * styp list)) list} val empty = {frac_types = [], codatatypes = []} val extend = I fun merge ({frac_types = fs1, codatatypes = cs1}, {frac_types = fs2, codatatypes = cs2}) : T = {frac_types = AList.merge (op =) (K true) (fs1, fs2), codatatypes = AList.merge (op =) (K true) (cs1, cs2)}) (* term * term -> term *) fun s_conj (t1, @{const True}) = t1 | s_conj (@{const True}, t2) = t2 | s_conj (t1, t2) = if t1 = @{const False} orelse t2 = @{const False} then @{const False} else HOLogic.mk_conj (t1, t2) fun s_disj (t1, @{const False}) = t1 | s_disj (@{const False}, t2) = t2 | s_disj (t1, t2) = if t1 = @{const True} orelse t2 = @{const True} then @{const True} else HOLogic.mk_disj (t1, t2) (* term -> term -> term *) fun mk_exists v t = HOLogic.exists_const (fastype_of v) $ lambda v (incr_boundvars 1 t) (* term -> term -> term list *) fun strip_connective conn_t (t as (t0 $ t1 $ t2)) = if t0 = conn_t then strip_connective t0 t2 @ strip_connective t0 t1 else [t] | strip_connective _ t = [t] (* term -> term list * term *) fun strip_any_connective (t as (t0 $ t1 $ t2)) = if t0 = @{const "op &"} orelse t0 = @{const "op |"} then (strip_connective t0 t, t0) else ([t], @{const Not}) | strip_any_connective t = ([t], @{const Not}) (* term -> term list *) val conjuncts = strip_connective @{const "op &"} val disjuncts = strip_connective @{const "op |"} val name_sep = "$" val numeral_prefix = nitpick_prefix ^ "num" ^ name_sep val sel_prefix = nitpick_prefix ^ "sel" val discr_prefix = nitpick_prefix ^ "is" ^ name_sep val set_prefix = nitpick_prefix ^ "set" ^ name_sep val lfp_iterator_prefix = nitpick_prefix ^ "lfpit" ^ name_sep val gfp_iterator_prefix = nitpick_prefix ^ "gfpit" ^ name_sep val nwf_prefix = nitpick_prefix ^ "nwf" ^ name_sep val unrolled_prefix = nitpick_prefix ^ "unroll" ^ name_sep val base_prefix = nitpick_prefix ^ "base" ^ name_sep val step_prefix = nitpick_prefix ^ "step" ^ name_sep val ubfp_prefix = nitpick_prefix ^ "ubfp" ^ name_sep val lbfp_prefix = nitpick_prefix ^ "lbfp" ^ name_sep val skolem_prefix = nitpick_prefix ^ "sk" val special_prefix = nitpick_prefix ^ "sp" val uncurry_prefix = nitpick_prefix ^ "unc" val eval_prefix = nitpick_prefix ^ "eval" val bound_var_prefix = "b" val cong_var_prefix = "c" val iter_var_prefix = "i" val val_var_prefix = nitpick_prefix ^ "v" val arg_var_prefix = "x" (* int -> string *) fun sel_prefix_for j = sel_prefix ^ string_of_int j ^ name_sep fun special_prefix_for j = special_prefix ^ string_of_int j ^ name_sep (* int -> int -> string *) fun skolem_prefix_for k j = skolem_prefix ^ string_of_int k ^ "@" ^ string_of_int j ^ name_sep fun uncurry_prefix_for k j = uncurry_prefix ^ string_of_int k ^ "@" ^ string_of_int j ^ name_sep (* string -> string * string *) val strip_first_name_sep = Substring.full #> Substring.position name_sep ##> Substring.triml 1 #> pairself Substring.string (* string -> string *) fun original_name s = if String.isPrefix nitpick_prefix s then case strip_first_name_sep s of (s1, "") => s1 | (_, s2) => original_name s2 else s val after_name_sep = snd o strip_first_name_sep (* When you add constants to these lists, make sure to handle them in "Nitpick_Nut.nut_from_term", and perhaps in "Nitpick_Mono.consider_term" as well. *) val built_in_consts = [(@{const_name all}, 1), (@{const_name "=="}, 2), (@{const_name "==>"}, 2), (@{const_name Pure.conjunction}, 2), (@{const_name Trueprop}, 1), (@{const_name Not}, 1), (@{const_name False}, 0), (@{const_name True}, 0), (@{const_name All}, 1), (@{const_name Ex}, 1), (@{const_name "op ="}, 2), (@{const_name "op &"}, 2), (@{const_name "op |"}, 2), (@{const_name "op -->"}, 2), (@{const_name If}, 3), (@{const_name Let}, 2), (@{const_name Unity}, 0), (@{const_name Pair}, 2), (@{const_name fst}, 1), (@{const_name snd}, 1), (@{const_name Id}, 0), (@{const_name insert}, 2), (@{const_name converse}, 1), (@{const_name trancl}, 1), (@{const_name rel_comp}, 2), (@{const_name image}, 2), (@{const_name Suc}, 0), (@{const_name finite}, 1), (@{const_name nat}, 0), (@{const_name zero_nat_inst.zero_nat}, 0), (@{const_name one_nat_inst.one_nat}, 0), (@{const_name plus_nat_inst.plus_nat}, 0), (@{const_name minus_nat_inst.minus_nat}, 0), (@{const_name times_nat_inst.times_nat}, 0), (@{const_name div_nat_inst.div_nat}, 0), (@{const_name ord_nat_inst.less_nat}, 2), (@{const_name ord_nat_inst.less_eq_nat}, 2), (@{const_name nat_gcd}, 0), (@{const_name nat_lcm}, 0), (@{const_name zero_int_inst.zero_int}, 0), (@{const_name one_int_inst.one_int}, 0), (@{const_name plus_int_inst.plus_int}, 0), (@{const_name minus_int_inst.minus_int}, 0), (@{const_name times_int_inst.times_int}, 0), (@{const_name div_int_inst.div_int}, 0), (@{const_name uminus_int_inst.uminus_int}, 0), (@{const_name ord_int_inst.less_int}, 2), (@{const_name ord_int_inst.less_eq_int}, 2), (@{const_name unknown}, 0), (@{const_name is_unknown}, 1), (@{const_name Tha}, 1), (@{const_name Frac}, 0), (@{const_name norm_frac}, 0)] val built_in_descr_consts = [(@{const_name The}, 1), (@{const_name Eps}, 1)] val built_in_typed_consts = [((@{const_name of_nat}, nat_T --> int_T), 0), ((@{const_name of_nat}, @{typ "unsigned_bit word => signed_bit word"}), 0)] val built_in_set_consts = [(@{const_name lower_semilattice_fun_inst.inf_fun}, 2), (@{const_name upper_semilattice_fun_inst.sup_fun}, 2), (@{const_name minus_fun_inst.minus_fun}, 2), (@{const_name ord_fun_inst.less_eq_fun}, 2)] (* typ -> typ *) fun unbit_type @{typ "unsigned_bit word"} = nat_T | unbit_type @{typ "signed_bit word"} = int_T | unbit_type @{typ bisim_iterator} = nat_T | unbit_type (Type (s, Ts as _ :: _)) = Type (s, map unbit_type Ts) | unbit_type T = T fun unbit_and_unbox_type (Type (@{type_name fun_box}, Ts)) = unbit_and_unbox_type (Type ("fun", Ts)) | unbit_and_unbox_type (Type (@{type_name pair_box}, Ts)) = Type ("*", map unbit_and_unbox_type Ts) | unbit_and_unbox_type @{typ "unsigned_bit word"} = nat_T | unbit_and_unbox_type @{typ "signed_bit word"} = int_T | unbit_and_unbox_type @{typ bisim_iterator} = nat_T | unbit_and_unbox_type (Type (s, Ts as _ :: _)) = Type (s, map unbit_and_unbox_type Ts) | unbit_and_unbox_type T = T (* Proof.context -> typ -> string *) fun string_for_type ctxt = Syntax.string_of_typ ctxt o unbit_and_unbox_type (* string -> string -> string *) val prefix_name = Long_Name.qualify o Long_Name.base_name (* string -> string *) fun shortest_name s = List.last (space_explode "." s) handle List.Empty => "" (* string -> term -> term *) val prefix_abs_vars = Term.map_abs_vars o prefix_name (* term -> term *) val shorten_abs_vars = Term.map_abs_vars shortest_name (* string -> string *) fun short_name s = case space_explode name_sep s of [_] => s |> String.isPrefix nitpick_prefix s ? unprefix nitpick_prefix | ss => map shortest_name ss |> space_implode "_" (* typ -> typ *) fun shorten_names_in_type (Type (s, Ts)) = Type (short_name s, map shorten_names_in_type Ts) | shorten_names_in_type T = T (* term -> term *) val shorten_names_in_term = map_aterms (fn Const (s, T) => Const (short_name s, T) | t => t) #> map_types shorten_names_in_type (* theory -> typ * typ -> bool *) fun type_match thy (T1, T2) = (Sign.typ_match thy (T2, T1) Vartab.empty; true) handle Type.TYPE_MATCH => false (* theory -> styp * styp -> bool *) fun const_match thy ((s1, T1), (s2, T2)) = s1 = s2 andalso type_match thy (T1, T2) (* theory -> term * term -> bool *) fun term_match thy (Const x1, Const x2) = const_match thy (x1, x2) | term_match thy (Free (s1, T1), Free (s2, T2)) = const_match thy ((shortest_name s1, T1), (shortest_name s2, T2)) | term_match thy (t1, t2) = t1 aconv t2 (* typ -> bool *) fun is_TFree (TFree _) = true | is_TFree _ = false fun is_higher_order_type (Type ("fun", _)) = true | is_higher_order_type (Type (_, Ts)) = exists is_higher_order_type Ts | is_higher_order_type _ = false fun is_fun_type (Type ("fun", _)) = true | is_fun_type _ = false fun is_set_type (Type ("fun", [_, @{typ bool}])) = true | is_set_type _ = false fun is_pair_type (Type ("*", _)) = true | is_pair_type _ = false fun is_lfp_iterator_type (Type (s, _)) = String.isPrefix lfp_iterator_prefix s | is_lfp_iterator_type _ = false fun is_gfp_iterator_type (Type (s, _)) = String.isPrefix gfp_iterator_prefix s | is_gfp_iterator_type _ = false val is_fp_iterator_type = is_lfp_iterator_type orf is_gfp_iterator_type fun is_boolean_type T = (T = prop_T orelse T = bool_T) val is_integer_type = member (op =) [nat_T, int_T, @{typ bisim_iterator}] orf is_fp_iterator_type fun is_bit_type T = (T = @{typ unsigned_bit} orelse T = @{typ signed_bit}) fun is_word_type (Type (@{type_name word}, _)) = true | is_word_type _ = false val is_record_type = not o null o Record.dest_recTs (* theory -> typ -> bool *) fun is_frac_type thy (Type (s, [])) = not (null (these (AList.lookup (op =) (#frac_types (Data.get thy)) s))) | is_frac_type _ _ = false fun is_number_type thy = is_integer_type orf is_frac_type thy (* bool -> styp -> typ *) fun iterator_type_for_const gfp (s, T) = Type ((if gfp then gfp_iterator_prefix else lfp_iterator_prefix) ^ s, binder_types T) (* typ -> styp *) fun const_for_iterator_type (Type (s, Ts)) = (after_name_sep s, Ts ---> bool_T) | const_for_iterator_type T = raise TYPE ("Nitpick_HOL.const_for_iterator_type", [T], []) (* int -> typ -> typ * typ *) fun strip_n_binders 0 T = ([], T) | strip_n_binders n (Type ("fun", [T1, T2])) = strip_n_binders (n - 1) T2 |>> cons T1 | strip_n_binders n (Type (@{type_name fun_box}, Ts)) = strip_n_binders n (Type ("fun", Ts)) | strip_n_binders _ T = raise TYPE ("Nitpick_HOL.strip_n_binders", [T], []) (* typ -> typ *) val nth_range_type = snd oo strip_n_binders (* typ -> int *) fun num_factors_in_type (Type ("*", [T1, T2])) = fold (Integer.add o num_factors_in_type) [T1, T2] 0 | num_factors_in_type _ = 1 fun num_binder_types (Type ("fun", [_, T2])) = 1 + num_binder_types T2 | num_binder_types _ = 0 (* typ -> typ list *) val curried_binder_types = maps HOLogic.flatten_tupleT o binder_types fun maybe_curried_binder_types T = (if is_pair_type (body_type T) then binder_types else curried_binder_types) T (* typ -> term list -> term *) fun mk_flat_tuple _ [t] = t | mk_flat_tuple (Type ("*", [T1, T2])) (t :: ts) = HOLogic.pair_const T1 T2 $ t $ (mk_flat_tuple T2 ts) | mk_flat_tuple T ts = raise TYPE ("Nitpick_HOL.mk_flat_tuple", [T], ts) (* int -> term -> term list *) fun dest_n_tuple 1 t = [t] | dest_n_tuple n t = HOLogic.dest_prod t ||> dest_n_tuple (n - 1) |> op :: (* int -> typ -> typ list *) fun dest_n_tuple_type 1 T = [T] | dest_n_tuple_type n (Type (_, [T1, T2])) = T1 :: dest_n_tuple_type (n - 1) T2 | dest_n_tuple_type _ T = raise TYPE ("Nitpick_HOL.dest_n_tuple_type", [T], []) (* FIXME: Use antiquotation for "code_numeral" below or detect "rep_datatype", e.g., by adding a field to "Datatype_Aux.info". *) (* string -> bool *) val is_basic_datatype = member (op =) [@{type_name "*"}, @{type_name bool}, @{type_name unit}, @{type_name nat}, @{type_name int}, "Code_Numeral.code_numeral"] (* theory -> typ -> typ -> typ -> typ *) fun instantiate_type thy T1 T1' T2 = Same.commit (Envir.subst_type_same (Sign.typ_match thy (Logic.varifyT T1, T1') Vartab.empty)) (Logic.varifyT T2) handle Type.TYPE_MATCH => raise TYPE ("Nitpick_HOL.instantiate_type", [T1, T1'], []) (* theory -> typ -> typ -> styp *) fun repair_constr_type thy body_T' T = instantiate_type thy (body_type T) body_T' T (* string -> (string * string) list -> theory -> theory *) fun register_frac_type frac_s ersaetze thy = let val {frac_types, codatatypes} = Data.get thy val frac_types = AList.update (op =) (frac_s, ersaetze) frac_types in Data.put {frac_types = frac_types, codatatypes = codatatypes} thy end (* string -> theory -> theory *) fun unregister_frac_type frac_s = register_frac_type frac_s [] (* typ -> string -> styp list -> theory -> theory *) fun register_codatatype co_T case_name constr_xs thy = let val {frac_types, codatatypes} = Data.get thy val constr_xs = map (apsnd (repair_constr_type thy co_T)) constr_xs val (co_s, co_Ts) = dest_Type co_T val _ = if forall is_TFree co_Ts andalso not (has_duplicates (op =) co_Ts) andalso co_s <> "fun" andalso not (is_basic_datatype co_s) then () else raise TYPE ("Nitpick_HOL.register_codatatype", [co_T], []) val codatatypes = AList.update (op =) (co_s, (case_name, constr_xs)) codatatypes in Data.put {frac_types = frac_types, codatatypes = codatatypes} thy end (* typ -> theory -> theory *) fun unregister_codatatype co_T = register_codatatype co_T "" [] type typedef_info = {rep_type: typ, abs_type: typ, Rep_name: string, Abs_name: string, set_def: thm option, prop_of_Rep: thm, set_name: string, Abs_inverse: thm option, Rep_inverse: thm option} (* theory -> string -> typedef_info *) fun typedef_info thy s = if is_frac_type thy (Type (s, [])) then SOME {abs_type = Type (s, []), rep_type = @{typ "int * int"}, Abs_name = @{const_name Abs_Frac}, Rep_name = @{const_name Rep_Frac}, set_def = NONE, prop_of_Rep = @{prop "Rep_Frac x \ Frac"} |> Logic.varify, set_name = @{const_name Frac}, Abs_inverse = NONE, Rep_inverse = NONE} else case Typedef.get_info thy s of SOME {abs_type, rep_type, Abs_name, Rep_name, set_def, Rep, Abs_inverse, Rep_inverse, ...} => SOME {abs_type = abs_type, rep_type = rep_type, Abs_name = Abs_name, Rep_name = Rep_name, set_def = set_def, prop_of_Rep = prop_of Rep, set_name = set_prefix ^ s, Abs_inverse = SOME Abs_inverse, Rep_inverse = SOME Rep_inverse} | NONE => NONE (* theory -> string -> bool *) val is_typedef = is_some oo typedef_info val is_real_datatype = is_some oo Datatype.get_info (* theory -> typ -> bool *) -fun is_quot_type _ (Type ("IntEx.my_int", _)) = true (* ### *) +fun is_quot_type _ (Type ("IntEx.my_int", _)) = true (* FIXME *) | is_quot_type _ _ = false fun is_codatatype thy (T as Type (s, _)) = not (null (AList.lookup (op =) (#codatatypes (Data.get thy)) s |> Option.map snd |> these)) | is_codatatype _ _ = false fun is_pure_typedef thy (T as Type (s, _)) = is_typedef thy s andalso not (is_real_datatype thy s orelse is_quot_type thy T orelse is_codatatype thy T orelse is_record_type T orelse is_integer_type T) | is_pure_typedef _ _ = false fun is_univ_typedef thy (Type (s, _)) = (case typedef_info thy s of SOME {set_def, prop_of_Rep, ...} => (case set_def of SOME thm => try (fst o dest_Const o snd o Logic.dest_equals o prop_of) thm | NONE => try (fst o dest_Const o snd o HOLogic.dest_mem o HOLogic.dest_Trueprop) prop_of_Rep) = SOME @{const_name top} | NONE => false) | is_univ_typedef _ _ = false fun is_datatype thy (T as Type (s, _)) = (is_typedef thy s orelse is_codatatype thy T orelse T = @{typ ind} orelse is_quot_type thy T) andalso not (is_basic_datatype s) | is_datatype _ _ = false (* theory -> typ -> (string * typ) list * (string * typ) *) fun all_record_fields thy T = let val (recs, more) = Record.get_extT_fields thy T in recs @ more :: all_record_fields thy (snd more) end handle TYPE _ => [] (* styp -> bool *) fun is_record_constr (x as (s, T)) = String.isSuffix Record.extN s andalso let val dataT = body_type T in is_record_type dataT andalso s = unsuffix Record.ext_typeN (fst (dest_Type dataT)) ^ Record.extN end (* theory -> typ -> int *) val num_record_fields = Integer.add 1 o length o fst oo Record.get_extT_fields (* theory -> string -> typ -> int *) fun no_of_record_field thy s T1 = find_index (curry (op =) s o fst) (Record.get_extT_fields thy T1 ||> single |> op @) (* theory -> styp -> bool *) fun is_record_get thy (s, Type ("fun", [T1, _])) = exists (curry (op =) s o fst) (all_record_fields thy T1) | is_record_get _ _ = false fun is_record_update thy (s, T) = String.isSuffix Record.updateN s andalso exists (curry (op =) (unsuffix Record.updateN s) o fst) (all_record_fields thy (body_type T)) handle TYPE _ => false fun is_abs_fun thy (s, Type ("fun", [_, Type (s', _)])) = (case typedef_info thy s' of SOME {Abs_name, ...} => s = Abs_name | NONE => false) | is_abs_fun _ _ = false fun is_rep_fun thy (s, Type ("fun", [Type (s', _), _])) = (case typedef_info thy s' of SOME {Rep_name, ...} => s = Rep_name | NONE => false) | is_rep_fun _ _ = false (* Proof.context -> styp -> bool *) -fun is_quot_abs_fun _ ("IntEx.abs_my_int", _) = true (* ### *) +fun is_quot_abs_fun _ ("IntEx.abs_my_int", _) = true (* FIXME *) | is_quot_abs_fun _ _ = false -fun is_quot_rep_fun _ ("IntEx.rep_my_int", _) = true (* ### *) +fun is_quot_rep_fun _ ("IntEx.rep_my_int", _) = true (* FIXME *) | is_quot_rep_fun _ _ = false (* theory -> styp -> styp *) fun mate_of_rep_fun thy (x as (_, Type ("fun", [T1 as Type (s', _), T2]))) = (case typedef_info thy s' of SOME {Abs_name, ...} => (Abs_name, Type ("fun", [T2, T1])) | NONE => raise TERM ("Nitpick_HOL.mate_of_rep_fun", [Const x])) | mate_of_rep_fun _ x = raise TERM ("Nitpick_HOL.mate_of_rep_fun", [Const x]) (* theory -> typ -> typ *) fun rep_type_for_quot_type _ (Type ("IntEx.my_int", [])) = @{typ "nat * nat"} | rep_type_for_quot_type _ T = raise TYPE ("Nitpick_HOL.rep_type_for_quot_type", [T], []) (* theory -> typ -> term *) fun equiv_relation_for_quot_type _ (Type ("IntEx.my_int", [])) = Const ("IntEx.intrel", @{typ "(nat * nat) => (nat * nat) => bool"}) | equiv_relation_for_quot_type _ T = raise TYPE ("Nitpick_HOL.equiv_relation_for_quot_type", [T], []) (* theory -> styp -> bool *) fun is_coconstr thy (s, T) = let val {codatatypes, ...} = Data.get thy val co_T = body_type T val co_s = dest_Type co_T |> fst in exists (fn (s', T') => s = s' andalso repair_constr_type thy co_T T' = T) (AList.lookup (op =) codatatypes co_s |> Option.map snd |> these) end handle TYPE ("dest_Type", _, _) => false fun is_constr_like thy (s, T) = - s = @{const_name FunBox} orelse s = @{const_name PairBox} orelse + member (op =) [@{const_name FunBox}, @{const_name PairBox}, + @{const_name Quot}, @{const_name Zero_Rep}, + @{const_name Suc_Rep}] s orelse let val (x as (s, T)) = (s, unbit_and_unbox_type T) in Refute.is_IDT_constructor thy x orelse is_record_constr x orelse (is_abs_fun thy x andalso is_pure_typedef thy (range_type T)) orelse - s = @{const_name Quot} orelse - s = @{const_name Zero_Rep} orelse s = @{const_name Suc_Rep} orelse x = (@{const_name zero_nat_inst.zero_nat}, nat_T) orelse is_coconstr thy x end fun is_stale_constr thy (x as (_, T)) = is_codatatype thy (body_type T) andalso is_constr_like thy x andalso not (is_coconstr thy x) fun is_constr thy (x as (_, T)) = is_constr_like thy x andalso not (is_basic_datatype (fst (dest_Type (unbit_type (body_type T))))) andalso not (is_stale_constr thy x) (* string -> bool *) val is_sel = String.isPrefix discr_prefix orf String.isPrefix sel_prefix val is_sel_like_and_no_discr = String.isPrefix sel_prefix orf (member (op =) [@{const_name fst}, @{const_name snd}]) datatype boxability = InConstr | InSel | InExpr | InPair | InFunLHS | InFunRHS1 | InFunRHS2 (* boxability -> boxability *) fun in_fun_lhs_for InConstr = InSel | in_fun_lhs_for _ = InFunLHS fun in_fun_rhs_for InConstr = InConstr | in_fun_rhs_for InSel = InSel | in_fun_rhs_for InFunRHS1 = InFunRHS2 | in_fun_rhs_for _ = InFunRHS1 (* extended_context -> boxability -> typ -> bool *) fun is_boxing_worth_it (ext_ctxt : extended_context) boxy T = case T of Type ("fun", _) => (boxy = InPair orelse boxy = InFunLHS) andalso not (is_boolean_type (body_type T)) | Type ("*", Ts) => boxy = InPair orelse boxy = InFunRHS1 orelse boxy = InFunRHS2 orelse ((boxy = InExpr orelse boxy = InFunLHS) andalso exists (is_boxing_worth_it ext_ctxt InPair) (map (box_type ext_ctxt InPair) Ts)) | _ => false (* extended_context -> boxability -> string * typ list -> string *) and should_box_type (ext_ctxt as {thy, boxes, ...}) boxy (z as (s, Ts)) = case triple_lookup (type_match thy) boxes (Type z) of SOME (SOME box_me) => box_me | _ => is_boxing_worth_it ext_ctxt boxy (Type z) (* extended_context -> boxability -> typ -> typ *) and box_type ext_ctxt boxy T = case T of Type (z as ("fun", [T1, T2])) => if boxy <> InConstr andalso boxy <> InSel andalso should_box_type ext_ctxt boxy z then Type (@{type_name fun_box}, [box_type ext_ctxt InFunLHS T1, box_type ext_ctxt InFunRHS1 T2]) else box_type ext_ctxt (in_fun_lhs_for boxy) T1 --> box_type ext_ctxt (in_fun_rhs_for boxy) T2 | Type (z as ("*", Ts)) => - if should_box_type ext_ctxt boxy z then + if boxy <> InConstr andalso boxy <> InSel + andalso should_box_type ext_ctxt boxy z then Type (@{type_name pair_box}, map (box_type ext_ctxt InSel) Ts) else Type ("*", map (box_type ext_ctxt (if boxy = InConstr orelse boxy = InSel then boxy else InPair)) Ts) | _ => T (* styp -> styp *) fun discr_for_constr (s, T) = (discr_prefix ^ s, body_type T --> bool_T) (* typ -> int *) fun num_sels_for_constr_type T = length (maybe_curried_binder_types T) (* string -> int -> string *) fun nth_sel_name_for_constr_name s n = if s = @{const_name Pair} then if n = 0 then @{const_name fst} else @{const_name snd} else sel_prefix_for n ^ s (* styp -> int -> styp *) fun nth_sel_for_constr x ~1 = discr_for_constr x | nth_sel_for_constr (s, T) n = (nth_sel_name_for_constr_name s n, body_type T --> nth (maybe_curried_binder_types T) n) (* extended_context -> styp -> int -> styp *) fun boxed_nth_sel_for_constr ext_ctxt = apsnd (box_type ext_ctxt InSel) oo nth_sel_for_constr (* string -> int *) fun sel_no_from_name s = if String.isPrefix discr_prefix s then ~1 else if String.isPrefix sel_prefix s then s |> unprefix sel_prefix |> Int.fromString |> the else if s = @{const_name snd} then 1 else 0 (* typ list -> term -> int -> term *) fun eta_expand _ t 0 = t | eta_expand Ts (Abs (s, T, t')) n = Abs (s, T, eta_expand (T :: Ts) t' (n - 1)) | eta_expand Ts t n = fold_rev (curry3 Abs ("x\<^isub>\" ^ nat_subscript n)) (List.take (binder_types (fastype_of1 (Ts, t)), n)) (list_comb (incr_boundvars n t, map Bound (n - 1 downto 0))) (* term -> term *) fun extensionalize t = case t of (t0 as @{const Trueprop}) $ t1 => t0 $ extensionalize t1 | Const (@{const_name "op ="}, _) $ t1 $ Abs (s, T, t2) => let val v = Var ((s, maxidx_of_term t + 1), T) in extensionalize (HOLogic.mk_eq (t1 $ v, subst_bound (v, t2))) end | _ => t (* typ -> term list -> term *) fun distinctness_formula T = all_distinct_unordered_pairs_of #> map (fn (t1, t2) => @{const Not} $ (HOLogic.eq_const T $ t1 $ t2)) #> List.foldr (s_conj o swap) @{const True} (* typ -> term *) fun zero_const T = Const (@{const_name zero_nat_inst.zero_nat}, T) fun suc_const T = Const (@{const_name Suc}, T --> T) -(* theory -> typ -> styp list *) -fun uncached_datatype_constrs thy (T as Type (s, Ts)) = +(* extended_context -> typ -> styp list *) +fun uncached_datatype_constrs ({thy, stds, ...} : extended_context) + (T as Type (s, Ts)) = (case AList.lookup (op =) (#codatatypes (Data.get thy)) s of - SOME (_, xs' as (_ :: _)) => - map (apsnd (repair_constr_type thy T)) xs' + SOME (_, xs' as (_ :: _)) => map (apsnd (repair_constr_type thy T)) xs' | _ => if is_datatype thy T then case Datatype.get_info thy s of SOME {index, descr, ...} => let val (_, dtyps, constrs) = AList.lookup (op =) descr index |> the in map (fn (s', Us) => (s', map (Refute.typ_of_dtyp descr (dtyps ~~ Ts)) Us ---> T)) constrs + |> (triple_lookup (type_match thy) stds T |> the |> not) ? + cons (@{const_name NonStd}, @{typ \} --> T) end | NONE => if is_record_type T then let val s' = unsuffix Record.ext_typeN s ^ Record.extN val T' = (Record.get_extT_fields thy T |> apsnd single |> uncurry append |> map snd) ---> T in [(s', T')] end else if is_quot_type thy T then [(@{const_name Quot}, rep_type_for_quot_type thy T --> T)] else case typedef_info thy s of SOME {abs_type, rep_type, Abs_name, ...} => [(Abs_name, instantiate_type thy abs_type T rep_type --> T)] | NONE => if T = @{typ ind} then [dest_Const @{const Zero_Rep}, dest_Const @{const Suc_Rep}] else [] else []) | uncached_datatype_constrs _ _ = [] (* extended_context -> typ -> styp list *) -fun datatype_constrs (ext_ctxt as {thy, constr_cache, ...} : extended_context) - T = +fun datatype_constrs (ext_ctxt as {constr_cache, ...}) T = case AList.lookup (op =) (!constr_cache) T of SOME xs => xs | NONE => - let val xs = uncached_datatype_constrs thy T in + let val xs = uncached_datatype_constrs ext_ctxt T in (Unsynchronized.change constr_cache (cons (T, xs)); xs) end fun boxed_datatype_constrs ext_ctxt = map (apsnd (box_type ext_ctxt InConstr)) o datatype_constrs ext_ctxt (* extended_context -> typ -> int *) val num_datatype_constrs = length oo datatype_constrs (* string -> string *) fun constr_name_for_sel_like @{const_name fst} = @{const_name Pair} | constr_name_for_sel_like @{const_name snd} = @{const_name Pair} | constr_name_for_sel_like s' = original_name s' (* extended_context -> styp -> styp *) fun boxed_constr_for_sel ext_ctxt (s', T') = let val s = constr_name_for_sel_like s' in AList.lookup (op =) (boxed_datatype_constrs ext_ctxt (domain_type T')) s |> the |> pair s end + (* extended_context -> styp -> term *) fun discr_term_for_constr ext_ctxt (x as (s, T)) = let val dataT = body_type T in if s = @{const_name Suc} then Abs (Name.uu, dataT, @{const Not} $ HOLogic.mk_eq (zero_const dataT, Bound 0)) else if num_datatype_constrs ext_ctxt dataT >= 2 then Const (discr_for_constr x) else Abs (Name.uu, dataT, @{const True}) end - (* extended_context -> styp -> term -> term *) fun discriminate_value (ext_ctxt as {thy, ...}) (x as (_, T)) t = case strip_comb t of (Const x', args) => if x = x' then @{const True} else if is_constr_like thy x' then @{const False} else betapply (discr_term_for_constr ext_ctxt x, t) | _ => betapply (discr_term_for_constr ext_ctxt x, t) (* styp -> term -> term *) fun nth_arg_sel_term_for_constr (x as (s, T)) n = let val (arg_Ts, dataT) = strip_type T in if dataT = nat_T then @{term "%n::nat. minus_nat_inst.minus_nat n one_nat_inst.one_nat"} else if is_pair_type dataT then Const (nth_sel_for_constr x n) else let (* int -> typ -> int * term *) fun aux m (Type ("*", [T1, T2])) = let val (m, t1) = aux m T1 val (m, t2) = aux m T2 in (m, HOLogic.mk_prod (t1, t2)) end | aux m T = (m + 1, Const (nth_sel_name_for_constr_name s m, dataT --> T) $ Bound 0) val m = fold (Integer.add o num_factors_in_type) (List.take (arg_Ts, n)) 0 in Abs ("x", dataT, aux m (nth arg_Ts n) |> snd) end end (* theory -> styp -> term -> int -> typ -> term *) fun select_nth_constr_arg thy x t n res_T = case strip_comb t of (Const x', args) => if x = x' then nth args n else if is_constr_like thy x' then Const (@{const_name unknown}, res_T) else betapply (nth_arg_sel_term_for_constr x n, t) | _ => betapply (nth_arg_sel_term_for_constr x n, t) (* theory -> styp -> term list -> term *) fun construct_value _ x [] = Const x | construct_value thy (x as (s, _)) args = let val args = map Envir.eta_contract args in case hd args of Const (x' as (s', _)) $ t => if is_sel_like_and_no_discr s' andalso constr_name_for_sel_like s' = s andalso forall (fn (n, t') => select_nth_constr_arg thy x t n dummyT = t') (index_seq 0 (length args) ~~ args) then t else list_comb (Const x, args) | _ => list_comb (Const x, args) end (* extended_context -> typ -> term -> term *) fun constr_expand (ext_ctxt as {thy, ...}) T t = (case head_of t of Const x => if is_constr_like thy x then t else raise SAME () | _ => raise SAME ()) handle SAME () => let val x' as (_, T') = if is_pair_type T then let val (T1, T2) = HOLogic.dest_prodT T in (@{const_name Pair}, T1 --> T2 --> T) end else - datatype_constrs ext_ctxt T |> the_single + datatype_constrs ext_ctxt T |> hd val arg_Ts = binder_types T' in list_comb (Const x', map2 (select_nth_constr_arg thy x' t) (index_seq 0 (length arg_Ts)) arg_Ts) end (* (typ * int) list -> typ -> int *) fun card_of_type assigns (Type ("fun", [T1, T2])) = reasonable_power (card_of_type assigns T2) (card_of_type assigns T1) | card_of_type assigns (Type ("*", [T1, T2])) = card_of_type assigns T1 * card_of_type assigns T2 | card_of_type _ (Type (@{type_name itself}, _)) = 1 | card_of_type _ @{typ prop} = 2 | card_of_type _ @{typ bool} = 2 | card_of_type _ @{typ unit} = 1 | card_of_type assigns T = case AList.lookup (op =) assigns T of SOME k => k | NONE => if T = @{typ bisim_iterator} then 0 else raise TYPE ("Nitpick_HOL.card_of_type", [T], []) (* int -> (typ * int) list -> typ -> int *) fun bounded_card_of_type max default_card assigns (Type ("fun", [T1, T2])) = let val k1 = bounded_card_of_type max default_card assigns T1 val k2 = bounded_card_of_type max default_card assigns T2 in if k1 = max orelse k2 = max then max else Int.min (max, reasonable_power k2 k1) end | bounded_card_of_type max default_card assigns (Type ("*", [T1, T2])) = let val k1 = bounded_card_of_type max default_card assigns T1 val k2 = bounded_card_of_type max default_card assigns T2 in if k1 = max orelse k2 = max then max else Int.min (max, k1 * k2) end | bounded_card_of_type max default_card assigns T = Int.min (max, if default_card = ~1 then card_of_type assigns T else card_of_type assigns T handle TYPE ("Nitpick_HOL.card_of_type", _, _) => default_card) (* extended_context -> int -> (typ * int) list -> typ -> int *) fun bounded_exact_card_of_type ext_ctxt max default_card assigns T = let (* typ list -> typ -> int *) fun aux avoid T = (if member (op =) avoid T then 0 else case T of Type ("fun", [T1, T2]) => let val k1 = aux avoid T1 val k2 = aux avoid T2 in if k1 = 0 orelse k2 = 0 then 0 else if k1 >= max orelse k2 >= max then max else Int.min (max, reasonable_power k2 k1) end | Type ("*", [T1, T2]) => let val k1 = aux avoid T1 val k2 = aux avoid T2 in if k1 = 0 orelse k2 = 0 then 0 else if k1 >= max orelse k2 >= max then max else Int.min (max, k1 * k2) end | Type (@{type_name itself}, _) => 1 | @{typ prop} => 2 | @{typ bool} => 2 | @{typ unit} => 1 | Type _ => (case datatype_constrs ext_ctxt T of [] => if is_integer_type T orelse is_bit_type T then 0 else raise SAME () | constrs => let val constr_cards = datatype_constrs ext_ctxt T |> map (Integer.prod o map (aux (T :: avoid)) o binder_types o snd) in if exists (curry (op =) 0) constr_cards then 0 else Integer.sum constr_cards end) | _ => raise SAME ()) handle SAME () => AList.lookup (op =) assigns T |> the_default default_card in Int.min (max, aux [] T) end (* extended_context -> typ -> bool *) fun is_finite_type ext_ctxt = not_equal 0 o bounded_exact_card_of_type ext_ctxt 1 2 [] (* term -> bool *) fun is_ground_term (t1 $ t2) = is_ground_term t1 andalso is_ground_term t2 | is_ground_term (Const _) = true | is_ground_term _ = false (* term -> word -> word *) fun hashw_term (t1 $ t2) = Polyhash.hashw (hashw_term t1, hashw_term t2) | hashw_term (Const (s, _)) = Polyhash.hashw_string (s, 0w0) | hashw_term _ = 0w0 (* term -> int *) val hash_term = Word.toInt o hashw_term (* term list -> (indexname * typ) list *) fun special_bounds ts = fold Term.add_vars ts [] |> sort (TermOrd.fast_indexname_ord o pairself fst) (* indexname * typ -> term -> term *) fun abs_var ((s, j), T) body = Abs (s, T, abstract_over (Var ((s, j), T), body)) (* theory -> string -> bool *) fun is_funky_typedef_name thy s = member (op =) [@{type_name unit}, @{type_name "*"}, @{type_name "+"}, @{type_name int}] s orelse is_frac_type thy (Type (s, [])) (* theory -> term -> bool *) fun is_funky_typedef thy (Type (s, _)) = is_funky_typedef_name thy s | is_funky_typedef _ _ = false (* term -> bool *) fun is_arity_type_axiom (Const (@{const_name HOL.type_class}, _) $ Const (@{const_name TYPE}, _)) = true | is_arity_type_axiom _ = false (* theory -> bool -> term -> bool *) fun is_typedef_axiom thy boring (@{const "==>"} $ _ $ t2) = is_typedef_axiom thy boring t2 | is_typedef_axiom thy boring (@{const Trueprop} $ (Const (@{const_name Typedef.type_definition}, _) $ Const (_, Type ("fun", [Type (s, _), _])) $ Const _ $ _)) = boring <> is_funky_typedef_name thy s andalso is_typedef thy s | is_typedef_axiom _ _ _ = false (* Distinguishes between (1) constant definition axioms, (2) type arity and typedef axioms, and (3) other axioms, and returns the pair ((1), (3)). Typedef axioms are uninteresting to Nitpick, because it can retrieve them using "typedef_info". *) (* theory -> (string * term) list -> string list -> term list * term list *) fun partition_axioms_by_definitionality thy axioms def_names = let val axioms = sort (fast_string_ord o pairself fst) axioms val defs = OrdList.inter (fast_string_ord o apsnd fst) def_names axioms val nondefs = OrdList.subtract (fast_string_ord o apsnd fst) def_names axioms |> filter_out ((is_arity_type_axiom orf is_typedef_axiom thy true) o snd) in pairself (map snd) (defs, nondefs) end (* Ideally we would check against "Complex_Main", not "Refute", but any theory will do as long as it contains all the "axioms" and "axiomatization" commands. *) (* theory -> bool *) fun is_built_in_theory thy = Theory.subthy (thy, @{theory Refute}) (* term -> bool *) val is_plain_definition = let (* term -> bool *) fun do_lhs t1 = case strip_comb t1 of (Const _, args) => forall is_Var args andalso not (has_duplicates (op =) args) | _ => false fun do_eq (Const (@{const_name "=="}, _) $ t1 $ _) = do_lhs t1 | do_eq (@{const Trueprop} $ (Const (@{const_name "op ="}, _) $ t1 $ _)) = do_lhs t1 | do_eq _ = false in do_eq end (* theory -> term list * term list * term list *) fun all_axioms_of thy = let (* theory list -> term list *) val axioms_of_thys = maps Thm.axioms_of #> map (apsnd prop_of) val specs = Defs.all_specifications_of (Theory.defs_of thy) val def_names = specs |> maps snd |> map_filter #def |> OrdList.make fast_string_ord val thys = thy :: Theory.ancestors_of thy val (built_in_thys, user_thys) = List.partition is_built_in_theory thys val built_in_axioms = axioms_of_thys built_in_thys val user_axioms = axioms_of_thys user_thys val (built_in_defs, built_in_nondefs) = partition_axioms_by_definitionality thy built_in_axioms def_names ||> filter (is_typedef_axiom thy false) val (user_defs, user_nondefs) = partition_axioms_by_definitionality thy user_axioms def_names val (built_in_nondefs, user_nondefs) = List.partition (is_typedef_axiom thy false) user_nondefs |>> append built_in_nondefs val defs = (thy |> PureThy.all_thms_of |> filter (curry (op =) Thm.definitionK o Thm.get_kind o snd) |> map (prop_of o snd) |> filter is_plain_definition) @ user_defs @ built_in_defs in (defs, built_in_nondefs, user_nondefs) end (* bool -> styp -> int option *) fun arity_of_built_in_const fast_descrs (s, T) = if s = @{const_name If} then if nth_range_type 3 T = @{typ bool} then NONE else SOME 3 else case AList.lookup (op =) (built_in_consts |> fast_descrs ? append built_in_descr_consts) s of SOME n => SOME n | NONE => case AList.lookup (op =) built_in_typed_consts (s, T) of SOME n => SOME n | NONE => if is_fun_type T andalso is_set_type (domain_type T) then AList.lookup (op =) built_in_set_consts s else NONE (* bool -> styp -> bool *) val is_built_in_const = is_some oo arity_of_built_in_const (* This function is designed to work for both real definition axioms and simplification rules (equational specifications). *) (* term -> term *) fun term_under_def t = case t of @{const "==>"} $ _ $ t2 => term_under_def t2 | Const (@{const_name "=="}, _) $ t1 $ _ => term_under_def t1 | @{const Trueprop} $ t1 => term_under_def t1 | Const (@{const_name "op ="}, _) $ t1 $ _ => term_under_def t1 | Abs (_, _, t') => term_under_def t' | t1 $ _ => term_under_def t1 | _ => t (* Here we crucially rely on "Refute.specialize_type" performing a preorder traversal of the term, without which the wrong occurrence of a constant could be matched in the face of overloading. *) (* theory -> bool -> const_table -> styp -> term list *) fun def_props_for_const thy fast_descrs table (x as (s, _)) = if is_built_in_const fast_descrs x then [] else these (Symtab.lookup table s) |> map_filter (try (Refute.specialize_type thy x)) |> filter (curry (op =) (Const x) o term_under_def) (* theory -> term -> term option *) fun normalized_rhs_of thy t = let (* term option -> term option *) fun aux (v as Var _) (SOME t) = SOME (lambda v t) | aux (c as Const (@{const_name TYPE}, T)) (SOME t) = SOME (lambda c t) | aux _ _ = NONE val (lhs, rhs) = case t of Const (@{const_name "=="}, _) $ t1 $ t2 => (t1, t2) | @{const Trueprop} $ (Const (@{const_name "op ="}, _) $ t1 $ t2) => (t1, t2) | _ => raise TERM ("Nitpick_HOL.normalized_rhs_of", [t]) val args = strip_comb lhs |> snd in fold_rev aux args (SOME rhs) end (* theory -> const_table -> styp -> term option *) fun def_of_const thy table (x as (s, _)) = if is_built_in_const false x orelse original_name s <> s then NONE else x |> def_props_for_const thy false table |> List.last |> normalized_rhs_of thy |> Option.map (prefix_abs_vars s) handle List.Empty => NONE datatype fixpoint_kind = Lfp | Gfp | NoFp (* term -> fixpoint_kind *) fun fixpoint_kind_of_rhs (Abs (_, _, t)) = fixpoint_kind_of_rhs t | fixpoint_kind_of_rhs (Const (@{const_name lfp}, _) $ Abs _) = Lfp | fixpoint_kind_of_rhs (Const (@{const_name gfp}, _) $ Abs _) = Gfp | fixpoint_kind_of_rhs _ = NoFp (* theory -> const_table -> term -> bool *) fun is_mutually_inductive_pred_def thy table t = let (* term -> bool *) fun is_good_arg (Bound _) = true | is_good_arg (Const (s, _)) = s = @{const_name True} orelse s = @{const_name False} orelse s = @{const_name undefined} | is_good_arg _ = false in case t |> strip_abs_body |> strip_comb of (Const x, ts as (_ :: _)) => (case def_of_const thy table x of SOME t' => fixpoint_kind_of_rhs t' <> NoFp andalso forall is_good_arg ts | NONE => false) | _ => false end (* theory -> const_table -> term -> term *) fun unfold_mutually_inductive_preds thy table = map_aterms (fn t as Const x => (case def_of_const thy table x of SOME t' => let val t' = Envir.eta_contract t' in if is_mutually_inductive_pred_def thy table t' then t' else t end | NONE => t) | t => t) (* term -> string * term *) fun pair_for_prop t = case term_under_def t of Const (s, _) => (s, t) | Free _ => raise NOT_SUPPORTED "local definitions" | t' => raise TERM ("Nitpick_HOL.pair_for_prop", [t, t']) (* (Proof.context -> term list) -> Proof.context -> const_table *) fun table_for get ctxt = get ctxt |> map pair_for_prop |> AList.group (op =) |> Symtab.make (* theory -> (string * int) list *) fun case_const_names thy = Symtab.fold (fn (dtype_s, {index, descr, case_name, ...}) => if is_basic_datatype dtype_s then I else cons (case_name, AList.lookup (op =) descr index |> the |> #3 |> length)) (Datatype.get_all thy) [] @ map (apsnd length o snd) (#codatatypes (Data.get thy)) (* Proof.context -> term list -> const_table *) fun const_def_table ctxt ts = table_for (map prop_of o Nitpick_Defs.get) ctxt |> fold (fn (s, t) => Symtab.map_default (s, []) (cons t)) (map pair_for_prop ts) (* term list -> const_table *) fun const_nondef_table ts = fold (fn t => append (map (fn s => (s, t)) (Term.add_const_names t []))) ts [] |> AList.group (op =) |> Symtab.make (* Proof.context -> const_table *) val const_simp_table = table_for (map prop_of o Nitpick_Simps.get) val const_psimp_table = table_for (map prop_of o Nitpick_Psimps.get) (* Proof.context -> const_table -> const_table *) fun inductive_intro_table ctxt def_table = table_for (map (unfold_mutually_inductive_preds (ProofContext.theory_of ctxt) def_table o prop_of) o Nitpick_Intros.get) ctxt (* theory -> term list Inttab.table *) fun ground_theorem_table thy = fold ((fn @{const Trueprop} $ t1 => is_ground_term t1 ? Inttab.map_default (hash_term t1, []) (cons t1) | _ => I) o prop_of o snd) (PureThy.all_thms_of thy) Inttab.empty val basic_ersatz_table = [(@{const_name prod_case}, @{const_name split}), (@{const_name card}, @{const_name card'}), (@{const_name setsum}, @{const_name setsum'}), (@{const_name fold_graph}, @{const_name fold_graph'}), (@{const_name wf}, @{const_name wf'}), (@{const_name wf_wfrec}, @{const_name wf_wfrec'}), (@{const_name wfrec}, @{const_name wfrec'})] (* theory -> (string * string) list *) fun ersatz_table thy = fold (append o snd) (#frac_types (Data.get thy)) basic_ersatz_table (* const_table Unsynchronized.ref -> string -> term list -> unit *) fun add_simps simp_table s eqs = Unsynchronized.change simp_table (Symtab.update (s, eqs @ these (Symtab.lookup (!simp_table) s))) (* Similar to "Refute.specialize_type" but returns all matches rather than only the first (preorder) match. *) (* theory -> styp -> term -> term list *) fun multi_specialize_type thy slack (x as (s, T)) t = let (* term -> (typ * term) list -> (typ * term) list *) fun aux (Const (s', T')) ys = if s = s' then ys |> (if AList.defined (op =) ys T' then I else cons (T', Refute.monomorphic_term (Sign.typ_match thy (T', T) Vartab.empty) t) handle Type.TYPE_MATCH => I | Refute.REFUTE _ => if slack then I else raise NOT_SUPPORTED ("too much polymorphism in \ \axiom involving " ^ quote s)) else ys | aux _ ys = ys in map snd (fold_aterms aux t []) end (* theory -> bool -> const_table -> styp -> term list *) fun nondef_props_for_const thy slack table (x as (s, _)) = these (Symtab.lookup table s) |> maps (multi_specialize_type thy slack x) (* theory -> styp -> term list *) fun inverse_axioms_for_rep_fun thy (x as (_, T)) = let val abs_T = domain_type T in typedef_info thy (fst (dest_Type abs_T)) |> the |> pairf #Abs_inverse #Rep_inverse |> pairself (Refute.specialize_type thy x o prop_of o the) ||> single |> op :: end (* theory -> styp list -> term list *) fun optimized_typedef_axioms thy (abs_z as (abs_s, abs_Ts)) = let val abs_T = Type abs_z in if is_univ_typedef thy abs_T then [] else case typedef_info thy abs_s of SOME {abs_type, rep_type, Abs_name, Rep_name, prop_of_Rep, set_name, ...} => let val rep_T = instantiate_type thy abs_type abs_T rep_type val rep_t = Const (Rep_name, abs_T --> rep_T) val set_t = Const (set_name, rep_T --> bool_T) val set_t' = prop_of_Rep |> HOLogic.dest_Trueprop |> Refute.specialize_type thy (dest_Const rep_t) |> HOLogic.dest_mem |> snd in [HOLogic.all_const abs_T $ Abs (Name.uu, abs_T, set_t $ (rep_t $ Bound 0))] |> set_t <> set_t' ? cons (HOLogic.mk_eq (set_t, set_t')) |> map HOLogic.mk_Trueprop end | NONE => [] end fun optimized_quot_type_axioms thy abs_z = let val abs_T = Type abs_z val rep_T = rep_type_for_quot_type thy abs_T val equiv_rel = equiv_relation_for_quot_type thy abs_T val a_var = Var (("a", 0), abs_T) val x_var = Var (("x", 0), rep_T) val y_var = Var (("y", 0), rep_T) val x = (@{const_name Quot}, rep_T --> abs_T) val sel_a_t = select_nth_constr_arg thy x a_var 0 rep_T val normal_t = Const (@{const_name quot_normal}, rep_T --> rep_T) val normal_x = normal_t $ x_var val normal_y = normal_t $ y_var val is_unknown_t = Const (@{const_name is_unknown}, rep_T --> bool_T) in - [Logic.mk_equals (sel_a_t, normal_t $ sel_a_t), + [Logic.mk_equals (normal_t $ sel_a_t, sel_a_t), Logic.list_implies - ([@{const Not} $ (HOLogic.mk_eq (x_var, y_var)), - @{const Not} $ (is_unknown_t $ normal_x), + ([@{const Not} $ (is_unknown_t $ normal_x), @{const Not} $ (is_unknown_t $ normal_y), equiv_rel $ x_var $ y_var] |> map HOLogic.mk_Trueprop, Logic.mk_equals (normal_x, normal_y)), @{const "==>"} $ (HOLogic.mk_Trueprop (@{const Not} $ (is_unknown_t $ normal_x))) $ (HOLogic.mk_Trueprop (equiv_rel $ x_var $ normal_x))] end (* theory -> int * styp -> term *) fun constr_case_body thy (j, (x as (_, T))) = let val arg_Ts = binder_types T in list_comb (Bound j, map2 (select_nth_constr_arg thy x (Bound 0)) (index_seq 0 (length arg_Ts)) arg_Ts) end (* extended_context -> typ -> int * styp -> term -> term *) fun add_constr_case (ext_ctxt as {thy, ...}) res_T (j, x) res_t = Const (@{const_name If}, bool_T --> res_T --> res_T --> res_T) $ discriminate_value ext_ctxt x (Bound 0) $ constr_case_body thy (j, x) $ res_t (* extended_context -> typ -> typ -> term *) fun optimized_case_def (ext_ctxt as {thy, ...}) dataT res_T = let val xs = datatype_constrs ext_ctxt dataT - val func_Ts = map ((fn T => binder_types T ---> res_T) o snd) xs - val (xs', x) = split_last xs + val xs' = filter_out (fn (s, _) => s = @{const_name NonStd}) xs + val func_Ts = map ((fn T => binder_types T ---> res_T) o snd) xs' in - constr_case_body thy (1, x) - |> fold_rev (add_constr_case ext_ctxt res_T) (length xs downto 2 ~~ xs') + (if length xs = length xs' then + let + val (xs'', x) = split_last xs' + in + constr_case_body thy (1, x) + |> fold_rev (add_constr_case ext_ctxt res_T) + (length xs' downto 2 ~~ xs'') + end + else + Const (@{const_name undefined}, dataT --> res_T) $ Bound 0 + |> fold_rev (add_constr_case ext_ctxt res_T) + (length xs' downto 1 ~~ xs')) |> fold_rev (curry absdummy) (func_Ts @ [dataT]) end (* extended_context -> string -> typ -> typ -> term -> term *) fun optimized_record_get (ext_ctxt as {thy, ...}) s rec_T res_T t = - let val constr_x = the_single (datatype_constrs ext_ctxt rec_T) in + let val constr_x = hd (datatype_constrs ext_ctxt rec_T) in case no_of_record_field thy s rec_T of ~1 => (case rec_T of Type (_, Ts as _ :: _) => let val rec_T' = List.last Ts val j = num_record_fields thy rec_T - 1 in select_nth_constr_arg thy constr_x t j res_T |> optimized_record_get ext_ctxt s rec_T' res_T end | _ => raise TYPE ("Nitpick_HOL.optimized_record_get", [rec_T], [])) | j => select_nth_constr_arg thy constr_x t j res_T end (* extended_context -> string -> typ -> term -> term -> term *) fun optimized_record_update (ext_ctxt as {thy, ...}) s rec_T fun_t rec_t = let - val constr_x as (_, constr_T) = the_single (datatype_constrs ext_ctxt rec_T) + val constr_x as (_, constr_T) = hd (datatype_constrs ext_ctxt rec_T) val Ts = binder_types constr_T val n = length Ts val special_j = no_of_record_field thy s rec_T val ts = map2 (fn j => fn T => let val t = select_nth_constr_arg thy constr_x rec_t j T in if j = special_j then betapply (fun_t, t) else if j = n - 1 andalso special_j = ~1 then optimized_record_update ext_ctxt s (rec_T |> dest_Type |> snd |> List.last) fun_t t else t end) (index_seq 0 n) Ts in list_comb (Const constr_x, ts) end (* Constants "c" whose definition is of the form "c == c'", where "c'" is also a constant, are said to be trivial. For those, we ignore the simplification rules and use the definition instead, to ensure that built-in symbols like "ord_nat_inst.less_eq_nat" are picked up correctly. *) (* theory -> const_table -> styp -> bool *) fun has_trivial_definition thy table x = case def_of_const thy table x of SOME (Const _) => true | _ => false (* theory -> const_table -> string * typ -> fixpoint_kind *) fun fixpoint_kind_of_const thy table x = if is_built_in_const false x then NoFp else fixpoint_kind_of_rhs (the (def_of_const thy table x)) handle Option.Option => NoFp (* extended_context -> styp -> bool *) fun is_real_inductive_pred ({thy, fast_descrs, def_table, intro_table, ...} : extended_context) x = not (null (def_props_for_const thy fast_descrs intro_table x)) andalso fixpoint_kind_of_const thy def_table x <> NoFp fun is_real_equational_fun ({thy, fast_descrs, simp_table, psimp_table, ...} : extended_context) x = exists (fn table => not (null (def_props_for_const thy fast_descrs table x))) [!simp_table, psimp_table] fun is_inductive_pred ext_ctxt = is_real_inductive_pred ext_ctxt andf (not o is_real_equational_fun ext_ctxt) fun is_equational_fun (ext_ctxt as {thy, def_table, ...}) = (is_real_equational_fun ext_ctxt orf is_real_inductive_pred ext_ctxt orf (String.isPrefix ubfp_prefix orf String.isPrefix lbfp_prefix) o fst) andf (not o has_trivial_definition thy def_table) (* term * term -> term *) fun s_betapply (Const (@{const_name If}, _) $ @{const True} $ t, _) = t | s_betapply (Const (@{const_name If}, _) $ @{const False} $ _, t) = t | s_betapply p = betapply p (* term * term list -> term *) val s_betapplys = Library.foldl s_betapply (* term -> term *) fun lhs_of_equation t = case t of Const (@{const_name all}, _) $ Abs (_, _, t1) => lhs_of_equation t1 | Const (@{const_name "=="}, _) $ t1 $ _ => SOME t1 | @{const "==>"} $ _ $ t2 => lhs_of_equation t2 | @{const Trueprop} $ t1 => lhs_of_equation t1 | Const (@{const_name All}, _) $ Abs (_, _, t1) => lhs_of_equation t1 | Const (@{const_name "op ="}, _) $ t1 $ _ => SOME t1 | @{const "op -->"} $ _ $ t2 => lhs_of_equation t2 | _ => NONE (* theory -> term -> bool *) fun is_constr_pattern _ (Bound _) = true | is_constr_pattern _ (Var _) = true | is_constr_pattern thy t = case strip_comb t of (Const (x as (s, _)), args) => is_constr_like thy x andalso forall (is_constr_pattern thy) args | _ => false fun is_constr_pattern_lhs thy t = forall (is_constr_pattern thy) (snd (strip_comb t)) fun is_constr_pattern_formula thy t = case lhs_of_equation t of SOME t' => is_constr_pattern_lhs thy t' | NONE => false val unfold_max_depth = 255 val axioms_max_depth = 255 (* extended_context -> term -> term *) fun unfold_defs_in_term (ext_ctxt as {thy, destroy_constrs, fast_descrs, case_names, def_table, ground_thm_table, ersatz_table, ...}) = let (* int -> typ list -> term -> term *) fun do_term depth Ts t = case t of (t0 as Const (@{const_name Int.number_class.number_of}, Type ("fun", [_, ran_T]))) $ t1 => ((if is_number_type thy ran_T then let val j = t1 |> HOLogic.dest_numeral |> ran_T = nat_T ? Integer.max 0 val s = numeral_prefix ^ signed_string_of_int j in if is_integer_type ran_T then Const (s, ran_T) else do_term depth Ts (Const (@{const_name of_int}, int_T --> ran_T) $ Const (s, int_T)) end handle TERM _ => raise SAME () else raise SAME ()) handle SAME () => betapply (do_term depth Ts t0, do_term depth Ts t1)) | Const (@{const_name refl_on}, T) $ Const (@{const_name top}, _) $ t2 => do_const depth Ts t (@{const_name refl'}, range_type T) [t2] | (t0 as Const (x as (@{const_name Sigma}, T))) $ t1 $ (t2 as Abs (_, _, t2')) => betapplys (t0 |> loose_bvar1 (t2', 0) ? do_term depth Ts, map (do_term depth Ts) [t1, t2]) | Const (x as (@{const_name distinct}, Type ("fun", [Type (@{type_name list}, [T']), _]))) $ (t1 as _ $ _) => (t1 |> HOLogic.dest_list |> distinctness_formula T' handle TERM _ => do_const depth Ts t x [t1]) | (t0 as Const (x as (@{const_name If}, _))) $ t1 $ t2 $ t3 => if is_ground_term t1 andalso exists (Pattern.matches thy o rpair t1) (Inttab.lookup_list ground_thm_table (hash_term t1)) then do_term depth Ts t2 else do_const depth Ts t x [t1, t2, t3] | Const x $ t1 $ t2 $ t3 => do_const depth Ts t x [t1, t2, t3] | Const x $ t1 $ t2 => do_const depth Ts t x [t1, t2] | Const x $ t1 => do_const depth Ts t x [t1] | Const x => do_const depth Ts t x [] | t1 $ t2 => betapply (do_term depth Ts t1, do_term depth Ts t2) | Free _ => t | Var _ => t | Bound _ => t | Abs (s, T, body) => Abs (s, T, do_term depth (T :: Ts) body) (* int -> typ list -> styp -> term list -> int -> typ -> term * term list *) and select_nth_constr_arg_with_args _ _ (x as (_, T)) [] n res_T = (Abs (Name.uu, body_type T, select_nth_constr_arg thy x (Bound 0) n res_T), []) | select_nth_constr_arg_with_args depth Ts x (t :: ts) n res_T = (select_nth_constr_arg thy x (do_term depth Ts t) n res_T, ts) (* int -> typ list -> term -> styp -> term list -> term *) and do_const depth Ts t (x as (s, T)) ts = case AList.lookup (op =) ersatz_table s of SOME s' => do_const (depth + 1) Ts (list_comb (Const (s', T), ts)) (s', T) ts | NONE => let val (const, ts) = if is_built_in_const fast_descrs x then (Const x, ts) else case AList.lookup (op =) case_names s of SOME n => let val (dataT, res_T) = nth_range_type n T |> pairf domain_type range_type in (optimized_case_def ext_ctxt dataT res_T |> do_term (depth + 1) Ts, ts) end | _ => if is_constr thy x then (Const x, ts) else if is_stale_constr thy x then raise NOT_SUPPORTED ("(non-co)constructors of codatatypes \ \(\"" ^ s ^ "\")") else if is_quot_abs_fun thy x then let val rep_T = domain_type T val abs_T = range_type T in (Abs (Name.uu, rep_T, Const (@{const_name Quot}, rep_T --> abs_T) $ (Const (@{const_name quot_normal}, rep_T --> rep_T) $ Bound 0)), ts) end else if is_quot_rep_fun thy x then let val abs_T = domain_type T val rep_T = range_type T val x' = (@{const_name Quot}, rep_T --> abs_T) in select_nth_constr_arg_with_args depth Ts x' ts 0 rep_T end else if is_record_get thy x then case length ts of 0 => (do_term depth Ts (eta_expand Ts t 1), []) | _ => (optimized_record_get ext_ctxt s (domain_type T) - (range_type T) (hd ts), tl ts) + (range_type T) (do_term depth Ts (hd ts)), tl ts) else if is_record_update thy x then case length ts of 2 => (optimized_record_update ext_ctxt (unsuffix Record.updateN s) (nth_range_type 2 T) (do_term depth Ts (hd ts)) (do_term depth Ts (nth ts 1)), []) | n => (do_term depth Ts (eta_expand Ts t (2 - n)), []) else if is_rep_fun thy x then let val x' = mate_of_rep_fun thy x in if is_constr thy x' then select_nth_constr_arg_with_args depth Ts x' ts 0 (range_type T) else (Const x, ts) end else if is_equational_fun ext_ctxt x then (Const x, ts) else case def_of_const thy def_table x of SOME def => if depth > unfold_max_depth then raise TOO_LARGE ("Nitpick_HOL.unfold_defs_in_term", "too many nested definitions (" ^ string_of_int depth ^ ") while expanding " ^ quote s) else if s = @{const_name wfrec'} then (do_term (depth + 1) Ts (betapplys (def, ts)), []) else (do_term (depth + 1) Ts def, ts) | NONE => (Const x, ts) in s_betapplys (const, map (do_term depth Ts) ts) |> Envir.beta_norm end in do_term 0 [] end (* extended_context -> typ -> term list *) fun codatatype_bisim_axioms (ext_ctxt as {thy, ...}) T = let val xs = datatype_constrs ext_ctxt T val set_T = T --> bool_T val iter_T = @{typ bisim_iterator} val bisim_const = Const (@{const_name bisim}, iter_T --> T --> T --> bool_T) val bisim_max = @{const bisim_iterator_max} val n_var = Var (("n", 0), iter_T) val n_var_minus_1 = Const (@{const_name Tha}, (iter_T --> bool_T) --> iter_T) $ Abs ("m", iter_T, HOLogic.eq_const iter_T $ (suc_const iter_T $ Bound 0) $ n_var) val x_var = Var (("x", 0), T) val y_var = Var (("y", 0), T) (* styp -> int -> typ -> term *) fun nth_sub_bisim x n nth_T = (if is_codatatype thy nth_T then bisim_const $ n_var_minus_1 else HOLogic.eq_const nth_T) $ select_nth_constr_arg thy x x_var n nth_T $ select_nth_constr_arg thy x y_var n nth_T (* styp -> term *) fun case_func (x as (_, T)) = let val arg_Ts = binder_types T val core_t = discriminate_value ext_ctxt x y_var :: map2 (nth_sub_bisim x) (index_seq 0 (length arg_Ts)) arg_Ts |> foldr1 s_conj in List.foldr absdummy core_t arg_Ts end in [HOLogic.eq_const bool_T $ (bisim_const $ n_var $ x_var $ y_var) $ (@{term "op |"} $ (HOLogic.eq_const iter_T $ n_var $ zero_const iter_T) $ (betapplys (optimized_case_def ext_ctxt T bool_T, map case_func xs @ [x_var]))), HOLogic.eq_const set_T $ (bisim_const $ bisim_max $ x_var) $ (Const (@{const_name insert}, T --> set_T --> set_T) $ x_var $ Const (@{const_name bot_fun_inst.bot_fun}, set_T))] |> map HOLogic.mk_Trueprop end exception NO_TRIPLE of unit (* theory -> styp -> term -> term list * term list * term *) fun triple_for_intro_rule thy x t = let val prems = Logic.strip_imp_prems t |> map (ObjectLogic.atomize_term thy) val concl = Logic.strip_imp_concl t |> ObjectLogic.atomize_term thy val (main, side) = List.partition (exists_Const (curry (op =) x)) prems (* term -> bool *) val is_good_head = curry (op =) (Const x) o head_of in if forall is_good_head main then (side, main, concl) else raise NO_TRIPLE () end (* term -> term *) val tuple_for_args = HOLogic.mk_tuple o snd o strip_comb (* indexname * typ -> term list -> term -> term -> term *) fun wf_constraint_for rel side concl main = let val core = HOLogic.mk_mem (HOLogic.mk_prod (tuple_for_args main, tuple_for_args concl), Var rel) val t = List.foldl HOLogic.mk_imp core side val vars = filter (not_equal rel) (Term.add_vars t []) in Library.foldl (fn (t', ((x, j), T)) => HOLogic.all_const T $ Abs (x, T, abstract_over (Var ((x, j), T), t'))) (t, vars) end (* indexname * typ -> term list * term list * term -> term *) fun wf_constraint_for_triple rel (side, main, concl) = map (wf_constraint_for rel side concl) main |> foldr1 s_conj (* Proof.context -> Time.time option -> thm -> (Proof.context -> tactic -> tactic) -> bool *) fun terminates_by ctxt timeout goal tac = can (SINGLE (Classical.safe_tac (claset_of ctxt)) #> the #> SINGLE (DETERM_TIMEOUT timeout (tac ctxt (auto_tac (clasimpset_of ctxt)))) #> the #> Goal.finish ctxt) goal val max_cached_wfs = 100 val cached_timeout = Unsynchronized.ref (SOME Time.zeroTime) val cached_wf_props : (term * bool) list Unsynchronized.ref = Unsynchronized.ref [] val termination_tacs = [Lexicographic_Order.lex_order_tac true, ScnpReconstruct.sizechange_tac] (* extended_context -> const_table -> styp -> bool *) fun uncached_is_well_founded_inductive_pred ({thy, ctxt, debug, fast_descrs, tac_timeout, intro_table, ...} : extended_context) (x as (_, T)) = case def_props_for_const thy fast_descrs intro_table x of [] => raise TERM ("Nitpick_HOL.uncached_is_well_founded_inductive", [Const x]) | intro_ts => (case map (triple_for_intro_rule thy x) intro_ts |> filter_out (null o #2) of [] => true | triples => let val binders_T = HOLogic.mk_tupleT (binder_types T) val rel_T = HOLogic.mk_prodT (binders_T, binders_T) --> bool_T val j = fold Integer.max (map maxidx_of_term intro_ts) 0 + 1 val rel = (("R", j), rel_T) val prop = Const (@{const_name wf}, rel_T --> bool_T) $ Var rel :: map (wf_constraint_for_triple rel) triples |> foldr1 s_conj |> HOLogic.mk_Trueprop val _ = if debug then priority ("Wellfoundedness goal: " ^ Syntax.string_of_term ctxt prop ^ ".") else () in if tac_timeout = (!cached_timeout) andalso length (!cached_wf_props) < max_cached_wfs then () else (cached_wf_props := []; cached_timeout := tac_timeout); case AList.lookup (op =) (!cached_wf_props) prop of SOME wf => wf | NONE => let val goal = prop |> cterm_of thy |> Goal.init val wf = exists (terminates_by ctxt tac_timeout goal) termination_tacs in Unsynchronized.change cached_wf_props (cons (prop, wf)); wf end end) handle List.Empty => false | NO_TRIPLE () => false (* The type constraint below is a workaround for a Poly/ML bug. *) (* extended_context -> styp -> bool *) fun is_well_founded_inductive_pred (ext_ctxt as {thy, wfs, def_table, wf_cache, ...} : extended_context) (x as (s, _)) = case triple_lookup (const_match thy) wfs x of SOME (SOME b) => b | _ => s = @{const_name Nats} orelse s = @{const_name fold_graph'} orelse case AList.lookup (op =) (!wf_cache) x of SOME (_, wf) => wf | NONE => let val gfp = (fixpoint_kind_of_const thy def_table x = Gfp) val wf = uncached_is_well_founded_inductive_pred ext_ctxt x in Unsynchronized.change wf_cache (cons (x, (gfp, wf))); wf end (* typ list -> typ -> typ -> term -> term *) fun ap_curry [_] _ _ t = t | ap_curry arg_Ts tuple_T body_T t = let val n = length arg_Ts in list_abs (map (pair "c") arg_Ts, incr_boundvars n t $ mk_flat_tuple tuple_T (map Bound (n - 1 downto 0))) end (* int -> term -> int *) fun num_occs_of_bound_in_term j (t1 $ t2) = op + (pairself (num_occs_of_bound_in_term j) (t1, t2)) | num_occs_of_bound_in_term j (Abs (s, T, t')) = num_occs_of_bound_in_term (j + 1) t' | num_occs_of_bound_in_term j (Bound j') = if j' = j then 1 else 0 | num_occs_of_bound_in_term _ _ = 0 (* term -> bool *) val is_linear_inductive_pred_def = let (* int -> term -> bool *) fun do_disjunct j (Const (@{const_name Ex}, _) $ Abs (_, _, t2)) = do_disjunct (j + 1) t2 | do_disjunct j t = case num_occs_of_bound_in_term j t of 0 => true | 1 => exists (curry (op =) (Bound j) o head_of) (conjuncts t) | _ => false (* term -> bool *) fun do_lfp_def (Const (@{const_name lfp}, _) $ t2) = let val (xs, body) = strip_abs t2 in case length xs of 1 => false | n => forall (do_disjunct (n - 1)) (disjuncts body) end | do_lfp_def _ = false in do_lfp_def o strip_abs_body end (* int -> int list list *) fun n_ptuple_paths 0 = [] | n_ptuple_paths 1 = [] | n_ptuple_paths n = [] :: map (cons 2) (n_ptuple_paths (n - 1)) (* int -> typ -> typ -> term -> term *) val ap_n_split = HOLogic.mk_psplits o n_ptuple_paths (* term -> term * term *) val linear_pred_base_and_step_rhss = let (* term -> term *) fun aux (Const (@{const_name lfp}, _) $ t2) = let val (xs, body) = strip_abs t2 val arg_Ts = map snd (tl xs) val tuple_T = HOLogic.mk_tupleT arg_Ts val j = length arg_Ts (* int -> term -> term *) fun repair_rec j (Const (@{const_name Ex}, T1) $ Abs (s2, T2, t2')) = Const (@{const_name Ex}, T1) $ Abs (s2, T2, repair_rec (j + 1) t2') | repair_rec j (@{const "op &"} $ t1 $ t2) = @{const "op &"} $ repair_rec j t1 $ repair_rec j t2 | repair_rec j t = let val (head, args) = strip_comb t in if head = Bound j then HOLogic.eq_const tuple_T $ Bound j $ mk_flat_tuple tuple_T args else t end val (nonrecs, recs) = List.partition (curry (op =) 0 o num_occs_of_bound_in_term j) (disjuncts body) val base_body = nonrecs |> List.foldl s_disj @{const False} val step_body = recs |> map (repair_rec j) |> List.foldl s_disj @{const False} in (list_abs (tl xs, incr_bv (~1, j, base_body)) |> ap_n_split (length arg_Ts) tuple_T bool_T, Abs ("y", tuple_T, list_abs (tl xs, step_body) |> ap_n_split (length arg_Ts) tuple_T bool_T)) end | aux t = raise TERM ("Nitpick_HOL.linear_pred_base_and_step_rhss.aux", [t]) in aux end (* extended_context -> styp -> term -> term *) fun starred_linear_pred_const (ext_ctxt as {simp_table, ...}) (x as (s, T)) def = let val j = maxidx_of_term def + 1 val (outer, fp_app) = strip_abs def val outer_bounds = map Bound (length outer - 1 downto 0) val outer_vars = map (fn (s, T) => Var ((s, j), T)) outer val fp_app = subst_bounds (rev outer_vars, fp_app) val (outer_Ts, rest_T) = strip_n_binders (length outer) T val tuple_arg_Ts = strip_type rest_T |> fst val tuple_T = HOLogic.mk_tupleT tuple_arg_Ts val set_T = tuple_T --> bool_T val curried_T = tuple_T --> set_T val uncurried_T = Type ("*", [tuple_T, tuple_T]) --> bool_T val (base_rhs, step_rhs) = linear_pred_base_and_step_rhss fp_app val base_x as (base_s, _) = (base_prefix ^ s, outer_Ts ---> set_T) val base_eq = HOLogic.mk_eq (list_comb (Const base_x, outer_vars), base_rhs) |> HOLogic.mk_Trueprop val _ = add_simps simp_table base_s [base_eq] val step_x as (step_s, _) = (step_prefix ^ s, outer_Ts ---> curried_T) val step_eq = HOLogic.mk_eq (list_comb (Const step_x, outer_vars), step_rhs) |> HOLogic.mk_Trueprop val _ = add_simps simp_table step_s [step_eq] in list_abs (outer, Const (@{const_name Image}, uncurried_T --> set_T --> set_T) $ (Const (@{const_name rtrancl}, uncurried_T --> uncurried_T) $ (Const (@{const_name split}, curried_T --> uncurried_T) $ list_comb (Const step_x, outer_bounds))) $ list_comb (Const base_x, outer_bounds) |> ap_curry tuple_arg_Ts tuple_T bool_T) |> unfold_defs_in_term ext_ctxt end (* extended_context -> bool -> styp -> term *) fun unrolled_inductive_pred_const (ext_ctxt as {thy, star_linear_preds, def_table, simp_table, ...}) gfp (x as (s, T)) = let val iter_T = iterator_type_for_const gfp x val x' as (s', _) = (unrolled_prefix ^ s, iter_T --> T) val unrolled_const = Const x' $ zero_const iter_T val def = the (def_of_const thy def_table x) in if is_equational_fun ext_ctxt x' then unrolled_const (* already done *) else if not gfp andalso is_linear_inductive_pred_def def andalso star_linear_preds then starred_linear_pred_const ext_ctxt x def else let val j = maxidx_of_term def + 1 val (outer, fp_app) = strip_abs def val outer_bounds = map Bound (length outer - 1 downto 0) val cur = Var ((iter_var_prefix, j + 1), iter_T) val next = suc_const iter_T $ cur val rhs = case fp_app of Const _ $ t => betapply (t, list_comb (Const x', next :: outer_bounds)) | _ => raise TERM ("Nitpick_HOL.unrolled_inductive_pred_\ \const", [fp_app]) val (inner, naked_rhs) = strip_abs rhs val all = outer @ inner val bounds = map Bound (length all - 1 downto 0) val vars = map (fn (s, T) => Var ((s, j), T)) all val eq = HOLogic.mk_eq (list_comb (Const x', cur :: bounds), naked_rhs) |> HOLogic.mk_Trueprop |> curry subst_bounds (rev vars) val _ = add_simps simp_table s' [eq] in unrolled_const end end (* extended_context -> styp -> term *) fun raw_inductive_pred_axiom ({thy, def_table, ...} : extended_context) x = let val def = the (def_of_const thy def_table x) val (outer, fp_app) = strip_abs def val outer_bounds = map Bound (length outer - 1 downto 0) val rhs = case fp_app of Const _ $ t => betapply (t, list_comb (Const x, outer_bounds)) | _ => raise TERM ("Nitpick_HOL.raw_inductive_pred_axiom", [fp_app]) val (inner, naked_rhs) = strip_abs rhs val all = outer @ inner val bounds = map Bound (length all - 1 downto 0) val j = maxidx_of_term def + 1 val vars = map (fn (s, T) => Var ((s, j), T)) all in HOLogic.mk_eq (list_comb (Const x, bounds), naked_rhs) |> HOLogic.mk_Trueprop |> curry subst_bounds (rev vars) end fun inductive_pred_axiom ext_ctxt (x as (s, T)) = if String.isPrefix ubfp_prefix s orelse String.isPrefix lbfp_prefix s then let val x' = (after_name_sep s, T) in raw_inductive_pred_axiom ext_ctxt x' |> subst_atomic [(Const x', Const x)] end else raw_inductive_pred_axiom ext_ctxt x (* extended_context -> styp -> term list *) fun raw_equational_fun_axioms (ext_ctxt as {thy, fast_descrs, simp_table, psimp_table, ...}) (x as (s, _)) = case def_props_for_const thy fast_descrs (!simp_table) x of [] => (case def_props_for_const thy fast_descrs psimp_table x of [] => [inductive_pred_axiom ext_ctxt x] | psimps => psimps) | simps => simps val equational_fun_axioms = map extensionalize oo raw_equational_fun_axioms (* term list -> term list *) fun merge_type_vars_in_terms ts = let (* typ -> (sort * string) list -> (sort * string) list *) fun add_type (TFree (s, S)) table = (case AList.lookup (op =) table S of SOME s' => if string_ord (s', s) = LESS then AList.update (op =) (S, s') table else table | NONE => (S, s) :: table) | add_type _ table = table val table = fold (fold_types (fold_atyps add_type)) ts [] (* typ -> typ *) fun coalesce (TFree (s, S)) = TFree (AList.lookup (op =) table S |> the, S) | coalesce T = T in map (map_types (map_atyps coalesce)) ts end (* extended_context -> typ -> typ list -> typ list *) fun add_ground_types ext_ctxt T accum = case T of Type ("fun", Ts) => fold (add_ground_types ext_ctxt) Ts accum | Type ("*", Ts) => fold (add_ground_types ext_ctxt) Ts accum | Type (@{type_name itself}, [T1]) => add_ground_types ext_ctxt T1 accum | Type (_, Ts) => if member (op =) (@{typ prop} :: @{typ bool} :: @{typ unit} :: accum) T then accum else T :: accum |> fold (add_ground_types ext_ctxt) (case boxed_datatype_constrs ext_ctxt T of [] => Ts | xs => map snd xs) | _ => insert (op =) T accum (* extended_context -> typ -> typ list *) fun ground_types_in_type ext_ctxt T = add_ground_types ext_ctxt T [] (* extended_context -> term list -> typ list *) fun ground_types_in_terms ext_ctxt ts = fold (fold_types (add_ground_types ext_ctxt)) ts [] (* typ list -> int -> term -> bool *) fun has_heavy_bounds_or_vars Ts level t = let (* typ list -> bool *) fun aux [] = false | aux [T] = is_fun_type T orelse is_pair_type T | aux _ = true in aux (map snd (Term.add_vars t []) @ map (nth Ts) (loose_bnos t)) end (* typ list -> int -> int -> int -> term -> term *) fun fresh_value_var Ts k n j t = Var ((val_var_prefix ^ nat_subscript (n - j), k), fastype_of1 (Ts, t)) (* theory -> typ list -> bool -> int -> int -> term -> term list -> term list -> term * term list *) fun pull_out_constr_comb thy Ts relax k level t args seen = let val t_comb = list_comb (t, args) in case t of Const x => if not relax andalso is_constr thy x andalso not (is_fun_type (fastype_of1 (Ts, t_comb))) andalso has_heavy_bounds_or_vars Ts level t_comb andalso not (loose_bvar (t_comb, level)) then let val (j, seen) = case find_index (curry (op =) t_comb) seen of ~1 => (0, t_comb :: seen) | j => (j, seen) in (fresh_value_var Ts k (length seen) j t_comb, seen) end else (t_comb, seen) | _ => (t_comb, seen) end (* (term -> term) -> typ list -> int -> term list -> term list *) fun equations_for_pulled_out_constrs mk_eq Ts k seen = let val n = length seen in map2 (fn j => fn t => mk_eq (fresh_value_var Ts k n j t, t)) (index_seq 0 n) seen end (* theory -> bool -> term -> term *) fun pull_out_universal_constrs thy def t = let val k = maxidx_of_term t + 1 (* typ list -> bool -> term -> term list -> term list -> term * term list *) fun do_term Ts def t args seen = case t of (t0 as Const (@{const_name "=="}, _)) $ t1 $ t2 => do_eq_or_imp Ts true def t0 t1 t2 seen | (t0 as @{const "==>"}) $ t1 $ t2 => - do_eq_or_imp Ts false def t0 t1 t2 seen + if def then (t, []) else do_eq_or_imp Ts false def t0 t1 t2 seen | (t0 as Const (@{const_name "op ="}, _)) $ t1 $ t2 => do_eq_or_imp Ts true def t0 t1 t2 seen | (t0 as @{const "op -->"}) $ t1 $ t2 => do_eq_or_imp Ts false def t0 t1 t2 seen | Abs (s, T, t') => let val (t', seen) = do_term (T :: Ts) def t' [] seen in (list_comb (Abs (s, T, t'), args), seen) end | t1 $ t2 => let val (t2, seen) = do_term Ts def t2 [] seen in do_term Ts def t1 (t2 :: args) seen end | _ => pull_out_constr_comb thy Ts def k 0 t args seen (* typ list -> bool -> bool -> term -> term -> term -> term list -> term * term list *) and do_eq_or_imp Ts eq def t0 t1 t2 seen = let val (t2, seen) = if eq andalso def then (t2, seen) else do_term Ts false t2 [] seen val (t1, seen) = do_term Ts false t1 [] seen in (t0 $ t1 $ t2, seen) end val (concl, seen) = do_term [] def t [] [] in Logic.list_implies (equations_for_pulled_out_constrs Logic.mk_equals [] k seen, concl) end (* extended_context -> bool -> term -> term *) fun destroy_pulled_out_constrs (ext_ctxt as {thy, ...}) axiom t = let (* styp -> int *) val num_occs_of_var = fold_aterms (fn Var z => (fn f => fn z' => f z' |> z = z' ? Integer.add 1) | _ => I) t (K 0) (* bool -> term -> term *) fun aux careful ((t0 as Const (@{const_name "=="}, _)) $ t1 $ t2) = aux_eq careful true t0 t1 t2 | aux careful ((t0 as @{const "==>"}) $ t1 $ t2) = t0 $ aux false t1 $ aux careful t2 | aux careful ((t0 as Const (@{const_name "op ="}, _)) $ t1 $ t2) = aux_eq careful true t0 t1 t2 | aux careful ((t0 as @{const "op -->"}) $ t1 $ t2) = t0 $ aux false t1 $ aux careful t2 | aux careful (Abs (s, T, t')) = Abs (s, T, aux careful t') | aux careful (t1 $ t2) = aux careful t1 $ aux careful t2 | aux _ t = t (* bool -> bool -> term -> term -> term -> term *) and aux_eq careful pass1 t0 t1 t2 = - (if careful then - raise SAME () - else if axiom andalso is_Var t2 andalso - num_occs_of_var (dest_Var t2) = 1 then - @{const True} - else case strip_comb t2 of - (Const (x as (s, T)), args) => - let val arg_Ts = binder_types T in - if length arg_Ts = length args andalso - (is_constr thy x orelse s = @{const_name Pair} orelse - x = (@{const_name Suc}, nat_T --> nat_T)) andalso - (not careful orelse not (is_Var t1) orelse - String.isPrefix val_var_prefix (fst (fst (dest_Var t1)))) then - discriminate_value ext_ctxt x t1 :: - map3 (sel_eq x t1) (index_seq 0 (length args)) arg_Ts args - |> foldr1 s_conj - |> body_type (type_of t0) = prop_T ? HOLogic.mk_Trueprop - else - raise SAME () - end - | _ => raise SAME ()) + ((if careful then + raise SAME () + else if axiom andalso is_Var t2 andalso + num_occs_of_var (dest_Var t2) = 1 then + @{const True} + else case strip_comb t2 of + (* The first case is not as general as it could be. *) + (Const (@{const_name PairBox}, _), + [Const (@{const_name fst}, _) $ Var z1, + Const (@{const_name snd}, _) $ Var z2]) => + if z1 = z2 andalso num_occs_of_var z1 = 2 then @{const True} + else raise SAME () + | (Const (x as (s, T)), args) => + let val arg_Ts = binder_types T in + if length arg_Ts = length args andalso + (is_constr thy x orelse s = @{const_name Pair} orelse + x = (@{const_name Suc}, nat_T --> nat_T)) andalso + (not careful orelse not (is_Var t1) orelse + String.isPrefix val_var_prefix (fst (fst (dest_Var t1)))) then + discriminate_value ext_ctxt x t1 :: + map3 (sel_eq x t1) (index_seq 0 (length args)) arg_Ts args + |> foldr1 s_conj + else + raise SAME () + end + | _ => raise SAME ()) + |> body_type (type_of t0) = prop_T ? HOLogic.mk_Trueprop) handle SAME () => if pass1 then aux_eq careful false t0 t2 t1 else t0 $ aux false t2 $ aux false t1 (* styp -> term -> int -> typ -> term -> term *) and sel_eq x t n nth_T nth_t = HOLogic.eq_const nth_T $ nth_t $ select_nth_constr_arg thy x t n nth_T |> aux false in aux axiom t end (* theory -> term -> term *) fun simplify_constrs_and_sels thy t = let (* term -> int -> term *) fun is_nth_sel_on t' n (Const (s, _) $ t) = (t = t' andalso is_sel_like_and_no_discr s andalso sel_no_from_name s = n) | is_nth_sel_on _ _ _ = false (* term -> term list -> term *) fun do_term (Const (@{const_name Rep_Frac}, _) $ (Const (@{const_name Abs_Frac}, _) $ t1)) [] = do_term t1 [] | do_term (Const (@{const_name Abs_Frac}, _) $ (Const (@{const_name Rep_Frac}, _) $ t1)) [] = do_term t1 [] | do_term (t1 $ t2) args = do_term t1 (do_term t2 [] :: args) | do_term (t as Const (x as (s, T))) (args as _ :: _) = ((if is_constr_like thy x then if length args = num_binder_types T then case hd args of Const (x' as (_, T')) $ t' => if domain_type T' = body_type T andalso forall (uncurry (is_nth_sel_on t')) (index_seq 0 (length args) ~~ args) then t' else raise SAME () | _ => raise SAME () else raise SAME () else if is_sel_like_and_no_discr s then case strip_comb (hd args) of (Const (x' as (s', T')), ts') => if is_constr_like thy x' andalso constr_name_for_sel_like s = s' andalso not (exists is_pair_type (binder_types T')) then list_comb (nth ts' (sel_no_from_name s), tl args) else raise SAME () | _ => raise SAME () else raise SAME ()) handle SAME () => betapplys (t, args)) | do_term (Abs (s, T, t')) args = betapplys (Abs (s, T, do_term t' []), args) | do_term t args = betapplys (t, args) in do_term t [] end (* term -> term *) fun curry_assms (@{const "==>"} $ (@{const Trueprop} $ (@{const "op &"} $ t1 $ t2)) $ t3) = curry_assms (Logic.list_implies ([t1, t2] |> map HOLogic.mk_Trueprop, t3)) | curry_assms (@{const "==>"} $ t1 $ t2) = @{const "==>"} $ curry_assms t1 $ curry_assms t2 | curry_assms t = t (* term -> term *) val destroy_universal_equalities = let (* term list -> (indexname * typ) list -> term -> term *) fun aux prems zs t = case t of @{const "==>"} $ t1 $ t2 => aux_implies prems zs t1 t2 | _ => Logic.list_implies (rev prems, t) (* term list -> (indexname * typ) list -> term -> term -> term *) and aux_implies prems zs t1 t2 = case t1 of Const (@{const_name "=="}, _) $ Var z $ t' => aux_eq prems zs z t' t1 t2 | @{const Trueprop} $ (Const (@{const_name "op ="}, _) $ Var z $ t') => aux_eq prems zs z t' t1 t2 | @{const Trueprop} $ (Const (@{const_name "op ="}, _) $ t' $ Var z) => aux_eq prems zs z t' t1 t2 | _ => aux (t1 :: prems) (Term.add_vars t1 zs) t2 (* term list -> (indexname * typ) list -> indexname * typ -> term -> term -> term -> term *) and aux_eq prems zs z t' t1 t2 = if not (member (op =) zs z) andalso not (exists_subterm (curry (op =) (Var z)) t') then aux prems zs (subst_free [(Var z, t')] t2) else aux (t1 :: prems) (Term.add_vars t1 zs) t2 in aux [] [] end (* theory -> term -> term *) fun pull_out_existential_constrs thy t = let val k = maxidx_of_term t + 1 (* typ list -> int -> term -> term list -> term list -> term * term list *) fun aux Ts num_exists t args seen = case t of (t0 as Const (@{const_name Ex}, _)) $ Abs (s1, T1, t1) => let val (t1, seen') = aux (T1 :: Ts) (num_exists + 1) t1 [] [] val n = length seen' (* unit -> term list *) fun vars () = map2 (fresh_value_var Ts k n) (index_seq 0 n) seen' in (equations_for_pulled_out_constrs HOLogic.mk_eq Ts k seen' |> List.foldl s_conj t1 |> fold mk_exists (vars ()) |> curry3 Abs s1 T1 |> curry (op $) t0, seen) end | t1 $ t2 => let val (t2, seen) = aux Ts num_exists t2 [] seen in aux Ts num_exists t1 (t2 :: args) seen end | Abs (s, T, t') => let val (t', seen) = aux (T :: Ts) 0 t' [] (map (incr_boundvars 1) seen) in (list_comb (Abs (s, T, t'), args), map (incr_boundvars ~1) seen) end | _ => if num_exists > 0 then pull_out_constr_comb thy Ts false k num_exists t args seen else (list_comb (t, args), seen) in aux [] 0 t [] [] |> fst end (* theory -> int -> term list -> term list -> (term * term list) option *) fun find_bound_assign _ _ _ [] = NONE | find_bound_assign thy j seen (t :: ts) = let (* bool -> term -> term -> (term * term list) option *) fun aux pass1 t1 t2 = (if loose_bvar1 (t2, j) then if pass1 then aux false t2 t1 else raise SAME () else case t1 of Bound j' => if j' = j then SOME (t2, ts @ seen) else raise SAME () | Const (s, Type ("fun", [T1, T2])) $ Bound j' => if j' = j andalso s = sel_prefix_for 0 ^ @{const_name FunBox} then SOME (construct_value thy (@{const_name FunBox}, T2 --> T1) [t2], ts @ seen) else raise SAME () | _ => raise SAME ()) handle SAME () => find_bound_assign thy j (t :: seen) ts in case t of Const (@{const_name "op ="}, _) $ t1 $ t2 => aux true t1 t2 | _ => find_bound_assign thy j (t :: seen) ts end (* int -> term -> term -> term *) fun subst_one_bound j arg t = let fun aux (Bound i, lev) = if i < lev then raise SAME () else if i = lev then incr_boundvars (lev - j) arg else Bound (i - 1) | aux (Abs (a, T, body), lev) = Abs (a, T, aux (body, lev + 1)) | aux (f $ t, lev) = (aux (f, lev) $ (aux (t, lev) handle SAME () => t) handle SAME () => f $ aux (t, lev)) | aux _ = raise SAME () in aux (t, j) handle SAME () => t end (* theory -> term -> term *) fun destroy_existential_equalities thy = let (* string list -> typ list -> term list -> term *) fun kill [] [] ts = foldr1 s_conj ts | kill (s :: ss) (T :: Ts) ts = (case find_bound_assign thy (length ss) [] ts of SOME (_, []) => @{const True} | SOME (arg_t, ts) => kill ss Ts (map (subst_one_bound (length ss) (incr_bv (~1, length ss + 1, arg_t))) ts) | NONE => Const (@{const_name Ex}, (T --> bool_T) --> bool_T) $ Abs (s, T, kill ss Ts ts)) | kill _ _ _ = raise UnequalLengths (* string list -> typ list -> term -> term *) fun gather ss Ts ((t0 as Const (@{const_name Ex}, _)) $ Abs (s1, T1, t1)) = gather (ss @ [s1]) (Ts @ [T1]) t1 | gather [] [] (Abs (s, T, t1)) = Abs (s, T, gather [] [] t1) | gather [] [] (t1 $ t2) = gather [] [] t1 $ gather [] [] t2 | gather [] [] t = t | gather ss Ts t = kill ss Ts (conjuncts (gather [] [] t)) in gather [] [] end (* term -> term *) fun distribute_quantifiers t = case t of (t0 as Const (@{const_name All}, T0)) $ Abs (s, T1, t1) => (case t1 of (t10 as @{const "op &"}) $ t11 $ t12 => t10 $ distribute_quantifiers (t0 $ Abs (s, T1, t11)) $ distribute_quantifiers (t0 $ Abs (s, T1, t12)) | (t10 as @{const Not}) $ t11 => t10 $ distribute_quantifiers (Const (@{const_name Ex}, T0) $ Abs (s, T1, t11)) | t1 => if not (loose_bvar1 (t1, 0)) then distribute_quantifiers (incr_boundvars ~1 t1) else t0 $ Abs (s, T1, distribute_quantifiers t1)) | (t0 as Const (@{const_name Ex}, T0)) $ Abs (s, T1, t1) => (case distribute_quantifiers t1 of (t10 as @{const "op |"}) $ t11 $ t12 => t10 $ distribute_quantifiers (t0 $ Abs (s, T1, t11)) $ distribute_quantifiers (t0 $ Abs (s, T1, t12)) | (t10 as @{const "op -->"}) $ t11 $ t12 => t10 $ distribute_quantifiers (Const (@{const_name All}, T0) $ Abs (s, T1, t11)) $ distribute_quantifiers (t0 $ Abs (s, T1, t12)) | (t10 as @{const Not}) $ t11 => t10 $ distribute_quantifiers (Const (@{const_name All}, T0) $ Abs (s, T1, t11)) | t1 => if not (loose_bvar1 (t1, 0)) then distribute_quantifiers (incr_boundvars ~1 t1) else t0 $ Abs (s, T1, distribute_quantifiers t1)) | t1 $ t2 => distribute_quantifiers t1 $ distribute_quantifiers t2 | Abs (s, T, t') => Abs (s, T, distribute_quantifiers t') | _ => t (* int -> int -> (int -> int) -> term -> term *) fun renumber_bounds j n f t = case t of t1 $ t2 => renumber_bounds j n f t1 $ renumber_bounds j n f t2 | Abs (s, T, t') => Abs (s, T, renumber_bounds (j + 1) n f t') | Bound j' => Bound (if j' >= j andalso j' < j + n then f (j' - j) + j else j') | _ => t val quantifier_cluster_max_size = 8 (* theory -> term -> term *) fun push_quantifiers_inward thy = let (* string -> string list -> typ list -> term -> term *) fun aux quant_s ss Ts t = (case t of (t0 as Const (s0, _)) $ Abs (s1, T1, t1 as _ $ _) => if s0 = quant_s andalso length Ts < quantifier_cluster_max_size then aux s0 (s1 :: ss) (T1 :: Ts) t1 else if quant_s = "" andalso (s0 = @{const_name All} orelse s0 = @{const_name Ex}) then aux s0 [s1] [T1] t1 else raise SAME () | _ => raise SAME ()) handle SAME () => case t of t1 $ t2 => if quant_s = "" then aux "" [] [] t1 $ aux "" [] [] t2 else let val typical_card = 4 (* ('a -> ''b list) -> 'a list -> ''b list *) fun big_union proj ps = fold (fold (insert (op =)) o proj) ps [] val (ts, connective) = strip_any_connective t val T_costs = map (bounded_card_of_type 65536 typical_card []) Ts val t_costs = map size_of_term ts val num_Ts = length Ts (* int -> int *) val flip = curry (op -) (num_Ts - 1) val t_boundss = map (map flip o loose_bnos) ts (* (int list * int) list -> int list -> int *) fun cost boundss_cum_costs [] = map snd boundss_cum_costs |> Integer.sum | cost boundss_cum_costs (j :: js) = let val (yeas, nays) = List.partition (fn (bounds, _) => member (op =) bounds j) boundss_cum_costs val yeas_bounds = big_union fst yeas val yeas_cost = Integer.sum (map snd yeas) * nth T_costs j in cost ((yeas_bounds, yeas_cost) :: nays) js end val js = all_permutations (index_seq 0 num_Ts) |> map (`(cost (t_boundss ~~ t_costs))) |> sort (int_ord o pairself fst) |> hd |> snd val back_js = map (fn j => find_index (curry (op =) j) js) (index_seq 0 num_Ts) val ts = map (renumber_bounds 0 num_Ts (nth back_js o flip)) ts (* (term * int list) list -> term *) fun mk_connection [] = raise ARG ("Nitpick_HOL.push_quantifiers_inward.aux.\ \mk_connection", "") | mk_connection ts_cum_bounds = ts_cum_bounds |> map fst |> foldr1 (fn (t1, t2) => connective $ t1 $ t2) (* (term * int list) list -> int list -> term *) fun build ts_cum_bounds [] = ts_cum_bounds |> mk_connection | build ts_cum_bounds (j :: js) = let val (yeas, nays) = List.partition (fn (_, bounds) => member (op =) bounds j) ts_cum_bounds ||> map (apfst (incr_boundvars ~1)) in if null yeas then build nays js else let val T = nth Ts (flip j) in build ((Const (quant_s, (T --> bool_T) --> bool_T) $ Abs (nth ss (flip j), T, mk_connection yeas), big_union snd yeas) :: nays) js end end in build (ts ~~ t_boundss) js end | Abs (s, T, t') => Abs (s, T, aux "" [] [] t') | _ => t in aux "" [] [] end (* polarity -> string -> bool *) fun is_positive_existential polar quant_s = (polar = Pos andalso quant_s = @{const_name Ex}) orelse (polar = Neg andalso quant_s <> @{const_name Ex}) (* extended_context -> int -> term -> term *) fun skolemize_term_and_more (ext_ctxt as {thy, def_table, skolems, ...}) skolem_depth = let (* int list -> int list *) val incrs = map (Integer.add 1) (* string list -> typ list -> int list -> int -> polarity -> term -> term *) fun aux ss Ts js depth polar t = let (* string -> typ -> string -> typ -> term -> term *) fun do_quantifier quant_s quant_T abs_s abs_T t = if not (loose_bvar1 (t, 0)) then aux ss Ts js depth polar (incr_boundvars ~1 t) else if depth <= skolem_depth andalso is_positive_existential polar quant_s then let val j = length (!skolems) + 1 val sko_s = skolem_prefix_for (length js) j ^ abs_s val _ = Unsynchronized.change skolems (cons (sko_s, ss)) val sko_t = list_comb (Const (sko_s, rev Ts ---> abs_T), map Bound (rev js)) val abs_t = Abs (abs_s, abs_T, aux ss Ts (incrs js) depth polar t) in if null js then betapply (abs_t, sko_t) else Const (@{const_name Let}, abs_T --> quant_T) $ sko_t $ abs_t end else Const (quant_s, quant_T) $ Abs (abs_s, abs_T, if is_higher_order_type abs_T then t else aux (abs_s :: ss) (abs_T :: Ts) (0 :: incrs js) (depth + 1) polar t) in case t of Const (s0 as @{const_name all}, T0) $ Abs (s1, T1, t1) => do_quantifier s0 T0 s1 T1 t1 | @{const "==>"} $ t1 $ t2 => @{const "==>"} $ aux ss Ts js depth (flip_polarity polar) t1 $ aux ss Ts js depth polar t2 | @{const Pure.conjunction} $ t1 $ t2 => @{const Pure.conjunction} $ aux ss Ts js depth polar t1 $ aux ss Ts js depth polar t2 | @{const Trueprop} $ t1 => @{const Trueprop} $ aux ss Ts js depth polar t1 | @{const Not} $ t1 => @{const Not} $ aux ss Ts js depth (flip_polarity polar) t1 | Const (s0 as @{const_name All}, T0) $ Abs (s1, T1, t1) => do_quantifier s0 T0 s1 T1 t1 | Const (s0 as @{const_name Ex}, T0) $ Abs (s1, T1, t1) => do_quantifier s0 T0 s1 T1 t1 | @{const "op &"} $ t1 $ t2 => @{const "op &"} $ aux ss Ts js depth polar t1 $ aux ss Ts js depth polar t2 | @{const "op |"} $ t1 $ t2 => @{const "op |"} $ aux ss Ts js depth polar t1 $ aux ss Ts js depth polar t2 | @{const "op -->"} $ t1 $ t2 => @{const "op -->"} $ aux ss Ts js depth (flip_polarity polar) t1 $ aux ss Ts js depth polar t2 | (t0 as Const (@{const_name Let}, T0)) $ t1 $ t2 => t0 $ t1 $ aux ss Ts js depth polar t2 | Const (x as (s, T)) => if is_inductive_pred ext_ctxt x andalso not (is_well_founded_inductive_pred ext_ctxt x) then let val gfp = (fixpoint_kind_of_const thy def_table x = Gfp) val (pref, connective, set_oper) = if gfp then (lbfp_prefix, @{const "op |"}, @{const_name upper_semilattice_fun_inst.sup_fun}) else (ubfp_prefix, @{const "op &"}, @{const_name lower_semilattice_fun_inst.inf_fun}) (* unit -> term *) fun pos () = unrolled_inductive_pred_const ext_ctxt gfp x |> aux ss Ts js depth polar fun neg () = Const (pref ^ s, T) in (case polar |> gfp ? flip_polarity of Pos => pos () | Neg => neg () | Neut => if is_fun_type T then let val ((trunk_arg_Ts, rump_arg_T), body_T) = T |> strip_type |>> split_last val set_T = rump_arg_T --> body_T (* (unit -> term) -> term *) fun app f = list_comb (f (), map Bound (length trunk_arg_Ts - 1 downto 0)) in List.foldr absdummy (Const (set_oper, set_T --> set_T --> set_T) $ app pos $ app neg) trunk_arg_Ts end else connective $ pos () $ neg ()) end else Const x | t1 $ t2 => betapply (aux ss Ts [] (skolem_depth + 1) polar t1, aux ss Ts [] depth Neut t2) | Abs (s, T, t1) => Abs (s, T, aux ss Ts (incrs js) depth polar t1) | _ => t end in aux [] [] [] 0 Pos end (* extended_context -> styp -> (int * term option) list *) fun static_args_in_term ({ersatz_table, ...} : extended_context) x t = let (* term -> term list -> term list -> term list list *) fun fun_calls (Abs (_, _, t)) _ = fun_calls t [] | fun_calls (t1 $ t2) args = fun_calls t2 [] #> fun_calls t1 (t2 :: args) | fun_calls t args = (case t of Const (x' as (s', T')) => x = x' orelse (case AList.lookup (op =) ersatz_table s' of SOME s'' => x = (s'', T') | NONE => false) | _ => false) ? cons args (* term list list -> term list list -> term list -> term list list *) fun call_sets [] [] vs = [vs] | call_sets [] uss vs = vs :: call_sets uss [] [] | call_sets ([] :: _) _ _ = [] | call_sets ((t :: ts) :: tss) uss vs = OrdList.insert TermOrd.term_ord t vs |> call_sets tss (ts :: uss) val sets = call_sets (fun_calls t [] []) [] [] val indexed_sets = sets ~~ (index_seq 0 (length sets)) in fold_rev (fn (set, j) => case set of [Var _] => AList.lookup (op =) indexed_sets set = SOME j ? cons (j, NONE) | [t as Const _] => cons (j, SOME t) | [t as Free _] => cons (j, SOME t) | _ => I) indexed_sets [] end (* extended_context -> styp -> term list -> (int * term option) list *) fun static_args_in_terms ext_ctxt x = map (static_args_in_term ext_ctxt x) #> fold1 (OrdList.inter (prod_ord int_ord (option_ord TermOrd.term_ord))) (* term -> term list *) fun params_in_equation (@{const "==>"} $ _ $ t2) = params_in_equation t2 | params_in_equation (@{const Trueprop} $ t1) = params_in_equation t1 | params_in_equation (Const (@{const_name "op ="}, _) $ t1 $ _) = snd (strip_comb t1) | params_in_equation _ = [] (* styp -> styp -> int list -> term list -> term list -> term -> term *) fun specialize_fun_axiom x x' fixed_js fixed_args extra_args t = let val k = fold Integer.max (map maxidx_of_term (fixed_args @ extra_args)) 0 + 1 val t = map_aterms (fn Var ((s, i), T) => Var ((s, k + i), T) | t' => t') t val fixed_params = filter_indices fixed_js (params_in_equation t) (* term list -> term -> term *) fun aux args (Abs (s, T, t)) = list_comb (Abs (s, T, aux [] t), args) | aux args (t1 $ t2) = aux (aux [] t2 :: args) t1 | aux args t = if t = Const x then list_comb (Const x', extra_args @ filter_out_indices fixed_js args) else let val j = find_index (curry (op =) t) fixed_params in list_comb (if j >= 0 then nth fixed_args j else t, args) end in aux [] t end (* typ list -> term -> bool *) fun is_eligible_arg Ts t = let val bad_Ts = map snd (Term.add_vars t []) @ map (nth Ts) (loose_bnos t) in null bad_Ts orelse (is_higher_order_type (fastype_of1 (Ts, t)) andalso forall (not o is_higher_order_type) bad_Ts) end (* (int * term option) list -> (int * term) list -> int list *) fun overlapping_indices [] _ = [] | overlapping_indices _ [] = [] | overlapping_indices (ps1 as (j1, t1) :: ps1') (ps2 as (j2, t2) :: ps2') = if j1 < j2 then overlapping_indices ps1' ps2 else if j1 > j2 then overlapping_indices ps1 ps2' else overlapping_indices ps1' ps2' |> the_default t2 t1 = t2 ? cons j1 val special_depth = 20 (* extended_context -> int -> term -> term *) fun specialize_consts_in_term (ext_ctxt as {thy, specialize, simp_table, special_funs, ...}) depth t = if not specialize orelse depth > special_depth then t else let val blacklist = if depth = 0 then [] else case term_under_def t of Const x => [x] | _ => [] (* term list -> typ list -> term -> term *) fun aux args Ts (Const (x as (s, T))) = ((if not (member (op =) blacklist x) andalso not (null args) andalso not (String.isPrefix special_prefix s) andalso is_equational_fun ext_ctxt x then let val eligible_args = filter (is_eligible_arg Ts o snd) (index_seq 0 (length args) ~~ args) val _ = not (null eligible_args) orelse raise SAME () val old_axs = equational_fun_axioms ext_ctxt x |> map (destroy_existential_equalities thy) val static_params = static_args_in_terms ext_ctxt x old_axs val fixed_js = overlapping_indices static_params eligible_args val _ = not (null fixed_js) orelse raise SAME () val fixed_args = filter_indices fixed_js args val vars = fold Term.add_vars fixed_args [] |> sort (TermOrd.fast_indexname_ord o pairself fst) val bound_js = fold (fn t => fn js => add_loose_bnos (t, 0, js)) fixed_args [] |> sort int_ord val live_args = filter_out_indices fixed_js args val extra_args = map Var vars @ map Bound bound_js @ live_args val extra_Ts = map snd vars @ filter_indices bound_js Ts val k = maxidx_of_term t + 1 (* int -> term *) fun var_for_bound_no j = Var ((bound_var_prefix ^ nat_subscript (find_index (curry (op =) j) bound_js + 1), k), nth Ts j) val fixed_args_in_axiom = map (curry subst_bounds (map var_for_bound_no (index_seq 0 (length Ts)))) fixed_args in case AList.lookup (op =) (!special_funs) (x, fixed_js, fixed_args_in_axiom) of SOME x' => list_comb (Const x', extra_args) | NONE => let val extra_args_in_axiom = map Var vars @ map var_for_bound_no bound_js val x' as (s', _) = (special_prefix_for (length (!special_funs) + 1) ^ s, extra_Ts @ filter_out_indices fixed_js (binder_types T) ---> body_type T) val new_axs = map (specialize_fun_axiom x x' fixed_js fixed_args_in_axiom extra_args_in_axiom) old_axs val _ = Unsynchronized.change special_funs (cons ((x, fixed_js, fixed_args_in_axiom), x')) val _ = add_simps simp_table s' new_axs in list_comb (Const x', extra_args) end end else raise SAME ()) handle SAME () => list_comb (Const x, args)) | aux args Ts (Abs (s, T, t)) = list_comb (Abs (s, T, aux [] (T :: Ts) t), args) | aux args Ts (t1 $ t2) = aux (aux [] Ts t2 :: args) Ts t1 | aux args _ t = list_comb (t, args) in aux [] [] t end (* theory -> term -> int Termtab.tab -> int Termtab.tab *) fun add_to_uncurry_table thy t = let (* term -> term list -> int Termtab.tab -> int Termtab.tab *) fun aux (t1 $ t2) args table = let val table = aux t2 [] table in aux t1 (t2 :: args) table end | aux (Abs (_, _, t')) _ table = aux t' [] table | aux (t as Const (x as (s, _))) args table = if is_built_in_const true x orelse is_constr_like thy x orelse is_sel s orelse s = @{const_name Sigma} then table else Termtab.map_default (t, 65536) (curry Int.min (length args)) table | aux _ _ table = table in aux t [] end (* int Termtab.tab term -> term *) fun uncurry_term table t = let (* term -> term list -> term *) fun aux (t1 $ t2) args = aux t1 (aux t2 [] :: args) | aux (Abs (s, T, t')) args = betapplys (Abs (s, T, aux t' []), args) | aux (t as Const (s, T)) args = (case Termtab.lookup table t of SOME n => if n >= 2 then let val (arg_Ts, rest_T) = strip_n_binders n T val j = if hd arg_Ts = @{typ bisim_iterator} orelse is_fp_iterator_type (hd arg_Ts) then 1 else case find_index (not_equal bool_T) arg_Ts of ~1 => n | j => j val ((before_args, tuple_args), after_args) = args |> chop n |>> chop j val ((before_arg_Ts, tuple_arg_Ts), rest_T) = T |> strip_n_binders n |>> chop j val tuple_T = HOLogic.mk_tupleT tuple_arg_Ts in if n - j < 2 then betapplys (t, args) else betapplys (Const (uncurry_prefix_for (n - j) j ^ s, before_arg_Ts ---> tuple_T --> rest_T), before_args @ [mk_flat_tuple tuple_T tuple_args] @ after_args) end else betapplys (t, args) | NONE => betapplys (t, args)) | aux t args = betapplys (t, args) in aux t [] end (* (term -> term) -> int -> term -> term *) fun coerce_bound_no f j t = case t of t1 $ t2 => coerce_bound_no f j t1 $ coerce_bound_no f j t2 | Abs (s, T, t') => Abs (s, T, coerce_bound_no f (j + 1) t') | Bound j' => if j' = j then f t else t | _ => t (* extended_context -> bool -> term -> term *) fun box_fun_and_pair_in_term (ext_ctxt as {thy, fast_descrs, ...}) def orig_t = let (* typ -> typ *) fun box_relational_operator_type (Type ("fun", Ts)) = Type ("fun", map box_relational_operator_type Ts) | box_relational_operator_type (Type ("*", Ts)) = Type ("*", map (box_type ext_ctxt InPair) Ts) | box_relational_operator_type T = T (* typ -> typ -> term -> term *) fun coerce_bound_0_in_term new_T old_T = old_T <> new_T ? coerce_bound_no (coerce_term [new_T] old_T new_T) 0 (* typ list -> typ -> term -> term *) and coerce_term Ts new_T old_T t = if old_T = new_T then t else case (new_T, old_T) of (Type (new_s, new_Ts as [new_T1, new_T2]), Type ("fun", [old_T1, old_T2])) => (case eta_expand Ts t 1 of Abs (s, _, t') => Abs (s, new_T1, t' |> coerce_bound_0_in_term new_T1 old_T1 |> coerce_term (new_T1 :: Ts) new_T2 old_T2) |> Envir.eta_contract |> new_s <> "fun" ? construct_value thy (@{const_name FunBox}, Type ("fun", new_Ts) --> new_T) o single | t' => raise TERM ("Nitpick_HOL.box_fun_and_pair_in_term.\ \coerce_term", [t'])) | (Type (new_s, new_Ts as [new_T1, new_T2]), Type (old_s, old_Ts as [old_T1, old_T2])) => if old_s = @{type_name fun_box} orelse old_s = @{type_name pair_box} orelse old_s = "*" then case constr_expand ext_ctxt old_T t of Const (@{const_name FunBox}, _) $ t1 => if new_s = "fun" then coerce_term Ts new_T (Type ("fun", old_Ts)) t1 else construct_value thy (@{const_name FunBox}, Type ("fun", new_Ts) --> new_T) [coerce_term Ts (Type ("fun", new_Ts)) (Type ("fun", old_Ts)) t1] | Const _ $ t1 $ t2 => construct_value thy (if new_s = "*" then @{const_name Pair} else @{const_name PairBox}, new_Ts ---> new_T) [coerce_term Ts new_T1 old_T1 t1, coerce_term Ts new_T2 old_T2 t2] | t' => raise TERM ("Nitpick_HOL.box_fun_and_pair_in_term.\ \coerce_term", [t']) else raise TYPE ("coerce_term", [new_T, old_T], [t]) | _ => raise TYPE ("coerce_term", [new_T, old_T], [t]) (* indexname * typ -> typ * term -> typ option list -> typ option list *) fun add_boxed_types_for_var (z as (_, T)) (T', t') = case t' of Var z' => z' = z ? insert (op =) T' | Const (@{const_name Pair}, _) $ t1 $ t2 => (case T' of Type (_, [T1, T2]) => fold (add_boxed_types_for_var z) [(T1, t1), (T2, t2)] | _ => raise TYPE ("Nitpick_HOL.box_fun_and_pair_in_term.\ \add_boxed_types_for_var", [T'], [])) | _ => exists_subterm (curry (op =) (Var z)) t' ? insert (op =) T (* typ list -> typ list -> term -> indexname * typ -> typ *) fun box_var_in_def new_Ts old_Ts t (z as (_, T)) = case t of @{const Trueprop} $ t1 => box_var_in_def new_Ts old_Ts t1 z | Const (s0, _) $ t1 $ _ => if s0 = @{const_name "=="} orelse s0 = @{const_name "op ="} then let val (t', args) = strip_comb t1 val T' = fastype_of1 (new_Ts, do_term new_Ts old_Ts Neut t') in case fold (add_boxed_types_for_var z) (fst (strip_n_binders (length args) T') ~~ args) [] of [T''] => T'' | _ => T end else T | _ => T (* typ list -> typ list -> polarity -> string -> typ -> string -> typ -> term -> term *) and do_quantifier new_Ts old_Ts polar quant_s quant_T abs_s abs_T t = let val abs_T' = if polar = Neut orelse is_positive_existential polar quant_s then box_type ext_ctxt InFunLHS abs_T else abs_T val body_T = body_type quant_T in Const (quant_s, (abs_T' --> body_T) --> body_T) $ Abs (abs_s, abs_T', t |> do_term (abs_T' :: new_Ts) (abs_T :: old_Ts) polar) end (* typ list -> typ list -> string -> typ -> term -> term -> term *) and do_equals new_Ts old_Ts s0 T0 t1 t2 = let val (t1, t2) = pairself (do_term new_Ts old_Ts Neut) (t1, t2) val (T1, T2) = pairself (curry fastype_of1 new_Ts) (t1, t2) val T = [T1, T2] |> sort TermOrd.typ_ord |> List.last in list_comb (Const (s0, T --> T --> body_type T0), map2 (coerce_term new_Ts T) [T1, T2] [t1, t2]) end (* string -> typ -> term *) and do_description_operator s T = let val T1 = box_type ext_ctxt InFunLHS (range_type T) in Const (s, (T1 --> bool_T) --> T1) end (* typ list -> typ list -> polarity -> term -> term *) and do_term new_Ts old_Ts polar t = case t of Const (s0 as @{const_name all}, T0) $ Abs (s1, T1, t1) => do_quantifier new_Ts old_Ts polar s0 T0 s1 T1 t1 | Const (s0 as @{const_name "=="}, T0) $ t1 $ t2 => do_equals new_Ts old_Ts s0 T0 t1 t2 | @{const "==>"} $ t1 $ t2 => @{const "==>"} $ do_term new_Ts old_Ts (flip_polarity polar) t1 $ do_term new_Ts old_Ts polar t2 | @{const Pure.conjunction} $ t1 $ t2 => @{const Pure.conjunction} $ do_term new_Ts old_Ts polar t1 $ do_term new_Ts old_Ts polar t2 | @{const Trueprop} $ t1 => @{const Trueprop} $ do_term new_Ts old_Ts polar t1 | @{const Not} $ t1 => @{const Not} $ do_term new_Ts old_Ts (flip_polarity polar) t1 | Const (s0 as @{const_name All}, T0) $ Abs (s1, T1, t1) => do_quantifier new_Ts old_Ts polar s0 T0 s1 T1 t1 | Const (s0 as @{const_name Ex}, T0) $ Abs (s1, T1, t1) => do_quantifier new_Ts old_Ts polar s0 T0 s1 T1 t1 | Const (s0 as @{const_name "op ="}, T0) $ t1 $ t2 => do_equals new_Ts old_Ts s0 T0 t1 t2 | @{const "op &"} $ t1 $ t2 => @{const "op &"} $ do_term new_Ts old_Ts polar t1 $ do_term new_Ts old_Ts polar t2 | @{const "op |"} $ t1 $ t2 => @{const "op |"} $ do_term new_Ts old_Ts polar t1 $ do_term new_Ts old_Ts polar t2 | @{const "op -->"} $ t1 $ t2 => @{const "op -->"} $ do_term new_Ts old_Ts (flip_polarity polar) t1 $ do_term new_Ts old_Ts polar t2 | Const (s as @{const_name The}, T) => do_description_operator s T | Const (s as @{const_name Eps}, T) => do_description_operator s T | Const (@{const_name quot_normal}, Type ("fun", [_, T2])) => - let val T' = box_type ext_ctxt InFunRHS1 T2 in + let val T' = box_type ext_ctxt InSel T2 in Const (@{const_name quot_normal}, T' --> T') end | Const (s as @{const_name Tha}, T) => do_description_operator s T | Const (x as (s, T)) => Const (s, if s = @{const_name converse} orelse s = @{const_name trancl} then box_relational_operator_type T else if is_built_in_const fast_descrs x orelse s = @{const_name Sigma} then T else if is_constr_like thy x then box_type ext_ctxt InConstr T - else if is_sel s orelse is_rep_fun thy x then + else if is_sel s + orelse is_rep_fun thy x then box_type ext_ctxt InSel T else box_type ext_ctxt InExpr T) | t1 $ Abs (s, T, t2') => let val t1 = do_term new_Ts old_Ts Neut t1 val T1 = fastype_of1 (new_Ts, t1) val (s1, Ts1) = dest_Type T1 val T' = hd (snd (dest_Type (hd Ts1))) val t2 = Abs (s, T', do_term (T' :: new_Ts) (T :: old_Ts) Neut t2') val T2 = fastype_of1 (new_Ts, t2) val t2 = coerce_term new_Ts (hd Ts1) T2 t2 in betapply (if s1 = "fun" then t1 else select_nth_constr_arg thy (@{const_name FunBox}, Type ("fun", Ts1) --> T1) t1 0 (Type ("fun", Ts1)), t2) end | t1 $ t2 => let val t1 = do_term new_Ts old_Ts Neut t1 val T1 = fastype_of1 (new_Ts, t1) val (s1, Ts1) = dest_Type T1 val t2 = do_term new_Ts old_Ts Neut t2 val T2 = fastype_of1 (new_Ts, t2) val t2 = coerce_term new_Ts (hd Ts1) T2 t2 in betapply (if s1 = "fun" then t1 else select_nth_constr_arg thy (@{const_name FunBox}, Type ("fun", Ts1) --> T1) t1 0 (Type ("fun", Ts1)), t2) end | Free (s, T) => Free (s, box_type ext_ctxt InExpr T) | Var (z as (x, T)) => Var (x, if def then box_var_in_def new_Ts old_Ts orig_t z else box_type ext_ctxt InExpr T) | Bound _ => t | Abs (s, T, t') => Abs (s, T, do_term (T :: new_Ts) (T :: old_Ts) Neut t') in do_term [] [] Pos orig_t end (* int -> term -> term *) fun eval_axiom_for_term j t = Logic.mk_equals (Const (eval_prefix ^ string_of_int j, fastype_of t), t) (* extended_context -> styp -> bool *) fun is_equational_fun_surely_complete ext_ctxt x = case raw_equational_fun_axioms ext_ctxt x of [@{const Trueprop} $ (Const (@{const_name "op ="}, _) $ t1 $ _)] => strip_comb t1 |> snd |> forall is_Var | _ => false type special = int list * term list * styp (* styp -> special -> special -> term *) fun special_congruence_axiom (s, T) (js1, ts1, x1) (js2, ts2, x2) = let val (bounds1, bounds2) = pairself (map Var o special_bounds) (ts1, ts2) val Ts = binder_types T val max_j = fold (fold Integer.max) [js1, js2] ~1 val (eqs, (args1, args2)) = fold (fn j => case pairself (fn ps => AList.lookup (op =) ps j) (js1 ~~ ts1, js2 ~~ ts2) of (SOME t1, SOME t2) => apfst (cons (t1, t2)) | (SOME t1, NONE) => apsnd (apsnd (cons t1)) | (NONE, SOME t2) => apsnd (apfst (cons t2)) | (NONE, NONE) => let val v = Var ((cong_var_prefix ^ nat_subscript j, 0), nth Ts j) in apsnd (pairself (cons v)) end) (max_j downto 0) ([], ([], [])) in Logic.list_implies (eqs |> filter_out (op =) |> distinct (op =) |> map Logic.mk_equals, Logic.mk_equals (list_comb (Const x1, bounds1 @ args1), list_comb (Const x2, bounds2 @ args2))) |> Refute.close_form (* TODO: needed? *) end (* extended_context -> styp list -> term list *) fun special_congruence_axioms (ext_ctxt as {special_funs, ...}) xs = let val groups = !special_funs |> map (fn ((x, js, ts), x') => (x, (js, ts, x'))) |> AList.group (op =) |> filter_out (is_equational_fun_surely_complete ext_ctxt o fst) |> map (fn (x, zs) => (x, zs |> member (op =) xs x ? cons ([], [], x))) (* special -> int *) fun generality (js, _, _) = ~(length js) (* special -> special -> bool *) fun is_more_specific (j1, t1, x1) (j2, t2, x2) = x1 <> x2 andalso OrdList.subset (prod_ord int_ord TermOrd.term_ord) (j2 ~~ t2, j1 ~~ t1) (* styp -> special list -> special list -> special list -> term list -> term list *) fun do_pass_1 _ [] [_] [_] = I | do_pass_1 x skipped _ [] = do_pass_2 x skipped | do_pass_1 x skipped all (z :: zs) = case filter (is_more_specific z) all |> sort (int_ord o pairself generality) of [] => do_pass_1 x (z :: skipped) all zs | (z' :: _) => cons (special_congruence_axiom x z z') #> do_pass_1 x skipped all zs (* styp -> special list -> term list -> term list *) and do_pass_2 _ [] = I | do_pass_2 x (z :: zs) = fold (cons o special_congruence_axiom x z) zs #> do_pass_2 x zs in fold (fn (x, zs) => do_pass_1 x [] zs zs) groups [] end (* term -> bool *) val is_trivial_equation = the_default false o try (op aconv o Logic.dest_equals) (* 'a Symtab.table -> 'a list *) fun all_table_entries table = Symtab.fold (append o snd) table [] (* const_table -> string -> const_table *) fun extra_table table s = Symtab.make [(s, all_table_entries table)] (* extended_context -> term -> (term list * term list) * (bool * bool) *) fun axioms_for_term (ext_ctxt as {thy, max_bisim_depth, user_axioms, fast_descrs, evals, def_table, nondef_table, user_nondefs, ...}) t = let type accumulator = styp list * (term list * term list) (* (term list * term list -> term list) -> ((term list -> term list) -> term list * term list -> term list * term list) -> int -> term -> accumulator -> accumulator *) fun add_axiom get app depth t (accum as (xs, axs)) = let val t = t |> unfold_defs_in_term ext_ctxt |> skolemize_term_and_more ext_ctxt ~1 in if is_trivial_equation t then accum else let val t' = t |> specialize_consts_in_term ext_ctxt depth in if exists (member (op aconv) (get axs)) [t, t'] then accum else add_axioms_for_term (depth + 1) t' (xs, app (cons t') axs) end end (* int -> term -> accumulator -> accumulator *) and add_def_axiom depth = add_axiom fst apfst depth and add_nondef_axiom depth = add_axiom snd apsnd depth and add_maybe_def_axiom depth t = (if head_of t <> @{const "==>"} then add_def_axiom else add_nondef_axiom) depth t and add_eq_axiom depth t = (if is_constr_pattern_formula thy t then add_def_axiom else add_nondef_axiom) depth t (* int -> term -> accumulator -> accumulator *) and add_axioms_for_term depth t (accum as (xs, axs)) = case t of t1 $ t2 => accum |> fold (add_axioms_for_term depth) [t1, t2] | Const (x as (s, T)) => (if member (op =) xs x orelse is_built_in_const fast_descrs x then accum else let val accum as (xs, _) = (x :: xs, axs) in if depth > axioms_max_depth then raise TOO_LARGE ("Nitpick_HOL.axioms_for_term.\ \add_axioms_for_term", "too many nested axioms (" ^ string_of_int depth ^ ")") else if Refute.is_const_of_class thy x then let val class = Logic.class_of_const s val of_class = Logic.mk_of_class (TVar (("'a", 0), [class]), class) val ax1 = try (Refute.specialize_type thy x) of_class val ax2 = Option.map (Refute.specialize_type thy x o snd) (Refute.get_classdef thy class) in fold (add_maybe_def_axiom depth) (map_filter I [ax1, ax2]) accum end else if is_constr thy x then accum else if is_equational_fun ext_ctxt x then fold (add_eq_axiom depth) (equational_fun_axioms ext_ctxt x) accum else if is_abs_fun thy x then if is_quot_type thy (range_type T) then raise NOT_SUPPORTED "\"Abs_\" function of quotient type" else accum |> fold (add_nondef_axiom depth) (nondef_props_for_const thy false nondef_table x) |> is_funky_typedef thy (range_type T) ? fold (add_maybe_def_axiom depth) (nondef_props_for_const thy true (extra_table def_table s) x) else if is_rep_fun thy x then if is_quot_type thy (domain_type T) then raise NOT_SUPPORTED "\"Rep_\" function of quotient type" else accum |> fold (add_nondef_axiom depth) (nondef_props_for_const thy false nondef_table x) |> is_funky_typedef thy (range_type T) ? fold (add_maybe_def_axiom depth) (nondef_props_for_const thy true (extra_table def_table s) x) |> add_axioms_for_term depth (Const (mate_of_rep_fun thy x)) |> fold (add_def_axiom depth) (inverse_axioms_for_rep_fun thy x) else accum |> user_axioms <> SOME false ? fold (add_nondef_axiom depth) (nondef_props_for_const thy false nondef_table x) end) |> add_axioms_for_type depth T | Free (_, T) => add_axioms_for_type depth T accum | Var (_, T) => add_axioms_for_type depth T accum | Bound _ => accum | Abs (_, T, t) => accum |> add_axioms_for_term depth t |> add_axioms_for_type depth T (* int -> typ -> accumulator -> accumulator *) and add_axioms_for_type depth T = case T of Type ("fun", Ts) => fold (add_axioms_for_type depth) Ts | Type ("*", Ts) => fold (add_axioms_for_type depth) Ts | @{typ prop} => I | @{typ bool} => I | @{typ unit} => I | TFree (_, S) => add_axioms_for_sort depth T S | TVar (_, S) => add_axioms_for_sort depth T S | Type (z as (s, Ts)) => fold (add_axioms_for_type depth) Ts #> (if is_pure_typedef thy T then fold (add_maybe_def_axiom depth) (optimized_typedef_axioms thy z) else if is_quot_type thy T then fold (add_def_axiom depth) (optimized_quot_type_axioms thy z) else if max_bisim_depth >= 0 andalso is_codatatype thy T then fold (add_maybe_def_axiom depth) (codatatype_bisim_axioms ext_ctxt T) else I) (* int -> typ -> sort -> accumulator -> accumulator *) and add_axioms_for_sort depth T S = let val supers = Sign.complete_sort thy S val class_axioms = maps (fn class => map prop_of (AxClass.get_info thy class |> #axioms handle ERROR _ => [])) supers val monomorphic_class_axioms = map (fn t => case Term.add_tvars t [] of [] => t | [(x, S)] => Refute.monomorphic_term (Vartab.make [(x, (S, T))]) t | _ => raise TERM ("Nitpick_HOL.axioms_for_term.\ \add_axioms_for_sort", [t])) class_axioms in fold (add_nondef_axiom depth) monomorphic_class_axioms end val (mono_user_nondefs, poly_user_nondefs) = List.partition (null o Term.hidden_polymorphism) user_nondefs val eval_axioms = map2 eval_axiom_for_term (index_seq 0 (length evals)) evals val (xs, (defs, nondefs)) = ([], ([], [])) |> add_axioms_for_term 1 t |> fold_rev (add_def_axiom 1) eval_axioms |> user_axioms = SOME true ? fold (add_nondef_axiom 1) mono_user_nondefs val defs = defs @ special_congruence_axioms ext_ctxt xs in ((defs, nondefs), (user_axioms = SOME true orelse null mono_user_nondefs, null poly_user_nondefs)) end (* theory -> const_table -> styp -> int list *) fun const_format thy def_table (x as (s, T)) = if String.isPrefix unrolled_prefix s then const_format thy def_table (original_name s, range_type T) else if String.isPrefix skolem_prefix s then let val k = unprefix skolem_prefix s |> strip_first_name_sep |> fst |> space_explode "@" |> hd |> Int.fromString |> the in [k, num_binder_types T - k] end else if original_name s <> s then [num_binder_types T] else case def_of_const thy def_table x of SOME t' => if fixpoint_kind_of_rhs t' <> NoFp then let val k = length (strip_abs_vars t') in [k, num_binder_types T - k] end else [num_binder_types T] | NONE => [num_binder_types T] (* int list -> int list -> int list *) fun intersect_formats _ [] = [] | intersect_formats [] _ = [] | intersect_formats ks1 ks2 = let val ((ks1', k1), (ks2', k2)) = pairself split_last (ks1, ks2) in intersect_formats (ks1' @ (if k1 > k2 then [k1 - k2] else [])) (ks2' @ (if k2 > k1 then [k2 - k1] else [])) @ [Int.min (k1, k2)] end (* theory -> const_table -> (term option * int list) list -> term -> int list *) fun lookup_format thy def_table formats t = case AList.lookup (fn (SOME x, SOME y) => (term_match thy) (x, y) | _ => false) formats (SOME t) of SOME format => format | NONE => let val format = the (AList.lookup (op =) formats NONE) in case t of Const x => intersect_formats format (const_format thy def_table x) | _ => format end (* int list -> int list -> typ -> typ *) fun format_type default_format format T = let val T = unbit_and_unbox_type T val format = format |> filter (curry (op <) 0) in if forall (curry (op =) 1) format then T else let val (binder_Ts, body_T) = strip_type T val batched = binder_Ts |> map (format_type default_format default_format) |> rev |> chunk_list_unevenly (rev format) |> map (HOLogic.mk_tupleT o rev) in List.foldl (op -->) body_T batched end end (* theory -> const_table -> (term option * int list) list -> term -> typ *) fun format_term_type thy def_table formats t = format_type (the (AList.lookup (op =) formats NONE)) (lookup_format thy def_table formats t) (fastype_of t) (* int list -> int -> int list -> int list *) fun repair_special_format js m format = m - 1 downto 0 |> chunk_list_unevenly (rev format) |> map (rev o filter_out (member (op =) js)) |> filter_out null |> map length |> rev (* extended_context -> string * string -> (term option * int list) list -> styp -> term * typ *) fun user_friendly_const ({thy, evals, def_table, skolems, special_funs, ...} : extended_context) (base_name, step_name) formats = let val default_format = the (AList.lookup (op =) formats NONE) (* styp -> term * typ *) fun do_const (x as (s, T)) = (if String.isPrefix special_prefix s then let (* term -> term *) val do_term = map_aterms (fn Const x => fst (do_const x) | t' => t') val (x' as (_, T'), js, ts) = AList.find (op =) (!special_funs) (s, unbit_and_unbox_type T) |> the_single val max_j = List.last js val Ts = List.take (binder_types T', max_j + 1) val missing_js = filter_out (member (op =) js) (0 upto max_j) val missing_Ts = filter_indices missing_js Ts (* int -> indexname *) fun nth_missing_var n = ((arg_var_prefix ^ nat_subscript (n + 1), 0), nth missing_Ts n) val missing_vars = map nth_missing_var (0 upto length missing_js - 1) val vars = special_bounds ts @ missing_vars val ts' = map2 (fn T => fn j => case AList.lookup (op =) (js ~~ ts) j of SOME t => do_term t | NONE => Var (nth missing_vars (find_index (curry (op =) j) missing_js))) Ts (0 upto max_j) val t = do_const x' |> fst val format = case AList.lookup (fn (SOME t1, SOME t2) => term_match thy (t1, t2) | _ => false) formats (SOME t) of SOME format => repair_special_format js (num_binder_types T') format | NONE => const_format thy def_table x' |> repair_special_format js (num_binder_types T') |> intersect_formats default_format in (list_comb (t, ts') |> fold_rev abs_var vars, format_type default_format format T) end else if String.isPrefix uncurry_prefix s then let val (ss, s') = unprefix uncurry_prefix s |> strip_first_name_sep |>> space_explode "@" in if String.isPrefix step_prefix s' then do_const (s', T) else let val k = the (Int.fromString (hd ss)) val j = the (Int.fromString (List.last ss)) val (before_Ts, (tuple_T, rest_T)) = strip_n_binders j T ||> (strip_n_binders 1 #>> hd) val T' = before_Ts ---> dest_n_tuple_type k tuple_T ---> rest_T in do_const (s', T') end end else if String.isPrefix unrolled_prefix s then let val t = Const (original_name s, range_type T) in (lambda (Free (iter_var_prefix, nat_T)) t, format_type default_format (lookup_format thy def_table formats t) T) end else if String.isPrefix base_prefix s then (Const (base_name, T --> T) $ Const (unprefix base_prefix s, T), format_type default_format default_format T) else if String.isPrefix step_prefix s then (Const (step_name, T --> T) $ Const (unprefix step_prefix s, T), format_type default_format default_format T) else if String.isPrefix skolem_prefix s then let val ss = the (AList.lookup (op =) (!skolems) s) val (Ts, Ts') = chop (length ss) (binder_types T) val frees = map Free (ss ~~ Ts) val s' = original_name s in (fold lambda frees (Const (s', Ts' ---> T)), format_type default_format (lookup_format thy def_table formats (Const x)) T) end else if String.isPrefix eval_prefix s then let val t = nth evals (the (Int.fromString (unprefix eval_prefix s))) in (t, format_term_type thy def_table formats t) end else if s = @{const_name undefined_fast_The} then (Const (nitpick_prefix ^ "The fallback", T), format_type default_format (lookup_format thy def_table formats (Const (@{const_name The}, (T --> bool_T) --> T))) T) else if s = @{const_name undefined_fast_Eps} then (Const (nitpick_prefix ^ "Eps fallback", T), format_type default_format (lookup_format thy def_table formats (Const (@{const_name Eps}, (T --> bool_T) --> T))) T) else let val t = Const (original_name s, T) in (t, format_term_type thy def_table formats t) end) |>> map_types unbit_and_unbox_type |>> shorten_names_in_term |>> shorten_abs_vars in do_const end (* styp -> string *) fun assign_operator_for_const (s, T) = if String.isPrefix ubfp_prefix s then if is_fun_type T then "\" else "\" else if String.isPrefix lbfp_prefix s then if is_fun_type T then "\" else "\" else if original_name s <> s then assign_operator_for_const (after_name_sep s, T) else "=" val binary_int_threshold = 4 (* term -> bool *) fun may_use_binary_ints (t1 $ t2) = may_use_binary_ints t1 andalso may_use_binary_ints t2 | may_use_binary_ints (t as Const (s, _)) = t <> @{const Suc} andalso not (member (op =) [@{const_name Abs_Frac}, @{const_name Rep_Frac}, @{const_name nat_gcd}, @{const_name nat_lcm}, @{const_name Frac}, @{const_name norm_frac}] s) | may_use_binary_ints (Abs (_, _, t')) = may_use_binary_ints t' | may_use_binary_ints _ = true fun should_use_binary_ints (t1 $ t2) = should_use_binary_ints t1 orelse should_use_binary_ints t2 | should_use_binary_ints (Const (s, _)) = member (op =) [@{const_name times_nat_inst.times_nat}, @{const_name div_nat_inst.div_nat}, @{const_name times_int_inst.times_int}, @{const_name div_int_inst.div_int}] s orelse (String.isPrefix numeral_prefix s andalso let val n = the (Int.fromString (unprefix numeral_prefix s)) in n <= ~ binary_int_threshold orelse n >= binary_int_threshold end) | should_use_binary_ints (Abs (_, _, t')) = should_use_binary_ints t' | should_use_binary_ints _ = false (* typ -> typ *) fun binarize_nat_and_int_in_type @{typ nat} = @{typ "unsigned_bit word"} | binarize_nat_and_int_in_type @{typ int} = @{typ "signed_bit word"} | binarize_nat_and_int_in_type (Type (s, Ts)) = Type (s, map binarize_nat_and_int_in_type Ts) | binarize_nat_and_int_in_type T = T (* term -> term *) val binarize_nat_and_int_in_term = map_types binarize_nat_and_int_in_type (* extended_context -> term -> ((term list * term list) * (bool * bool)) * term *) fun preprocess_term (ext_ctxt as {thy, binary_ints, destroy_constrs, boxes, skolemize, uncurry, ...}) t = let val skolem_depth = if skolemize then 4 else ~1 val (((def_ts, nondef_ts), (got_all_mono_user_axioms, no_poly_user_axioms)), core_t) = t |> unfold_defs_in_term ext_ctxt |> Refute.close_form |> skolemize_term_and_more ext_ctxt skolem_depth |> specialize_consts_in_term ext_ctxt 0 |> `(axioms_for_term ext_ctxt) val binarize = case binary_ints of SOME false => false | _ => forall may_use_binary_ints (core_t :: def_ts @ nondef_ts) andalso (binary_ints = SOME true orelse exists should_use_binary_ints (core_t :: def_ts @ nondef_ts)) val box = exists (not_equal (SOME false) o snd) boxes val table = Termtab.empty |> uncurry ? fold (add_to_uncurry_table thy) (core_t :: def_ts @ nondef_ts) (* bool -> bool -> term -> term *) fun do_rest def core = binarize ? binarize_nat_and_int_in_term #> uncurry ? uncurry_term table #> box ? box_fun_and_pair_in_term ext_ctxt def #> destroy_constrs ? (pull_out_universal_constrs thy def #> pull_out_existential_constrs thy #> destroy_pulled_out_constrs ext_ctxt def) #> curry_assms #> destroy_universal_equalities #> destroy_existential_equalities thy #> simplify_constrs_and_sels thy #> distribute_quantifiers #> push_quantifiers_inward thy + #> Envir.eta_contract #> not core ? Refute.close_form #> shorten_abs_vars in (((map (do_rest true false) def_ts, map (do_rest false false) nondef_ts), (got_all_mono_user_axioms, no_poly_user_axioms)), do_rest false true core_t) end end; diff --git a/src/HOL/Tools/Nitpick/nitpick_isar.ML b/src/HOL/Tools/Nitpick/nitpick_isar.ML --- a/src/HOL/Tools/Nitpick/nitpick_isar.ML +++ b/src/HOL/Tools/Nitpick/nitpick_isar.ML @@ -1,484 +1,496 @@ (* Title: HOL/Tools/Nitpick/nitpick_isar.ML Author: Jasmin Blanchette, TU Muenchen - Copyright 2008, 2009 + Copyright 2008, 2009, 2010 Adds the "nitpick" and "nitpick_params" commands to Isabelle/Isar's outer syntax. *) signature NITPICK_ISAR = sig type params = Nitpick.params val auto: bool Unsynchronized.ref val default_params : theory -> (string * string) list -> params val setup : theory -> theory end structure Nitpick_Isar : NITPICK_ISAR = struct open Nitpick_Util open Nitpick_HOL open Nitpick_Rep open Nitpick_Nut open Nitpick val auto = Unsynchronized.ref false; val _ = ProofGeneralPgip.add_preference Preferences.category_tracing - (Preferences.bool_pref auto - "auto-nitpick" - "Whether to run Nitpick automatically.") + (Preferences.bool_pref auto "auto-nitpick" + "Whether to run Nitpick automatically.") type raw_param = string * string list val default_default_params = [("card", ["1\8"]), ("iter", ["0,1,2,4,8,12,16,24"]), ("bits", ["1,2,3,4,6,8,10,12"]), ("bisim_depth", ["7"]), ("box", ["smart"]), ("mono", ["smart"]), + ("std", ["true"]), ("wf", ["smart"]), ("sat_solver", ["smart"]), ("batch_size", ["smart"]), ("blocking", ["true"]), ("falsify", ["true"]), ("user_axioms", ["smart"]), ("assms", ["true"]), ("merge_type_vars", ["false"]), ("binary_ints", ["smart"]), ("destroy_constrs", ["true"]), ("specialize", ["true"]), ("skolemize", ["true"]), ("star_linear_preds", ["true"]), ("uncurry", ["true"]), ("fast_descrs", ["true"]), ("peephole_optim", ["true"]), ("timeout", ["30 s"]), ("tac_timeout", ["500 ms"]), ("sym_break", ["20"]), ("sharing_depth", ["3"]), ("flatten_props", ["false"]), ("max_threads", ["0"]), ("verbose", ["false"]), ("debug", ["false"]), ("overlord", ["false"]), ("show_all", ["false"]), ("show_skolems", ["true"]), ("show_datatypes", ["false"]), ("show_consts", ["false"]), ("format", ["1"]), ("max_potential", ["1"]), ("max_genuine", ["1"]), ("check_potential", ["false"]), ("check_genuine", ["false"])] val negated_params = [("dont_box", "box"), ("non_mono", "mono"), + ("non_std", "std"), ("non_wf", "wf"), ("non_blocking", "blocking"), ("satisfy", "falsify"), ("no_user_axioms", "user_axioms"), ("no_assms", "assms"), ("dont_merge_type_vars", "merge_type_vars"), ("unary_ints", "binary_ints"), ("dont_destroy_constrs", "destroy_constrs"), ("dont_specialize", "specialize"), ("dont_skolemize", "skolemize"), ("dont_star_linear_preds", "star_linear_preds"), ("dont_uncurry", "uncurry"), ("full_descrs", "fast_descrs"), ("no_peephole_optim", "peephole_optim"), ("dont_flatten_props", "flatten_props"), ("quiet", "verbose"), ("no_debug", "debug"), ("no_overlord", "overlord"), ("dont_show_all", "show_all"), ("hide_skolems", "show_skolems"), ("hide_datatypes", "show_datatypes"), ("hide_consts", "show_consts"), ("trust_potential", "check_potential"), ("trust_genuine", "check_genuine")] (* string -> bool *) fun is_known_raw_param s = AList.defined (op =) default_default_params s orelse AList.defined (op =) negated_params s orelse s = "max" orelse s = "eval" orelse s = "expect" orelse exists (fn p => String.isPrefix (p ^ " ") s) - ["card", "max", "iter", "box", "dont_box", "mono", "non_mono", "wf", - "non_wf", "format"] + ["card", "max", "iter", "box", "dont_box", "mono", "non_mono", "std", + "non_std", "wf", "non_wf", "format"] (* string * 'a -> unit *) fun check_raw_param (s, _) = if is_known_raw_param s then () else error ("Unknown parameter " ^ quote s ^ ".") (* string -> string option *) fun unnegate_param_name name = case AList.lookup (op =) negated_params name of NONE => if String.isPrefix "dont_" name then SOME (unprefix "dont_" name) else if String.isPrefix "non_" name then SOME (unprefix "non_" name) else NONE | some_name => some_name (* raw_param -> raw_param *) fun unnegate_raw_param (name, value) = case unnegate_param_name name of SOME name' => (name', case value of ["false"] => ["true"] | ["true"] => ["false"] | [] => ["false"] | _ => value) | NONE => (name, value) structure Data = Theory_Data( type T = {params: raw_param list} val empty = {params = rev default_default_params} val extend = I fun merge ({params = ps1}, {params = ps2}) : T = {params = AList.merge (op =) (K true) (ps1, ps2)}) (* raw_param -> theory -> theory *) fun set_default_raw_param param thy = let val {params} = Data.get thy in Data.put {params = AList.update (op =) (unnegate_raw_param param) params} thy end (* theory -> raw_param list *) val default_raw_params = #params o Data.get (* string -> bool *) fun is_punctuation s = (s = "," orelse s = "-" orelse s = "\") (* string list -> string *) fun stringify_raw_param_value [] = "" | stringify_raw_param_value [s] = s | stringify_raw_param_value (s1 :: s2 :: ss) = s1 ^ (if is_punctuation s1 orelse is_punctuation s2 then "" else " ") ^ stringify_raw_param_value (s2 :: ss) (* bool -> string -> string -> bool option *) fun bool_option_from_string option name s = (case s of "smart" => if option then NONE else raise Option | "false" => SOME false | "true" => SOME true | "" => SOME true | s => raise Option) handle Option.Option => let val ss = map quote ((option ? cons "smart") ["true", "false"]) in error ("Parameter " ^ quote name ^ " must be assigned " ^ space_implode " " (serial_commas "or" ss) ^ ".") end (* bool -> raw_param list -> bool option -> string -> bool option *) fun general_lookup_bool option raw_params default_value name = case AList.lookup (op =) raw_params name of SOME s => s |> stringify_raw_param_value |> bool_option_from_string option name | NONE => default_value (* int -> string -> int *) fun maxed_int_from_string min_int s = Int.max (min_int, the (Int.fromString s)) (* Proof.context -> bool -> raw_param list -> raw_param list -> params *) fun extract_params ctxt auto default_params override_params = let val override_params = map unnegate_raw_param override_params val raw_params = rev override_params @ rev default_params val lookup = Option.map stringify_raw_param_value o AList.lookup (op =) raw_params (* string -> string *) fun lookup_string name = the_default "" (lookup name) (* string -> bool *) val lookup_bool = the o general_lookup_bool false raw_params (SOME false) (* string -> bool option *) val lookup_bool_option = general_lookup_bool true raw_params NONE (* string -> string option -> int *) fun do_int name value = case value of SOME s => (case Int.fromString s of SOME i => i | NONE => error ("Parameter " ^ quote name ^ " must be assigned an integer value.")) | NONE => 0 (* string -> int *) fun lookup_int name = do_int name (lookup name) (* string -> int option *) fun lookup_int_option name = case lookup name of SOME "smart" => NONE | value => SOME (do_int name value) (* string -> int -> string -> int list *) fun int_range_from_string name min_int s = let val (k1, k2) = (case space_explode "-" s of [s] => the_default (s, s) (first_field "\" s) | ["", s2] => ("-" ^ s2, "-" ^ s2) | [s1, s2] => (s1, s2) | _ => raise Option) |> pairself (maxed_int_from_string min_int) in if k1 <= k2 then k1 upto k2 else k1 downto k2 end handle Option.Option => error ("Parameter " ^ quote name ^ " must be assigned a sequence of integers.") (* string -> int -> string -> int list *) fun int_seq_from_string name min_int s = maps (int_range_from_string name min_int) (space_explode "," s) (* string -> int -> int list *) fun lookup_int_seq name min_int = case lookup name of SOME s => (case int_seq_from_string name min_int s of [] => [min_int] | value => value) | NONE => [min_int] (* (string -> 'a) -> int -> string -> ('a option * int list) list *) fun lookup_ints_assigns read prefix min_int = (NONE, lookup_int_seq prefix min_int) :: map (fn (name, value) => (SOME (read (String.extract (name, size prefix + 1, NONE))), value |> stringify_raw_param_value |> int_seq_from_string name min_int)) (filter (String.isPrefix (prefix ^ " ") o fst) raw_params) + (* (string -> 'a) -> string -> ('a option * bool) list *) + fun lookup_bool_assigns read prefix = + (NONE, lookup_bool prefix) + :: map (fn (name, value) => + (SOME (read (String.extract (name, size prefix + 1, NONE))), + value |> stringify_raw_param_value + |> bool_option_from_string false name |> the)) + (filter (String.isPrefix (prefix ^ " ") o fst) raw_params) (* (string -> 'a) -> string -> ('a option * bool option) list *) fun lookup_bool_option_assigns read prefix = (NONE, lookup_bool_option prefix) :: map (fn (name, value) => (SOME (read (String.extract (name, size prefix + 1, NONE))), value |> stringify_raw_param_value |> bool_option_from_string true name)) (filter (String.isPrefix (prefix ^ " ") o fst) raw_params) (* string -> Time.time option *) fun lookup_time name = case lookup name of NONE => NONE | SOME "none" => NONE | SOME s => let val msecs = case space_explode " " s of [s1, "min"] => 60000 * the (Int.fromString s1) | [s1, "s"] => 1000 * the (Int.fromString s1) | [s1, "ms"] => the (Int.fromString s1) | _ => 0 in if msecs <= 0 then error ("Parameter " ^ quote name ^ " must be assigned a positive \ \time value (e.g., \"60 s\", \"200 ms\") or \"none\".") else SOME (Time.fromMilliseconds msecs) end (* string -> term list *) val lookup_term_list = AList.lookup (op =) raw_params #> these #> Syntax.read_terms ctxt val read_type_polymorphic = Syntax.read_typ ctxt #> Logic.mk_type #> singleton (Variable.polymorphic ctxt) #> Logic.dest_type (* string -> term *) val read_term_polymorphic = Syntax.read_term ctxt #> singleton (Variable.polymorphic ctxt) (* string -> styp *) val read_const_polymorphic = read_term_polymorphic #> dest_Const val cards_assigns = lookup_ints_assigns read_type_polymorphic "card" 1 val maxes_assigns = lookup_ints_assigns read_const_polymorphic "max" ~1 val iters_assigns = lookup_ints_assigns read_const_polymorphic "iter" 0 val bitss = lookup_int_seq "bits" 1 val bisim_depths = lookup_int_seq "bisim_depth" ~1 val boxes = lookup_bool_option_assigns read_type_polymorphic "box" @ map_filter (fn (SOME T, _) => if is_fun_type T orelse is_pair_type T then SOME (SOME T, SOME true) else NONE | (NONE, _) => NONE) cards_assigns val monos = lookup_bool_option_assigns read_type_polymorphic "mono" + val stds = lookup_bool_assigns read_type_polymorphic "std" val wfs = lookup_bool_option_assigns read_const_polymorphic "wf" val sat_solver = lookup_string "sat_solver" val blocking = not auto andalso lookup_bool "blocking" val falsify = lookup_bool "falsify" val debug = not auto andalso lookup_bool "debug" val verbose = debug orelse (not auto andalso lookup_bool "verbose") val overlord = lookup_bool "overlord" val user_axioms = lookup_bool_option "user_axioms" val assms = lookup_bool "assms" val merge_type_vars = lookup_bool "merge_type_vars" val binary_ints = lookup_bool_option "binary_ints" val destroy_constrs = lookup_bool "destroy_constrs" val specialize = lookup_bool "specialize" val skolemize = lookup_bool "skolemize" val star_linear_preds = lookup_bool "star_linear_preds" val uncurry = lookup_bool "uncurry" val fast_descrs = lookup_bool "fast_descrs" val peephole_optim = lookup_bool "peephole_optim" val timeout = if auto then NONE else lookup_time "timeout" val tac_timeout = lookup_time "tac_timeout" val sym_break = Int.max (0, lookup_int "sym_break") val sharing_depth = Int.max (1, lookup_int "sharing_depth") val flatten_props = lookup_bool "flatten_props" val max_threads = Int.max (0, lookup_int "max_threads") val show_all = debug orelse lookup_bool "show_all" val show_skolems = show_all orelse lookup_bool "show_skolems" val show_datatypes = show_all orelse lookup_bool "show_datatypes" val show_consts = show_all orelse lookup_bool "show_consts" val formats = lookup_ints_assigns read_term_polymorphic "format" 0 val evals = lookup_term_list "eval" val max_potential = if auto then 0 else Int.max (0, lookup_int "max_potential") val max_genuine = Int.max (0, lookup_int "max_genuine") val check_potential = lookup_bool "check_potential" val check_genuine = lookup_bool "check_genuine" val batch_size = case lookup_int_option "batch_size" of SOME n => Int.max (1, n) | NONE => if debug then 1 else 64 val expect = lookup_string "expect" in {cards_assigns = cards_assigns, maxes_assigns = maxes_assigns, iters_assigns = iters_assigns, bitss = bitss, bisim_depths = bisim_depths, - boxes = boxes, monos = monos, wfs = wfs, sat_solver = sat_solver, - blocking = blocking, falsify = falsify, debug = debug, verbose = verbose, - overlord = overlord, user_axioms = user_axioms, assms = assms, + boxes = boxes, monos = monos, stds = stds, wfs = wfs, + sat_solver = sat_solver, blocking = blocking, falsify = falsify, + debug = debug, verbose = verbose, overlord = overlord, + user_axioms = user_axioms, assms = assms, merge_type_vars = merge_type_vars, binary_ints = binary_ints, destroy_constrs = destroy_constrs, specialize = specialize, skolemize = skolemize, star_linear_preds = star_linear_preds, uncurry = uncurry, fast_descrs = fast_descrs, peephole_optim = peephole_optim, timeout = timeout, tac_timeout = tac_timeout, sym_break = sym_break, sharing_depth = sharing_depth, flatten_props = flatten_props, max_threads = max_threads, show_skolems = show_skolems, show_datatypes = show_datatypes, show_consts = show_consts, formats = formats, evals = evals, max_potential = max_potential, max_genuine = max_genuine, check_potential = check_potential, check_genuine = check_genuine, batch_size = batch_size, expect = expect} end (* theory -> (string * string) list -> params *) fun default_params thy = extract_params (ProofContext.init thy) false (default_raw_params thy) o map (apsnd single) (* OuterParse.token list -> string * OuterParse.token list *) val scan_key = Scan.repeat1 OuterParse.typ_group >> space_implode " " (* OuterParse.token list -> string list * OuterParse.token list *) val scan_value = Scan.repeat1 (OuterParse.minus >> single || Scan.repeat1 (Scan.unless OuterParse.minus OuterParse.name) || OuterParse.$$$ "," |-- OuterParse.number >> prefix "," >> single) >> flat (* OuterParse.token list -> raw_param * OuterParse.token list *) val scan_param = scan_key -- (Scan.option (OuterParse.$$$ "=" |-- scan_value) >> these) (* OuterParse.token list -> raw_param list option * OuterParse.token list *) val scan_params = Scan.option (OuterParse.$$$ "[" |-- OuterParse.list scan_param --| OuterParse.$$$ "]") (* Proof.context -> ('a -> 'a) -> 'a -> 'a *) fun handle_exceptions ctxt f x = f x handle ARG (loc, details) => error ("Bad argument(s) to " ^ quote loc ^ ": " ^ details ^ ".") | BAD (loc, details) => error ("Internal error (" ^ quote loc ^ "): " ^ details ^ ".") | NOT_SUPPORTED details => (warning ("Unsupported case: " ^ details ^ "."); x) | NUT (loc, us) => error ("Invalid intermediate term" ^ plural_s_for_list us ^ " (" ^ quote loc ^ "): " ^ commas (map (string_for_nut ctxt) us) ^ ".") | REP (loc, Rs) => error ("Invalid representation" ^ plural_s_for_list Rs ^ " (" ^ quote loc ^ "): " ^ commas (map string_for_rep Rs) ^ ".") | TERM (loc, ts) => error ("Invalid term" ^ plural_s_for_list ts ^ " (" ^ quote loc ^ "): " ^ commas (map (Syntax.string_of_term ctxt) ts) ^ ".") | TOO_LARGE (_, details) => (warning ("Limit reached: " ^ details ^ "."); x) | TOO_SMALL (_, details) => (warning ("Limit reached: " ^ details ^ "."); x) | TYPE (loc, Ts, ts) => error ("Invalid type" ^ plural_s_for_list Ts ^ (if null ts then "" else " for term" ^ plural_s_for_list ts ^ " " ^ commas (map (quote o Syntax.string_of_term ctxt) ts)) ^ " (" ^ quote loc ^ "): " ^ commas (map (Syntax.string_of_typ ctxt) Ts) ^ ".") | Kodkod.SYNTAX (_, details) => (warning ("Ill-formed Kodkodi output: " ^ details ^ "."); x) | Refute.REFUTE (loc, details) => error ("Unhandled Refute error (" ^ quote loc ^ "): " ^ details ^ ".") -(* raw_param list -> bool -> int -> Proof.state -> bool * Proof.state *) -fun pick_nits override_params auto i state = + +(* raw_param list -> bool -> int -> int -> Proof.state -> bool * Proof.state *) +fun pick_nits override_params auto i step state = let val thy = Proof.theory_of state val ctxt = Proof.context_of state val thm = #goal (Proof.raw_goal state) val _ = List.app check_raw_param override_params val params as {blocking, debug, ...} = extract_params ctxt auto (default_raw_params thy) override_params (* unit -> bool * Proof.state *) fun go () = (false, state) |> (if auto then perhaps o try else if debug then fn f => fn x => f x else handle_exceptions ctxt) - (fn (_, state) => pick_nits_in_subgoal state params auto i + (fn (_, state) => pick_nits_in_subgoal state params auto i step |>> curry (op =) "genuine") in if auto orelse blocking then go () else (Toplevel.thread true (fn () => (go (); ())); (false, state)) end (* raw_param list option * int option -> Toplevel.transition -> Toplevel.transition *) fun nitpick_trans (opt_params, opt_i) = - Toplevel.keep (K () - o snd o pick_nits (these opt_params) false (the_default 1 opt_i) - o Toplevel.proof_of) + Toplevel.keep (fn st => + (pick_nits (these opt_params) false (the_default 1 opt_i) + (Toplevel.proof_position_of st) (Toplevel.proof_of st); ())) (* raw_param -> string *) fun string_for_raw_param (name, value) = name ^ " = " ^ stringify_raw_param_value value (* raw_param list option -> Toplevel.transition -> Toplevel.transition *) fun nitpick_params_trans opt_params = Toplevel.theory (fold set_default_raw_param (these opt_params) #> tap (fn thy => writeln ("Default parameters for Nitpick:\n" ^ (case rev (default_raw_params thy) of [] => "none" | params => (map check_raw_param params; params |> map string_for_raw_param |> sort_strings |> cat_lines))))) (* OuterParse.token list -> (Toplevel.transition -> Toplevel.transition) * OuterParse.token list *) val scan_nitpick_command = (scan_params -- Scan.option OuterParse.nat) #>> nitpick_trans val scan_nitpick_params_command = scan_params #>> nitpick_params_trans val _ = OuterSyntax.improper_command "nitpick" "try to find a counterexample for a given subgoal using Kodkod" OuterKeyword.diag scan_nitpick_command val _ = OuterSyntax.command "nitpick_params" "set and display the default parameters for Nitpick" OuterKeyword.thy_decl scan_nitpick_params_command (* Proof.state -> bool * Proof.state *) fun auto_nitpick state = - if not (!auto) then (false, state) else pick_nits [] true 1 state + if not (!auto) then (false, state) else pick_nits [] true 1 0 state val setup = Auto_Counterexample.register_tool ("nitpick", auto_nitpick) end; diff --git a/src/HOL/Tools/Nitpick/nitpick_kodkod.ML b/src/HOL/Tools/Nitpick/nitpick_kodkod.ML --- a/src/HOL/Tools/Nitpick/nitpick_kodkod.ML +++ b/src/HOL/Tools/Nitpick/nitpick_kodkod.ML @@ -1,1904 +1,1904 @@ (* Title: HOL/Tools/Nitpick/nitpick_kodkod.ML Author: Jasmin Blanchette, TU Muenchen - Copyright 2008, 2009 + Copyright 2008, 2009, 2010 Kodkod problem generator part of Kodkod. *) signature NITPICK_KODKOD = sig type extended_context = Nitpick_HOL.extended_context type dtype_spec = Nitpick_Scope.dtype_spec type kodkod_constrs = Nitpick_Peephole.kodkod_constrs type nut = Nitpick_Nut.nut type nfa_transition = Kodkod.rel_expr * typ type nfa_entry = typ * nfa_transition list type nfa_table = nfa_entry list structure NameTable : TABLE val univ_card : int -> int -> int -> Kodkod.bound list -> Kodkod.formula -> int val check_bits : int -> Kodkod.formula -> unit val check_arity : int -> int -> unit val kk_tuple : bool -> int -> int list -> Kodkod.tuple val tuple_set_from_atom_schema : (int * int) list -> Kodkod.tuple_set val sequential_int_bounds : int -> Kodkod.int_bound list val pow_of_two_int_bounds : int -> int -> int -> Kodkod.int_bound list val bounds_for_built_in_rels_in_formula : bool -> int -> int -> int -> int -> Kodkod.formula -> Kodkod.bound list val bound_for_plain_rel : Proof.context -> bool -> nut -> Kodkod.bound val bound_for_sel_rel : Proof.context -> bool -> dtype_spec list -> nut -> Kodkod.bound val merge_bounds : Kodkod.bound list -> Kodkod.bound list val declarative_axiom_for_plain_rel : kodkod_constrs -> nut -> Kodkod.formula val declarative_axioms_for_datatypes : extended_context -> int -> int Typtab.table -> kodkod_constrs -> nut NameTable.table -> dtype_spec list -> Kodkod.formula list val kodkod_formula_from_nut : int -> int Typtab.table -> bool -> kodkod_constrs -> nut -> Kodkod.formula end; structure Nitpick_Kodkod : NITPICK_KODKOD = struct open Nitpick_Util open Nitpick_HOL open Nitpick_Scope open Nitpick_Peephole open Nitpick_Rep open Nitpick_Nut structure KK = Kodkod type nfa_transition = KK.rel_expr * typ type nfa_entry = typ * nfa_transition list type nfa_table = nfa_entry list structure NfaGraph = Graph(type key = typ val ord = TermOrd.typ_ord) (* int -> KK.int_expr list *) fun flip_nums n = index_seq 1 n @ [0] |> map KK.Num (* int -> int -> int -> KK.bound list -> KK.formula -> int *) fun univ_card nat_card int_card main_j0 bounds formula = let (* KK.rel_expr -> int -> int *) fun rel_expr_func r k = Int.max (k, case r of KK.Atom j => j + 1 | KK.AtomSeq (k', j0) => j0 + k' | _ => 0) (* KK.tuple -> int -> int *) fun tuple_func t k = case t of KK.Tuple js => fold Integer.max (map (Integer.add 1) js) k | _ => k (* KK.tuple_set -> int -> int *) fun tuple_set_func ts k = Int.max (k, case ts of KK.TupleAtomSeq (k', j0) => j0 + k' | _ => 0) val expr_F = {formula_func = K I, rel_expr_func = rel_expr_func, int_expr_func = K I} val tuple_F = {tuple_func = tuple_func, tuple_set_func = tuple_set_func} val card = fold (KK.fold_bound expr_F tuple_F) bounds 1 |> KK.fold_formula expr_F formula in Int.max (main_j0 + fold Integer.max [2, nat_card, int_card] 0, card) end (* int -> KK.formula -> unit *) fun check_bits bits formula = let (* KK.int_expr -> unit -> unit *) fun int_expr_func (KK.Num k) () = if is_twos_complement_representable bits k then () else raise TOO_SMALL ("Nitpick_Kodkod.check_bits", "\"bits\" value " ^ string_of_int bits ^ " too small for problem") | int_expr_func _ () = () val expr_F = {formula_func = K I, rel_expr_func = K I, int_expr_func = int_expr_func} in KK.fold_formula expr_F formula () end (* int -> int -> unit *) fun check_arity univ_card n = if n > KK.max_arity univ_card then raise TOO_LARGE ("Nitpick_Kodkod.check_arity", "arity " ^ string_of_int n ^ " too large for universe of \ \cardinality " ^ string_of_int univ_card) else () (* bool -> int -> int list -> KK.tuple *) fun kk_tuple debug univ_card js = if debug then KK.Tuple js else KK.TupleIndex (length js, fold (fn j => fn accum => accum * univ_card + j) js 0) (* (int * int) list -> KK.tuple_set *) val tuple_set_from_atom_schema = foldl1 KK.TupleProduct o map KK.TupleAtomSeq (* rep -> KK.tuple_set *) val upper_bound_for_rep = tuple_set_from_atom_schema o atom_schema_of_rep (* int -> KK.tuple_set *) val single_atom = KK.TupleSet o single o KK.Tuple o single (* int -> KK.int_bound list *) fun sequential_int_bounds n = [(NONE, map single_atom (index_seq 0 n))] (* int -> int -> KK.int_bound list *) fun pow_of_two_int_bounds bits j0 univ_card = let (* int -> int -> int -> KK.int_bound list *) fun aux 0 _ _ = [] | aux 1 pow_of_two j = if j < univ_card then [(SOME (~ pow_of_two), [single_atom j])] else [] | aux iter pow_of_two j = (SOME pow_of_two, [single_atom j]) :: aux (iter - 1) (2 * pow_of_two) (j + 1) in aux (bits + 1) 1 j0 end (* KK.formula -> KK.n_ary_index list *) fun built_in_rels_in_formula formula = let (* KK.rel_expr -> KK.n_ary_index list -> KK.n_ary_index list *) fun rel_expr_func (r as KK.Rel (x as (n, j))) = if x = unsigned_bit_word_sel_rel orelse x = signed_bit_word_sel_rel then I else (case AList.lookup (op =) (#rels initial_pool) n of SOME k => j < k ? insert (op =) x | NONE => I) | rel_expr_func _ = I val expr_F = {formula_func = K I, rel_expr_func = rel_expr_func, int_expr_func = K I} in KK.fold_formula expr_F formula [] end val max_table_size = 65536 (* int -> unit *) fun check_table_size k = if k > max_table_size then raise TOO_LARGE ("Nitpick_Kodkod.check_table_size", "precomputed table too large (" ^ string_of_int k ^ ")") else () (* bool -> int -> int * int -> (int -> int) -> KK.tuple list *) fun tabulate_func1 debug univ_card (k, j0) f = (check_table_size k; map_filter (fn j1 => let val j2 = f j1 in if j2 >= 0 then SOME (kk_tuple debug univ_card [j1 + j0, j2 + j0]) else NONE end) (index_seq 0 k)) (* bool -> int -> int * int -> int -> (int * int -> int) -> KK.tuple list *) fun tabulate_op2 debug univ_card (k, j0) res_j0 f = (check_table_size (k * k); map_filter (fn j => let val j1 = j div k val j2 = j - j1 * k val j3 = f (j1, j2) in if j3 >= 0 then SOME (kk_tuple debug univ_card [j1 + j0, j2 + j0, j3 + res_j0]) else NONE end) (index_seq 0 (k * k))) (* bool -> int -> int * int -> int -> (int * int -> int * int) -> KK.tuple list *) fun tabulate_op2_2 debug univ_card (k, j0) res_j0 f = (check_table_size (k * k); map_filter (fn j => let val j1 = j div k val j2 = j - j1 * k val (j3, j4) = f (j1, j2) in if j3 >= 0 andalso j4 >= 0 then SOME (kk_tuple debug univ_card [j1 + j0, j2 + j0, j3 + res_j0, j4 + res_j0]) else NONE end) (index_seq 0 (k * k))) (* bool -> int -> int * int -> (int * int -> int) -> KK.tuple list *) fun tabulate_nat_op2 debug univ_card (k, j0) f = tabulate_op2 debug univ_card (k, j0) j0 (atom_for_nat (k, 0) o f) fun tabulate_int_op2 debug univ_card (k, j0) f = tabulate_op2 debug univ_card (k, j0) j0 (atom_for_int (k, 0) o f o pairself (int_for_atom (k, 0))) (* bool -> int -> int * int -> (int * int -> int * int) -> KK.tuple list *) fun tabulate_int_op2_2 debug univ_card (k, j0) f = tabulate_op2_2 debug univ_card (k, j0) j0 (pairself (atom_for_int (k, 0)) o f o pairself (int_for_atom (k, 0))) (* int * int -> int *) fun isa_div (m, n) = m div n handle General.Div => 0 fun isa_mod (m, n) = m mod n handle General.Div => m fun isa_gcd (m, 0) = m | isa_gcd (m, n) = isa_gcd (n, isa_mod (m, n)) fun isa_lcm (m, n) = isa_div (m * n, isa_gcd (m, n)) val isa_zgcd = isa_gcd o pairself abs (* int * int -> int * int *) fun isa_norm_frac (m, n) = if n < 0 then isa_norm_frac (~m, ~n) else if m = 0 orelse n = 0 then (0, 1) else let val p = isa_zgcd (m, n) in (isa_div (m, p), isa_div (n, p)) end (* bool -> int -> int -> int -> int -> int * int -> string * bool * KK.tuple list *) fun tabulate_built_in_rel debug univ_card nat_card int_card j0 (x as (n, _)) = (check_arity univ_card n; if x = not3_rel then ("not3", tabulate_func1 debug univ_card (2, j0) (curry (op -) 1)) else if x = suc_rel then ("suc", tabulate_func1 debug univ_card (univ_card - j0 - 1, j0) (Integer.add 1)) else if x = nat_add_rel then ("nat_add", tabulate_nat_op2 debug univ_card (nat_card, j0) (op +)) else if x = int_add_rel then ("int_add", tabulate_int_op2 debug univ_card (int_card, j0) (op +)) else if x = nat_subtract_rel then ("nat_subtract", tabulate_op2 debug univ_card (nat_card, j0) j0 (uncurry nat_minus)) else if x = int_subtract_rel then ("int_subtract", tabulate_int_op2 debug univ_card (int_card, j0) (op -)) else if x = nat_multiply_rel then ("nat_multiply", tabulate_nat_op2 debug univ_card (nat_card, j0) (op * )) else if x = int_multiply_rel then ("int_multiply", tabulate_int_op2 debug univ_card (int_card, j0) (op * )) else if x = nat_divide_rel then ("nat_divide", tabulate_nat_op2 debug univ_card (nat_card, j0) isa_div) else if x = int_divide_rel then ("int_divide", tabulate_int_op2 debug univ_card (int_card, j0) isa_div) else if x = nat_less_rel then ("nat_less", tabulate_nat_op2 debug univ_card (nat_card, j0) (int_for_bool o op <)) else if x = int_less_rel then ("int_less", tabulate_int_op2 debug univ_card (int_card, j0) (int_for_bool o op <)) else if x = gcd_rel then ("gcd", tabulate_nat_op2 debug univ_card (nat_card, j0) isa_gcd) else if x = lcm_rel then ("lcm", tabulate_nat_op2 debug univ_card (nat_card, j0) isa_lcm) else if x = norm_frac_rel then ("norm_frac", tabulate_int_op2_2 debug univ_card (int_card, j0) isa_norm_frac) else raise ARG ("Nitpick_Kodkod.tabulate_built_in_rel", "unknown relation")) (* bool -> int -> int -> int -> int -> int * int -> KK.rel_expr -> KK.bound *) fun bound_for_built_in_rel debug univ_card nat_card int_card j0 x = let val (nick, ts) = tabulate_built_in_rel debug univ_card nat_card int_card j0 x in ([(x, nick)], [KK.TupleSet ts]) end (* bool -> int -> int -> int -> int -> KK.formula -> KK.bound list *) fun bounds_for_built_in_rels_in_formula debug univ_card nat_card int_card j0 = map (bound_for_built_in_rel debug univ_card nat_card int_card j0) o built_in_rels_in_formula (* Proof.context -> bool -> string -> typ -> rep -> string *) fun bound_comment ctxt debug nick T R = short_name nick ^ - (if debug then " :: " ^ plain_string_from_yxml (Syntax.string_of_typ ctxt T) - else "") ^ " : " ^ string_for_rep R + (if debug then " :: " ^ unyxml (Syntax.string_of_typ ctxt T) else "") ^ + " : " ^ string_for_rep R (* Proof.context -> bool -> nut -> KK.bound *) fun bound_for_plain_rel ctxt debug (u as FreeRel (x, T, R, nick)) = ([(x, bound_comment ctxt debug nick T R)], if nick = @{const_name bisim_iterator_max} then case R of Atom (k, j0) => [single_atom (k - 1 + j0)] | _ => raise NUT ("Nitpick_Kodkod.bound_for_plain_rel", [u]) else [KK.TupleSet [], upper_bound_for_rep R]) | bound_for_plain_rel _ _ u = raise NUT ("Nitpick_Kodkod.bound_for_plain_rel", [u]) (* Proof.context -> bool -> dtype_spec list -> nut -> KK.bound *) fun bound_for_sel_rel ctxt debug dtypes (FreeRel (x, T as Type ("fun", [T1, T2]), R as Func (Atom (_, j0), R2), nick)) = let val constr as {delta, epsilon, exclusive, explicit_max, ...} = constr_spec dtypes (original_name nick, T1) in ([(x, bound_comment ctxt debug nick T R)], if explicit_max = 0 then [KK.TupleSet []] else let val ts = KK.TupleAtomSeq (epsilon - delta, delta + j0) in if R2 = Formula Neut then [ts] |> not exclusive ? cons (KK.TupleSet []) else [KK.TupleSet [], KK.TupleProduct (ts, upper_bound_for_rep R2)] end) end | bound_for_sel_rel _ _ _ u = raise NUT ("Nitpick_Kodkod.bound_for_sel_rel", [u]) (* KK.bound list -> KK.bound list *) fun merge_bounds bs = let (* KK.bound -> int *) fun arity (zs, _) = fst (fst (hd zs)) (* KK.bound list -> KK.bound -> KK.bound list -> KK.bound list *) fun add_bound ds b [] = List.revAppend (ds, [b]) | add_bound ds b (c :: cs) = if arity b = arity c andalso snd b = snd c then List.revAppend (ds, (fst c @ fst b, snd c) :: cs) else add_bound (c :: ds) b cs in fold (add_bound []) bs [] end (* int -> int -> KK.rel_expr list *) fun unary_var_seq j0 n = map (curry KK.Var 1) (index_seq j0 n) (* int list -> KK.rel_expr *) val singleton_from_combination = foldl1 KK.Product o map KK.Atom (* rep -> KK.rel_expr list *) fun all_singletons_for_rep R = if is_lone_rep R then all_combinations_for_rep R |> map singleton_from_combination else raise REP ("Nitpick_Kodkod.all_singletons_for_rep", [R]) (* KK.rel_expr -> KK.rel_expr list *) fun unpack_products (KK.Product (r1, r2)) = unpack_products r1 @ unpack_products r2 | unpack_products r = [r] fun unpack_joins (KK.Join (r1, r2)) = unpack_joins r1 @ unpack_joins r2 | unpack_joins r = [r] (* rep -> KK.rel_expr *) val empty_rel_for_rep = empty_n_ary_rel o arity_of_rep fun full_rel_for_rep R = case atom_schema_of_rep R of [] => raise REP ("Nitpick_Kodkod.full_rel_for_rep", [R]) | schema => foldl1 KK.Product (map KK.AtomSeq schema) (* int -> int list -> KK.decl list *) fun decls_for_atom_schema j0 schema = map2 (fn j => fn x => KK.DeclOne ((1, j), KK.AtomSeq x)) (index_seq j0 (length schema)) schema (* The type constraint below is a workaround for a Poly/ML bug. *) (* kodkod_constrs -> rep -> KK.rel_expr -> KK.formula *) fun d_n_ary_function ({kk_all, kk_join, kk_lone, kk_one, ...} : kodkod_constrs) R r = let val body_R = body_rep R in if is_lone_rep body_R then let val binder_schema = atom_schema_of_reps (binder_reps R) val body_schema = atom_schema_of_rep body_R val one = is_one_rep body_R val opt_x = case r of KK.Rel x => SOME x | _ => NONE in if opt_x <> NONE andalso length binder_schema = 1 andalso length body_schema = 1 then (if one then KK.Function else KK.Functional) (the opt_x, KK.AtomSeq (hd binder_schema), KK.AtomSeq (hd body_schema)) else let val decls = decls_for_atom_schema ~1 binder_schema val vars = unary_var_seq ~1 (length binder_schema) val kk_xone = if one then kk_one else kk_lone in kk_all decls (kk_xone (fold kk_join vars r)) end end else KK.True end fun kk_n_ary_function kk R (r as KK.Rel x) = if not (is_opt_rep R) then if x = suc_rel then KK.False else if x = nat_add_rel then formula_for_bool (card_of_rep (body_rep R) = 1) else if x = nat_multiply_rel then formula_for_bool (card_of_rep (body_rep R) <= 2) else d_n_ary_function kk R r else if x = nat_subtract_rel then KK.True else d_n_ary_function kk R r | kk_n_ary_function kk R r = d_n_ary_function kk R r (* kodkod_constrs -> KK.rel_expr list -> KK.formula *) fun kk_disjoint_sets _ [] = KK.True | kk_disjoint_sets (kk as {kk_and, kk_no, kk_intersect, ...} : kodkod_constrs) (r :: rs) = fold (kk_and o kk_no o kk_intersect r) rs (kk_disjoint_sets kk rs) (* int -> kodkod_constrs -> (KK.rel_expr -> KK.rel_expr) -> KK.rel_expr -> KK.rel_expr *) fun basic_rel_rel_let j ({kk_rel_let, ...} : kodkod_constrs) f r = if inline_rel_expr r then f r else let val x = (KK.arity_of_rel_expr r, j) in kk_rel_let [KK.AssignRelReg (x, r)] (f (KK.RelReg x)) end (* kodkod_constrs -> (KK.rel_expr -> KK.rel_expr) -> KK.rel_expr -> KK.rel_expr *) val single_rel_rel_let = basic_rel_rel_let 0 (* kodkod_constrs -> (KK.rel_expr -> KK.rel_expr -> KK.rel_expr) -> KK.rel_expr -> KK.rel_expr -> KK.rel_expr *) fun double_rel_rel_let kk f r1 r2 = single_rel_rel_let kk (fn r1 => basic_rel_rel_let 1 kk (f r1) r2) r1 (* kodkod_constrs -> (KK.rel_expr -> KK.rel_expr -> KK.rel_expr -> KK.rel_expr) -> KK.rel_expr -> KK.rel_expr -> KK.rel_expr -> KK.rel_expr *) fun tripl_rel_rel_let kk f r1 r2 r3 = double_rel_rel_let kk (fn r1 => fn r2 => basic_rel_rel_let 2 kk (f r1 r2) r3) r1 r2 (* kodkod_constrs -> int -> KK.formula -> KK.rel_expr *) fun atom_from_formula ({kk_rel_if, ...} : kodkod_constrs) j0 f = kk_rel_if f (KK.Atom (j0 + 1)) (KK.Atom j0) (* kodkod_constrs -> rep -> KK.formula -> KK.rel_expr *) fun rel_expr_from_formula kk R f = case unopt_rep R of Atom (2, j0) => atom_from_formula kk j0 f | _ => raise REP ("Nitpick_Kodkod.rel_expr_from_formula", [R]) (* kodkod_cotrs -> int -> int -> KK.rel_expr -> KK.rel_expr list *) fun unpack_vect_in_chunks ({kk_project_seq, ...} : kodkod_constrs) chunk_arity num_chunks r = List.tabulate (num_chunks, fn j => kk_project_seq r (j * chunk_arity) chunk_arity) (* kodkod_constrs -> bool -> rep -> rep -> KK.rel_expr -> KK.rel_expr -> KK.rel_expr *) fun kk_n_fold_join (kk as {kk_intersect, kk_product, kk_join, kk_project_seq, ...}) one R1 res_R r1 r2 = case arity_of_rep R1 of 1 => kk_join r1 r2 | arity1 => let val unpacked_rs1 = if inline_rel_expr r1 then unpack_vect_in_chunks kk 1 arity1 r1 else unpack_products r1 in if one andalso length unpacked_rs1 = arity1 then fold kk_join unpacked_rs1 r2 else kk_project_seq (kk_intersect (kk_product r1 (full_rel_for_rep res_R)) r2) arity1 (arity_of_rep res_R) end (* kodkod_constrs -> rep -> rep -> KK.rel_expr -> KK.rel_expr list -> KK.rel_expr list -> KK.rel_expr *) fun kk_case_switch (kk as {kk_union, kk_product, ...}) R1 R2 r rs1 rs2 = if rs1 = rs2 then r else kk_n_fold_join kk true R1 R2 r (fold1 kk_union (map2 kk_product rs1 rs2)) val lone_rep_fallback_max_card = 4096 val some_j0 = 0 (* kodkod_constrs -> rep -> rep -> KK.rel_expr -> KK.rel_expr *) fun lone_rep_fallback kk new_R old_R r = if old_R = new_R then r else let val card = card_of_rep old_R in if is_lone_rep old_R andalso is_lone_rep new_R andalso card = card_of_rep new_R then if card >= lone_rep_fallback_max_card then raise TOO_LARGE ("Nitpick_Kodkod.lone_rep_fallback", "too high cardinality (" ^ string_of_int card ^ ")") else kk_case_switch kk old_R new_R r (all_singletons_for_rep old_R) (all_singletons_for_rep new_R) else raise REP ("Nitpick_Kodkod.lone_rep_fallback", [old_R, new_R]) end (* kodkod_constrs -> int * int -> rep -> KK.rel_expr -> KK.rel_expr *) and atom_from_rel_expr kk (x as (k, j0)) old_R r = case old_R of Func (R1, R2) => let val dom_card = card_of_rep R1 val R2' = case R2 of Atom _ => R2 | _ => Atom (card_of_rep R2, some_j0) in atom_from_rel_expr kk x (Vect (dom_card, R2')) (vect_from_rel_expr kk dom_card R2' old_R r) end | Opt _ => raise REP ("Nitpick_Kodkod.atom_from_rel_expr", [old_R]) | _ => lone_rep_fallback kk (Atom x) old_R r (* kodkod_constrs -> rep list -> rep -> KK.rel_expr -> KK.rel_expr *) and struct_from_rel_expr kk Rs old_R r = case old_R of Atom _ => lone_rep_fallback kk (Struct Rs) old_R r | Struct Rs' => let val Rs = filter (not_equal Unit) Rs val Rs' = filter (not_equal Unit) Rs' in if Rs' = Rs then r else if map card_of_rep Rs' = map card_of_rep Rs then let val old_arities = map arity_of_rep Rs' val old_offsets = offset_list old_arities val old_rs = map2 (#kk_project_seq kk r) old_offsets old_arities in fold1 (#kk_product kk) (map3 (rel_expr_from_rel_expr kk) Rs Rs' old_rs) end else lone_rep_fallback kk (Struct Rs) old_R r end | _ => raise REP ("Nitpick_Kodkod.struct_from_rel_expr", [old_R]) (* kodkod_constrs -> int -> rep -> rep -> KK.rel_expr -> KK.rel_expr *) and vect_from_rel_expr kk k R old_R r = case old_R of Atom _ => lone_rep_fallback kk (Vect (k, R)) old_R r | Vect (k', R') => if k = k' andalso R = R' then r else lone_rep_fallback kk (Vect (k, R)) old_R r | Func (R1, Formula Neut) => if k = card_of_rep R1 then fold1 (#kk_product kk) (map (fn arg_r => rel_expr_from_formula kk R (#kk_subset kk arg_r r)) (all_singletons_for_rep R1)) else raise REP ("Nitpick_Kodkod.vect_from_rel_expr", [old_R]) | Func (Unit, R2) => rel_expr_from_rel_expr kk R R2 r | Func (R1, R2) => fold1 (#kk_product kk) (map (fn arg_r => rel_expr_from_rel_expr kk R R2 (kk_n_fold_join kk true R1 R2 arg_r r)) (all_singletons_for_rep R1)) | _ => raise REP ("Nitpick_Kodkod.vect_from_rel_expr", [old_R]) (* kodkod_constrs -> rep -> rep -> rep -> KK.rel_expr -> KK.rel_expr *) and func_from_no_opt_rel_expr kk R1 R2 (Atom x) r = let val dom_card = card_of_rep R1 val R2' = case R2 of Atom _ => R2 | _ => Atom (card_of_rep R2, some_j0) in func_from_no_opt_rel_expr kk R1 R2 (Vect (dom_card, R2')) (vect_from_rel_expr kk dom_card R2' (Atom x) r) end | func_from_no_opt_rel_expr kk Unit R2 old_R r = (case old_R of Vect (k, R') => rel_expr_from_rel_expr kk R2 R' r | Func (Unit, R2') => rel_expr_from_rel_expr kk R2 R2' r | Func (Atom (1, _), Formula Neut) => (case unopt_rep R2 of Atom (2, j0) => atom_from_formula kk j0 (#kk_some kk r) | _ => raise REP ("Nitpick_Kodkod.func_from_no_opt_rel_expr", [old_R, Func (Unit, R2)])) | Func (R1', R2') => rel_expr_from_rel_expr kk R2 R2' (#kk_project_seq kk r (arity_of_rep R1') (arity_of_rep R2')) | _ => raise REP ("Nitpick_Kodkod.func_from_no_opt_rel_expr", [old_R, Func (Unit, R2)])) | func_from_no_opt_rel_expr kk R1 (Formula Neut) old_R r = (case old_R of Vect (k, Atom (2, j0)) => let val args_rs = all_singletons_for_rep R1 val vals_rs = unpack_vect_in_chunks kk 1 k r (* KK.rel_expr -> KK.rel_expr -> KK.rel_expr *) fun empty_or_singleton_set_for arg_r val_r = #kk_join kk val_r (#kk_product kk (KK.Atom (j0 + 1)) arg_r) in fold1 (#kk_union kk) (map2 empty_or_singleton_set_for args_rs vals_rs) end | Func (R1', Formula Neut) => if R1 = R1' then r else let val schema = atom_schema_of_rep R1 val r1 = fold1 (#kk_product kk) (unary_var_seq ~1 (length schema)) |> rel_expr_from_rel_expr kk R1' R1 val kk_xeq = (if is_one_rep R1' then #kk_subset else #kk_rel_eq) kk in #kk_comprehension kk (decls_for_atom_schema ~1 schema) (kk_xeq r1 r) end | Func (Unit, (Atom (2, j0))) => #kk_rel_if kk (#kk_rel_eq kk r (KK.Atom (j0 + 1))) (full_rel_for_rep R1) (empty_rel_for_rep R1) | Func (R1', Atom (2, j0)) => func_from_no_opt_rel_expr kk R1 (Formula Neut) (Func (R1', Formula Neut)) (#kk_join kk r (KK.Atom (j0 + 1))) | _ => raise REP ("Nitpick_Kodkod.func_from_no_opt_rel_expr", [old_R, Func (R1, Formula Neut)])) | func_from_no_opt_rel_expr kk R1 R2 old_R r = case old_R of Vect (k, R) => let val args_rs = all_singletons_for_rep R1 val vals_rs = unpack_vect_in_chunks kk (arity_of_rep R) k r |> map (rel_expr_from_rel_expr kk R2 R) in fold1 (#kk_union kk) (map2 (#kk_product kk) args_rs vals_rs) end | Func (R1', Formula Neut) => (case R2 of Atom (x as (2, j0)) => let val schema = atom_schema_of_rep R1 in if length schema = 1 then #kk_override kk (#kk_product kk (KK.AtomSeq (hd schema)) (KK.Atom j0)) (#kk_product kk r (KK.Atom (j0 + 1))) else let val r1 = fold1 (#kk_product kk) (unary_var_seq ~1 (length schema)) |> rel_expr_from_rel_expr kk R1' R1 val r2 = KK.Var (1, ~(length schema) - 1) val r3 = atom_from_formula kk j0 (#kk_subset kk r1 r) in #kk_comprehension kk (decls_for_atom_schema ~1 (schema @ [x])) (#kk_subset kk r2 r3) end end | _ => raise REP ("Nitpick_Kodkod.func_from_no_opt_rel_expr", [old_R, Func (R1, R2)])) | Func (Unit, R2') => let val j0 = some_j0 in func_from_no_opt_rel_expr kk R1 R2 (Func (Atom (1, j0), R2')) (#kk_product kk (KK.Atom j0) r) end | Func (R1', R2') => if R1 = R1' andalso R2 = R2' then r else let val dom_schema = atom_schema_of_rep R1 val ran_schema = atom_schema_of_rep R2 val dom_prod = fold1 (#kk_product kk) (unary_var_seq ~1 (length dom_schema)) |> rel_expr_from_rel_expr kk R1' R1 val ran_prod = fold1 (#kk_product kk) (unary_var_seq (~(length dom_schema) - 1) (length ran_schema)) |> rel_expr_from_rel_expr kk R2' R2 val app = kk_n_fold_join kk true R1' R2' dom_prod r val kk_xeq = (if is_one_rep R2' then #kk_subset else #kk_rel_eq) kk in #kk_comprehension kk (decls_for_atom_schema ~1 (dom_schema @ ran_schema)) (kk_xeq ran_prod app) end | _ => raise REP ("Nitpick_Kodkod.func_from_no_opt_rel_expr", [old_R, Func (R1, R2)]) (* kodkod_constrs -> rep -> rep -> KK.rel_expr -> KK.rel_expr *) and rel_expr_from_rel_expr kk new_R old_R r = let val unopt_old_R = unopt_rep old_R val unopt_new_R = unopt_rep new_R in if unopt_old_R <> old_R andalso unopt_new_R = new_R then raise REP ("Nitpick_Kodkod.rel_expr_from_rel_expr", [old_R, new_R]) else if unopt_new_R = unopt_old_R then r else (case unopt_new_R of Atom x => atom_from_rel_expr kk x | Struct Rs => struct_from_rel_expr kk Rs | Vect (k, R') => vect_from_rel_expr kk k R' | Func (R1, R2) => func_from_no_opt_rel_expr kk R1 R2 | _ => raise REP ("Nitpick_Kodkod.rel_expr_from_rel_expr", [old_R, new_R])) unopt_old_R r end (* kodkod_constrs -> rep -> rep -> rep -> KK.rel_expr -> KK.rel_expr *) and rel_expr_to_func kk R1 R2 = rel_expr_from_rel_expr kk (Func (R1, R2)) (* kodkod_constrs -> typ -> KK.rel_expr -> KK.rel_expr *) fun bit_set_from_atom ({kk_join, ...} : kodkod_constrs) T r = kk_join r (KK.Rel (if T = @{typ "unsigned_bit word"} then unsigned_bit_word_sel_rel else signed_bit_word_sel_rel)) (* kodkod_constrs -> typ -> KK.rel_expr -> KK.int_expr *) val int_expr_from_atom = KK.SetSum ooo bit_set_from_atom (* kodkod_constrs -> typ -> rep -> KK.int_expr -> KK.rel_expr *) fun atom_from_int_expr (kk as {kk_rel_eq, kk_comprehension, ...} : kodkod_constrs) T R i = kk_comprehension (decls_for_atom_schema ~1 (atom_schema_of_rep R)) (kk_rel_eq (bit_set_from_atom kk T (KK.Var (1, ~1))) (KK.Bits i)) (* kodkod_constrs -> nut -> KK.formula *) fun declarative_axiom_for_plain_rel kk (FreeRel (x, _, R as Func _, nick)) = kk_n_ary_function kk (R |> nick = @{const_name List.set} ? unopt_rep) (KK.Rel x) | declarative_axiom_for_plain_rel ({kk_lone, kk_one, ...} : kodkod_constrs) (FreeRel (x, _, R, _)) = if is_one_rep R then kk_one (KK.Rel x) else if is_lone_rep R andalso card_of_rep R > 1 then kk_lone (KK.Rel x) else KK.True | declarative_axiom_for_plain_rel _ u = raise NUT ("Nitpick_Kodkod.declarative_axiom_for_plain_rel", [u]) (* nut NameTable.table -> styp -> KK.rel_expr * rep * int *) fun const_triple rel_table (x as (s, T)) = case the_name rel_table (ConstName (s, T, Any)) of FreeRel ((n, j), _, R, _) => (KK.Rel (n, j), R, n) | _ => raise TERM ("Nitpick_Kodkod.const_triple", [Const x]) (* nut NameTable.table -> styp -> KK.rel_expr *) fun discr_rel_expr rel_table = #1 o const_triple rel_table o discr_for_constr (* extended_context -> kodkod_constrs -> nut NameTable.table -> dtype_spec list -> styp -> int -> nfa_transition list *) fun nfa_transitions_for_sel ext_ctxt ({kk_project, ...} : kodkod_constrs) rel_table (dtypes : dtype_spec list) constr_x n = let val x as (_, T) = boxed_nth_sel_for_constr ext_ctxt constr_x n val (r, R, arity) = const_triple rel_table x val type_schema = type_schema_of_rep T R in map_filter (fn (j, T) => if forall (not_equal T o #typ) dtypes then NONE else SOME (kk_project r (map KK.Num [0, j]), T)) (index_seq 1 (arity - 1) ~~ tl type_schema) end (* extended_context -> kodkod_constrs -> nut NameTable.table -> dtype_spec list -> styp -> nfa_transition list *) fun nfa_transitions_for_constr ext_ctxt kk rel_table dtypes (x as (_, T)) = maps (nfa_transitions_for_sel ext_ctxt kk rel_table dtypes x) (index_seq 0 (num_sels_for_constr_type T)) (* extended_context -> kodkod_constrs -> nut NameTable.table -> dtype_spec list -> dtype_spec -> nfa_entry option *) fun nfa_entry_for_datatype _ _ _ _ ({co = true, ...} : dtype_spec) = NONE - | nfa_entry_for_datatype _ _ _ _ {shallow = true, ...} = NONE + | nfa_entry_for_datatype _ _ _ _ {deep = false, ...} = NONE | nfa_entry_for_datatype ext_ctxt kk rel_table dtypes {typ, constrs, ...} = SOME (typ, maps (nfa_transitions_for_constr ext_ctxt kk rel_table dtypes o #const) constrs) val empty_rel = KK.Product (KK.None, KK.None) (* nfa_table -> typ -> typ -> KK.rel_expr list *) fun direct_path_rel_exprs nfa start final = case AList.lookup (op =) nfa final of SOME trans => map fst (filter (curry (op =) start o snd) trans) | NONE => [] (* kodkod_constrs -> nfa_table -> typ list -> typ -> typ -> KK.rel_expr *) and any_path_rel_expr ({kk_union, ...} : kodkod_constrs) nfa [] start final = fold kk_union (direct_path_rel_exprs nfa start final) (if start = final then KK.Iden else empty_rel) | any_path_rel_expr (kk as {kk_union, ...}) nfa (q :: qs) start final = kk_union (any_path_rel_expr kk nfa qs start final) (knot_path_rel_expr kk nfa qs start q final) (* kodkod_constrs -> nfa_table -> typ list -> typ -> typ -> typ -> KK.rel_expr *) and knot_path_rel_expr (kk as {kk_join, kk_reflexive_closure, ...}) nfa qs start knot final = kk_join (kk_join (any_path_rel_expr kk nfa qs knot final) (kk_reflexive_closure (loop_path_rel_expr kk nfa qs knot))) (any_path_rel_expr kk nfa qs start knot) (* kodkod_constrs -> nfa_table -> typ list -> typ -> KK.rel_expr *) and loop_path_rel_expr ({kk_union, ...} : kodkod_constrs) nfa [] start = fold kk_union (direct_path_rel_exprs nfa start start) empty_rel | loop_path_rel_expr (kk as {kk_union, kk_closure, ...}) nfa (q :: qs) start = if start = q then kk_closure (loop_path_rel_expr kk nfa qs start) else kk_union (loop_path_rel_expr kk nfa qs start) (knot_path_rel_expr kk nfa qs start q start) (* nfa_table -> unit NfaGraph.T *) fun graph_for_nfa nfa = let (* typ -> unit NfaGraph.T -> unit NfaGraph.T *) fun new_node q = perhaps (try (NfaGraph.new_node (q, ()))) (* nfa_table -> unit NfaGraph.T -> unit NfaGraph.T *) fun add_nfa [] = I | add_nfa ((_, []) :: nfa) = add_nfa nfa | add_nfa ((q, ((_, q') :: transitions)) :: nfa) = add_nfa ((q, transitions) :: nfa) o NfaGraph.add_edge (q, q') o new_node q' o new_node q in add_nfa nfa NfaGraph.empty end (* nfa_table -> nfa_table list *) fun strongly_connected_sub_nfas nfa = nfa |> graph_for_nfa |> NfaGraph.strong_conn |> map (fn keys => filter (member (op =) keys o fst) nfa) (* dtype_spec list -> kodkod_constrs -> nfa_table -> typ -> KK.formula *) fun acyclicity_axiom_for_datatype dtypes kk nfa start = #kk_no kk (#kk_intersect kk (loop_path_rel_expr kk nfa (map fst nfa) start) KK.Iden) (* extended_context -> kodkod_constrs -> nut NameTable.table -> dtype_spec list -> KK.formula list *) fun acyclicity_axioms_for_datatypes ext_ctxt kk rel_table dtypes = map_filter (nfa_entry_for_datatype ext_ctxt kk rel_table dtypes) dtypes |> strongly_connected_sub_nfas |> maps (fn nfa => map (acyclicity_axiom_for_datatype dtypes kk nfa o fst) nfa) (* extended_context -> int -> kodkod_constrs -> nut NameTable.table -> KK.rel_expr -> constr_spec -> int -> KK.formula *) fun sel_axiom_for_sel ext_ctxt j0 (kk as {kk_all, kk_implies, kk_formula_if, kk_subset, kk_rel_eq, kk_no, kk_join, ...}) rel_table dom_r ({const, delta, epsilon, exclusive, explicit_max, ...} : constr_spec) n = let val x as (_, T) = boxed_nth_sel_for_constr ext_ctxt const n val (r, R, arity) = const_triple rel_table x val R2 = dest_Func R |> snd val z = (epsilon - delta, delta + j0) in if exclusive then kk_n_ary_function kk (Func (Atom z, R2)) r else let val r' = kk_join (KK.Var (1, 0)) r in kk_all [KK.DeclOne ((1, 0), KK.AtomSeq z)] (kk_formula_if (kk_subset (KK.Var (1, 0)) dom_r) (kk_n_ary_function kk R2 r') (kk_no r')) end end (* extended_context -> int -> int -> kodkod_constrs -> nut NameTable.table -> constr_spec -> KK.formula list *) fun sel_axioms_for_constr ext_ctxt bits j0 kk rel_table (constr as {const, delta, epsilon, explicit_max, ...}) = let val honors_explicit_max = explicit_max < 0 orelse epsilon - delta <= explicit_max in if explicit_max = 0 then [formula_for_bool honors_explicit_max] else let val ran_r = discr_rel_expr rel_table const val max_axiom = if honors_explicit_max then KK.True else if is_twos_complement_representable bits (epsilon - delta) then KK.LE (KK.Cardinality ran_r, KK.Num explicit_max) else raise TOO_SMALL ("Nitpick_Kodkod.sel_axioms_for_constr", "\"bits\" value " ^ string_of_int bits ^ " too small for \"max\"") in max_axiom :: map (sel_axiom_for_sel ext_ctxt j0 kk rel_table ran_r constr) (index_seq 0 (num_sels_for_constr_type (snd const))) end end (* extended_context -> int -> int -> kodkod_constrs -> nut NameTable.table -> dtype_spec -> KK.formula list *) fun sel_axioms_for_datatype ext_ctxt bits j0 kk rel_table ({constrs, ...} : dtype_spec) = maps (sel_axioms_for_constr ext_ctxt bits j0 kk rel_table) constrs (* extended_context -> kodkod_constrs -> nut NameTable.table -> constr_spec -> KK.formula list *) fun uniqueness_axiom_for_constr ext_ctxt ({kk_all, kk_implies, kk_and, kk_rel_eq, kk_lone, kk_join, ...} : kodkod_constrs) rel_table ({const, ...} : constr_spec) = let (* KK.rel_expr -> KK.formula *) fun conjunct_for_sel r = kk_rel_eq (kk_join (KK.Var (1, 0)) r) (kk_join (KK.Var (1, 1)) r) val num_sels = num_sels_for_constr_type (snd const) val triples = map (const_triple rel_table o boxed_nth_sel_for_constr ext_ctxt const) (~1 upto num_sels - 1) val j0 = case triples |> hd |> #2 of Func (Atom (_, j0), _) => j0 | R => raise REP ("Nitpick_Kodkod.uniqueness_axiom_for_constr", [R]) val set_r = triples |> hd |> #1 in if num_sels = 0 then kk_lone set_r else kk_all (map (KK.DeclOne o rpair set_r o pair 1) [0, 1]) (kk_implies (fold1 kk_and (map (conjunct_for_sel o #1) (tl triples))) (kk_rel_eq (KK.Var (1, 0)) (KK.Var (1, 1)))) end (* extended_context -> kodkod_constrs -> nut NameTable.table -> dtype_spec -> KK.formula list *) fun uniqueness_axioms_for_datatype ext_ctxt kk rel_table ({constrs, ...} : dtype_spec) = map (uniqueness_axiom_for_constr ext_ctxt kk rel_table) constrs (* constr_spec -> int *) fun effective_constr_max ({delta, epsilon, ...} : constr_spec) = epsilon - delta (* int -> kodkod_constrs -> nut NameTable.table -> dtype_spec -> KK.formula list *) fun partition_axioms_for_datatype j0 (kk as {kk_rel_eq, kk_union, ...}) rel_table ({card, constrs, ...} : dtype_spec) = if forall #exclusive constrs then [Integer.sum (map effective_constr_max constrs) = card |> formula_for_bool] else let val rs = map (discr_rel_expr rel_table o #const) constrs in [kk_rel_eq (fold1 kk_union rs) (KK.AtomSeq (card, j0)), kk_disjoint_sets kk rs] end (* extended_context -> int -> int Typtab.table -> kodkod_constrs -> nut NameTable.table -> dtype_spec -> KK.formula list *) -fun other_axioms_for_datatype _ _ _ _ _ {shallow = true, ...} = [] +fun other_axioms_for_datatype _ _ _ _ _ {deep = false, ...} = [] | other_axioms_for_datatype ext_ctxt bits ofs kk rel_table (dtype as {typ, ...}) = let val j0 = offset_of_type ofs typ in sel_axioms_for_datatype ext_ctxt bits j0 kk rel_table dtype @ uniqueness_axioms_for_datatype ext_ctxt kk rel_table dtype @ partition_axioms_for_datatype j0 kk rel_table dtype end (* extended_context -> int -> int Typtab.table -> kodkod_constrs -> nut NameTable.table -> dtype_spec list -> KK.formula list *) fun declarative_axioms_for_datatypes ext_ctxt bits ofs kk rel_table dtypes = acyclicity_axioms_for_datatypes ext_ctxt kk rel_table dtypes @ maps (other_axioms_for_datatype ext_ctxt bits ofs kk rel_table) dtypes (* int -> int Typtab.table -> bool -> kodkod_constrs -> nut -> KK.formula *) fun kodkod_formula_from_nut bits ofs liberal (kk as {kk_all, kk_exist, kk_formula_let, kk_formula_if, kk_or, kk_not, kk_iff, kk_implies, kk_and, kk_subset, kk_rel_eq, kk_no, kk_one, kk_some, kk_rel_let, kk_rel_if, kk_union, kk_difference, kk_intersect, kk_product, kk_join, kk_closure, kk_comprehension, kk_project, kk_project_seq, kk_not3, kk_nat_less, kk_int_less, ...}) u = let val main_j0 = offset_of_type ofs bool_T val bool_j0 = main_j0 val bool_atom_R = Atom (2, main_j0) val false_atom = KK.Atom bool_j0 val true_atom = KK.Atom (bool_j0 + 1) (* polarity -> int -> KK.rel_expr -> KK.formula *) fun formula_from_opt_atom polar j0 r = case polar of Neg => kk_not (kk_rel_eq r (KK.Atom j0)) | _ => kk_rel_eq r (KK.Atom (j0 + 1)) (* int -> KK.rel_expr -> KK.formula *) val formula_from_atom = formula_from_opt_atom Pos (* KK.formula -> KK.formula -> KK.formula *) fun kk_notimplies f1 f2 = kk_and f1 (kk_not f2) (* KK.rel_expr -> KK.rel_expr -> KK.rel_expr *) val kk_or3 = double_rel_rel_let kk (fn r1 => fn r2 => kk_rel_if (kk_subset true_atom (kk_union r1 r2)) true_atom (kk_intersect r1 r2)) val kk_and3 = double_rel_rel_let kk (fn r1 => fn r2 => kk_rel_if (kk_subset false_atom (kk_union r1 r2)) false_atom (kk_intersect r1 r2)) fun kk_notimplies3 r1 r2 = kk_and3 r1 (kk_not3 r2) (* int -> KK.rel_expr -> KK.formula list *) val unpack_formulas = map (formula_from_atom bool_j0) oo unpack_vect_in_chunks kk 1 (* (KK.formula -> KK.formula -> KK.formula) -> int -> KK.rel_expr -> KK.rel_expr -> KK.rel_expr *) fun kk_vect_set_op connective k r1 r2 = fold1 kk_product (map2 (atom_from_formula kk bool_j0 oo connective) (unpack_formulas k r1) (unpack_formulas k r2)) (* (KK.formula -> KK.formula -> KK.formula) -> int -> KK.rel_expr -> KK.rel_expr -> KK.formula *) fun kk_vect_set_bool_op connective k r1 r2 = fold1 kk_and (map2 connective (unpack_formulas k r1) (unpack_formulas k r2)) (* nut -> KK.formula *) fun to_f u = case rep_of u of Formula polar => (case u of Cst (False, _, _) => KK.False | Cst (True, _, _) => KK.True | Op1 (Not, _, _, u1) => kk_not (to_f_with_polarity (flip_polarity polar) u1) | Op1 (Finite, _, _, u1) => let val opt1 = is_opt_rep (rep_of u1) in case polar of Neut => if opt1 then raise NUT ("Nitpick_Kodkod.to_f (Finite)", [u]) else KK.True | Pos => formula_for_bool (not opt1) | Neg => KK.True end | Op1 (IsUnknown, _, _, u1) => kk_no (to_r u1) | Op1 (Cast, _, _, u1) => to_f_with_polarity polar u1 | Op2 (All, _, _, u1, u2) => kk_all (untuple to_decl u1) (to_f_with_polarity polar u2) | Op2 (Exist, _, _, u1, u2) => kk_exist (untuple to_decl u1) (to_f_with_polarity polar u2) | Op2 (Or, _, _, u1, u2) => kk_or (to_f_with_polarity polar u1) (to_f_with_polarity polar u2) | Op2 (And, _, _, u1, u2) => kk_and (to_f_with_polarity polar u1) (to_f_with_polarity polar u2) | Op2 (Less, T, Formula polar, u1, u2) => formula_from_opt_atom polar bool_j0 (to_r (Op2 (Less, T, Opt bool_atom_R, u1, u2))) | Op2 (Subset, _, _, u1, u2) => let val dom_T = domain_type (type_of u1) val R1 = rep_of u1 val R2 = rep_of u2 val (dom_R, ran_R) = case min_rep R1 R2 of Func (Unit, R') => (Atom (1, offset_of_type ofs dom_T), R') | Func Rp => Rp | R => (Atom (card_of_domain_from_rep 2 R, offset_of_type ofs dom_T), if is_opt_rep R then Opt bool_atom_R else Formula Neut) val set_R = Func (dom_R, ran_R) in if not (is_opt_rep ran_R) then to_set_bool_op kk_implies kk_subset u1 u2 else if polar = Neut then raise NUT ("Nitpick_Kodkod.to_f (Subset)", [u]) else let (* FIXME: merge with similar code below *) (* bool -> nut -> KK.rel_expr *) fun set_to_r widen u = if widen then kk_difference (full_rel_for_rep dom_R) (kk_join (to_rep set_R u) false_atom) else kk_join (to_rep set_R u) true_atom val widen1 = (polar = Pos andalso is_opt_rep R1) val widen2 = (polar = Neg andalso is_opt_rep R2) in kk_subset (set_to_r widen1 u1) (set_to_r widen2 u2) end end | Op2 (DefEq, _, _, u1, u2) => (case min_rep (rep_of u1) (rep_of u2) of Unit => KK.True | Formula polar => kk_iff (to_f_with_polarity polar u1) (to_f_with_polarity polar u2) | min_R => let (* nut -> nut list *) fun args (Op2 (Apply, _, _, u1, u2)) = u2 :: args u1 | args (Tuple (_, _, us)) = us | args _ = [] val opt_arg_us = filter (is_opt_rep o rep_of) (args u1) in if null opt_arg_us orelse not (is_Opt min_R) orelse is_eval_name u1 then fold (kk_or o (kk_no o to_r)) opt_arg_us (kk_rel_eq (to_rep min_R u1) (to_rep min_R u2)) else kk_subset (to_rep min_R u1) (to_rep min_R u2) end) | Op2 (Eq, T, R, u1, u2) => (case min_rep (rep_of u1) (rep_of u2) of Unit => KK.True | Formula polar => kk_iff (to_f_with_polarity polar u1) (to_f_with_polarity polar u2) | min_R => if is_opt_rep min_R then if polar = Neut then (* continuation of hackish optimization *) kk_rel_eq (to_rep min_R u1) (to_rep min_R u2) else if is_Cst Unrep u1 then to_could_be_unrep (polar = Neg) u2 else if is_Cst Unrep u2 then to_could_be_unrep (polar = Neg) u1 else let val r1 = to_rep min_R u1 val r2 = to_rep min_R u2 val both_opt = forall (is_opt_rep o rep_of) [u1, u2] in (if polar = Pos then if not both_opt then kk_rel_eq r1 r2 else if is_lone_rep min_R andalso arity_of_rep min_R = 1 then kk_some (kk_intersect r1 r2) else raise SAME () else if is_lone_rep min_R then if arity_of_rep min_R = 1 then kk_subset (kk_product r1 r2) KK.Iden else if not both_opt then (r1, r2) |> is_opt_rep (rep_of u2) ? swap |-> kk_subset else raise SAME () else raise SAME ()) handle SAME () => formula_from_opt_atom polar bool_j0 (to_guard [u1, u2] bool_atom_R (rel_expr_from_formula kk bool_atom_R (kk_rel_eq r1 r2))) end else let val r1 = to_rep min_R u1 val r2 = to_rep min_R u2 in if is_one_rep min_R then let val rs1 = unpack_products r1 val rs2 = unpack_products r2 in if length rs1 = length rs2 andalso map KK.arity_of_rel_expr rs1 = map KK.arity_of_rel_expr rs2 then fold1 kk_and (map2 kk_subset rs1 rs2) else kk_subset r1 r2 end else kk_rel_eq r1 r2 end) | Op2 (The, T, _, u1, u2) => to_f_with_polarity polar (Op2 (The, T, Opt (Atom (2, bool_j0)), u1, u2)) | Op2 (Eps, T, _, u1, u2) => to_f_with_polarity polar (Op2 (Eps, T, Opt (Atom (2, bool_j0)), u1, u2)) | Op2 (Apply, T, _, u1, u2) => (case (polar, rep_of u1) of (Neg, Func (R, Formula Neut)) => kk_subset (to_opt R u2) (to_r u1) | _ => to_f_with_polarity polar (Op2 (Apply, T, Opt (Atom (2, offset_of_type ofs T)), u1, u2))) | Op3 (Let, _, _, u1, u2, u3) => kk_formula_let [to_expr_assign u1 u2] (to_f_with_polarity polar u3) | Op3 (If, _, _, u1, u2, u3) => kk_formula_if (to_f u1) (to_f_with_polarity polar u2) (to_f_with_polarity polar u3) | FormulaReg (j, _, _) => KK.FormulaReg j | _ => raise NUT ("Nitpick_Kodkod.to_f", [u])) | Atom (2, j0) => formula_from_atom j0 (to_r u) | _ => raise NUT ("Nitpick_Kodkod.to_f", [u]) (* polarity -> nut -> KK.formula *) and to_f_with_polarity polar u = case rep_of u of Formula _ => to_f u | Atom (2, j0) => formula_from_atom j0 (to_r u) | Opt (Atom (2, j0)) => formula_from_opt_atom polar j0 (to_r u) | _ => raise NUT ("Nitpick_Kodkod.to_f_with_polarity", [u]) (* nut -> KK.rel_expr *) and to_r u = case u of Cst (False, _, Atom _) => false_atom | Cst (True, _, Atom _) => true_atom | Cst (Iden, T, Func (Struct [R1, R2], Formula Neut)) => if R1 = R2 andalso arity_of_rep R1 = 1 then kk_intersect KK.Iden (kk_product (full_rel_for_rep R1) KK.Univ) else let val schema1 = atom_schema_of_rep R1 val schema2 = atom_schema_of_rep R2 val arity1 = length schema1 val arity2 = length schema2 val r1 = fold1 kk_product (unary_var_seq 0 arity1) val r2 = fold1 kk_product (unary_var_seq arity1 arity2) val min_R = min_rep R1 R2 in kk_comprehension (decls_for_atom_schema 0 (schema1 @ schema2)) (kk_rel_eq (rel_expr_from_rel_expr kk min_R R1 r1) (rel_expr_from_rel_expr kk min_R R2 r2)) end | Cst (Iden, T, Func (Atom (1, j0), Formula Neut)) => KK.Atom j0 | Cst (Iden, T as Type ("fun", [T1, _]), R as Func (R1, _)) => to_rep R (Cst (Iden, T, Func (one_rep ofs T1 R1, Formula Neut))) | Cst (Num j, T, R) => if is_word_type T then atom_from_int_expr kk T R (KK.Num j) else if T = int_T then case atom_for_int (card_of_rep R, offset_of_type ofs int_T) j of ~1 => if is_opt_rep R then KK.None else raise NUT ("Nitpick_Kodkod.to_r (Num)", [u]) | j' => KK.Atom j' else if j < card_of_rep R then KK.Atom (j + offset_of_type ofs T) else if is_opt_rep R then KK.None else raise NUT ("Nitpick_Kodkod.to_r (Num)", [u]) | Cst (Unknown, _, R) => empty_rel_for_rep R | Cst (Unrep, _, R) => empty_rel_for_rep R | Cst (Suc, T as @{typ "unsigned_bit word => unsigned_bit word"}, R) => to_bit_word_unary_op T R (curry KK.Add (KK.Num 1)) | Cst (Suc, @{typ "nat => nat"}, Func (Atom x, _)) => kk_intersect (KK.Rel suc_rel) (kk_product KK.Univ (KK.AtomSeq x)) | Cst (Suc, _, Func (Atom x, _)) => KK.Rel suc_rel | Cst (Add, Type ("fun", [@{typ nat}, _]), _) => KK.Rel nat_add_rel | Cst (Add, Type ("fun", [@{typ int}, _]), _) => KK.Rel int_add_rel | Cst (Add, T as Type ("fun", [@{typ "unsigned_bit word"}, _]), R) => to_bit_word_binary_op T R NONE (SOME (curry KK.Add)) | Cst (Add, T as Type ("fun", [@{typ "signed_bit word"}, _]), R) => to_bit_word_binary_op T R (SOME (fn i1 => fn i2 => fn i3 => kk_implies (KK.LE (KK.Num 0, KK.BitXor (i1, i2))) (KK.LE (KK.Num 0, KK.BitXor (i2, i3))))) (SOME (curry KK.Add)) | Cst (Subtract, Type ("fun", [@{typ nat}, _]), _) => KK.Rel nat_subtract_rel | Cst (Subtract, Type ("fun", [@{typ int}, _]), _) => KK.Rel int_subtract_rel | Cst (Subtract, T as Type ("fun", [@{typ "unsigned_bit word"}, _]), R) => to_bit_word_binary_op T R NONE (SOME (fn i1 => fn i2 => KK.IntIf (KK.LE (i1, i2), KK.Num 0, KK.Sub (i1, i2)))) | Cst (Subtract, T as Type ("fun", [@{typ "signed_bit word"}, _]), R) => to_bit_word_binary_op T R (SOME (fn i1 => fn i2 => fn i3 => kk_implies (KK.LT (KK.BitXor (i1, i2), KK.Num 0)) (KK.LT (KK.BitXor (i2, i3), KK.Num 0)))) (SOME (curry KK.Sub)) | Cst (Multiply, Type ("fun", [@{typ nat}, _]), _) => KK.Rel nat_multiply_rel | Cst (Multiply, Type ("fun", [@{typ int}, _]), _) => KK.Rel int_multiply_rel | Cst (Multiply, T as Type ("fun", [Type (@{type_name word}, [bit_T]), _]), R) => to_bit_word_binary_op T R (SOME (fn i1 => fn i2 => fn i3 => kk_or (KK.IntEq (i2, KK.Num 0)) (KK.IntEq (KK.Div (i3, i2), i1) |> bit_T = @{typ signed_bit} ? kk_and (KK.LE (KK.Num 0, foldl1 KK.BitAnd [i1, i2, i3]))))) (SOME (curry KK.Mult)) | Cst (Divide, Type ("fun", [@{typ nat}, _]), _) => KK.Rel nat_divide_rel | Cst (Divide, Type ("fun", [@{typ int}, _]), _) => KK.Rel int_divide_rel | Cst (Divide, T as Type ("fun", [@{typ "unsigned_bit word"}, _]), R) => to_bit_word_binary_op T R NONE (SOME (fn i1 => fn i2 => KK.IntIf (KK.IntEq (i2, KK.Num 0), KK.Num 0, KK.Div (i1, i2)))) | Cst (Divide, T as Type ("fun", [@{typ "signed_bit word"}, _]), R) => to_bit_word_binary_op T R (SOME (fn i1 => fn i2 => fn i3 => KK.LE (KK.Num 0, foldl1 KK.BitAnd [i1, i2, i3]))) (SOME (fn i1 => fn i2 => KK.IntIf (kk_and (KK.LT (i1, KK.Num 0)) (KK.LT (KK.Num 0, i2)), KK.Sub (KK.Div (KK.Add (i1, KK.Num 1), i2), KK.Num 1), KK.IntIf (kk_and (KK.LT (KK.Num 0, i1)) (KK.LT (i2, KK.Num 0)), KK.Sub (KK.Div (KK.Sub (i1, KK.Num 1), i2), KK.Num 1), KK.IntIf (KK.IntEq (i2, KK.Num 0), KK.Num 0, KK.Div (i1, i2)))))) | Cst (Gcd, _, _) => KK.Rel gcd_rel | Cst (Lcm, _, _) => KK.Rel lcm_rel | Cst (Fracs, _, Func (Atom (1, _), _)) => KK.None | Cst (Fracs, _, Func (Struct _, _)) => kk_project_seq (KK.Rel norm_frac_rel) 2 2 | Cst (NormFrac, _, _) => KK.Rel norm_frac_rel | Cst (NatToInt, Type ("fun", [@{typ nat}, _]), Func (Atom _, Atom _)) => KK.Iden | Cst (NatToInt, Type ("fun", [@{typ nat}, _]), Func (Atom (nat_k, nat_j0), Opt (Atom (int_k, int_j0)))) => if nat_j0 = int_j0 then kk_intersect KK.Iden (kk_product (KK.AtomSeq (max_int_for_card int_k + 1, nat_j0)) KK.Univ) else raise BAD ("Nitpick_Kodkod.to_r (NatToInt)", "\"nat_j0 <> int_j0\"") | Cst (NatToInt, T as Type ("fun", [@{typ "unsigned_bit word"}, _]), R) => to_bit_word_unary_op T R I | Cst (IntToNat, Type ("fun", [@{typ int}, _]), Func (Atom (int_k, int_j0), nat_R)) => let val abs_card = max_int_for_card int_k + 1 val (nat_k, nat_j0) = the_single (atom_schema_of_rep nat_R) val overlap = Int.min (nat_k, abs_card) in if nat_j0 = int_j0 then kk_union (kk_product (KK.AtomSeq (int_k - abs_card, int_j0 + abs_card)) (KK.Atom nat_j0)) (kk_intersect KK.Iden (kk_product (KK.AtomSeq (overlap, int_j0)) KK.Univ)) else raise BAD ("Nitpick_Kodkod.to_r (IntToNat)", "\"nat_j0 <> int_j0\"") end | Cst (IntToNat, T as Type ("fun", [@{typ "signed_bit word"}, _]), R) => to_bit_word_unary_op T R (fn i => KK.IntIf (KK.LE (i, KK.Num 0), KK.Num 0, i)) | Op1 (Not, _, R, u1) => kk_not3 (to_rep R u1) | Op1 (Finite, _, Opt (Atom _), _) => KK.None | Op1 (Converse, T, R, u1) => let val (b_T, a_T) = HOLogic.dest_prodT (domain_type T) val (b_R, a_R) = case R of Func (Struct [R1, R2], _) => (R1, R2) | Func (R1, _) => if card_of_rep R1 <> 1 then raise REP ("Nitpick_Kodkod.to_r (Converse)", [R]) else pairself (Atom o pair 1 o offset_of_type ofs) (b_T, a_T) | _ => raise REP ("Nitpick_Kodkod.to_r (Converse)", [R]) val body_R = body_rep R val a_arity = arity_of_rep a_R val b_arity = arity_of_rep b_R val ab_arity = a_arity + b_arity val body_arity = arity_of_rep body_R in kk_project (to_rep (Func (Struct [a_R, b_R], body_R)) u1) (map KK.Num (index_seq a_arity b_arity @ index_seq 0 a_arity @ index_seq ab_arity body_arity)) |> rel_expr_from_rel_expr kk R (Func (Struct [b_R, a_R], body_R)) end | Op1 (Closure, _, R, u1) => if is_opt_rep R then let val T1 = type_of u1 val R' = rep_to_binary_rel_rep ofs T1 (unopt_rep (rep_of u1)) val R'' = opt_rep ofs T1 R' in single_rel_rel_let kk (fn r => let val true_r = kk_closure (kk_join r true_atom) val full_r = full_rel_for_rep R' val false_r = kk_difference full_r (kk_closure (kk_difference full_r (kk_join r false_atom))) in rel_expr_from_rel_expr kk R R'' (kk_union (kk_product true_r true_atom) (kk_product false_r false_atom)) end) (to_rep R'' u1) end else let val R' = rep_to_binary_rel_rep ofs (type_of u1) (rep_of u1) in rel_expr_from_rel_expr kk R R' (kk_closure (to_rep R' u1)) end | Op1 (SingletonSet, _, Func (R1, Opt _), Cst (Unrep, _, _)) => (if R1 = Unit then I else kk_product (full_rel_for_rep R1)) false_atom | Op1 (SingletonSet, _, R, u1) => (case R of Func (R1, Formula Neut) => to_rep R1 u1 | Func (Unit, Opt R) => to_guard [u1] R true_atom | Func (R1, R2 as Opt _) => single_rel_rel_let kk (fn r => kk_rel_if (kk_no r) (empty_rel_for_rep R) (rel_expr_to_func kk R1 bool_atom_R (Func (R1, Formula Neut)) r)) (to_opt R1 u1) | _ => raise NUT ("Nitpick_Kodkod.to_r (SingletonSet)", [u])) | Op1 (Tha, T, R, u1) => if is_opt_rep R then kk_join (to_rep (Func (unopt_rep R, Opt bool_atom_R)) u1) true_atom else to_rep (Func (R, Formula Neut)) u1 | Op1 (First, T, R, u1) => to_nth_pair_sel 0 T R u1 | Op1 (Second, T, R, u1) => to_nth_pair_sel 1 T R u1 | Op1 (Cast, _, R, u1) => ((case rep_of u1 of Formula _ => (case unopt_rep R of Atom (2, j0) => atom_from_formula kk j0 (to_f u1) | _ => raise SAME ()) | _ => raise SAME ()) handle SAME () => rel_expr_from_rel_expr kk R (rep_of u1) (to_r u1)) | Op2 (All, T, R as Opt _, u1, u2) => to_r (Op1 (Not, T, R, Op2 (Exist, T, R, u1, Op1 (Not, T, rep_of u2, u2)))) | Op2 (Exist, T, Opt _, u1, u2) => let val rs1 = untuple to_decl u1 in if not (is_opt_rep (rep_of u2)) then kk_rel_if (kk_exist rs1 (to_f u2)) true_atom KK.None else let val r2 = to_r u2 in kk_union (kk_rel_if (kk_exist rs1 (kk_rel_eq r2 true_atom)) true_atom KK.None) (kk_rel_if (kk_all rs1 (kk_rel_eq r2 false_atom)) false_atom KK.None) end end | Op2 (Or, _, _, u1, u2) => if is_opt_rep (rep_of u1) then kk_rel_if (to_f u2) true_atom (to_r u1) else kk_rel_if (to_f u1) true_atom (to_r u2) | Op2 (And, _, _, u1, u2) => if is_opt_rep (rep_of u1) then kk_rel_if (to_f u2) (to_r u1) false_atom else kk_rel_if (to_f u1) (to_r u2) false_atom | Op2 (Less, _, _, u1, u2) => (case type_of u1 of @{typ nat} => if is_Cst Unrep u1 then to_compare_with_unrep u2 false_atom else if is_Cst Unrep u2 then to_compare_with_unrep u1 true_atom else kk_nat_less (to_integer u1) (to_integer u2) | @{typ int} => kk_int_less (to_integer u1) (to_integer u2) | _ => double_rel_rel_let kk (fn r1 => fn r2 => kk_rel_if (fold kk_and (map_filter (fn (u, r) => if is_opt_rep (rep_of u) then SOME (kk_some r) else NONE) [(u1, r1), (u2, r2)]) KK.True) (atom_from_formula kk bool_j0 (KK.LT (pairself (int_expr_from_atom kk (type_of u1)) (r1, r2)))) KK.None) (to_r u1) (to_r u2)) | Op2 (The, T, R, u1, u2) => if is_opt_rep R then let val r1 = to_opt (Func (unopt_rep R, bool_atom_R)) u1 in kk_rel_if (kk_one (kk_join r1 true_atom)) (kk_join r1 true_atom) (kk_rel_if (kk_or (kk_some (kk_join r1 true_atom)) (kk_subset (full_rel_for_rep R) (kk_join r1 false_atom))) (to_rep R u2) (empty_rel_for_rep R)) end else let val r1 = to_rep (Func (R, Formula Neut)) u1 in kk_rel_if (kk_one r1) r1 (to_rep R u2) end | Op2 (Eps, T, R, u1, u2) => if is_opt_rep (rep_of u1) then let val r1 = to_rep (Func (unopt_rep R, Opt bool_atom_R)) u1 val r2 = to_rep R u2 in kk_union (kk_rel_if (kk_one (kk_join r1 true_atom)) (kk_join r1 true_atom) (empty_rel_for_rep R)) (kk_rel_if (kk_or (kk_subset r2 (kk_join r1 true_atom)) (kk_subset (full_rel_for_rep R) (kk_join r1 false_atom))) r2 (empty_rel_for_rep R)) end else let val r1 = to_rep (Func (unopt_rep R, Formula Neut)) u1 val r2 = to_rep R u2 in kk_union (kk_rel_if (kk_one r1) r1 (empty_rel_for_rep R)) (kk_rel_if (kk_or (kk_no r1) (kk_subset r2 r1)) r2 (empty_rel_for_rep R)) end | Op2 (Triad, T, Opt (Atom (2, j0)), u1, u2) => let val f1 = to_f u1 val f2 = to_f u2 in if f1 = f2 then atom_from_formula kk j0 f1 else kk_union (kk_rel_if f1 true_atom KK.None) (kk_rel_if f2 KK.None false_atom) end | Op2 (Union, _, R, u1, u2) => to_set_op kk_or kk_or3 kk_union kk_union kk_intersect false R u1 u2 | Op2 (SetDifference, _, R, u1, u2) => to_set_op kk_notimplies kk_notimplies3 kk_difference kk_intersect kk_union true R u1 u2 | Op2 (Intersect, _, R, u1, u2) => to_set_op kk_and kk_and3 kk_intersect kk_intersect kk_union false R u1 u2 | Op2 (Composition, _, R, u1, u2) => let val (a_T, b_T) = HOLogic.dest_prodT (domain_type (type_of u1)) val (_, c_T) = HOLogic.dest_prodT (domain_type (type_of u2)) val ab_k = card_of_domain_from_rep 2 (rep_of u1) val bc_k = card_of_domain_from_rep 2 (rep_of u2) val ac_k = card_of_domain_from_rep 2 R val a_k = exact_root 2 (ac_k * ab_k div bc_k) val b_k = exact_root 2 (ab_k * bc_k div ac_k) val c_k = exact_root 2 (bc_k * ac_k div ab_k) val a_R = Atom (a_k, offset_of_type ofs a_T) val b_R = Atom (b_k, offset_of_type ofs b_T) val c_R = Atom (c_k, offset_of_type ofs c_T) val body_R = body_rep R in (case body_R of Formula Neut => kk_join (to_rep (Func (Struct [a_R, b_R], Formula Neut)) u1) (to_rep (Func (Struct [b_R, c_R], Formula Neut)) u2) | Opt (Atom (2, _)) => let (* FIXME: merge with similar code above *) (* rep -> rep -> nut -> KK.rel_expr *) fun must R1 R2 u = kk_join (to_rep (Func (Struct [R1, R2], body_R)) u) true_atom fun may R1 R2 u = kk_difference (full_rel_for_rep (Struct [R1, R2])) (kk_join (to_rep (Func (Struct [R1, R2], body_R)) u) false_atom) in kk_union (kk_product (kk_join (must a_R b_R u1) (must b_R c_R u2)) true_atom) (kk_product (kk_difference (full_rel_for_rep (Struct [a_R, c_R])) (kk_join (may a_R b_R u1) (may b_R c_R u2))) false_atom) end | _ => raise NUT ("Nitpick_Kodkod.to_r (Composition)", [u])) |> rel_expr_from_rel_expr kk R (Func (Struct [a_R, c_R], body_R)) end | Op2 (Product, T, R, u1, u2) => let val (a_T, b_T) = HOLogic.dest_prodT (domain_type T) val a_k = card_of_domain_from_rep 2 (rep_of u1) val b_k = card_of_domain_from_rep 2 (rep_of u2) val a_R = Atom (a_k, offset_of_type ofs a_T) val b_R = Atom (b_k, offset_of_type ofs b_T) val body_R = body_rep R in (case body_R of Formula Neut => kk_product (to_rep (Func (a_R, Formula Neut)) u1) (to_rep (Func (b_R, Formula Neut)) u2) | Opt (Atom (2, _)) => let (* KK.rel_expr -> rep -> nut -> KK.rel_expr *) fun do_nut r R u = kk_join (to_rep (Func (R, body_R)) u) r (* KK.rel_expr -> KK.rel_expr *) fun do_term r = kk_product (kk_product (do_nut r a_R u1) (do_nut r b_R u2)) r in kk_union (do_term true_atom) (do_term false_atom) end | _ => raise NUT ("Nitpick_Kodkod.to_r (Product)", [u])) |> rel_expr_from_rel_expr kk R (Func (Struct [a_R, b_R], body_R)) end | Op2 (Image, T, R, u1, u2) => (case (rep_of u1, rep_of u2) of (Func (R11, R12), Func (R21, Formula Neut)) => if R21 = R11 andalso is_lone_rep R12 then let (* KK.rel_expr -> KK.rel_expr *) fun big_join r = kk_n_fold_join kk false R21 R12 r (to_r u1) val core_r = big_join (to_r u2) val core_R = Func (R12, Formula Neut) in if is_opt_rep R12 then let val schema = atom_schema_of_rep R21 val decls = decls_for_atom_schema ~1 schema val vars = unary_var_seq ~1 (length decls) val f = kk_some (big_join (fold1 kk_product vars)) in kk_rel_if (kk_all decls f) (rel_expr_from_rel_expr kk R core_R core_r) (rel_expr_from_rel_expr kk R (opt_rep ofs T core_R) (kk_product core_r true_atom)) end else rel_expr_from_rel_expr kk R core_R core_r end else raise NUT ("Nitpick_Kodkod.to_r (Image)", [u1, u2]) | _ => raise NUT ("Nitpick_Kodkod.to_r (Image)", [u1, u2])) | Op2 (Apply, @{typ nat}, _, Op2 (Apply, _, _, Cst (Subtract, _, _), u1), u2) => if is_Cst Unrep u2 andalso not (is_opt_rep (rep_of u1)) then KK.Atom (offset_of_type ofs nat_T) else fold kk_join (map to_integer [u1, u2]) (KK.Rel nat_subtract_rel) | Op2 (Apply, _, R, u1, u2) => if is_Cst Unrep u2 andalso is_set_type (type_of u1) andalso is_FreeName u1 then false_atom else to_apply R u1 u2 | Op2 (Lambda, T, R as Opt (Atom (1, j0)), u1, u2) => to_guard [u1, u2] R (KK.Atom j0) | Op2 (Lambda, T, Func (_, Formula Neut), u1, u2) => kk_comprehension (untuple to_decl u1) (to_f u2) | Op2 (Lambda, T, Func (_, R2), u1, u2) => let val dom_decls = untuple to_decl u1 val ran_schema = atom_schema_of_rep R2 val ran_decls = decls_for_atom_schema ~1 ran_schema val ran_vars = unary_var_seq ~1 (length ran_decls) in kk_comprehension (dom_decls @ ran_decls) (kk_subset (fold1 kk_product ran_vars) (to_rep R2 u2)) end | Op3 (Let, _, R, u1, u2, u3) => kk_rel_let [to_expr_assign u1 u2] (to_rep R u3) | Op3 (If, _, R, u1, u2, u3) => if is_opt_rep (rep_of u1) then tripl_rel_rel_let kk (fn r1 => fn r2 => fn r3 => let val empty_r = empty_rel_for_rep R in fold1 kk_union [kk_rel_if (kk_rel_eq r1 true_atom) r2 empty_r, kk_rel_if (kk_rel_eq r1 false_atom) r3 empty_r, kk_rel_if (kk_rel_eq r2 r3) (if inline_rel_expr r2 then r2 else r3) empty_r] end) (to_r u1) (to_rep R u2) (to_rep R u3) else kk_rel_if (to_f u1) (to_rep R u2) (to_rep R u3) | Tuple (_, R, us) => (case unopt_rep R of Struct Rs => to_product Rs us | Vect (k, R) => to_product (replicate k R) us | Atom (1, j0) => (case filter (not_equal Unit o rep_of) us of [] => KK.Atom j0 | us' => kk_rel_if (kk_some (fold1 kk_product (map to_r us'))) (KK.Atom j0) KK.None) | _ => raise NUT ("Nitpick_Kodkod.to_r (Tuple)", [u])) | Construct ([u'], _, _, []) => to_r u' | Construct (discr_u :: sel_us, T, R, arg_us) => let val set_rs = map2 (fn sel_u => fn arg_u => let val (R1, R2) = dest_Func (rep_of sel_u) val sel_r = to_r sel_u val arg_r = to_opt R2 arg_u in if is_one_rep R2 then kk_n_fold_join kk true R2 R1 arg_r (kk_project sel_r (flip_nums (arity_of_rep R2))) else kk_comprehension [KK.DeclOne ((1, ~1), to_r discr_u)] (kk_rel_eq (kk_join (KK.Var (1, ~1)) sel_r) arg_r) end) sel_us arg_us in fold1 kk_intersect set_rs end | BoundRel (x, _, _, _) => KK.Var x | FreeRel (x, _, _, _) => KK.Rel x | RelReg (j, _, R) => KK.RelReg (arity_of_rep R, j) | u => raise NUT ("Nitpick_Kodkod.to_r", [u]) (* nut -> KK.decl *) and to_decl (BoundRel (x, _, R, _)) = KK.DeclOne (x, KK.AtomSeq (the_single (atom_schema_of_rep R))) | to_decl u = raise NUT ("Nitpick_Kodkod.to_decl", [u]) (* nut -> KK.expr_assign *) and to_expr_assign (FormulaReg (j, _, R)) u = KK.AssignFormulaReg (j, to_f u) | to_expr_assign (RelReg (j, _, R)) u = KK.AssignRelReg ((arity_of_rep R, j), to_r u) | to_expr_assign u1 _ = raise NUT ("Nitpick_Kodkod.to_expr_assign", [u1]) (* int * int -> nut -> KK.rel_expr *) and to_atom (x as (k, j0)) u = case rep_of u of Formula _ => atom_from_formula kk j0 (to_f u) | Unit => if k = 1 then KK.Atom j0 else raise NUT ("Nitpick_Kodkod.to_atom", [u]) | R => atom_from_rel_expr kk x R (to_r u) (* rep list -> nut -> KK.rel_expr *) and to_struct Rs u = case rep_of u of Unit => full_rel_for_rep (Struct Rs) | R' => struct_from_rel_expr kk Rs R' (to_r u) (* int -> rep -> nut -> KK.rel_expr *) and to_vect k R u = case rep_of u of Unit => full_rel_for_rep (Vect (k, R)) | R' => vect_from_rel_expr kk k R R' (to_r u) (* rep -> rep -> nut -> KK.rel_expr *) and to_func R1 R2 u = case rep_of u of Unit => full_rel_for_rep (Func (R1, R2)) | R' => rel_expr_to_func kk R1 R2 R' (to_r u) (* rep -> nut -> KK.rel_expr *) and to_opt R u = let val old_R = rep_of u in if is_opt_rep old_R then rel_expr_from_rel_expr kk (Opt R) old_R (to_r u) else to_rep R u end (* rep -> nut -> KK.rel_expr *) and to_rep (Atom x) u = to_atom x u | to_rep (Struct Rs) u = to_struct Rs u | to_rep (Vect (k, R)) u = to_vect k R u | to_rep (Func (R1, R2)) u = to_func R1 R2 u | to_rep (Opt R) u = to_opt R u | to_rep R _ = raise REP ("Nitpick_Kodkod.to_rep", [R]) (* nut -> KK.rel_expr *) and to_integer u = to_opt (one_rep ofs (type_of u) (rep_of u)) u (* nut list -> rep -> KK.rel_expr -> KK.rel_expr *) and to_guard guard_us R r = let val unpacked_rs = unpack_joins r val plain_guard_rs = map to_r (filter (is_Opt o rep_of) guard_us) |> filter_out (member (op =) unpacked_rs) val func_guard_us = filter ((is_Func andf is_opt_rep) o rep_of) guard_us val func_guard_rs = map to_r func_guard_us val guard_fs = map kk_no plain_guard_rs @ map2 (kk_not oo kk_n_ary_function kk) (map (unopt_rep o rep_of) func_guard_us) func_guard_rs in if null guard_fs then r else kk_rel_if (fold1 kk_or guard_fs) (empty_rel_for_rep R) r end (* rep -> rep -> KK.rel_expr -> int -> KK.rel_expr *) and to_project new_R old_R r j0 = rel_expr_from_rel_expr kk new_R old_R (kk_project_seq r j0 (arity_of_rep old_R)) (* rep list -> nut list -> KK.rel_expr *) and to_product Rs us = case map (uncurry to_opt) (filter (not_equal Unit o fst) (Rs ~~ us)) of [] => raise REP ("Nitpick_Kodkod.to_product", Rs) | rs => fold1 kk_product rs (* int -> typ -> rep -> nut -> KK.rel_expr *) and to_nth_pair_sel n res_T res_R u = case u of Tuple (_, _, us) => to_rep res_R (nth us n) | _ => let val R = rep_of u val (a_T, b_T) = HOLogic.dest_prodT (type_of u) val Rs = case unopt_rep R of Struct (Rs as [_, _]) => Rs | _ => let val res_card = card_of_rep res_R val other_card = card_of_rep R div res_card val (a_card, b_card) = (res_card, other_card) |> n = 1 ? swap in [Atom (a_card, offset_of_type ofs a_T), Atom (b_card, offset_of_type ofs b_T)] end val nth_R = nth Rs n val j0 = if n = 0 then 0 else arity_of_rep (hd Rs) in case arity_of_rep nth_R of 0 => to_guard [u] res_R (to_rep res_R (Cst (Unity, res_T, Unit))) | arity => to_project res_R nth_R (to_rep (Opt (Struct Rs)) u) j0 end (* (KK.formula -> KK.formula -> KK.formula) -> (KK.rel_expr -> KK.rel_expr -> KK.formula) -> nut -> nut -> KK.formula *) and to_set_bool_op connective set_oper u1 u2 = let val min_R = min_rep (rep_of u1) (rep_of u2) val r1 = to_rep min_R u1 val r2 = to_rep min_R u2 in case min_R of Vect (k, Atom _) => kk_vect_set_bool_op connective k r1 r2 | Func (R1, Formula Neut) => set_oper r1 r2 | Func (Unit, Atom (2, j0)) => connective (formula_from_atom j0 r1) (formula_from_atom j0 r2) | Func (R1, Atom _) => set_oper (kk_join r1 true_atom) (kk_join r2 true_atom) | _ => raise REP ("Nitpick_Kodkod.to_set_bool_op", [min_R]) end (* (KK.formula -> KK.formula -> KK.formula) -> (KK.rel_expr -> KK.rel_expr -> KK.rel_expr) -> (KK.rel_expr -> KK.rel_expr -> KK.formula) -> (KK.rel_expr -> KK.rel_expr -> KK.formula) -> (KK.rel_expr -> KK.rel_expr -> KK.formula) -> bool -> rep -> nut -> nut -> KK.rel_expr *) and to_set_op connective connective3 set_oper true_set_oper false_set_oper neg_second R u1 u2 = let val min_R = min_rep (rep_of u1) (rep_of u2) val r1 = to_rep min_R u1 val r2 = to_rep min_R u2 val unopt_R = unopt_rep R in rel_expr_from_rel_expr kk unopt_R (unopt_rep min_R) (case min_R of Opt (Vect (k, Atom _)) => kk_vect_set_op connective k r1 r2 | Vect (k, Atom _) => kk_vect_set_op connective k r1 r2 | Func (_, Formula Neut) => set_oper r1 r2 | Func (Unit, _) => connective3 r1 r2 | Func (R1, _) => double_rel_rel_let kk (fn r1 => fn r2 => kk_union (kk_product (true_set_oper (kk_join r1 true_atom) (kk_join r2 (atom_for_bool bool_j0 (not neg_second)))) true_atom) (kk_product (false_set_oper (kk_join r1 false_atom) (kk_join r2 (atom_for_bool bool_j0 neg_second))) false_atom)) r1 r2 | _ => raise REP ("Nitpick_Kodkod.to_set_op", [min_R])) end (* typ -> rep -> (KK.int_expr -> KK.int_expr) -> KK.rel_expr *) and to_bit_word_unary_op T R oper = let val Ts = strip_type T ||> single |> op @ (* int -> KK.int_expr *) fun int_arg j = int_expr_from_atom kk (nth Ts j) (KK.Var (1, j)) in kk_comprehension (decls_for_atom_schema 0 (atom_schema_of_rep R)) (KK.FormulaLet (map (fn j => KK.AssignIntReg (j, int_arg j)) (0 upto 1), KK.IntEq (KK.IntReg 1, oper (KK.IntReg 0)))) end (* typ -> rep -> (KK.int_expr -> KK.int_expr -> KK.int_expr -> bool) option -> (KK.int_expr -> KK.int_expr -> KK.int_expr) option -> KK.rel_expr *) and to_bit_word_binary_op T R opt_guard opt_oper = let val Ts = strip_type T ||> single |> op @ (* int -> KK.int_expr *) fun int_arg j = int_expr_from_atom kk (nth Ts j) (KK.Var (1, j)) in kk_comprehension (decls_for_atom_schema 0 (atom_schema_of_rep R)) (KK.FormulaLet (map (fn j => KK.AssignIntReg (j, int_arg j)) (0 upto 2), fold1 kk_and ((case opt_guard of NONE => [] | SOME guard => [guard (KK.IntReg 0) (KK.IntReg 1) (KK.IntReg 2)]) @ (case opt_oper of NONE => [] | SOME oper => [KK.IntEq (KK.IntReg 2, oper (KK.IntReg 0) (KK.IntReg 1))])))) end (* rep -> rep -> KK.rel_expr -> nut -> KK.rel_expr *) and to_apply res_R func_u arg_u = case unopt_rep (rep_of func_u) of Unit => let val j0 = offset_of_type ofs (type_of func_u) in to_guard [arg_u] res_R (rel_expr_from_rel_expr kk res_R (Atom (1, j0)) (KK.Atom j0)) end | Atom (1, j0) => to_guard [arg_u] res_R (rel_expr_from_rel_expr kk res_R (Atom (1, j0)) (to_r func_u)) | Atom (k, j0) => let val dom_card = card_of_rep (rep_of arg_u) val ran_R = Atom (exact_root dom_card k, offset_of_type ofs (range_type (type_of func_u))) in to_apply_vect dom_card ran_R res_R (to_vect dom_card ran_R func_u) arg_u end | Vect (1, R') => to_guard [arg_u] res_R (rel_expr_from_rel_expr kk res_R R' (to_r func_u)) | Vect (k, R') => to_apply_vect k R' res_R (to_r func_u) arg_u | Func (R, Formula Neut) => to_guard [arg_u] res_R (rel_expr_from_formula kk res_R (kk_subset (to_opt R arg_u) (to_r func_u))) | Func (Unit, R2) => to_guard [arg_u] res_R (rel_expr_from_rel_expr kk res_R R2 (to_r func_u)) | Func (R1, R2) => rel_expr_from_rel_expr kk res_R R2 (kk_n_fold_join kk true R1 R2 (to_opt R1 arg_u) (to_r func_u)) |> body_rep R2 = Formula Neut ? to_guard [arg_u] res_R | _ => raise NUT ("Nitpick_Kodkod.to_apply", [func_u]) (* int -> rep -> rep -> KK.rel_expr -> nut *) and to_apply_vect k R' res_R func_r arg_u = let val arg_R = one_rep ofs (type_of arg_u) (unopt_rep (rep_of arg_u)) val vect_r = vect_from_rel_expr kk k res_R (Vect (k, R')) func_r val vect_rs = unpack_vect_in_chunks kk (arity_of_rep res_R) k vect_r in kk_case_switch kk arg_R res_R (to_opt arg_R arg_u) (all_singletons_for_rep arg_R) vect_rs end (* bool -> nut -> KK.formula *) and to_could_be_unrep neg u = if neg andalso is_opt_rep (rep_of u) then kk_no (to_r u) else KK.False (* nut -> KK.rel_expr -> KK.rel_expr *) and to_compare_with_unrep u r = if is_opt_rep (rep_of u) then kk_rel_if (kk_some (to_r u)) r (empty_rel_for_rep (rep_of u)) else r in to_f_with_polarity Pos u end end; diff --git a/src/HOL/Tools/Nitpick/nitpick_model.ML b/src/HOL/Tools/Nitpick/nitpick_model.ML --- a/src/HOL/Tools/Nitpick/nitpick_model.ML +++ b/src/HOL/Tools/Nitpick/nitpick_model.ML @@ -1,761 +1,772 @@ (* Title: HOL/Tools/Nitpick/nitpick_model.ML Author: Jasmin Blanchette, TU Muenchen - Copyright 2009 + Copyright 2009, 2010 Model reconstruction for Nitpick. *) signature NITPICK_MODEL = sig type styp = Nitpick_Util.styp type scope = Nitpick_Scope.scope type rep = Nitpick_Rep.rep type nut = Nitpick_Nut.nut type params = { show_skolems: bool, show_datatypes: bool, show_consts: bool} structure NameTable : TABLE val tuple_list_for_name : nut NameTable.table -> Kodkod.raw_bound list -> nut -> int list list val reconstruct_hol_model : params -> scope -> (term option * int list) list -> styp list -> nut list -> nut list -> nut list -> nut NameTable.table -> Kodkod.raw_bound list -> Pretty.T * bool val prove_hol_model : scope -> Time.time option -> nut list -> nut list -> nut NameTable.table -> Kodkod.raw_bound list -> term -> bool option end; structure Nitpick_Model : NITPICK_MODEL = struct open Nitpick_Util open Nitpick_HOL open Nitpick_Scope open Nitpick_Peephole open Nitpick_Rep open Nitpick_Nut structure KK = Kodkod type params = { show_skolems: bool, show_datatypes: bool, show_consts: bool} val unknown = "?" val unrep = "\" val maybe_mixfix = "_\<^sup>?" val base_mixfix = "_\<^bsub>base\<^esub>" val step_mixfix = "_\<^bsub>step\<^esub>" val abs_mixfix = "\_\" -val non_opt_name = nitpick_prefix ^ "non_opt" +val opt_flag = nitpick_prefix ^ "opt" +val non_opt_flag = nitpick_prefix ^ "non_opt" (* string -> int -> string *) fun atom_suffix s j = nat_subscript (j + 1) |> (s <> "" andalso Symbol.is_ascii_digit (List.last (explode s))) ? prefix "\<^isub>," (* string -> typ -> int -> string *) fun atom_name prefix (T as Type (s, _)) j = - prefix ^ substring (shortest_name s, 0, 1) ^ atom_suffix s j + let val s' = shortest_name s in + prefix ^ (if String.isPrefix "\\" s' then s' else substring (s', 0, 1)) ^ + atom_suffix s j + end | atom_name prefix (T as TFree (s, _)) j = prefix ^ perhaps (try (unprefix "'")) s ^ atom_suffix s j | atom_name _ T _ = raise TYPE ("Nitpick_Model.atom_name", [T], []) (* bool -> typ -> int -> term *) fun atom for_auto T j = if for_auto then Free (atom_name (hd (space_explode "." nitpick_prefix)) T j, T) else Const (atom_name "" T j, T) (* term -> real *) fun extract_real_number (Const (@{const_name Algebras.divide}, _) $ t1 $ t2) = real (snd (HOLogic.dest_number t1)) / real (snd (HOLogic.dest_number t2)) | extract_real_number t = real (snd (HOLogic.dest_number t)) (* term * term -> order *) fun nice_term_ord (Abs (_, _, t1), Abs (_, _, t2)) = nice_term_ord (t1, t2) | nice_term_ord tp = Real.compare (pairself extract_real_number tp) handle TERM ("dest_number", _) => case tp of (t11 $ t12, t21 $ t22) => (case nice_term_ord (t11, t21) of EQUAL => nice_term_ord (t12, t22) | ord => ord) | _ => TermOrd.fast_term_ord tp (* nut NameTable.table -> KK.raw_bound list -> nut -> int list list *) fun tuple_list_for_name rel_table bounds name = the (AList.lookup (op =) bounds (the_rel rel_table name)) handle NUT _ => [[]] (* term -> term *) fun unbit_and_unbox_term (Const (@{const_name FunBox}, _) $ t1) = unbit_and_unbox_term t1 | unbit_and_unbox_term (Const (@{const_name PairBox}, Type ("fun", [T1, Type ("fun", [T2, T3])])) $ t1 $ t2) = let val Ts = map unbit_and_unbox_type [T1, T2] in Const (@{const_name Pair}, Ts ---> Type ("*", Ts)) $ unbit_and_unbox_term t1 $ unbit_and_unbox_term t2 end | unbit_and_unbox_term (Const (s, T)) = Const (s, unbit_and_unbox_type T) | unbit_and_unbox_term (t1 $ t2) = unbit_and_unbox_term t1 $ unbit_and_unbox_term t2 | unbit_and_unbox_term (Free (s, T)) = Free (s, unbit_and_unbox_type T) | unbit_and_unbox_term (Var (x, T)) = Var (x, unbit_and_unbox_type T) | unbit_and_unbox_term (Bound j) = Bound j | unbit_and_unbox_term (Abs (s, T, t')) = Abs (s, unbit_and_unbox_type T, unbit_and_unbox_term t') (* typ -> typ -> (typ * typ) * (typ * typ) *) fun factor_out_types (T1 as Type ("*", [T11, T12])) (T2 as Type ("*", [T21, T22])) = let val (n1, n2) = pairself num_factors_in_type (T11, T21) in if n1 = n2 then let val ((T11', opt_T12'), (T21', opt_T22')) = factor_out_types T12 T22 in ((Type ("*", [T11, T11']), opt_T12'), (Type ("*", [T21, T21']), opt_T22')) end else if n1 < n2 then case factor_out_types T1 T21 of (p1, (T21', NONE)) => (p1, (T21', SOME T22)) | (p1, (T21', SOME T22')) => (p1, (T21', SOME (Type ("*", [T22', T22])))) else swap (factor_out_types T2 T1) end | factor_out_types (Type ("*", [T11, T12])) T2 = ((T11, SOME T12), (T2, NONE)) | factor_out_types T1 (Type ("*", [T21, T22])) = ((T1, NONE), (T21, SOME T22)) | factor_out_types T1 T2 = ((T1, NONE), (T2, NONE)) (* bool -> typ -> typ -> (term * term) list -> term *) fun make_plain_fun maybe_opt T1 T2 = let (* typ -> typ -> (term * term) list -> term *) fun aux T1 T2 [] = - Const (if maybe_opt orelse T2 <> bool_T then @{const_name undefined} - else non_opt_name, T1 --> T2) + Const (if maybe_opt then opt_flag else non_opt_flag, T1 --> T2) | aux T1 T2 ((t1, t2) :: ps) = Const (@{const_name fun_upd}, (T1 --> T2) --> T1 --> T2 --> T1 --> T2) $ aux T1 T2 ps $ t1 $ t2 in aux T1 T2 o rev end (* term -> bool *) -fun is_plain_fun (Const (s, _)) = - (s = @{const_name undefined} orelse s = non_opt_name) +fun is_plain_fun (Const (s, _)) = (s = opt_flag orelse s = non_opt_flag) | is_plain_fun (Const (@{const_name fun_upd}, _) $ t0 $ _ $ _) = is_plain_fun t0 | is_plain_fun _ = false (* term -> bool * (term list * term list) *) val dest_plain_fun = let - (* term -> term list * term list *) - fun aux (Const (s, _)) = (s <> non_opt_name, ([], [])) + (* term -> bool * (term list * term list) *) + fun aux (Const (s, _)) = (s <> non_opt_flag, ([], [])) | aux (Const (@{const_name fun_upd}, _) $ t0 $ t1 $ t2) = let val (s, (ts1, ts2)) = aux t0 in (s, (t1 :: ts1, t2 :: ts2)) end | aux t = raise TERM ("Nitpick_Model.dest_plain_fun", [t]) in apsnd (pairself rev) o aux end (* typ -> typ -> typ -> term -> term * term *) fun break_in_two T T1 T2 t = let val ps = HOLogic.flat_tupleT_paths T val cut = length (HOLogic.strip_tupleT T1) val (ps1, ps2) = pairself HOLogic.flat_tupleT_paths (T1, T2) val (ts1, ts2) = t |> HOLogic.strip_ptuple ps |> chop cut in (HOLogic.mk_ptuple ps1 T1 ts1, HOLogic.mk_ptuple ps2 T2 ts2) end (* typ -> term -> term -> term *) fun pair_up (Type ("*", [T1', T2'])) (t1 as Const (@{const_name Pair}, Type ("fun", [_, Type ("fun", [_, T1])])) $ t11 $ t12) t2 = if T1 = T1' then HOLogic.mk_prod (t1, t2) else HOLogic.mk_prod (t11, pair_up T2' t12 t2) | pair_up _ t1 t2 = HOLogic.mk_prod (t1, t2) (* typ -> term -> term list * term list -> (term * term) list*) fun multi_pair_up T1 t1 (ts2, ts3) = map2 (pair o pair_up T1 t1) ts2 ts3 (* typ -> typ -> typ -> term -> term *) fun typecast_fun (Type ("fun", [T1', T2'])) T1 T2 t = let (* typ -> typ -> typ -> typ -> term -> term *) fun do_curry T1 T1a T1b T2 t = let val (maybe_opt, ps) = dest_plain_fun t val ps = ps |>> map (break_in_two T1 T1a T1b) |> uncurry (map2 (fn (t1a, t1b) => fn t2 => (t1a, (t1b, t2)))) |> AList.coalesce (op =) |> map (apsnd (make_plain_fun maybe_opt T1b T2)) in make_plain_fun maybe_opt T1a (T1b --> T2) ps end (* typ -> typ -> term -> term *) and do_uncurry T1 T2 t = let val (maybe_opt, tsp) = dest_plain_fun t val ps = tsp |> op ~~ |> maps (fn (t1, t2) => multi_pair_up T1 t1 (snd (dest_plain_fun t2))) in make_plain_fun maybe_opt T1 T2 ps end (* typ -> typ -> typ -> typ -> term -> term *) and do_arrow T1' T2' _ _ (Const (s, _)) = Const (s, T1' --> T2') | do_arrow T1' T2' T1 T2 (Const (@{const_name fun_upd}, _) $ t0 $ t1 $ t2) = Const (@{const_name fun_upd}, (T1' --> T2') --> T1' --> T2' --> T1' --> T2') $ do_arrow T1' T2' T1 T2 t0 $ do_term T1' T1 t1 $ do_term T2' T2 t2 | do_arrow _ _ _ _ t = raise TERM ("Nitpick_Model.typecast_fun.do_arrow", [t]) and do_fun T1' T2' T1 T2 t = case factor_out_types T1' T1 of ((_, NONE), (_, NONE)) => t |> do_arrow T1' T2' T1 T2 | ((_, NONE), (T1a, SOME T1b)) => t |> do_curry T1 T1a T1b T2 |> do_arrow T1' T2' T1a (T1b --> T2) | ((T1a', SOME T1b'), (_, NONE)) => t |> do_arrow T1a' (T1b' --> T2') T1 T2 |> do_uncurry T1' T2' | _ => raise TYPE ("Nitpick_Model.typecast_fun.do_fun", [T1, T1'], []) (* typ -> typ -> term -> term *) and do_term (Type ("fun", [T1', T2'])) (Type ("fun", [T1, T2])) t = do_fun T1' T2' T1 T2 t | do_term (T' as Type ("*", Ts' as [T1', T2'])) (Type ("*", [T1, T2])) (Const (@{const_name Pair}, _) $ t1 $ t2) = Const (@{const_name Pair}, Ts' ---> T') $ do_term T1' T1 t1 $ do_term T2' T2 t2 | do_term T' T t = if T = T' then t else raise TYPE ("Nitpick_Model.typecast_fun.do_term", [T, T'], []) in if T1' = T1 andalso T2' = T2 then t else do_fun T1' T2' T1 T2 t end | typecast_fun T' _ _ _ = raise TYPE ("Nitpick_Model.typecast_fun", [T'], []) (* term -> string *) fun truth_const_sort_key @{const True} = "0" | truth_const_sort_key @{const False} = "2" | truth_const_sort_key _ = "1" (* typ -> term list -> term *) fun mk_tuple (Type ("*", [T1, T2])) ts = HOLogic.mk_prod (mk_tuple T1 ts, mk_tuple T2 (List.drop (ts, length (HOLogic.flatten_tupleT T1)))) | mk_tuple _ (t :: _) = t | mk_tuple T [] = raise TYPE ("Nitpick_Model.mk_tuple", [T], []) (* string * string * string * string -> scope -> nut list -> nut list -> nut list -> nut NameTable.table -> KK.raw_bound list -> typ -> typ -> rep -> int list list -> term *) fun reconstruct_term (maybe_name, base_name, step_name, abs_name) ({ext_ctxt as {thy, ctxt, ...}, card_assigns, bits, datatypes, ofs, ...} : scope) sel_names rel_table bounds = let val for_auto = (maybe_name = "") (* int list list -> int *) fun value_of_bits jss = let val j0 = offset_of_type ofs @{typ unsigned_bit} val js = map (Integer.add (~ j0) o the_single) jss in fold (fn j => Integer.add (reasonable_power 2 j |> j = bits ? op ~)) js 0 end (* bool -> typ -> typ -> (term * term) list -> term *) fun make_set maybe_opt T1 T2 = let val empty_const = Const (@{const_name Set.empty}, T1 --> T2) val insert_const = Const (@{const_name insert}, T1 --> (T1 --> T2) --> T1 --> T2) (* (term * term) list -> term *) fun aux [] = if maybe_opt andalso not (is_complete_type datatypes T1) then insert_const $ Const (unrep, T1) $ empty_const else empty_const | aux ((t1, t2) :: zs) = aux zs |> t2 <> @{const False} ? curry (op $) (insert_const $ (t1 |> t2 <> @{const True} ? curry (op $) (Const (maybe_name, T1 --> T1)))) in aux end (* typ -> typ -> typ -> (term * term) list -> term *) fun make_map T1 T2 T2' = let val update_const = Const (@{const_name fun_upd}, (T1 --> T2) --> T1 --> T2 --> T1 --> T2) (* (term * term) list -> term *) fun aux' [] = Const (@{const_name Map.empty}, T1 --> T2) | aux' ((t1, t2) :: ps) = (case t2 of Const (@{const_name None}, _) => aux' ps | _ => update_const $ aux' ps $ t1 $ t2) fun aux ps = if not (is_complete_type datatypes T1) then update_const $ aux' ps $ Const (unrep, T1) $ (Const (@{const_name Some}, T2' --> T2) $ Const (unknown, T2')) else aux' ps in aux end (* typ list -> term -> term *) - fun setify_mapify_funs Ts t = + fun polish_funs Ts t = (case fastype_of1 (Ts, t) of Type ("fun", [T1, T2]) => if is_plain_fun t then case T2 of @{typ bool} => let val (maybe_opt, ts_pair) = - dest_plain_fun t ||> pairself (map (setify_mapify_funs Ts)) + dest_plain_fun t ||> pairself (map (polish_funs Ts)) in make_set maybe_opt T1 T2 (sort_wrt (truth_const_sort_key o snd) (op ~~ ts_pair)) end | Type (@{type_name option}, [T2']) => let val ts_pair = snd (dest_plain_fun t) - |> pairself (map (setify_mapify_funs Ts)) + |> pairself (map (polish_funs Ts)) in make_map T1 T2 T2' (rev (op ~~ ts_pair)) end | _ => raise SAME () else raise SAME () | _ => raise SAME ()) handle SAME () => case t of - t1 $ t2 => setify_mapify_funs Ts t1 $ setify_mapify_funs Ts t2 - | Abs (s, T, t') => Abs (s, T, setify_mapify_funs (T :: Ts) t') - | _ => t + (t1 as Const (@{const_name fun_upd}, _) $ t11 $ _) + $ (t2 as Const (s, _)) => + if s = unknown then polish_funs Ts t11 + else polish_funs Ts t1 $ polish_funs Ts t2 + | t1 $ t2 => polish_funs Ts t1 $ polish_funs Ts t2 + | Abs (s, T, t') => Abs (s, T, polish_funs (T :: Ts) t') + | Const (s, Type ("fun", [T1, T2])) => + if s = opt_flag orelse s = non_opt_flag then + Abs ("x", T1, Const (unknown, T2)) + else + t + | t => t (* bool -> typ -> typ -> typ -> term list -> term list -> term *) fun make_fun maybe_opt T1 T2 T' ts1 ts2 = ts1 ~~ ts2 |> sort (nice_term_ord o pairself fst) |> make_plain_fun (maybe_opt andalso not for_auto) T1 T2 |> unbit_and_unbox_term |> typecast_fun (unbit_and_unbox_type T') (unbit_and_unbox_type T1) (unbit_and_unbox_type T2) (* (typ * int) list -> typ -> typ -> int -> term *) fun term_for_atom seen (T as Type ("fun", [T1, T2])) T' j = let val k1 = card_of_type card_assigns T1 val k2 = card_of_type card_assigns T2 in term_for_rep seen T T' (Vect (k1, Atom (k2, 0))) [nth_combination (replicate k1 (k2, 0)) j] handle General.Subscript => raise ARG ("Nitpick_Model.reconstruct_term.term_for_atom", signed_string_of_int j ^ " for " ^ string_for_rep (Vect (k1, Atom (k2, 0)))) end | term_for_atom seen (Type ("*", [T1, T2])) _ j = let val k1 = card_of_type card_assigns T1 in list_comb (HOLogic.pair_const T1 T2, map2 (fn T => term_for_atom seen T T) [T1, T2] [j div k1, j mod k1]) end | term_for_atom seen @{typ prop} _ j = HOLogic.mk_Trueprop (term_for_atom seen bool_T bool_T j) | term_for_atom _ @{typ bool} _ j = if j = 0 then @{const False} else @{const True} | term_for_atom _ @{typ unit} _ _ = @{const Unity} | term_for_atom seen T _ j = if T = nat_T then HOLogic.mk_number nat_T j else if T = int_T then HOLogic.mk_number int_T (int_for_atom (card_of_type card_assigns int_T, 0) j) else if is_fp_iterator_type T then HOLogic.mk_number nat_T (card_of_type card_assigns T - j - 1) else if T = @{typ bisim_iterator} then HOLogic.mk_number nat_T j else case datatype_spec datatypes T of NONE => atom for_auto T j - | SOME {shallow = true, ...} => atom for_auto T j + | SOME {deep = false, ...} => atom for_auto T j | SOME {co, constrs, ...} => let (* styp -> int list *) fun tuples_for_const (s, T) = tuple_list_for_name rel_table bounds (ConstName (s, T, Any)) (* unit -> indexname * typ *) fun var () = ((atom_name "" T j, 0), T) val discr_jsss = map (tuples_for_const o discr_for_constr o #const) constrs val real_j = j + offset_of_type ofs T val constr_x as (constr_s, constr_T) = get_first (fn (jss, {const, ...}) => if member (op =) jss [real_j] then SOME const else NONE) (discr_jsss ~~ constrs) |> the val arg_Ts = curried_binder_types constr_T val sel_xs = map (boxed_nth_sel_for_constr ext_ctxt constr_x) (index_seq 0 (length arg_Ts)) val sel_Rs = map (fn x => get_first (fn ConstName (s', T', R) => if (s', T') = x then SOME R else NONE | u => raise NUT ("Nitpick_Model.reconstruct_\ \term.term_for_atom", [u])) sel_names |> the) sel_xs val arg_Rs = map (snd o dest_Func) sel_Rs val sel_jsss = map tuples_for_const sel_xs val arg_jsss = map (map_filter (fn js => if hd js = real_j then SOME (tl js) else NONE)) sel_jsss val uncur_arg_Ts = binder_types constr_T in if co andalso member (op =) seen (T, j) then Var (var ()) else if constr_s = @{const_name Word} then HOLogic.mk_number (if T = @{typ "unsigned_bit word"} then nat_T else int_T) (value_of_bits (the_single arg_jsss)) else let val seen = seen |> co ? cons (T, j) val ts = if length arg_Ts = 0 then [] else map3 (fn Ts => term_for_rep seen Ts Ts) arg_Ts arg_Rs arg_jsss |> mk_tuple (HOLogic.mk_tupleT uncur_arg_Ts) |> dest_n_tuple (length uncur_arg_Ts) val t = if constr_s = @{const_name Abs_Frac} then let val num_T = body_type T (* int -> term *) val mk_num = HOLogic.mk_number num_T in case ts of [Const (@{const_name Pair}, _) $ t1 $ t2] => (case snd (HOLogic.dest_number t1) of 0 => mk_num 0 | n1 => case HOLogic.dest_number t2 |> snd of 1 => mk_num n1 | n2 => Const (@{const_name Algebras.divide}, num_T --> num_T --> num_T) $ mk_num n1 $ mk_num n2) - | _ => raise TERM ("Nitpick_Model.reconstruct_term.term_\ - \for_atom (Abs_Frac)", ts) + | _ => raise TERM ("Nitpick_Model.reconstruct_term.\ + \term_for_atom (Abs_Frac)", ts) end else if not for_auto andalso (is_abs_fun thy constr_x orelse constr_s = @{const_name Quot}) then Const (abs_name, constr_T) $ the_single ts + else if not for_auto andalso + constr_s = @{const_name NonStd} then + Const (fst (dest_Const (the_single ts)), T) else list_comb (Const constr_x, ts) in if co then let val var = var () in if exists_subterm (curry (op =) (Var var)) t then Const (@{const_name The}, (T --> bool_T) --> T) $ Abs ("\", T, Const (@{const_name "op ="}, T --> T --> bool_T) $ Bound 0 $ abstract_over (Var var, t)) else t end else t end end (* (typ * int) list -> int -> rep -> typ -> typ -> typ -> int list -> term *) and term_for_vect seen k R T1 T2 T' js = make_fun true T1 T2 T' (map (term_for_atom seen T1 T1) (index_seq 0 k)) (map (term_for_rep seen T2 T2 R o single) (batch_list (arity_of_rep R) js)) (* (typ * int) list -> typ -> typ -> rep -> int list list -> term *) and term_for_rep seen T T' Unit [[]] = term_for_atom seen T T' 0 | term_for_rep seen T T' (R as Atom (k, j0)) [[j]] = if j >= j0 andalso j < j0 + k then term_for_atom seen T T' (j - j0) else raise REP ("Nitpick_Model.reconstruct_term.term_for_rep", [R]) | term_for_rep seen (Type ("*", [T1, T2])) _ (Struct [R1, R2]) [js] = let val arity1 = arity_of_rep R1 val (js1, js2) = chop arity1 js in list_comb (HOLogic.pair_const T1 T2, map3 (fn T => term_for_rep seen T T) [T1, T2] [R1, R2] [[js1], [js2]]) end | term_for_rep seen (Type ("fun", [T1, T2])) T' (R as Vect (k, R')) [js] = term_for_vect seen k R' T1 T2 T' js | term_for_rep seen (Type ("fun", [T1, T2])) T' (Func (R1, Formula Neut)) jss = let val jss1 = all_combinations_for_rep R1 val ts1 = map (term_for_rep seen T1 T1 R1 o single) jss1 val ts2 = map (fn js => term_for_rep seen T2 T2 (Atom (2, 0)) [[int_for_bool (member (op =) jss js)]]) jss1 in make_fun false T1 T2 T' ts1 ts2 end | term_for_rep seen (Type ("fun", [T1, T2])) T' (Func (R1, R2)) jss = let val arity1 = arity_of_rep R1 val jss1 = all_combinations_for_rep R1 val ts1 = map (term_for_rep seen T1 T1 R1 o single) jss1 val grouped_jss2 = AList.group (op =) (map (chop arity1) jss) val ts2 = map (term_for_rep seen T2 T2 R2 o the_default [] o AList.lookup (op =) grouped_jss2) jss1 in make_fun true T1 T2 T' ts1 ts2 end | term_for_rep seen T T' (Opt R) jss = if null jss then Const (unknown, T) else term_for_rep seen T T' R jss | term_for_rep seen T _ R jss = raise ARG ("Nitpick_Model.reconstruct_term.term_for_rep", Refute.string_of_typ T ^ " " ^ string_for_rep R ^ " " ^ string_of_int (length jss)) in - (not for_auto ? setify_mapify_funs []) o unbit_and_unbox_term - oooo term_for_rep [] + (not for_auto ? polish_funs []) o unbit_and_unbox_term oooo term_for_rep [] end (* scope -> nut list -> nut NameTable.table -> KK.raw_bound list -> nut -> term *) fun term_for_name scope sel_names rel_table bounds name = let val T = type_of name in tuple_list_for_name rel_table bounds name |> reconstruct_term ("", "", "", "") scope sel_names rel_table bounds T T (rep_of name) end -(* Proof.context - -> (string * string * string * string * string) * Proof.context *) +(* Proof.context -> (string * string * string * string) * Proof.context *) fun add_wacky_syntax ctxt = let (* term -> string *) val name_of = fst o dest_Const val thy = ProofContext.theory_of ctxt |> Context.reject_draft val (maybe_t, thy) = Sign.declare_const ((@{binding nitpick_maybe}, @{typ "'a => 'a"}), Mixfix (maybe_mixfix, [1000], 1000)) thy val (base_t, thy) = Sign.declare_const ((@{binding nitpick_base}, @{typ "'a => 'a"}), Mixfix (base_mixfix, [1000], 1000)) thy val (step_t, thy) = Sign.declare_const ((@{binding nitpick_step}, @{typ "'a => 'a"}), Mixfix (step_mixfix, [1000], 1000)) thy val (abs_t, thy) = Sign.declare_const ((@{binding nitpick_abs}, @{typ "'a => 'b"}), Mixfix (abs_mixfix, [40], 40)) thy in ((name_of maybe_t, name_of base_t, name_of step_t, name_of abs_t), ProofContext.transfer_syntax thy ctxt) end (* term -> term *) fun unfold_outer_the_binders (t as Const (@{const_name The}, _) $ Abs (s, T, Const (@{const_name "op ="}, _) $ Bound 0 $ t')) = betapply (Abs (s, T, t'), t) |> unfold_outer_the_binders | unfold_outer_the_binders t = t (* typ list -> int -> term * term -> bool *) fun bisimilar_values _ 0 _ = true | bisimilar_values coTs max_depth (t1, t2) = let val T = fastype_of t1 in if exists_subtype (member (op =) coTs) T then let val ((head1, args1), (head2, args2)) = pairself (strip_comb o unfold_outer_the_binders) (t1, t2) val max_depth = max_depth - (if member (op =) coTs T then 1 else 0) in head1 = head2 andalso forall (bisimilar_values coTs max_depth) (args1 ~~ args2) end else t1 = t2 end (* params -> scope -> (term option * int list) list -> styp list -> nut list -> nut list -> nut list -> nut NameTable.table -> KK.raw_bound list -> Pretty.T * bool *) fun reconstruct_hol_model {show_skolems, show_datatypes, show_consts} - ({ext_ctxt as {thy, ctxt, max_bisim_depth, boxes, wfs, user_axioms, - debug, binary_ints, destroy_constrs, specialize, - skolemize, star_linear_preds, uncurry, fast_descrs, - tac_timeout, evals, case_names, def_table, nondef_table, - user_nondefs, simp_table, psimp_table, intro_table, - ground_thm_table, ersatz_table, skolems, special_funs, - unrolled_preds, wf_cache, constr_cache}, + ({ext_ctxt as {thy, ctxt, max_bisim_depth, boxes, stds, wfs, + user_axioms, debug, binary_ints, destroy_constrs, + specialize, skolemize, star_linear_preds, uncurry, + fast_descrs, tac_timeout, evals, case_names, def_table, + nondef_table, user_nondefs, simp_table, psimp_table, + intro_table, ground_thm_table, ersatz_table, skolems, + special_funs, unrolled_preds, wf_cache, constr_cache}, card_assigns, bits, bisim_depth, datatypes, ofs} : scope) formats all_frees free_names sel_names nonsel_names rel_table bounds = let val (wacky_names as (_, base_name, step_name, _), ctxt) = add_wacky_syntax ctxt val ext_ctxt = {thy = thy, ctxt = ctxt, max_bisim_depth = max_bisim_depth, boxes = boxes, - wfs = wfs, user_axioms = user_axioms, debug = debug, + stds = stds, wfs = wfs, user_axioms = user_axioms, debug = debug, binary_ints = binary_ints, destroy_constrs = destroy_constrs, specialize = specialize, skolemize = skolemize, star_linear_preds = star_linear_preds, uncurry = uncurry, fast_descrs = fast_descrs, tac_timeout = tac_timeout, evals = evals, case_names = case_names, def_table = def_table, nondef_table = nondef_table, user_nondefs = user_nondefs, simp_table = simp_table, psimp_table = psimp_table, intro_table = intro_table, ground_thm_table = ground_thm_table, ersatz_table = ersatz_table, skolems = skolems, special_funs = special_funs, unrolled_preds = unrolled_preds, wf_cache = wf_cache, constr_cache = constr_cache} val scope = {ext_ctxt = ext_ctxt, card_assigns = card_assigns, bits = bits, bisim_depth = bisim_depth, datatypes = datatypes, ofs = ofs} (* typ -> typ -> rep -> int list list -> term *) val term_for_rep = reconstruct_term wacky_names scope sel_names rel_table bounds (* nat -> typ -> nat -> typ *) fun nth_value_of_type card T n = term_for_rep T T (Atom (card, 0)) [[n]] (* nat -> typ -> typ list *) fun all_values_of_type card T = index_seq 0 card |> map (nth_value_of_type card T) |> sort nice_term_ord (* dtype_spec list -> dtype_spec -> bool *) fun is_codatatype_wellformed (cos : dtype_spec list) ({typ, card, ...} : dtype_spec) = let val ts = all_values_of_type card typ val max_depth = Integer.sum (map #card cos) in forall (not o bisimilar_values (map #typ cos) max_depth) (all_distinct_unordered_pairs_of ts) end (* string -> Pretty.T *) fun pretty_for_assign name = let val (oper, (t1, T'), T) = case name of FreeName (s, T, _) => let val t = Free (s, unbit_and_unbox_type T) in ("=", (t, format_term_type thy def_table formats t), T) end | ConstName (s, T, _) => (assign_operator_for_const (s, T), user_friendly_const ext_ctxt (base_name, step_name) formats (s, T), T) | _ => raise NUT ("Nitpick_Model.reconstruct_hol_model.\ \pretty_for_assign", [name]) val t2 = if rep_of name = Any then Const (@{const_name undefined}, T') else tuple_list_for_name rel_table bounds name |> term_for_rep T T' (rep_of name) in Pretty.block (Pretty.breaks [setmp_show_all_types (Syntax.pretty_term ctxt) t1, Pretty.str oper, Syntax.pretty_term ctxt t2]) end (* dtype_spec -> Pretty.T *) fun pretty_for_datatype ({typ, card, complete, ...} : dtype_spec) = Pretty.block (Pretty.breaks [Syntax.pretty_typ ctxt (unbit_and_unbox_type typ), Pretty.str "=", Pretty.enum "," "{" "}" - (map (Syntax.pretty_term ctxt) (all_values_of_type card typ) - @ (if complete then [] else [Pretty.str unrep]))]) + (map (Syntax.pretty_term ctxt) (all_values_of_type card typ) @ + (if complete then [] else [Pretty.str unrep]))]) (* typ -> dtype_spec list *) fun integer_datatype T = [{typ = T, card = card_of_type card_assigns T, co = false, - complete = false, concrete = true, shallow = false, constrs = []}] + complete = false, concrete = true, deep = true, constrs = []}] handle TYPE ("Nitpick_HOL.card_of_type", _, _) => [] val (codatatypes, datatypes) = - datatypes |> filter_out #shallow - |> List.partition #co + datatypes |> filter #deep |> List.partition #co ||> append (integer_datatype nat_T @ integer_datatype int_T) val block_of_datatypes = if show_datatypes andalso not (null datatypes) then [Pretty.big_list ("Datatype" ^ plural_s_for_list datatypes ^ ":") (map pretty_for_datatype datatypes)] else [] val block_of_codatatypes = if show_datatypes andalso not (null codatatypes) then [Pretty.big_list ("Codatatype" ^ plural_s_for_list codatatypes ^ ":") (map pretty_for_datatype codatatypes)] else [] (* bool -> string -> nut list -> Pretty.T list *) fun block_of_names show title names = if show andalso not (null names) then Pretty.str (title ^ plural_s_for_list names ^ ":") :: map (Pretty.indent indent_size o pretty_for_assign) (sort_wrt (original_name o nickname_of) names) else [] val (skolem_names, nonskolem_nonsel_names) = List.partition is_skolem_name nonsel_names val (eval_names, noneval_nonskolem_nonsel_names) = List.partition (String.isPrefix eval_prefix o nickname_of) nonskolem_nonsel_names ||> filter_out (curry (op =) @{const_name bisim_iterator_max} o nickname_of) val free_names = map (fn x as (s, T) => case filter (curry (op =) x o pairf nickname_of (unbit_and_unbox_type o type_of)) free_names of [name] => name | [] => FreeName (s, T, Any) | _ => raise TERM ("Nitpick_Model.reconstruct_hol_model", [Const x])) all_frees val chunks = block_of_names true "Free variable" free_names @ block_of_names show_skolems "Skolem constant" skolem_names @ block_of_names true "Evaluated term" eval_names @ block_of_datatypes @ block_of_codatatypes @ block_of_names show_consts "Constant" noneval_nonskolem_nonsel_names in (Pretty.chunks (if null chunks then [Pretty.str "Empty assignment"] else chunks), bisim_depth >= 0 orelse forall (is_codatatype_wellformed codatatypes) codatatypes) end (* scope -> Time.time option -> nut list -> nut list -> nut NameTable.table -> KK.raw_bound list -> term -> bool option *) fun prove_hol_model (scope as {ext_ctxt as {thy, ctxt, ...}, card_assigns, ...}) auto_timeout free_names sel_names rel_table bounds prop = let (* typ * int -> term *) fun free_type_assm (T, k) = let (* int -> term *) val atom = atom true T fun equation_for_atom j = HOLogic.eq_const T $ Bound 0 $ atom j val eqs = map equation_for_atom (index_seq 0 k) val compreh_assm = Const (@{const_name All}, (T --> bool_T) --> bool_T) $ Abs ("x", T, foldl1 HOLogic.mk_disj eqs) val distinct_assm = distinctness_formula T (map atom (index_seq 0 k)) in HOLogic.mk_conj (compreh_assm, distinct_assm) end (* nut -> term *) fun free_name_assm name = HOLogic.mk_eq (Free (nickname_of name, type_of name), term_for_name scope sel_names rel_table bounds name) val freeT_assms = map free_type_assm (filter (is_TFree o fst) card_assigns) val model_assms = map free_name_assm free_names val assm = List.foldr HOLogic.mk_conj @{const True} (freeT_assms @ model_assms) (* bool -> bool *) fun try_out negate = let val concl = (negate ? curry (op $) @{const Not}) (ObjectLogic.atomize_term thy prop) val goal = HOLogic.mk_Trueprop (HOLogic.mk_imp (assm, concl)) |> map_types (map_type_tfree (fn (s, []) => TFree (s, HOLogic.typeS) | x => TFree x)) |> cterm_of thy |> Goal.init in (goal |> SINGLE (DETERM_TIMEOUT auto_timeout (auto_tac (clasimpset_of ctxt))) |> the |> Goal.finish ctxt; true) handle THM _ => false | TimeLimit.TimeOut => false end in if try_out false then SOME true else if try_out true then SOME false else NONE end end; diff --git a/src/HOL/Tools/Nitpick/nitpick_mono.ML b/src/HOL/Tools/Nitpick/nitpick_mono.ML --- a/src/HOL/Tools/Nitpick/nitpick_mono.ML +++ b/src/HOL/Tools/Nitpick/nitpick_mono.ML @@ -1,946 +1,959 @@ (* Title: HOL/Tools/Nitpick/nitpick_mono.ML Author: Jasmin Blanchette, TU Muenchen - Copyright 2009 + Copyright 2009, 2010 Monotonicity predicate for higher-order logic. *) signature NITPICK_MONO = sig + datatype sign = Plus | Minus type extended_context = Nitpick_HOL.extended_context val formulas_monotonic : - extended_context -> typ -> term list -> term list -> term -> bool + extended_context -> typ -> sign -> term list -> term list -> term -> bool end; structure Nitpick_Mono : NITPICK_MONO = struct open Nitpick_Util open Nitpick_HOL type var = int -datatype sign = Pos | Neg +datatype sign = Plus | Minus datatype sign_atom = S of sign | V of var type literal = var * sign datatype ctype = CAlpha | CFun of ctype * sign_atom * ctype | CPair of ctype * ctype | CType of string * ctype list | CRec of string * typ list type cdata = {ext_ctxt: extended_context, alpha_T: typ, max_fresh: int Unsynchronized.ref, datatype_cache: ((string * typ list) * ctype) list Unsynchronized.ref, constr_cache: (styp * ctype) list Unsynchronized.ref} exception CTYPE of string * ctype list (* string -> unit *) fun print_g (s : string) = () (* var -> string *) val string_for_var = signed_string_of_int (* string -> var list -> string *) fun string_for_vars sep [] = "0\<^bsub>" ^ sep ^ "\<^esub>" | string_for_vars sep xs = space_implode sep (map string_for_var xs) fun subscript_string_for_vars sep xs = if null xs then "" else "\<^bsub>" ^ string_for_vars sep xs ^ "\<^esub>" (* sign -> string *) -fun string_for_sign Pos = "+" - | string_for_sign Neg = "-" +fun string_for_sign Plus = "+" + | string_for_sign Minus = "-" (* sign -> sign -> sign *) -fun xor sn1 sn2 = if sn1 = sn2 then Pos else Neg +fun xor sn1 sn2 = if sn1 = sn2 then Plus else Minus (* sign -> sign *) -val negate = xor Neg +val negate = xor Minus (* sign_atom -> string *) fun string_for_sign_atom (S sn) = string_for_sign sn | string_for_sign_atom (V j) = string_for_var j (* literal -> string *) fun string_for_literal (x, sn) = string_for_var x ^ " = " ^ string_for_sign sn val bool_C = CType (@{type_name bool}, []) (* ctype -> bool *) fun is_CRec (CRec _) = true | is_CRec _ = false val no_prec = 100 val prec_CFun = 1 val prec_CPair = 2 (* tuple_set -> int *) fun precedence_of_ctype (CFun _) = prec_CFun | precedence_of_ctype (CPair _) = prec_CPair | precedence_of_ctype _ = no_prec (* ctype -> string *) val string_for_ctype = let (* int -> ctype -> string *) fun aux outer_prec C = let val prec = precedence_of_ctype C val need_parens = (prec < outer_prec) in (if need_parens then "(" else "") ^ (case C of CAlpha => "\" | CFun (C1, a, C2) => aux (prec + 1) C1 ^ " \\<^bsup>" ^ string_for_sign_atom a ^ "\<^esup> " ^ aux prec C2 | CPair (C1, C2) => aux (prec + 1) C1 ^ " \ " ^ aux prec C2 | CType (s, []) => if s = @{type_name prop} orelse s = @{type_name bool} then "o" else s | CType (s, Cs) => "(" ^ commas (map (aux 0) Cs) ^ ") " ^ s | CRec (s, _) => "[" ^ s ^ "]") ^ (if need_parens then ")" else "") end in aux 0 end (* ctype -> ctype list *) fun flatten_ctype (CPair (C1, C2)) = maps flatten_ctype [C1, C2] | flatten_ctype (CType (_, Cs)) = maps flatten_ctype Cs | flatten_ctype C = [C] (* extended_context -> typ -> cdata *) fun initial_cdata ext_ctxt alpha_T = ({ext_ctxt = ext_ctxt, alpha_T = alpha_T, max_fresh = Unsynchronized.ref 0, datatype_cache = Unsynchronized.ref [], constr_cache = Unsynchronized.ref []} : cdata) (* typ -> typ -> bool *) fun could_exist_alpha_subtype alpha_T (T as Type (_, Ts)) = T = alpha_T orelse (not (is_fp_iterator_type T) andalso exists (could_exist_alpha_subtype alpha_T) Ts) | could_exist_alpha_subtype alpha_T T = (T = alpha_T) (* theory -> typ -> typ -> bool *) fun could_exist_alpha_sub_ctype _ (alpha_T as TFree _) T = could_exist_alpha_subtype alpha_T T | could_exist_alpha_sub_ctype thy alpha_T T = (T = alpha_T orelse is_datatype thy T) (* ctype -> bool *) fun exists_alpha_sub_ctype CAlpha = true | exists_alpha_sub_ctype (CFun (C1, _, C2)) = exists exists_alpha_sub_ctype [C1, C2] | exists_alpha_sub_ctype (CPair (C1, C2)) = exists exists_alpha_sub_ctype [C1, C2] | exists_alpha_sub_ctype (CType (_, Cs)) = exists exists_alpha_sub_ctype Cs | exists_alpha_sub_ctype (CRec _) = true (* ctype -> bool *) fun exists_alpha_sub_ctype_fresh CAlpha = true | exists_alpha_sub_ctype_fresh (CFun (_, V _, _)) = true | exists_alpha_sub_ctype_fresh (CFun (_, _, C2)) = exists_alpha_sub_ctype_fresh C2 | exists_alpha_sub_ctype_fresh (CPair (C1, C2)) = exists exists_alpha_sub_ctype_fresh [C1, C2] | exists_alpha_sub_ctype_fresh (CType (_, Cs)) = exists exists_alpha_sub_ctype_fresh Cs | exists_alpha_sub_ctype_fresh (CRec _) = true (* string * typ list -> ctype list -> ctype *) fun constr_ctype_for_binders z Cs = - fold_rev (fn C => curry3 CFun C (S Neg)) Cs (CRec z) + fold_rev (fn C => curry3 CFun C (S Minus)) Cs (CRec z) (* ((string * typ list) * ctype) list -> ctype list -> ctype -> ctype *) fun repair_ctype _ _ CAlpha = CAlpha | repair_ctype cache seen (CFun (C1, a, C2)) = CFun (repair_ctype cache seen C1, a, repair_ctype cache seen C2) | repair_ctype cache seen (CPair Cp) = CPair (pairself (repair_ctype cache seen) Cp) | repair_ctype cache seen (CType (s, Cs)) = CType (s, maps (flatten_ctype o repair_ctype cache seen) Cs) | repair_ctype cache seen (CRec (z as (s, _))) = case AList.lookup (op =) cache z |> the of CRec _ => CType (s, []) | C => if member (op =) seen C then CType (s, []) else repair_ctype cache (C :: seen) C (* ((string * typ list) * ctype) list Unsynchronized.ref -> unit *) fun repair_datatype_cache cache = let (* (string * typ list) * ctype -> unit *) fun repair_one (z, C) = Unsynchronized.change cache (AList.update (op =) (z, repair_ctype (!cache) [] C)) in List.app repair_one (rev (!cache)) end (* (typ * ctype) list -> (styp * ctype) list Unsynchronized.ref -> unit *) fun repair_constr_cache dtype_cache constr_cache = let (* styp * ctype -> unit *) fun repair_one (x, C) = Unsynchronized.change constr_cache (AList.update (op =) (x, repair_ctype dtype_cache [] C)) in List.app repair_one (!constr_cache) end (* cdata -> typ -> ctype *) fun fresh_ctype_for_type ({ext_ctxt as {thy, ...}, alpha_T, max_fresh, datatype_cache, constr_cache, ...} : cdata) = let (* typ -> typ -> ctype *) fun do_fun T1 T2 = let val C1 = do_type T1 val C2 = do_type T2 val a = if is_boolean_type (body_type T2) andalso exists_alpha_sub_ctype_fresh C1 then V (Unsynchronized.inc max_fresh) else - S Neg + S Minus in CFun (C1, a, C2) end (* typ -> ctype *) and do_type T = if T = alpha_T then CAlpha else case T of Type ("fun", [T1, T2]) => do_fun T1 T2 | Type (@{type_name fun_box}, [T1, T2]) => do_fun T1 T2 | Type ("*", [T1, T2]) => CPair (pairself do_type (T1, T2)) | Type (z as (s, _)) => if could_exist_alpha_sub_ctype thy alpha_T T then case AList.lookup (op =) (!datatype_cache) z of SOME C => C | NONE => let val _ = Unsynchronized.change datatype_cache (cons (z, CRec z)) val xs = datatype_constrs ext_ctxt T val (all_Cs, constr_Cs) = fold_rev (fn (_, T') => fn (all_Cs, constr_Cs) => let val binder_Cs = map do_type (binder_types T') val new_Cs = filter exists_alpha_sub_ctype_fresh binder_Cs val constr_C = constr_ctype_for_binders z binder_Cs in (union (op =) new_Cs all_Cs, constr_C :: constr_Cs) end) xs ([], []) val C = CType (s, all_Cs) val _ = Unsynchronized.change datatype_cache (AList.update (op =) (z, C)) val _ = Unsynchronized.change constr_cache (append (xs ~~ constr_Cs)) in if forall (not o is_CRec o snd) (!datatype_cache) then (repair_datatype_cache datatype_cache; repair_constr_cache (!datatype_cache) constr_cache; AList.lookup (op =) (!datatype_cache) z |> the) else C end else CType (s, []) | _ => CType (Refute.string_of_typ T, []) in do_type end (* ctype -> ctype list *) fun prodC_factors (CPair (C1, C2)) = maps prodC_factors [C1, C2] | prodC_factors C = [C] (* ctype -> ctype list * ctype *) -fun curried_strip_ctype (CFun (C1, S Neg, C2)) = +fun curried_strip_ctype (CFun (C1, S Minus, C2)) = curried_strip_ctype C2 |>> append (prodC_factors C1) | curried_strip_ctype C = ([], C) (* string -> ctype -> ctype *) fun sel_ctype_from_constr_ctype s C = let val (arg_Cs, dataC) = curried_strip_ctype C in - CFun (dataC, S Neg, + CFun (dataC, S Minus, case sel_no_from_name s of ~1 => bool_C | n => nth arg_Cs n) end (* cdata -> styp -> ctype *) fun ctype_for_constr (cdata as {ext_ctxt as {thy, ...}, alpha_T, constr_cache, ...}) (x as (_, T)) = if could_exist_alpha_sub_ctype thy alpha_T T then case AList.lookup (op =) (!constr_cache) x of SOME C => C - | NONE => (fresh_ctype_for_type cdata (body_type T); - AList.lookup (op =) (!constr_cache) x |> the) + | NONE => if T = alpha_T then + let val C = fresh_ctype_for_type cdata T in + (Unsynchronized.change constr_cache (cons (x, C)); C) + end + else + (fresh_ctype_for_type cdata (body_type T); + AList.lookup (op =) (!constr_cache) x |> the) else fresh_ctype_for_type cdata T fun ctype_for_sel (cdata as {ext_ctxt, ...}) (x as (s, _)) = x |> boxed_constr_for_sel ext_ctxt |> ctype_for_constr cdata |> sel_ctype_from_constr_ctype s (* literal list -> ctype -> ctype *) fun instantiate_ctype lits = let (* ctype -> ctype *) fun aux CAlpha = CAlpha | aux (CFun (C1, V x, C2)) = let val a = case AList.lookup (op =) lits x of SOME sn => S sn | NONE => V x in CFun (aux C1, a, aux C2) end | aux (CFun (C1, a, C2)) = CFun (aux C1, a, aux C2) | aux (CPair Cp) = CPair (pairself aux Cp) | aux (CType (s, Cs)) = CType (s, map aux Cs) | aux (CRec z) = CRec z in aux end datatype comp_op = Eq | Leq type comp = sign_atom * sign_atom * comp_op * var list type sign_expr = literal list datatype constraint_set = UnsolvableCSet | CSet of literal list * comp list * sign_expr list (* comp_op -> string *) fun string_for_comp_op Eq = "=" | string_for_comp_op Leq = "\" (* sign_expr -> string *) fun string_for_sign_expr [] = "\" | string_for_sign_expr lits = space_implode " \ " (map string_for_literal lits) (* constraint_set *) val slack = CSet ([], [], []) (* literal -> literal list option -> literal list option *) fun do_literal _ NONE = NONE | do_literal (x, sn) (SOME lits) = case AList.lookup (op =) lits x of SOME sn' => if sn = sn' then SOME lits else NONE | NONE => SOME ((x, sn) :: lits) (* comp_op -> var list -> sign_atom -> sign_atom -> literal list * comp list -> (literal list * comp list) option *) fun do_sign_atom_comp Eq [] a1 a2 (accum as (lits, comps)) = (case (a1, a2) of (S sn1, S sn2) => if sn1 = sn2 then SOME accum else NONE | (V x1, S sn2) => Option.map (rpair comps) (do_literal (x1, sn2) (SOME lits)) | (V _, V _) => SOME (lits, insert (op =) (a1, a2, Eq, []) comps) | _ => do_sign_atom_comp Eq [] a2 a1 accum) | do_sign_atom_comp Leq [] a1 a2 (accum as (lits, comps)) = (case (a1, a2) of - (_, S Neg) => SOME accum - | (S Pos, _) => SOME accum - | (S Neg, S Pos) => NONE + (_, S Minus) => SOME accum + | (S Plus, _) => SOME accum + | (S Minus, S Plus) => NONE | (V _, V _) => SOME (lits, insert (op =) (a1, a2, Leq, []) comps) | _ => do_sign_atom_comp Eq [] a1 a2 accum) | do_sign_atom_comp cmp xs a1 a2 (accum as (lits, comps)) = SOME (lits, insert (op =) (a1, a2, cmp, xs) comps) (* comp -> var list -> ctype -> ctype -> (literal list * comp list) option -> (literal list * comp list) option *) fun do_ctype_comp _ _ _ _ NONE = NONE | do_ctype_comp _ _ CAlpha CAlpha accum = accum | do_ctype_comp Eq xs (CFun (C11, a1, C12)) (CFun (C21, a2, C22)) (SOME accum) = accum |> do_sign_atom_comp Eq xs a1 a2 |> do_ctype_comp Eq xs C11 C21 |> do_ctype_comp Eq xs C12 C22 | do_ctype_comp Leq xs (CFun (C11, a1, C12)) (CFun (C21, a2, C22)) (SOME accum) = (if exists_alpha_sub_ctype C11 then accum |> do_sign_atom_comp Leq xs a1 a2 |> do_ctype_comp Leq xs C21 C11 |> (case a2 of - S Neg => I - | S Pos => do_ctype_comp Leq xs C11 C21 + S Minus => I + | S Plus => do_ctype_comp Leq xs C11 C21 | V x => do_ctype_comp Leq (x :: xs) C11 C21) else SOME accum) |> do_ctype_comp Leq xs C12 C22 | do_ctype_comp cmp xs (C1 as CPair (C11, C12)) (C2 as CPair (C21, C22)) accum = (accum |> fold (uncurry (do_ctype_comp cmp xs)) [(C11, C21), (C12, C22)] handle Library.UnequalLengths => raise CTYPE ("Nitpick_Mono.do_ctype_comp", [C1, C2])) | do_ctype_comp cmp xs (CType _) (CType _) accum = accum (* no need to compare them thanks to the cache *) | do_ctype_comp _ _ C1 C2 _ = raise CTYPE ("Nitpick_Mono.do_ctype_comp", [C1, C2]) (* comp_op -> ctype -> ctype -> constraint_set -> constraint_set *) fun add_ctype_comp _ _ _ UnsolvableCSet = UnsolvableCSet | add_ctype_comp cmp C1 C2 (CSet (lits, comps, sexps)) = (print_g ("*** Add " ^ string_for_ctype C1 ^ " " ^ string_for_comp_op cmp ^ " " ^ string_for_ctype C2); case do_ctype_comp cmp [] C1 C2 (SOME (lits, comps)) of NONE => (print_g "**** Unsolvable"; UnsolvableCSet) | SOME (lits, comps) => CSet (lits, comps, sexps)) (* ctype -> ctype -> constraint_set -> constraint_set *) val add_ctypes_equal = add_ctype_comp Eq val add_is_sub_ctype = add_ctype_comp Leq (* sign -> sign_expr -> ctype -> (literal list * sign_expr list) option -> (literal list * sign_expr list) option *) fun do_notin_ctype_fv _ _ _ NONE = NONE - | do_notin_ctype_fv Neg _ CAlpha accum = accum - | do_notin_ctype_fv Pos [] CAlpha _ = NONE - | do_notin_ctype_fv Pos [(x, sn)] CAlpha (SOME (lits, sexps)) = + | do_notin_ctype_fv Minus _ CAlpha accum = accum + | do_notin_ctype_fv Plus [] CAlpha _ = NONE + | do_notin_ctype_fv Plus [(x, sn)] CAlpha (SOME (lits, sexps)) = SOME lits |> do_literal (x, sn) |> Option.map (rpair sexps) - | do_notin_ctype_fv Pos sexp CAlpha (SOME (lits, sexps)) = + | do_notin_ctype_fv Plus sexp CAlpha (SOME (lits, sexps)) = SOME (lits, insert (op =) sexp sexps) | do_notin_ctype_fv sn sexp (CFun (C1, S sn', C2)) accum = - accum |> (if sn' = Pos andalso sn = Pos then do_notin_ctype_fv Pos sexp C1 - else I) - |> (if sn' = Neg orelse sn = Pos then do_notin_ctype_fv Neg sexp C1 - else I) + accum |> (if sn' = Plus andalso sn = Plus then + do_notin_ctype_fv Plus sexp C1 + else + I) + |> (if sn' = Minus orelse sn = Plus then + do_notin_ctype_fv Minus sexp C1 + else + I) |> do_notin_ctype_fv sn sexp C2 - | do_notin_ctype_fv Pos sexp (CFun (C1, V x, C2)) accum = - accum |> (case do_literal (x, Neg) (SOME sexp) of + | do_notin_ctype_fv Plus sexp (CFun (C1, V x, C2)) accum = + accum |> (case do_literal (x, Minus) (SOME sexp) of NONE => I - | SOME sexp' => do_notin_ctype_fv Pos sexp' C1) - |> do_notin_ctype_fv Neg sexp C1 - |> do_notin_ctype_fv Pos sexp C2 - | do_notin_ctype_fv Neg sexp (CFun (C1, V x, C2)) accum = - accum |> (case do_literal (x, Pos) (SOME sexp) of + | SOME sexp' => do_notin_ctype_fv Plus sexp' C1) + |> do_notin_ctype_fv Minus sexp C1 + |> do_notin_ctype_fv Plus sexp C2 + | do_notin_ctype_fv Minus sexp (CFun (C1, V x, C2)) accum = + accum |> (case do_literal (x, Plus) (SOME sexp) of NONE => I - | SOME sexp' => do_notin_ctype_fv Pos sexp' C1) - |> do_notin_ctype_fv Neg sexp C2 + | SOME sexp' => do_notin_ctype_fv Plus sexp' C1) + |> do_notin_ctype_fv Minus sexp C2 | do_notin_ctype_fv sn sexp (CPair (C1, C2)) accum = accum |> fold (do_notin_ctype_fv sn sexp) [C1, C2] | do_notin_ctype_fv sn sexp (CType (_, Cs)) accum = accum |> fold (do_notin_ctype_fv sn sexp) Cs | do_notin_ctype_fv _ _ C _ = raise CTYPE ("Nitpick_Mono.do_notin_ctype_fv", [C]) (* sign -> ctype -> constraint_set -> constraint_set *) fun add_notin_ctype_fv _ _ UnsolvableCSet = UnsolvableCSet | add_notin_ctype_fv sn C (CSet (lits, comps, sexps)) = (print_g ("*** Add " ^ string_for_ctype C ^ " is right-" ^ - (case sn of Neg => "unique" | Pos => "total") ^ "."); + (case sn of Minus => "unique" | Plus => "total") ^ "."); case do_notin_ctype_fv sn [] C (SOME (lits, sexps)) of NONE => (print_g "**** Unsolvable"; UnsolvableCSet) | SOME (lits, sexps) => CSet (lits, comps, sexps)) (* ctype -> constraint_set -> constraint_set *) -val add_ctype_is_right_unique = add_notin_ctype_fv Neg -val add_ctype_is_right_total = add_notin_ctype_fv Pos +val add_ctype_is_right_unique = add_notin_ctype_fv Minus +val add_ctype_is_right_total = add_notin_ctype_fv Plus (* constraint_set -> constraint_set -> constraint_set *) fun unite (CSet (lits1, comps1, sexps1)) (CSet (lits2, comps2, sexps2)) = (case SOME lits1 |> fold do_literal lits2 of NONE => (print_g "**** Unsolvable"; UnsolvableCSet) | SOME lits => CSet (lits, comps1 @ comps2, sexps1 @ sexps2)) | unite _ _ = UnsolvableCSet (* sign -> bool *) -fun bool_from_sign Pos = false - | bool_from_sign Neg = true +fun bool_from_sign Plus = false + | bool_from_sign Minus = true (* bool -> sign *) -fun sign_from_bool false = Pos - | sign_from_bool true = Neg +fun sign_from_bool false = Plus + | sign_from_bool true = Minus (* literal -> PropLogic.prop_formula *) fun prop_for_literal (x, sn) = (not (bool_from_sign sn) ? PropLogic.Not) (PropLogic.BoolVar x) (* sign_atom -> PropLogic.prop_formula *) fun prop_for_sign_atom_eq (S sn', sn) = if sn = sn' then PropLogic.True else PropLogic.False | prop_for_sign_atom_eq (V x, sn) = prop_for_literal (x, sn) (* sign_expr -> PropLogic.prop_formula *) fun prop_for_sign_expr xs = PropLogic.exists (map prop_for_literal xs) (* var list -> sign -> PropLogic.prop_formula *) fun prop_for_exists_eq xs sn = PropLogic.exists (map (fn x => prop_for_literal (x, sn)) xs) (* comp -> PropLogic.prop_formula *) fun prop_for_comp (a1, a2, Eq, []) = PropLogic.SAnd (prop_for_comp (a1, a2, Leq, []), prop_for_comp (a2, a1, Leq, [])) | prop_for_comp (a1, a2, Leq, []) = - PropLogic.SOr (prop_for_sign_atom_eq (a1, Pos), - prop_for_sign_atom_eq (a2, Neg)) + PropLogic.SOr (prop_for_sign_atom_eq (a1, Plus), + prop_for_sign_atom_eq (a2, Minus)) | prop_for_comp (a1, a2, cmp, xs) = - PropLogic.SOr (prop_for_exists_eq xs Neg, prop_for_comp (a1, a2, cmp, [])) + PropLogic.SOr (prop_for_exists_eq xs Minus, prop_for_comp (a1, a2, cmp, [])) (* var -> (int -> bool option) -> literal list -> literal list *) fun literals_from_assignments max_var assigns lits = fold (fn x => fn accum => if AList.defined (op =) lits x then accum else case assigns x of SOME b => (x, sign_from_bool b) :: accum | NONE => accum) (max_var downto 1) lits (* literal list -> sign_atom -> sign option *) fun lookup_sign_atom _ (S sn) = SOME sn | lookup_sign_atom lit (V x) = AList.lookup (op =) lit x (* comp -> string *) fun string_for_comp (a1, a2, cmp, xs) = string_for_sign_atom a1 ^ " " ^ string_for_comp_op cmp ^ subscript_string_for_vars " \ " xs ^ " " ^ string_for_sign_atom a2 (* literal list -> comp list -> sign_expr list -> unit *) fun print_problem lits comps sexps = print_g ("*** Problem:\n" ^ cat_lines (map string_for_literal lits @ map string_for_comp comps @ map string_for_sign_expr sexps)) (* literal list -> unit *) fun print_solution lits = - let val (pos, neg) = List.partition (curry (op =) Pos o snd) lits in + let val (pos, neg) = List.partition (curry (op =) Plus o snd) lits in print_g ("*** Solution:\n" ^ "+: " ^ commas (map (string_for_var o fst) pos) ^ "\n" ^ "-: " ^ commas (map (string_for_var o fst) neg)) end (* var -> constraint_set -> literal list list option *) fun solve _ UnsolvableCSet = (print_g "*** Problem: Unsolvable"; NONE) | solve max_var (CSet (lits, comps, sexps)) = let val _ = print_problem lits comps sexps val prop = PropLogic.all (map prop_for_literal lits @ map prop_for_comp comps @ map prop_for_sign_expr sexps) (* use the first ML solver (to avoid startup overhead) *) val solvers = !SatSolver.solvers |> filter (member (op =) ["dptsat", "dpll"] o fst) in case snd (hd solvers) prop of SatSolver.SATISFIABLE assigns => SOME (literals_from_assignments max_var assigns lits |> tap print_solution) | _ => NONE end (* var -> constraint_set -> bool *) val is_solvable = is_some oo solve type ctype_schema = ctype * constraint_set type ctype_context = {bounds: ctype list, frees: (styp * ctype) list, - consts: (styp * ctype_schema) list} + consts: (styp * ctype) list} type accumulator = ctype_context * constraint_set val initial_gamma = {bounds = [], frees = [], consts = []} val unsolvable_accum = (initial_gamma, UnsolvableCSet) (* ctype -> ctype_context -> ctype_context *) fun push_bound C {bounds, frees, consts} = {bounds = C :: bounds, frees = frees, consts = consts} (* ctype_context -> ctype_context *) fun pop_bound {bounds, frees, consts} = {bounds = tl bounds, frees = frees, consts = consts} handle List.Empty => initial_gamma (* cdata -> term -> accumulator -> ctype * accumulator *) fun consider_term (cdata as {ext_ctxt as {ctxt, thy, def_table, ...}, alpha_T, max_fresh, ...}) = let (* typ -> ctype *) val ctype_for = fresh_ctype_for_type cdata (* ctype -> ctype *) fun pos_set_ctype_for_dom C = - CFun (C, S (if exists_alpha_sub_ctype C then Pos else Neg), bool_C) + CFun (C, S (if exists_alpha_sub_ctype C then Plus else Minus), bool_C) (* typ -> accumulator -> ctype * accumulator *) fun do_quantifier T (gamma, cset) = let val abs_C = ctype_for (domain_type (domain_type T)) val body_C = ctype_for (range_type T) in - (CFun (CFun (abs_C, S Neg, body_C), S Neg, body_C), + (CFun (CFun (abs_C, S Minus, body_C), S Minus, body_C), (gamma, cset |> add_ctype_is_right_total abs_C)) end fun do_equals T (gamma, cset) = let val C = ctype_for (domain_type T) in - (CFun (C, S Neg, CFun (C, V (Unsynchronized.inc max_fresh), - ctype_for (nth_range_type 2 T))), + (CFun (C, S Minus, CFun (C, V (Unsynchronized.inc max_fresh), + ctype_for (nth_range_type 2 T))), (gamma, cset |> add_ctype_is_right_unique C)) end fun do_robust_set_operation T (gamma, cset) = let val set_T = domain_type T val C1 = ctype_for set_T val C2 = ctype_for set_T val C3 = ctype_for set_T in - (CFun (C1, S Neg, CFun (C2, S Neg, C3)), + (CFun (C1, S Minus, CFun (C2, S Minus, C3)), (gamma, cset |> add_is_sub_ctype C1 C3 |> add_is_sub_ctype C2 C3)) end fun do_fragile_set_operation T (gamma, cset) = let val set_T = domain_type T val set_C = ctype_for set_T (* typ -> ctype *) fun custom_ctype_for (T as Type ("fun", [T1, T2])) = if T = set_T then set_C - else CFun (custom_ctype_for T1, S Neg, custom_ctype_for T2) + else CFun (custom_ctype_for T1, S Minus, custom_ctype_for T2) | custom_ctype_for T = ctype_for T in (custom_ctype_for T, (gamma, cset |> add_ctype_is_right_unique set_C)) end (* typ -> accumulator -> ctype * accumulator *) fun do_pair_constr T accum = case ctype_for (nth_range_type 2 T) of C as CPair (a_C, b_C) => - (CFun (a_C, S Neg, CFun (b_C, S Neg, C)), accum) + (CFun (a_C, S Minus, CFun (b_C, S Minus, C)), accum) | C => raise CTYPE ("Nitpick_Mono.consider_term.do_pair_constr", [C]) (* int -> typ -> accumulator -> ctype * accumulator *) fun do_nth_pair_sel n T = case ctype_for (domain_type T) of C as CPair (a_C, b_C) => - pair (CFun (C, S Neg, if n = 0 then a_C else b_C)) + pair (CFun (C, S Minus, if n = 0 then a_C else b_C)) | C => raise CTYPE ("Nitpick_Mono.consider_term.do_nth_pair_sel", [C]) val unsolvable = (CType ("unsolvable", []), unsolvable_accum) (* typ -> term -> accumulator -> ctype * accumulator *) fun do_bounded_quantifier abs_T bound_t body_t accum = let val abs_C = ctype_for abs_T val (bound_C, accum) = accum |>> push_bound abs_C |> do_term bound_t val expected_bound_C = pos_set_ctype_for_dom abs_C in accum ||> add_ctypes_equal expected_bound_C bound_C |> do_term body_t ||> apfst pop_bound end (* term -> accumulator -> ctype * accumulator *) and do_term _ (_, UnsolvableCSet) = unsolvable | do_term t (accum as (gamma as {bounds, frees, consts}, cset)) = (case t of Const (x as (s, T)) => (case AList.lookup (op =) consts x of - SOME (C, cset') => (C, (gamma, cset |> unite cset')) + SOME C => (C, accum) | NONE => if not (could_exist_alpha_subtype alpha_T T) then (ctype_for T, accum) else case s of @{const_name all} => do_quantifier T accum | @{const_name "=="} => do_equals T accum | @{const_name All} => do_quantifier T accum | @{const_name Ex} => do_quantifier T accum | @{const_name "op ="} => do_equals T accum | @{const_name The} => (print_g "*** The"; unsolvable) | @{const_name Eps} => (print_g "*** Eps"; unsolvable) | @{const_name If} => do_robust_set_operation (range_type T) accum - |>> curry3 CFun bool_C (S Neg) + |>> curry3 CFun bool_C (S Minus) | @{const_name Pair} => do_pair_constr T accum | @{const_name fst} => do_nth_pair_sel 0 T accum | @{const_name snd} => do_nth_pair_sel 1 T accum | @{const_name Id} => - (CFun (ctype_for (domain_type T), S Neg, bool_C), accum) + (CFun (ctype_for (domain_type T), S Minus, bool_C), accum) | @{const_name insert} => let val set_T = domain_type (range_type T) val C1 = ctype_for (domain_type set_T) val C1' = pos_set_ctype_for_dom C1 val C2 = ctype_for set_T val C3 = ctype_for set_T in - (CFun (C1, S Neg, CFun (C2, S Neg, C3)), + (CFun (C1, S Minus, CFun (C2, S Minus, C3)), (gamma, cset |> add_ctype_is_right_unique C1 |> add_is_sub_ctype C1' C3 |> add_is_sub_ctype C2 C3)) end | @{const_name converse} => let val x = Unsynchronized.inc max_fresh (* typ -> ctype *) fun ctype_for_set T = CFun (ctype_for (domain_type T), V x, bool_C) val ab_set_C = domain_type T |> ctype_for_set val ba_set_C = range_type T |> ctype_for_set - in (CFun (ab_set_C, S Neg, ba_set_C), accum) end + in (CFun (ab_set_C, S Minus, ba_set_C), accum) end | @{const_name trancl} => do_fragile_set_operation T accum | @{const_name rtrancl} => (print_g "*** rtrancl"; unsolvable) | @{const_name lower_semilattice_fun_inst.inf_fun} => do_robust_set_operation T accum | @{const_name upper_semilattice_fun_inst.sup_fun} => do_robust_set_operation T accum | @{const_name finite} => let val C1 = ctype_for (domain_type (domain_type T)) in - (CFun (pos_set_ctype_for_dom C1, S Neg, bool_C), accum) + (CFun (pos_set_ctype_for_dom C1, S Minus, bool_C), accum) end | @{const_name rel_comp} => let val x = Unsynchronized.inc max_fresh (* typ -> ctype *) fun ctype_for_set T = CFun (ctype_for (domain_type T), V x, bool_C) val bc_set_C = domain_type T |> ctype_for_set val ab_set_C = domain_type (range_type T) |> ctype_for_set val ac_set_C = nth_range_type 2 T |> ctype_for_set in - (CFun (bc_set_C, S Neg, CFun (ab_set_C, S Neg, ac_set_C)), + (CFun (bc_set_C, S Minus, CFun (ab_set_C, S Minus, ac_set_C)), accum) end | @{const_name image} => let val a_C = ctype_for (domain_type (domain_type T)) val b_C = ctype_for (range_type (domain_type T)) in - (CFun (CFun (a_C, S Neg, b_C), S Neg, - CFun (pos_set_ctype_for_dom a_C, S Neg, + (CFun (CFun (a_C, S Minus, b_C), S Minus, + CFun (pos_set_ctype_for_dom a_C, S Minus, pos_set_ctype_for_dom b_C)), accum) end | @{const_name Sigma} => let val x = Unsynchronized.inc max_fresh (* typ -> ctype *) fun ctype_for_set T = CFun (ctype_for (domain_type T), V x, bool_C) val a_set_T = domain_type T val a_C = ctype_for (domain_type a_set_T) val b_set_C = ctype_for_set (range_type (domain_type (range_type T))) val a_set_C = ctype_for_set a_set_T - val a_to_b_set_C = CFun (a_C, S Neg, b_set_C) + val a_to_b_set_C = CFun (a_C, S Minus, b_set_C) val ab_set_C = ctype_for_set (nth_range_type 2 T) in - (CFun (a_set_C, S Neg, CFun (a_to_b_set_C, S Neg, ab_set_C)), - accum) + (CFun (a_set_C, S Minus, + CFun (a_to_b_set_C, S Minus, ab_set_C)), accum) end | @{const_name minus_fun_inst.minus_fun} => let val set_T = domain_type T val left_set_C = ctype_for set_T val right_set_C = ctype_for set_T in - (CFun (left_set_C, S Neg, - CFun (right_set_C, S Neg, left_set_C)), + (CFun (left_set_C, S Minus, + CFun (right_set_C, S Minus, left_set_C)), (gamma, cset |> add_ctype_is_right_unique right_set_C |> add_is_sub_ctype right_set_C left_set_C)) end | @{const_name ord_fun_inst.less_eq_fun} => do_fragile_set_operation T accum | @{const_name Tha} => let val a_C = ctype_for (domain_type (domain_type T)) val a_set_C = pos_set_ctype_for_dom a_C - in (CFun (a_set_C, S Neg, a_C), accum) end + in (CFun (a_set_C, S Minus, a_C), accum) end | @{const_name FunBox} => let val dom_C = ctype_for (domain_type T) in - (CFun (dom_C, S Neg, dom_C), accum) + (CFun (dom_C, S Minus, dom_C), accum) end | _ => if is_sel s then if constr_name_for_sel_like s = @{const_name FunBox} then let val dom_C = ctype_for (domain_type T) in - (CFun (dom_C, S Neg, dom_C), accum) + (CFun (dom_C, S Minus, dom_C), accum) end else (ctype_for_sel cdata x, accum) else if is_constr thy x then (ctype_for_constr cdata x, accum) else if is_built_in_const true x then case def_of_const thy def_table x of SOME t' => do_term t' accum | NONE => (print_g ("*** built-in " ^ s); unsolvable) else - (ctype_for T, accum)) + let val C = ctype_for T in + (C, ({bounds = bounds, frees = frees, + consts = (x, C) :: consts}, cset)) + end) | Free (x as (_, T)) => (case AList.lookup (op =) frees x of SOME C => (C, accum) | NONE => let val C = ctype_for T in (C, ({bounds = bounds, frees = (x, C) :: frees, consts = consts}, cset)) end) | Var _ => (print_g "*** Var"; unsolvable) | Bound j => (nth bounds j, accum) | Abs (_, T, @{const False}) => (ctype_for (T --> bool_T), accum) | Abs (s, T, t') => let val C = ctype_for T val (C', accum) = do_term t' (accum |>> push_bound C) - in (CFun (C, S Neg, C'), accum |>> pop_bound) end + in (CFun (C, S Minus, C'), accum |>> pop_bound) end | Const (@{const_name All}, _) $ Abs (_, T', @{const "op -->"} $ (t1 $ Bound 0) $ t2) => do_bounded_quantifier T' t1 t2 accum | Const (@{const_name Ex}, _) $ Abs (_, T', @{const "op &"} $ (t1 $ Bound 0) $ t2) => do_bounded_quantifier T' t1 t2 accum | Const (@{const_name Let}, _) $ t1 $ t2 => do_term (betapply (t2, t1)) accum | t1 $ t2 => let val (C1, accum) = do_term t1 accum val (C2, accum) = do_term t2 accum in case accum of (_, UnsolvableCSet) => unsolvable | _ => case C1 of CFun (C11, _, C12) => (C12, accum ||> add_is_sub_ctype C2 C11) | _ => raise CTYPE ("Nitpick_Mono.consider_term.do_term \ \(op $)", [C1]) end) |> tap (fn (C, _) => print_g (" \ \ " ^ Syntax.string_of_term ctxt t ^ " : " ^ string_for_ctype C)) in do_term end (* cdata -> sign -> term -> accumulator -> accumulator *) fun consider_general_formula (cdata as {ext_ctxt as {ctxt, ...}, ...}) = let (* typ -> ctype *) val ctype_for = fresh_ctype_for_type cdata (* term -> accumulator -> ctype * accumulator *) val do_term = consider_term cdata (* sign -> term -> accumulator -> accumulator *) fun do_formula _ _ (_, UnsolvableCSet) = unsolvable_accum | do_formula sn t (accum as (gamma as {bounds, frees, consts}, cset)) = let (* term -> accumulator -> accumulator *) val do_co_formula = do_formula sn val do_contra_formula = do_formula (negate sn) (* string -> typ -> term -> accumulator *) fun do_quantifier quant_s abs_T body_t = let val abs_C = ctype_for abs_T - val side_cond = ((sn = Neg) = (quant_s = @{const_name Ex})) + val side_cond = ((sn = Minus) = (quant_s = @{const_name Ex})) val cset = cset |> side_cond ? add_ctype_is_right_total abs_C in (gamma |> push_bound abs_C, cset) |> do_co_formula body_t |>> pop_bound end (* typ -> term -> accumulator *) fun do_bounded_quantifier abs_T body_t = accum |>> push_bound (ctype_for abs_T) |> do_co_formula body_t |>> pop_bound (* term -> term -> accumulator *) fun do_equals t1 t2 = case sn of - Pos => do_term t accum |> snd - | Neg => let - val (C1, accum) = do_term t1 accum - val (C2, accum) = do_term t2 accum - in accum ||> add_ctypes_equal C1 C2 end + Plus => do_term t accum |> snd + | Minus => let + val (C1, accum) = do_term t1 accum + val (C2, accum) = do_term t2 accum + in accum ||> add_ctypes_equal C1 C2 end in case t of Const (s0 as @{const_name all}, _) $ Abs (_, T1, t1) => do_quantifier s0 T1 t1 | Const (@{const_name "=="}, _) $ t1 $ t2 => do_equals t1 t2 | @{const "==>"} $ t1 $ t2 => accum |> do_contra_formula t1 |> do_co_formula t2 | @{const Trueprop} $ t1 => do_co_formula t1 accum | @{const Not} $ t1 => do_contra_formula t1 accum | Const (@{const_name All}, _) $ Abs (_, T1, t1 as @{const "op -->"} $ (_ $ Bound 0) $ _) => do_bounded_quantifier T1 t1 | Const (s0 as @{const_name All}, _) $ Abs (_, T1, t1) => do_quantifier s0 T1 t1 | Const (@{const_name Ex}, _) $ Abs (_, T1, t1 as @{const "op &"} $ (_ $ Bound 0) $ _) => do_bounded_quantifier T1 t1 | Const (s0 as @{const_name Ex}, _) $ Abs (_, T1, t1) => do_quantifier s0 T1 t1 | Const (@{const_name "op ="}, _) $ t1 $ t2 => do_equals t1 t2 | @{const "op &"} $ t1 $ t2 => accum |> do_co_formula t1 |> do_co_formula t2 | @{const "op |"} $ t1 $ t2 => accum |> do_co_formula t1 |> do_co_formula t2 | @{const "op -->"} $ t1 $ t2 => accum |> do_contra_formula t1 |> do_co_formula t2 | Const (@{const_name If}, _) $ t1 $ t2 $ t3 => accum |> do_term t1 |> snd |> fold do_co_formula [t2, t3] | Const (@{const_name Let}, _) $ t1 $ t2 => do_co_formula (betapply (t2, t1)) accum | _ => do_term t accum |> snd end |> tap (fn _ => print_g ("\ \ " ^ Syntax.string_of_term ctxt t ^ " : o\<^sup>" ^ string_for_sign sn)) in do_formula end (* The harmless axiom optimization below is somewhat too aggressive in the face of (rather peculiar) user-defined axioms. *) val harmless_consts = [@{const_name ord_class.less}, @{const_name ord_class.less_eq}] val bounteous_consts = [@{const_name bisim}] (* term -> bool *) fun is_harmless_axiom t = Term.add_consts t [] |> filter_out (is_built_in_const true) |> (forall (member (op =) harmless_consts o original_name o fst) orf exists (member (op =) bounteous_consts o fst)) (* cdata -> sign -> term -> accumulator -> accumulator *) fun consider_nondefinitional_axiom cdata sn t = not (is_harmless_axiom t) ? consider_general_formula cdata sn t (* cdata -> term -> accumulator -> accumulator *) fun consider_definitional_axiom (cdata as {ext_ctxt as {thy, ...}, ...}) t = if not (is_constr_pattern_formula thy t) then - consider_nondefinitional_axiom cdata Pos t + consider_nondefinitional_axiom cdata Plus t else if is_harmless_axiom t then I else let (* term -> accumulator -> ctype * accumulator *) val do_term = consider_term cdata (* typ -> term -> accumulator -> accumulator *) fun do_all abs_T body_t accum = let val abs_C = fresh_ctype_for_type cdata abs_T in accum |>> push_bound abs_C |> do_formula body_t |>> pop_bound end (* term -> term -> accumulator -> accumulator *) and do_implies t1 t2 = do_term t1 #> snd #> do_formula t2 and do_equals t1 t2 accum = let val (C1, accum) = do_term t1 accum val (C2, accum) = do_term t2 accum in accum ||> add_ctypes_equal C1 C2 end (* term -> accumulator -> accumulator *) and do_formula _ (_, UnsolvableCSet) = unsolvable_accum | do_formula t accum = case t of Const (@{const_name all}, _) $ Abs (_, T1, t1) => do_all T1 t1 accum | @{const Trueprop} $ t1 => do_formula t1 accum | Const (@{const_name "=="}, _) $ t1 $ t2 => do_equals t1 t2 accum | @{const "==>"} $ t1 $ t2 => do_implies t1 t2 accum | @{const Pure.conjunction} $ t1 $ t2 => accum |> do_formula t1 |> do_formula t2 | Const (@{const_name All}, _) $ Abs (_, T1, t1) => do_all T1 t1 accum | Const (@{const_name "op ="}, _) $ t1 $ t2 => do_equals t1 t2 accum | @{const "op &"} $ t1 $ t2 => accum |> do_formula t1 |> do_formula t2 | @{const "op -->"} $ t1 $ t2 => do_implies t1 t2 accum | _ => raise TERM ("Nitpick_Mono.consider_definitional_axiom.\ \do_formula", [t]) in do_formula t end (* Proof.context -> literal list -> term -> ctype -> string *) fun string_for_ctype_of_term ctxt lits t C = Syntax.string_of_term ctxt t ^ " : " ^ string_for_ctype (instantiate_ctype lits C) (* theory -> literal list -> ctype_context -> unit *) fun print_ctype_context ctxt lits ({frees, consts, ...} : ctype_context) = map (fn (x, C) => string_for_ctype_of_term ctxt lits (Free x) C) frees @ - map (fn (x, (C, _)) => string_for_ctype_of_term ctxt lits (Const x) C) consts + map (fn (x, C) => string_for_ctype_of_term ctxt lits (Const x) C) consts |> cat_lines |> print_g -(* extended_context -> typ -> term list -> term list -> term -> bool *) -fun formulas_monotonic (ext_ctxt as {ctxt, ...}) alpha_T def_ts nondef_ts +(* extended_context -> typ -> sign -> term list -> term list -> term -> bool *) +fun formulas_monotonic (ext_ctxt as {ctxt, ...}) alpha_T sn def_ts nondef_ts core_t = let val _ = print_g ("****** " ^ string_for_ctype CAlpha ^ " is " ^ Syntax.string_of_typ ctxt alpha_T) val cdata as {max_fresh, ...} = initial_cdata ext_ctxt alpha_T val (gamma, cset) = (initial_gamma, slack) |> fold (consider_definitional_axiom cdata) def_ts - |> fold (consider_nondefinitional_axiom cdata Pos) nondef_ts - |> consider_general_formula cdata Pos core_t + |> fold (consider_nondefinitional_axiom cdata Plus) nondef_ts + |> consider_general_formula cdata sn core_t in case solve (!max_fresh) cset of SOME lits => (print_ctype_context ctxt lits gamma; true) | _ => false end handle CTYPE (loc, Cs) => raise BAD (loc, commas (map string_for_ctype Cs)) end; diff --git a/src/HOL/Tools/Nitpick/nitpick_nut.ML b/src/HOL/Tools/Nitpick/nitpick_nut.ML --- a/src/HOL/Tools/Nitpick/nitpick_nut.ML +++ b/src/HOL/Tools/Nitpick/nitpick_nut.ML @@ -1,1394 +1,1394 @@ (* Title: HOL/Tools/Nitpick/nitpick_nut.ML Author: Jasmin Blanchette, TU Muenchen - Copyright 2008, 2009 + Copyright 2008, 2009, 2010 Nitpick underlying terms (nuts). *) signature NITPICK_NUT = sig type special_fun = Nitpick_HOL.special_fun type extended_context = Nitpick_HOL.extended_context type scope = Nitpick_Scope.scope type name_pool = Nitpick_Peephole.name_pool type rep = Nitpick_Rep.rep datatype cst = Unity | False | True | Iden | Num of int | Unknown | Unrep | Suc | Add | Subtract | Multiply | Divide | Gcd | Lcm | Fracs | NormFrac | NatToInt | IntToNat datatype op1 = Not | Finite | Converse | Closure | SingletonSet | IsUnknown | Tha | First | Second | Cast datatype op2 = All | Exist | Or | And | Less | Subset | DefEq | Eq | The | Eps | Triad | Union | SetDifference | Intersect | Composition | Product | Image | Apply | Lambda datatype op3 = Let | If datatype nut = Cst of cst * typ * rep | Op1 of op1 * typ * rep * nut | Op2 of op2 * typ * rep * nut * nut | Op3 of op3 * typ * rep * nut * nut * nut | Tuple of typ * rep * nut list | Construct of nut list * typ * rep * nut list | BoundName of int * typ * rep * string | FreeName of string * typ * rep | ConstName of string * typ * rep | BoundRel of Kodkod.n_ary_index * typ * rep * string | FreeRel of Kodkod.n_ary_index * typ * rep * string | RelReg of int * typ * rep | FormulaReg of int * typ * rep structure NameTable : TABLE exception NUT of string * nut list val string_for_nut : Proof.context -> nut -> string val inline_nut : nut -> bool val type_of : nut -> typ val rep_of : nut -> rep val nickname_of : nut -> string val is_skolem_name : nut -> bool val is_eval_name : nut -> bool val is_FreeName : nut -> bool val is_Cst : cst -> nut -> bool val fold_nut : (nut -> 'a -> 'a) -> nut -> 'a -> 'a val map_nut : (nut -> nut) -> nut -> nut val untuple : (nut -> 'a) -> nut -> 'a list val add_free_and_const_names : nut -> nut list * nut list -> nut list * nut list val name_ord : (nut * nut) -> order val the_name : 'a NameTable.table -> nut -> 'a val the_rel : nut NameTable.table -> nut -> Kodkod.n_ary_index val nut_from_term : extended_context -> op2 -> term -> nut val choose_reps_for_free_vars : scope -> nut list -> rep NameTable.table -> nut list * rep NameTable.table val choose_reps_for_consts : scope -> bool -> nut list -> rep NameTable.table -> nut list * rep NameTable.table val choose_reps_for_all_sels : scope -> rep NameTable.table -> nut list * rep NameTable.table val choose_reps_in_nut : scope -> bool -> rep NameTable.table -> bool -> nut -> nut val rename_free_vars : nut list -> name_pool -> nut NameTable.table -> nut list * name_pool * nut NameTable.table val rename_vars_in_nut : name_pool -> nut NameTable.table -> nut -> nut end; structure Nitpick_Nut : NITPICK_NUT = struct open Nitpick_Util open Nitpick_HOL open Nitpick_Scope open Nitpick_Peephole open Nitpick_Rep structure KK = Kodkod datatype cst = Unity | False | True | Iden | Num of int | Unknown | Unrep | Suc | Add | Subtract | Multiply | Divide | Gcd | Lcm | Fracs | NormFrac | NatToInt | IntToNat datatype op1 = Not | Finite | Converse | Closure | SingletonSet | IsUnknown | Tha | First | Second | Cast datatype op2 = All | Exist | Or | And | Less | Subset | DefEq | Eq | The | Eps | Triad | Union | SetDifference | Intersect | Composition | Product | Image | Apply | Lambda datatype op3 = Let | If datatype nut = Cst of cst * typ * rep | Op1 of op1 * typ * rep * nut | Op2 of op2 * typ * rep * nut * nut | Op3 of op3 * typ * rep * nut * nut * nut | Tuple of typ * rep * nut list | Construct of nut list * typ * rep * nut list | BoundName of int * typ * rep * string | FreeName of string * typ * rep | ConstName of string * typ * rep | BoundRel of KK.n_ary_index * typ * rep * string | FreeRel of KK.n_ary_index * typ * rep * string | RelReg of int * typ * rep | FormulaReg of int * typ * rep exception NUT of string * nut list (* cst -> string *) fun string_for_cst Unity = "Unity" | string_for_cst False = "False" | string_for_cst True = "True" | string_for_cst Iden = "Iden" | string_for_cst (Num j) = "Num " ^ signed_string_of_int j | string_for_cst Unknown = "Unknown" | string_for_cst Unrep = "Unrep" | string_for_cst Suc = "Suc" | string_for_cst Add = "Add" | string_for_cst Subtract = "Subtract" | string_for_cst Multiply = "Multiply" | string_for_cst Divide = "Divide" | string_for_cst Gcd = "Gcd" | string_for_cst Lcm = "Lcm" | string_for_cst Fracs = "Fracs" | string_for_cst NormFrac = "NormFrac" | string_for_cst NatToInt = "NatToInt" | string_for_cst IntToNat = "IntToNat" (* op1 -> string *) fun string_for_op1 Not = "Not" | string_for_op1 Finite = "Finite" | string_for_op1 Converse = "Converse" | string_for_op1 Closure = "Closure" | string_for_op1 SingletonSet = "SingletonSet" | string_for_op1 IsUnknown = "IsUnknown" | string_for_op1 Tha = "Tha" | string_for_op1 First = "First" | string_for_op1 Second = "Second" | string_for_op1 Cast = "Cast" (* op2 -> string *) fun string_for_op2 All = "All" | string_for_op2 Exist = "Exist" | string_for_op2 Or = "Or" | string_for_op2 And = "And" | string_for_op2 Less = "Less" | string_for_op2 Subset = "Subset" | string_for_op2 DefEq = "DefEq" | string_for_op2 Eq = "Eq" | string_for_op2 The = "The" | string_for_op2 Eps = "Eps" | string_for_op2 Triad = "Triad" | string_for_op2 Union = "Union" | string_for_op2 SetDifference = "SetDifference" | string_for_op2 Intersect = "Intersect" | string_for_op2 Composition = "Composition" | string_for_op2 Product = "Product" | string_for_op2 Image = "Image" | string_for_op2 Apply = "Apply" | string_for_op2 Lambda = "Lambda" (* op3 -> string *) fun string_for_op3 Let = "Let" | string_for_op3 If = "If" (* int -> Proof.context -> nut -> string *) fun basic_string_for_nut indent ctxt u = let (* nut -> string *) val sub = basic_string_for_nut (indent + 1) ctxt in (if indent = 0 then "" else "\n" ^ implode (replicate (2 * indent) " ")) ^ "(" ^ (case u of Cst (c, T, R) => "Cst " ^ string_for_cst c ^ " " ^ Syntax.string_of_typ ctxt T ^ " " ^ string_for_rep R | Op1 (oper, T, R, u1) => "Op1 " ^ string_for_op1 oper ^ " " ^ Syntax.string_of_typ ctxt T ^ " " ^ string_for_rep R ^ " " ^ sub u1 | Op2 (oper, T, R, u1, u2) => "Op2 " ^ string_for_op2 oper ^ " " ^ Syntax.string_of_typ ctxt T ^ " " ^ string_for_rep R ^ " " ^ sub u1 ^ " " ^ sub u2 | Op3 (oper, T, R, u1, u2, u3) => "Op3 " ^ string_for_op3 oper ^ " " ^ Syntax.string_of_typ ctxt T ^ " " ^ string_for_rep R ^ " " ^ sub u1 ^ " " ^ sub u2 ^ " " ^ sub u3 | Tuple (T, R, us) => "Tuple " ^ Syntax.string_of_typ ctxt T ^ " " ^ string_for_rep R ^ implode (map sub us) | Construct (us', T, R, us) => "Construct " ^ implode (map sub us') ^ Syntax.string_of_typ ctxt T ^ " " ^ string_for_rep R ^ " " ^ implode (map sub us) | BoundName (j, T, R, nick) => "BoundName " ^ signed_string_of_int j ^ " " ^ Syntax.string_of_typ ctxt T ^ " " ^ string_for_rep R ^ " " ^ nick | FreeName (s, T, R) => "FreeName " ^ s ^ " " ^ Syntax.string_of_typ ctxt T ^ " " ^ string_for_rep R | ConstName (s, T, R) => "ConstName " ^ s ^ " " ^ Syntax.string_of_typ ctxt T ^ " " ^ string_for_rep R | BoundRel ((n, j), T, R, nick) => "BoundRel " ^ string_of_int n ^ "." ^ signed_string_of_int j ^ " " ^ Syntax.string_of_typ ctxt T ^ " " ^ string_for_rep R ^ " " ^ nick | FreeRel ((n, j), T, R, nick) => "FreeRel " ^ string_of_int n ^ "." ^ signed_string_of_int j ^ " " ^ Syntax.string_of_typ ctxt T ^ " " ^ string_for_rep R ^ " " ^ nick | RelReg (j, T, R) => "RelReg " ^ signed_string_of_int j ^ " " ^ Syntax.string_of_typ ctxt T ^ " " ^ string_for_rep R | FormulaReg (j, T, R) => "FormulaReg " ^ signed_string_of_int j ^ " " ^ Syntax.string_of_typ ctxt T ^ " " ^ string_for_rep R) ^ ")" end (* Proof.context -> nut -> string *) val string_for_nut = basic_string_for_nut 0 (* nut -> bool *) fun inline_nut (Op1 _) = false | inline_nut (Op2 _) = false | inline_nut (Op3 _) = false | inline_nut (Tuple (_, _, us)) = forall inline_nut us | inline_nut _ = true (* nut -> typ *) fun type_of (Cst (_, T, _)) = T | type_of (Op1 (_, T, _, _)) = T | type_of (Op2 (_, T, _, _, _)) = T | type_of (Op3 (_, T, _, _, _, _)) = T | type_of (Tuple (T, _, _)) = T | type_of (Construct (_, T, _, _)) = T | type_of (BoundName (_, T, _, _)) = T | type_of (FreeName (_, T, _)) = T | type_of (ConstName (_, T, _)) = T | type_of (BoundRel (_, T, _, _)) = T | type_of (FreeRel (_, T, _, _)) = T | type_of (RelReg (_, T, _)) = T | type_of (FormulaReg (_, T, _)) = T (* nut -> rep *) fun rep_of (Cst (_, _, R)) = R | rep_of (Op1 (_, _, R, _)) = R | rep_of (Op2 (_, _, R, _, _)) = R | rep_of (Op3 (_, _, R, _, _, _)) = R | rep_of (Tuple (_, R, _)) = R | rep_of (Construct (_, _, R, _)) = R | rep_of (BoundName (_, _, R, _)) = R | rep_of (FreeName (_, _, R)) = R | rep_of (ConstName (_, _, R)) = R | rep_of (BoundRel (_, _, R, _)) = R | rep_of (FreeRel (_, _, R, _)) = R | rep_of (RelReg (_, _, R)) = R | rep_of (FormulaReg (_, _, R)) = R (* nut -> string *) fun nickname_of (BoundName (_, _, _, nick)) = nick | nickname_of (FreeName (s, _, _)) = s | nickname_of (ConstName (s, _, _)) = s | nickname_of (BoundRel (_, _, _, nick)) = nick | nickname_of (FreeRel (_, _, _, nick)) = nick | nickname_of u = raise NUT ("Nitpick_Nut.nickname_of", [u]) (* nut -> bool *) fun is_skolem_name u = space_explode name_sep (nickname_of u) |> exists (String.isPrefix skolem_prefix) handle NUT ("Nitpick_Nut.nickname_of", _) => false fun is_eval_name u = String.isPrefix eval_prefix (nickname_of u) handle NUT ("Nitpick_Nut.nickname_of", _) => false fun is_FreeName (FreeName _) = true | is_FreeName _ = false (* cst -> nut -> bool *) fun is_Cst cst (Cst (cst', _, _)) = (cst = cst') | is_Cst _ _ = false (* (nut -> 'a -> 'a) -> nut -> 'a -> 'a *) fun fold_nut f u = case u of Op1 (_, _, _, u1) => fold_nut f u1 | Op2 (_, _, _, u1, u2) => fold_nut f u1 #> fold_nut f u2 | Op3 (_, _, _, u1, u2, u3) => fold_nut f u1 #> fold_nut f u2 #> fold_nut f u3 | Tuple (_, _, us) => fold (fold_nut f) us | Construct (us', _, _, us) => fold (fold_nut f) us #> fold (fold_nut f) us' | _ => f u (* (nut -> nut) -> nut -> nut *) fun map_nut f u = case u of Op1 (oper, T, R, u1) => Op1 (oper, T, R, map_nut f u1) | Op2 (oper, T, R, u1, u2) => Op2 (oper, T, R, map_nut f u1, map_nut f u2) | Op3 (oper, T, R, u1, u2, u3) => Op3 (oper, T, R, map_nut f u1, map_nut f u2, map_nut f u3) | Tuple (T, R, us) => Tuple (T, R, map (map_nut f) us) | Construct (us', T, R, us) => Construct (map (map_nut f) us', T, R, map (map_nut f) us) | _ => f u (* nut * nut -> order *) fun name_ord (BoundName (j1, _, _, _), BoundName (j2, _, _, _)) = int_ord (j1, j2) | name_ord (BoundName _, _) = LESS | name_ord (_, BoundName _) = GREATER | name_ord (FreeName (s1, T1, _), FreeName (s2, T2, _)) = (case fast_string_ord (s1, s2) of EQUAL => TermOrd.typ_ord (T1, T2) | ord => ord) | name_ord (FreeName _, _) = LESS | name_ord (_, FreeName _) = GREATER | name_ord (ConstName (s1, T1, _), ConstName (s2, T2, _)) = (case fast_string_ord (s1, s2) of EQUAL => TermOrd.typ_ord (T1, T2) | ord => ord) | name_ord (u1, u2) = raise NUT ("Nitpick_Nut.name_ord", [u1, u2]) (* nut -> nut -> int *) fun num_occs_in_nut needle_u stack_u = fold_nut (fn u => if u = needle_u then Integer.add 1 else I) stack_u 0 (* nut -> nut -> bool *) val is_subterm_of = not_equal 0 oo num_occs_in_nut (* nut -> nut -> nut -> nut *) fun substitute_in_nut needle_u needle_u' = map_nut (fn u => if u = needle_u then needle_u' else u) (* nut -> nut list * nut list -> nut list * nut list *) val add_free_and_const_names = fold_nut (fn u => case u of FreeName _ => apfst (insert (op =) u) | ConstName _ => apsnd (insert (op =) u) | _ => I) (* nut -> rep -> nut *) fun modify_name_rep (BoundName (j, T, _, nick)) R = BoundName (j, T, R, nick) | modify_name_rep (FreeName (s, T, _)) R = FreeName (s, T, R) | modify_name_rep (ConstName (s, T, _)) R = ConstName (s, T, R) | modify_name_rep u _ = raise NUT ("Nitpick_Nut.modify_name_rep", [u]) structure NameTable = Table(type key = nut val ord = name_ord) (* 'a NameTable.table -> nut -> 'a *) fun the_name table name = case NameTable.lookup table name of SOME u => u | NONE => raise NUT ("Nitpick_Nut.the_name", [name]) (* nut NameTable.table -> nut -> KK.n_ary_index *) fun the_rel table name = case the_name table name of FreeRel (x, _, _, _) => x | u => raise NUT ("Nitpick_Nut.the_rel", [u]) (* typ * term -> typ * term *) fun mk_fst (_, Const (@{const_name Pair}, T) $ t1 $ _) = (domain_type T, t1) | mk_fst (T, t) = let val res_T = fst (HOLogic.dest_prodT T) in (res_T, Const (@{const_name fst}, T --> res_T) $ t) end fun mk_snd (_, Const (@{const_name Pair}, T) $ _ $ t2) = (domain_type (range_type T), t2) | mk_snd (T, t) = let val res_T = snd (HOLogic.dest_prodT T) in (res_T, Const (@{const_name snd}, T --> res_T) $ t) end (* typ * term -> (typ * term) list *) fun factorize (z as (Type ("*", _), _)) = maps factorize [mk_fst z, mk_snd z] | factorize z = [z] (* extended_context -> op2 -> term -> nut *) fun nut_from_term (ext_ctxt as {thy, fast_descrs, special_funs, ...}) eq = let (* string list -> typ list -> term -> nut *) fun aux eq ss Ts t = let (* term -> nut *) val sub = aux Eq ss Ts val sub' = aux eq ss Ts (* string -> typ -> term -> nut *) fun sub_abs s T = aux eq (s :: ss) (T :: Ts) (* typ -> term -> term -> nut *) fun sub_equals T t1 t2 = let val (binder_Ts, body_T) = strip_type (domain_type T) val n = length binder_Ts in if eq = Eq andalso n > 0 then let val t1 = incr_boundvars n t1 val t2 = incr_boundvars n t2 val xs = map Bound (n - 1 downto 0) val equation = Const (@{const_name "op ="}, body_T --> body_T --> bool_T) $ betapplys (t1, xs) $ betapplys (t2, xs) val t = fold_rev (fn T => fn (t, j) => (Const (@{const_name All}, T --> bool_T) $ Abs ("x" ^ nat_subscript j, T, t), j - 1)) binder_Ts (equation, n) |> fst in sub' t end else Op2 (eq, bool_T, Any, aux Eq ss Ts t1, aux Eq ss Ts t2) end (* op2 -> string -> typ -> term -> nut *) fun do_quantifier quant s T t1 = let val bound_u = BoundName (length Ts, T, Any, s) val body_u = sub_abs s T t1 in if is_subterm_of bound_u body_u then Op2 (quant, bool_T, Any, bound_u, body_u) else body_u end (* term -> term list -> nut *) fun do_apply t0 ts = let val (ts', t2) = split_last ts val t1 = list_comb (t0, ts') val T1 = fastype_of1 (Ts, t1) in Op2 (Apply, range_type T1, Any, sub t1, sub t2) end in case strip_comb t of (Const (@{const_name all}, _), [Abs (s, T, t1)]) => do_quantifier All s T t1 | (t0 as Const (@{const_name all}, T), [t1]) => sub' (t0 $ eta_expand Ts t1 1) | (Const (@{const_name "=="}, T), [t1, t2]) => sub_equals T t1 t2 | (Const (@{const_name "==>"}, _), [t1, t2]) => Op2 (Or, prop_T, Any, Op1 (Not, prop_T, Any, sub t1), sub' t2) | (Const (@{const_name Pure.conjunction}, _), [t1, t2]) => Op2 (And, prop_T, Any, sub' t1, sub' t2) | (Const (@{const_name Trueprop}, _), [t1]) => sub' t1 | (Const (@{const_name Not}, _), [t1]) => (case sub t1 of Op1 (Not, _, _, u11) => u11 | u1 => Op1 (Not, bool_T, Any, u1)) | (Const (@{const_name False}, T), []) => Cst (False, T, Any) | (Const (@{const_name True}, T), []) => Cst (True, T, Any) | (Const (@{const_name All}, _), [Abs (s, T, t1)]) => do_quantifier All s T t1 | (t0 as Const (@{const_name All}, T), [t1]) => sub' (t0 $ eta_expand Ts t1 1) | (Const (@{const_name Ex}, _), [Abs (s, T, t1)]) => do_quantifier Exist s T t1 | (t0 as Const (@{const_name Ex}, T), [t1]) => sub' (t0 $ eta_expand Ts t1 1) | (t0 as Const (@{const_name The}, T), [t1]) => if fast_descrs then Op2 (The, range_type T, Any, sub t1, sub (Const (@{const_name undefined_fast_The}, range_type T))) else do_apply t0 [t1] | (t0 as Const (@{const_name Eps}, T), [t1]) => if fast_descrs then Op2 (Eps, range_type T, Any, sub t1, sub (Const (@{const_name undefined_fast_Eps}, range_type T))) else do_apply t0 [t1] | (Const (@{const_name "op ="}, T), [t1, t2]) => sub_equals T t1 t2 | (Const (@{const_name "op &"}, _), [t1, t2]) => Op2 (And, bool_T, Any, sub' t1, sub' t2) | (Const (@{const_name "op |"}, _), [t1, t2]) => Op2 (Or, bool_T, Any, sub t1, sub t2) | (Const (@{const_name "op -->"}, _), [t1, t2]) => Op2 (Or, bool_T, Any, Op1 (Not, bool_T, Any, sub t1), sub' t2) | (Const (@{const_name If}, T), [t1, t2, t3]) => Op3 (If, nth_range_type 3 T, Any, sub t1, sub t2, sub t3) | (Const (@{const_name Let}, T), [t1, Abs (s, T', t2)]) => Op3 (Let, nth_range_type 2 T, Any, BoundName (length Ts, T', Any, s), sub t1, sub_abs s T' t2) | (t0 as Const (@{const_name Let}, T), [t1, t2]) => sub (t0 $ t1 $ eta_expand Ts t2 1) | (@{const Unity}, []) => Cst (Unity, @{typ unit}, Any) | (Const (@{const_name Pair}, T), [t1, t2]) => Tuple (nth_range_type 2 T, Any, map sub [t1, t2]) | (Const (@{const_name fst}, T), [t1]) => Op1 (First, range_type T, Any, sub t1) | (Const (@{const_name snd}, T), [t1]) => Op1 (Second, range_type T, Any, sub t1) | (Const (@{const_name Id}, T), []) => Cst (Iden, T, Any) | (Const (@{const_name insert}, T), [t1, t2]) => (case t2 of Abs (_, _, @{const False}) => Op1 (SingletonSet, nth_range_type 2 T, Any, sub t1) | _ => Op2 (Union, nth_range_type 2 T, Any, Op1 (SingletonSet, nth_range_type 2 T, Any, sub t1), sub t2)) | (Const (@{const_name converse}, T), [t1]) => Op1 (Converse, range_type T, Any, sub t1) | (Const (@{const_name trancl}, T), [t1]) => Op1 (Closure, range_type T, Any, sub t1) | (Const (@{const_name rel_comp}, T), [t1, t2]) => Op2 (Composition, nth_range_type 2 T, Any, sub t1, sub t2) | (Const (@{const_name Sigma}, T), [t1, Abs (s, T', t2')]) => Op2 (Product, nth_range_type 2 T, Any, sub t1, sub_abs s T' t2') | (Const (@{const_name image}, T), [t1, t2]) => Op2 (Image, nth_range_type 2 T, Any, sub t1, sub t2) | (Const (@{const_name Suc}, T), []) => Cst (Suc, T, Any) | (Const (@{const_name finite}, T), [t1]) => (if is_finite_type ext_ctxt (domain_type T) then Cst (True, bool_T, Any) else case t1 of Const (@{const_name top}, _) => Cst (False, bool_T, Any) | _ => Op1 (Finite, bool_T, Any, sub t1)) | (Const (@{const_name nat}, T), []) => Cst (IntToNat, T, Any) | (Const (@{const_name zero_nat_inst.zero_nat}, T), []) => Cst (Num 0, T, Any) | (Const (@{const_name one_nat_inst.one_nat}, T), []) => Cst (Num 1, T, Any) | (Const (@{const_name plus_nat_inst.plus_nat}, T), []) => Cst (Add, T, Any) | (Const (@{const_name minus_nat_inst.minus_nat}, T), []) => Cst (Subtract, T, Any) | (Const (@{const_name times_nat_inst.times_nat}, T), []) => Cst (Multiply, T, Any) | (Const (@{const_name div_nat_inst.div_nat}, T), []) => Cst (Divide, T, Any) | (Const (@{const_name ord_nat_inst.less_nat}, T), [t1, t2]) => Op2 (Less, bool_T, Any, sub t1, sub t2) | (Const (@{const_name ord_nat_inst.less_eq_nat}, T), [t1, t2]) => Op1 (Not, bool_T, Any, Op2 (Less, bool_T, Any, sub t2, sub t1)) | (Const (@{const_name nat_gcd}, T), []) => Cst (Gcd, T, Any) | (Const (@{const_name nat_lcm}, T), []) => Cst (Lcm, T, Any) | (Const (@{const_name zero_int_inst.zero_int}, T), []) => Cst (Num 0, T, Any) | (Const (@{const_name one_int_inst.one_int}, T), []) => Cst (Num 1, T, Any) | (Const (@{const_name plus_int_inst.plus_int}, T), []) => Cst (Add, T, Any) | (Const (@{const_name minus_int_inst.minus_int}, T), []) => Cst (Subtract, T, Any) | (Const (@{const_name times_int_inst.times_int}, T), []) => Cst (Multiply, T, Any) | (Const (@{const_name div_int_inst.div_int}, T), []) => Cst (Divide, T, Any) | (Const (@{const_name uminus_int_inst.uminus_int}, T), []) => let val num_T = domain_type T in Op2 (Apply, num_T --> num_T, Any, Cst (Subtract, num_T --> num_T --> num_T, Any), Cst (Num 0, num_T, Any)) end | (Const (@{const_name ord_int_inst.less_int}, T), [t1, t2]) => Op2 (Less, bool_T, Any, sub t1, sub t2) | (Const (@{const_name ord_int_inst.less_eq_int}, T), [t1, t2]) => Op1 (Not, bool_T, Any, Op2 (Less, bool_T, Any, sub t2, sub t1)) | (Const (@{const_name unknown}, T), []) => Cst (Unknown, T, Any) | (Const (@{const_name is_unknown}, T), [t1]) => Op1 (IsUnknown, bool_T, Any, sub t1) | (Const (@{const_name Tha}, Type ("fun", [_, T2])), [t1]) => Op1 (Tha, T2, Any, sub t1) | (Const (@{const_name Frac}, T), []) => Cst (Fracs, T, Any) | (Const (@{const_name norm_frac}, T), []) => Cst (NormFrac, T, Any) | (Const (@{const_name of_nat}, T as @{typ "nat => int"}), []) => Cst (NatToInt, T, Any) | (Const (@{const_name of_nat}, T as @{typ "unsigned_bit word => signed_bit word"}), []) => Cst (NatToInt, T, Any) | (Const (@{const_name lower_semilattice_fun_inst.inf_fun}, T), [t1, t2]) => Op2 (Intersect, nth_range_type 2 T, Any, sub t1, sub t2) | (Const (@{const_name upper_semilattice_fun_inst.sup_fun}, T), [t1, t2]) => Op2 (Union, nth_range_type 2 T, Any, sub t1, sub t2) | (t0 as Const (@{const_name minus_fun_inst.minus_fun}, T), [t1, t2]) => Op2 (SetDifference, nth_range_type 2 T, Any, sub t1, sub t2) | (t0 as Const (@{const_name ord_fun_inst.less_eq_fun}, T), [t1, t2]) => Op2 (Subset, bool_T, Any, sub t1, sub t2) | (t0 as Const (x as (s, T)), ts) => if is_constr thy x then case num_binder_types T - length ts of 0 => Construct (map ((fn (s, T) => ConstName (s, T, Any)) o nth_sel_for_constr x) (~1 upto num_sels_for_constr_type T - 1), body_type T, Any, ts |> map (`(curry fastype_of1 Ts)) |> maps factorize |> map (sub o snd)) | k => sub (eta_expand Ts t k) else if String.isPrefix numeral_prefix s then Cst (Num (the (Int.fromString (unprefix numeral_prefix s))), T, Any) else (case arity_of_built_in_const fast_descrs x of SOME n => (case n - length ts of 0 => raise TERM ("Nitpick_Nut.nut_from_term.aux", [t]) | k => if k > 0 then sub (eta_expand Ts t k) else do_apply t0 ts) | NONE => if null ts then ConstName (s, T, Any) else do_apply t0 ts) | (Free (s, T), []) => FreeName (s, T, Any) | (Var _, []) => raise TERM ("Nitpick_Nut.nut_from_term.aux", [t]) | (Bound j, []) => BoundName (length Ts - j - 1, nth Ts j, Any, nth ss j) | (Abs (s, T, t1), []) => Op2 (Lambda, T --> fastype_of1 (T :: Ts, t1), Any, BoundName (length Ts, T, Any, s), sub_abs s T t1) | (t0, ts) => do_apply t0 ts end in aux eq [] [] end (* scope -> typ -> rep *) fun rep_for_abs_fun scope T = let val (R1, R2) = best_non_opt_symmetric_reps_for_fun_type scope T in Func (R1, (card_of_rep R1 <> card_of_rep R2 ? Opt) R2) end (* scope -> nut -> nut list * rep NameTable.table -> nut list * rep NameTable.table *) fun choose_rep_for_free_var scope v (vs, table) = let val R = best_non_opt_set_rep_for_type scope (type_of v) val v = modify_name_rep v R in (v :: vs, NameTable.update (v, R) table) end (* scope -> bool -> nut -> nut list * rep NameTable.table -> nut list * rep NameTable.table *) fun choose_rep_for_const (scope as {ext_ctxt as {thy, ctxt, ...}, datatypes, ofs, ...}) all_exact v (vs, table) = let val x as (s, T) = (nickname_of v, type_of v) val R = (if is_abs_fun thy x then rep_for_abs_fun else if is_rep_fun thy x then Func oo best_non_opt_symmetric_reps_for_fun_type else if all_exact orelse is_skolem_name v orelse member (op =) [@{const_name undefined_fast_The}, @{const_name undefined_fast_Eps}, @{const_name bisim}, @{const_name bisim_iterator_max}] (original_name s) then best_non_opt_set_rep_for_type else if member (op =) [@{const_name set}, @{const_name distinct}, @{const_name ord_class.less}, @{const_name ord_class.less_eq}] (original_name s) then best_set_rep_for_type else best_opt_set_rep_for_type) scope T val v = modify_name_rep v R in (v :: vs, NameTable.update (v, R) table) end (* scope -> nut list -> rep NameTable.table -> nut list * rep NameTable.table *) fun choose_reps_for_free_vars scope vs table = fold (choose_rep_for_free_var scope) vs ([], table) (* scope -> bool -> nut list -> rep NameTable.table -> nut list * rep NameTable.table *) fun choose_reps_for_consts scope all_exact vs table = fold (choose_rep_for_const scope all_exact) vs ([], table) (* scope -> styp -> int -> nut list * rep NameTable.table -> nut list * rep NameTable.table *) fun choose_rep_for_nth_sel_for_constr (scope as {ext_ctxt, ...}) (x as (_, T)) n (vs, table) = let val (s', T') = boxed_nth_sel_for_constr ext_ctxt x n val R' = if n = ~1 orelse is_word_type (body_type T) orelse (is_fun_type (range_type T') andalso is_boolean_type (body_type T')) then best_non_opt_set_rep_for_type scope T' else best_opt_set_rep_for_type scope T' |> unopt_rep val v = ConstName (s', T', R') in (v :: vs, NameTable.update (v, R') table) end (* scope -> styp -> nut list * rep NameTable.table -> nut list * rep NameTable.table *) fun choose_rep_for_sels_for_constr scope (x as (_, T)) = fold_rev (choose_rep_for_nth_sel_for_constr scope x) (~1 upto num_sels_for_constr_type T - 1) (* scope -> dtype_spec -> nut list * rep NameTable.table -> nut list * rep NameTable.table *) -fun choose_rep_for_sels_of_datatype _ ({shallow = true, ...} : dtype_spec) = I +fun choose_rep_for_sels_of_datatype _ ({deep = false, ...} : dtype_spec) = I | choose_rep_for_sels_of_datatype scope {constrs, ...} = fold_rev (choose_rep_for_sels_for_constr scope o #const) constrs (* scope -> rep NameTable.table -> nut list * rep NameTable.table *) fun choose_reps_for_all_sels (scope as {datatypes, ...}) = fold (choose_rep_for_sels_of_datatype scope) datatypes o pair [] (* scope -> nut -> rep NameTable.table -> rep NameTable.table *) fun choose_rep_for_bound_var scope v table = let val R = best_one_rep_for_type scope (type_of v) in NameTable.update (v, R) table end (* A nut is said to be constructive if whenever it evaluates to unknown in our three-valued logic, it would evaluate to a unrepresentable value ("unrep") according to the HOL semantics. For example, "Suc n" is constructive if "n" is representable or "Unrep", because unknown implies unrep. *) (* nut -> bool *) fun is_constructive u = is_Cst Unrep u orelse (not (is_fun_type (type_of u)) andalso not (is_opt_rep (rep_of u))) orelse case u of Cst (Num _, _, _) => true | Cst (cst, T, _) => cst = Suc orelse (body_type T = nat_T andalso cst = Add) | Op2 (Apply, _, _, u1, u2) => forall is_constructive [u1, u2] | Op3 (If, _, _, u1, u2, u3) => not (is_opt_rep (rep_of u1)) andalso forall is_constructive [u2, u3] | Tuple (_, _, us) => forall is_constructive us | Construct (_, _, _, us) => forall is_constructive us | _ => false (* nut -> nut *) fun optimize_unit u = if rep_of u = Unit then Cst (Unity, type_of u, Unit) else u (* typ -> rep -> nut *) fun unknown_boolean T R = Cst (case R of Formula Pos => False | Formula Neg => True | _ => Unknown, T, R) (* op1 -> typ -> rep -> nut -> nut *) fun s_op1 oper T R u1 = ((if oper = Not then if is_Cst True u1 then Cst (False, T, R) else if is_Cst False u1 then Cst (True, T, R) else raise SAME () else raise SAME ()) handle SAME () => Op1 (oper, T, R, u1)) |> optimize_unit (* op2 -> typ -> rep -> nut -> nut -> nut *) fun s_op2 oper T R u1 u2 = ((case oper of Or => if exists (is_Cst True) [u1, u2] then Cst (True, T, unopt_rep R) else if is_Cst False u1 then u2 else if is_Cst False u2 then u1 else raise SAME () | And => if exists (is_Cst False) [u1, u2] then Cst (False, T, unopt_rep R) else if is_Cst True u1 then u2 else if is_Cst True u2 then u1 else raise SAME () | Eq => (case pairself (is_Cst Unrep) (u1, u2) of (true, true) => unknown_boolean T R | (false, false) => raise SAME () | _ => if forall (is_opt_rep o rep_of) [u1, u2] then raise SAME () else Cst (False, T, Formula Neut)) | Triad => if is_Cst True u1 then u1 else if is_Cst False u2 then u2 else raise SAME () | Apply => if is_Cst Unrep u1 then Cst (Unrep, T, R) else if is_Cst Unrep u2 then if is_constructive u1 then Cst (Unrep, T, R) else if is_boolean_type T then if is_FreeName u1 then Cst (False, T, Formula Neut) else unknown_boolean T R else case u1 of Op2 (Apply, _, _, ConstName (@{const_name List.append}, _, _), _) => Cst (Unrep, T, R) | _ => raise SAME () else raise SAME () | _ => raise SAME ()) handle SAME () => Op2 (oper, T, R, u1, u2)) |> optimize_unit (* op3 -> typ -> rep -> nut -> nut -> nut -> nut *) fun s_op3 oper T R u1 u2 u3 = ((case oper of Let => if inline_nut u2 orelse num_occs_in_nut u1 u3 < 2 then substitute_in_nut u1 u2 u3 else raise SAME () | _ => raise SAME ()) handle SAME () => Op3 (oper, T, R, u1, u2, u3)) |> optimize_unit (* typ -> rep -> nut list -> nut *) fun s_tuple T R us = (if exists (is_Cst Unrep) us then Cst (Unrep, T, R) else Tuple (T, R, us)) |> optimize_unit (* theory -> nut -> nut *) fun optimize_nut u = case u of Op1 (oper, T, R, u1) => s_op1 oper T R (optimize_nut u1) | Op2 (oper, T, R, u1, u2) => s_op2 oper T R (optimize_nut u1) (optimize_nut u2) | Op3 (oper, T, R, u1, u2, u3) => s_op3 oper T R (optimize_nut u1) (optimize_nut u2) (optimize_nut u3) | Tuple (T, R, us) => s_tuple T R (map optimize_nut us) | Construct (us', T, R, us) => Construct (us', T, R, map optimize_nut us) | _ => optimize_unit u (* (nut -> 'a) -> nut -> 'a list *) fun untuple f (Tuple (_, _, us)) = maps (untuple f) us | untuple f u = if rep_of u = Unit then [] else [f u] (* scope -> bool -> rep NameTable.table -> bool -> nut -> nut *) fun choose_reps_in_nut (scope as {ext_ctxt as {thy, ctxt, ...}, card_assigns, bits, datatypes, ofs, ...}) liberal table def = let val bool_atom_R = Atom (2, offset_of_type ofs bool_T) (* polarity -> bool -> rep *) fun bool_rep polar opt = if polar = Neut andalso opt then Opt bool_atom_R else Formula polar (* nut -> nut -> nut *) fun triad u1 u2 = s_op2 Triad (type_of u1) (Opt bool_atom_R) u1 u2 (* (polarity -> nut) -> nut *) fun triad_fn f = triad (f Pos) (f Neg) (* rep NameTable.table -> bool -> polarity -> nut -> nut -> nut *) fun unrepify_nut_in_nut table def polar needle_u = let val needle_T = type_of needle_u in substitute_in_nut needle_u (Cst (if is_fun_type needle_T then Unknown else Unrep, needle_T, Any)) #> aux table def polar end (* rep NameTable.table -> bool -> polarity -> nut -> nut *) and aux table def polar u = let (* bool -> polarity -> nut -> nut *) val gsub = aux table (* nut -> nut *) val sub = gsub false Neut in case u of Cst (False, T, _) => Cst (False, T, Formula Neut) | Cst (True, T, _) => Cst (True, T, Formula Neut) | Cst (Num j, T, _) => if is_word_type T then Cst (if is_twos_complement_representable bits j then Num j else Unrep, T, best_opt_set_rep_for_type scope T) else (case spec_of_type scope T of (1, j0) => if j = 0 then Cst (Unity, T, Unit) else Cst (Unrep, T, Opt (Atom (1, j0))) | (k, j0) => let val ok = (if T = int_T then atom_for_int (k, j0) j <> ~1 else j < k) in if ok then Cst (Num j, T, Atom (k, j0)) else Cst (Unrep, T, Opt (Atom (k, j0))) end) | Cst (Suc, T as Type ("fun", [T1, _]), _) => let val R = Atom (spec_of_type scope T1) in Cst (Suc, T, Func (R, Opt R)) end | Cst (Fracs, T, _) => Cst (Fracs, T, best_non_opt_set_rep_for_type scope T) | Cst (NormFrac, T, _) => let val R1 = Atom (spec_of_type scope (domain_type T)) in Cst (NormFrac, T, Func (R1, Func (R1, Opt (Struct [R1, R1])))) end | Cst (cst, T, _) => if cst = Unknown orelse cst = Unrep then case (is_boolean_type T, polar) of (true, Pos) => Cst (False, T, Formula Pos) | (true, Neg) => Cst (True, T, Formula Neg) | _ => Cst (cst, T, best_opt_set_rep_for_type scope T) else if member (op =) [Add, Subtract, Multiply, Divide, Gcd, Lcm] cst then let val T1 = domain_type T val R1 = Atom (spec_of_type scope T1) val total = T1 = nat_T andalso (cst = Subtract orelse cst = Divide orelse cst = Gcd) in Cst (cst, T, Func (R1, Func (R1, (not total ? Opt) R1))) end else if cst = NatToInt orelse cst = IntToNat then let val (dom_card, dom_j0) = spec_of_type scope (domain_type T) val (ran_card, ran_j0) = spec_of_type scope (range_type T) val total = not (is_word_type (domain_type T)) andalso (if cst = NatToInt then max_int_for_card ran_card >= dom_card + 1 else max_int_for_card dom_card < ran_card) in Cst (cst, T, Func (Atom (dom_card, dom_j0), Atom (ran_card, ran_j0) |> not total ? Opt)) end else Cst (cst, T, best_set_rep_for_type scope T) | Op1 (Not, T, _, u1) => (case gsub def (flip_polarity polar) u1 of Op2 (Triad, T, R, u11, u12) => triad (s_op1 Not T (Formula Pos) u12) (s_op1 Not T (Formula Neg) u11) | u1' => s_op1 Not T (flip_rep_polarity (rep_of u1')) u1') | Op1 (IsUnknown, T, _, u1) => let val u1 = sub u1 in if is_opt_rep (rep_of u1) then Op1 (IsUnknown, T, Formula Neut, u1) else Cst (False, T, Formula Neut) end | Op1 (oper, T, _, u1) => let val u1 = sub u1 val R1 = rep_of u1 val R = case oper of Finite => bool_rep polar (is_opt_rep R1) | _ => (if is_opt_rep R1 then best_opt_set_rep_for_type else best_non_opt_set_rep_for_type) scope T in s_op1 oper T R u1 end | Op2 (Less, T, _, u1, u2) => let val u1 = sub u1 val u2 = sub u2 val R = bool_rep polar (exists (is_opt_rep o rep_of) [u1, u2]) in s_op2 Less T R u1 u2 end | Op2 (Subset, T, _, u1, u2) => let val u1 = sub u1 val u2 = sub u2 val opt = exists (is_opt_rep o rep_of) [u1, u2] val R = bool_rep polar opt in if is_opt_rep R then triad_fn (fn polar => s_op2 Subset T (Formula polar) u1 u2) else if opt andalso polar = Pos andalso not (is_concrete_type datatypes (type_of u1)) then Cst (False, T, Formula Pos) else s_op2 Subset T R u1 u2 end | Op2 (DefEq, T, _, u1, u2) => s_op2 DefEq T (Formula Neut) (sub u1) (sub u2) | Op2 (Eq, T, _, u1, u2) => let val u1' = sub u1 val u2' = sub u2 (* unit -> nut *) fun non_opt_case () = s_op2 Eq T (Formula polar) u1' u2' (* unit -> nut *) fun opt_opt_case () = if polar = Neut then triad_fn (fn polar => s_op2 Eq T (Formula polar) u1' u2') else non_opt_case () (* nut -> nut *) fun hybrid_case u = (* hackish optimization *) if is_constructive u then s_op2 Eq T (Formula Neut) u1' u2' else opt_opt_case () in if liberal orelse polar = Neg orelse is_concrete_type datatypes (type_of u1) then case (is_opt_rep (rep_of u1'), is_opt_rep (rep_of u2')) of (true, true) => opt_opt_case () | (true, false) => hybrid_case u1' | (false, true) => hybrid_case u2' | (false, false) => non_opt_case () else Cst (False, T, Formula Pos) |> polar = Neut ? (fn pos_u => triad pos_u (gsub def Neg u)) end | Op2 (Image, T, _, u1, u2) => let val u1' = sub u1 val u2' = sub u2 in (case (rep_of u1', rep_of u2') of (Func (R11, R12), Func (R21, Formula Neut)) => if R21 = R11 andalso is_lone_rep R12 then let val R = best_non_opt_set_rep_for_type scope T |> exists (is_opt_rep o rep_of) [u1', u2'] ? opt_rep ofs T in s_op2 Image T R u1' u2' end else raise SAME () | _ => raise SAME ()) handle SAME () => let val T1 = type_of u1 val dom_T = domain_type T1 val ran_T = range_type T1 val x_u = BoundName (~1, dom_T, Any, "image.x") val y_u = BoundName (~2, ran_T, Any, "image.y") in Op2 (Lambda, T, Any, y_u, Op2 (Exist, bool_T, Any, x_u, Op2 (And, bool_T, Any, case u2 of Op2 (Lambda, _, _, u21, u22) => if num_occs_in_nut u21 u22 = 0 then u22 else Op2 (Apply, bool_T, Any, u2, x_u) | _ => Op2 (Apply, bool_T, Any, u2, x_u), Op2 (Eq, bool_T, Any, y_u, Op2 (Apply, ran_T, Any, u1, x_u))))) |> sub end end | Op2 (Apply, T, _, u1, u2) => let val u1 = sub u1 val u2 = sub u2 val T1 = type_of u1 val R1 = rep_of u1 val R2 = rep_of u2 val opt = case (u1, is_opt_rep R2) of (ConstName (@{const_name set}, _, _), false) => false | _ => exists is_opt_rep [R1, R2] val ran_R = if is_boolean_type T then bool_rep polar opt else smart_range_rep ofs T1 (fn () => card_of_type card_assigns T) (unopt_rep R1) |> opt ? opt_rep ofs T in s_op2 Apply T ran_R u1 u2 end | Op2 (Lambda, T, _, u1, u2) => (case best_set_rep_for_type scope T of Unit => Cst (Unity, T, Unit) | R as Func (R1, _) => let val table' = NameTable.update (u1, R1) table val u1' = aux table' false Neut u1 val u2' = aux table' false Neut u2 val R = if is_opt_rep (rep_of u2') orelse (range_type T = bool_T andalso not (is_Cst False (unrepify_nut_in_nut table false Neut u1 u2 |> optimize_nut))) then opt_rep ofs T R else unopt_rep R in s_op2 Lambda T R u1' u2' end | R => raise NUT ("Nitpick_Nut.aux.choose_reps_in_nut", [u])) | Op2 (oper, T, _, u1, u2) => if oper = All orelse oper = Exist then let val table' = fold (choose_rep_for_bound_var scope) (untuple I u1) table val u1' = aux table' def polar u1 val u2' = aux table' def polar u2 in if polar = Neut andalso is_opt_rep (rep_of u2') then triad_fn (fn polar => gsub def polar u) else let val quant_u = s_op2 oper T (Formula polar) u1' u2' in if def orelse (liberal andalso (polar = Pos) = (oper = All)) orelse is_complete_type datatypes (type_of u1) then quant_u else let val connective = if oper = All then And else Or val unrepified_u = unrepify_nut_in_nut table def polar u1 u2 in s_op2 connective T (min_rep (rep_of quant_u) (rep_of unrepified_u)) quant_u unrepified_u end end end else if oper = Or orelse oper = And then let val u1' = gsub def polar u1 val u2' = gsub def polar u2 in (if polar = Neut then case (is_opt_rep (rep_of u1'), is_opt_rep (rep_of u2')) of (true, true) => triad_fn (fn polar => gsub def polar u) | (true, false) => s_op2 oper T (Opt bool_atom_R) (triad_fn (fn polar => gsub def polar u1)) u2' | (false, true) => s_op2 oper T (Opt bool_atom_R) u1' (triad_fn (fn polar => gsub def polar u2)) | (false, false) => raise SAME () else raise SAME ()) handle SAME () => s_op2 oper T (Formula polar) u1' u2' end else if oper = The orelse oper = Eps then let val u1' = sub u1 val opt1 = is_opt_rep (rep_of u1') val opt = (oper = Eps orelse opt1) val unopt_R = best_one_rep_for_type scope T |> optable_rep ofs T val R = if is_boolean_type T then bool_rep polar opt else unopt_R |> opt ? opt_rep ofs T val u = Op2 (oper, T, R, u1', sub u2) in if is_complete_type datatypes T orelse not opt1 then u else let val x_u = BoundName (~1, T, unopt_R, "descr.x") val R = R |> opt_rep ofs T in Op3 (If, T, R, Op2 (Exist, bool_T, Formula Pos, x_u, s_op2 Apply bool_T (Formula Pos) (gsub false Pos u1) x_u), u, Cst (Unknown, T, R)) end end else let val u1 = sub u1 val u2 = sub u2 val R = best_non_opt_set_rep_for_type scope T |> exists (is_opt_rep o rep_of) [u1, u2] ? opt_rep ofs T in s_op2 oper T R u1 u2 end | Op3 (Let, T, _, u1, u2, u3) => let val u2 = sub u2 val R2 = rep_of u2 val table' = NameTable.update (u1, R2) table val u1 = modify_name_rep u1 R2 val u3 = aux table' false polar u3 in s_op3 Let T (rep_of u3) u1 u2 u3 end | Op3 (If, T, _, u1, u2, u3) => let val u1 = sub u1 val u2 = gsub def polar u2 val u3 = gsub def polar u3 val min_R = min_rep (rep_of u2) (rep_of u3) val R = min_R |> is_opt_rep (rep_of u1) ? opt_rep ofs T in s_op3 If T R u1 u2 u3 end | Tuple (T, _, us) => let val Rs = map (best_one_rep_for_type scope o type_of) us val us = map sub us val R = if forall (curry (op =) Unit) Rs then Unit else Struct Rs val R' = (exists (is_opt_rep o rep_of) us ? opt_rep ofs T) R in s_tuple T R' us end | Construct (us', T, _, us) => let val us = map sub us val Rs = map rep_of us val R = best_one_rep_for_type scope T val {total, ...} = constr_spec datatypes (original_name (nickname_of (hd us')), T) val opt = exists is_opt_rep Rs orelse not total in Construct (map sub us', T, R |> opt ? Opt, us) end | _ => let val u = modify_name_rep u (the_name table u) in if polar = Neut orelse not (is_boolean_type (type_of u)) orelse not (is_opt_rep (rep_of u)) then u else s_op1 Cast (type_of u) (Formula polar) u end end |> optimize_unit in aux table def Pos end (* int -> KK.n_ary_index list -> KK.n_ary_index list -> int * KK.n_ary_index list *) fun fresh_n_ary_index n [] ys = (0, (n, 1) :: ys) | fresh_n_ary_index n ((m, j) :: xs) ys = if m = n then (j, ys @ ((m, j + 1) :: xs)) else fresh_n_ary_index n xs ((m, j) :: ys) (* int -> name_pool -> int * name_pool *) fun fresh_rel n {rels, vars, formula_reg, rel_reg} = let val (j, rels') = fresh_n_ary_index n rels [] in (j, {rels = rels', vars = vars, formula_reg = formula_reg, rel_reg = rel_reg}) end (* int -> name_pool -> int * name_pool *) fun fresh_var n {rels, vars, formula_reg, rel_reg} = let val (j, vars') = fresh_n_ary_index n vars [] in (j, {rels = rels, vars = vars', formula_reg = formula_reg, rel_reg = rel_reg}) end (* int -> name_pool -> int * name_pool *) fun fresh_formula_reg {rels, vars, formula_reg, rel_reg} = (formula_reg, {rels = rels, vars = vars, formula_reg = formula_reg + 1, rel_reg = rel_reg}) (* int -> name_pool -> int * name_pool *) fun fresh_rel_reg {rels, vars, formula_reg, rel_reg} = (rel_reg, {rels = rels, vars = vars, formula_reg = formula_reg, rel_reg = rel_reg + 1}) (* nut -> nut list * name_pool * nut NameTable.table -> nut list * name_pool * nut NameTable.table *) fun rename_plain_var v (ws, pool, table) = let val is_formula = (rep_of v = Formula Neut) val fresh = if is_formula then fresh_formula_reg else fresh_rel_reg val (j, pool) = fresh pool val constr = if is_formula then FormulaReg else RelReg val w = constr (j, type_of v, rep_of v) in (w :: ws, pool, NameTable.update (v, w) table) end (* typ -> rep -> nut list -> nut *) fun shape_tuple (T as Type ("*", [T1, T2])) (R as Struct [R1, R2]) us = let val arity1 = arity_of_rep R1 in Tuple (T, R, [shape_tuple T1 R1 (List.take (us, arity1)), shape_tuple T2 R2 (List.drop (us, arity1))]) end | shape_tuple (T as Type ("fun", [_, T2])) (R as Vect (k, R')) us = Tuple (T, R, map (shape_tuple T2 R') (batch_list (length us div k) us)) | shape_tuple T R [u] = if type_of u = T then u else raise NUT ("Nitpick_Nut.shape_tuple", [u]) | shape_tuple T Unit [] = Cst (Unity, T, Unit) | shape_tuple _ _ us = raise NUT ("Nitpick_Nut.shape_tuple", us) (* bool -> nut -> nut list * name_pool * nut NameTable.table -> nut list * name_pool * nut NameTable.table *) fun rename_n_ary_var rename_free v (ws, pool, table) = let val T = type_of v val R = rep_of v val arity = arity_of_rep R val nick = nickname_of v val (constr, fresh) = if rename_free then (FreeRel, fresh_rel) else (BoundRel, fresh_var) in if not rename_free andalso arity > 1 then let val atom_schema = atom_schema_of_rep R val type_schema = type_schema_of_rep T R val (js, pool) = funpow arity (fn (js, pool) => let val (j, pool) = fresh 1 pool in (j :: js, pool) end) ([], pool) val ws' = map3 (fn j => fn x => fn T => constr ((1, j), T, Atom x, nick ^ " [" ^ string_of_int j ^ "]")) (rev js) atom_schema type_schema in (ws' @ ws, pool, NameTable.update (v, shape_tuple T R ws') table) end else let val (j, pool) = case v of ConstName _ => if is_sel_like_and_no_discr nick then case domain_type T of @{typ "unsigned_bit word"} => (snd unsigned_bit_word_sel_rel, pool) | @{typ "signed_bit word"} => (snd signed_bit_word_sel_rel, pool) | _ => fresh arity pool else fresh arity pool | _ => fresh arity pool val w = constr ((arity, j), T, R, nick) in (w :: ws, pool, NameTable.update (v, w) table) end end (* nut list -> name_pool -> nut NameTable.table -> nut list * name_pool * nut NameTable.table *) fun rename_free_vars vs pool table = let val vs = filter (not_equal Unit o rep_of) vs val (vs, pool, table) = fold (rename_n_ary_var true) vs ([], pool, table) in (rev vs, pool, table) end (* name_pool -> nut NameTable.table -> nut -> nut *) fun rename_vars_in_nut pool table u = case u of Cst _ => u | Op1 (oper, T, R, u1) => Op1 (oper, T, R, rename_vars_in_nut pool table u1) | Op2 (oper, T, R, u1, u2) => if oper = All orelse oper = Exist orelse oper = Lambda then let val (_, pool, table) = fold (rename_n_ary_var false) (untuple I u1) ([], pool, table) in Op2 (oper, T, R, rename_vars_in_nut pool table u1, rename_vars_in_nut pool table u2) end else Op2 (oper, T, R, rename_vars_in_nut pool table u1, rename_vars_in_nut pool table u2) | Op3 (Let, T, R, u1, u2, u3) => if rep_of u2 = Unit orelse inline_nut u2 then let val u2 = rename_vars_in_nut pool table u2 val table = NameTable.update (u1, u2) table in rename_vars_in_nut pool table u3 end else let val bs = untuple I u1 val (_, pool, table') = fold rename_plain_var bs ([], pool, table) val u11 = rename_vars_in_nut pool table' u1 in Op3 (Let, T, R, rename_vars_in_nut pool table' u1, rename_vars_in_nut pool table u2, rename_vars_in_nut pool table' u3) end | Op3 (oper, T, R, u1, u2, u3) => Op3 (oper, T, R, rename_vars_in_nut pool table u1, rename_vars_in_nut pool table u2, rename_vars_in_nut pool table u3) | Tuple (T, R, us) => Tuple (T, R, map (rename_vars_in_nut pool table) us) | Construct (us', T, R, us) => Construct (map (rename_vars_in_nut pool table) us', T, R, map (rename_vars_in_nut pool table) us) | _ => the_name table u end; diff --git a/src/HOL/Tools/Nitpick/nitpick_peephole.ML b/src/HOL/Tools/Nitpick/nitpick_peephole.ML --- a/src/HOL/Tools/Nitpick/nitpick_peephole.ML +++ b/src/HOL/Tools/Nitpick/nitpick_peephole.ML @@ -1,660 +1,660 @@ (* Title: HOL/Tools/Nitpick/nitpick_peephole.ML Author: Jasmin Blanchette, TU Muenchen - Copyright 2008, 2009 + Copyright 2008, 2009, 2010 Peephole optimizer for Nitpick. *) signature NITPICK_PEEPHOLE = sig type n_ary_index = Kodkod.n_ary_index type formula = Kodkod.formula type int_expr = Kodkod.int_expr type rel_expr = Kodkod.rel_expr type decl = Kodkod.decl type expr_assign = Kodkod.expr_assign type name_pool = { rels: n_ary_index list, vars: n_ary_index list, formula_reg: int, rel_reg: int} val initial_pool : name_pool val not3_rel : n_ary_index val suc_rel : n_ary_index val unsigned_bit_word_sel_rel : n_ary_index val signed_bit_word_sel_rel : n_ary_index val nat_add_rel : n_ary_index val int_add_rel : n_ary_index val nat_subtract_rel : n_ary_index val int_subtract_rel : n_ary_index val nat_multiply_rel : n_ary_index val int_multiply_rel : n_ary_index val nat_divide_rel : n_ary_index val int_divide_rel : n_ary_index val nat_less_rel : n_ary_index val int_less_rel : n_ary_index val gcd_rel : n_ary_index val lcm_rel : n_ary_index val norm_frac_rel : n_ary_index val atom_for_bool : int -> bool -> rel_expr val formula_for_bool : bool -> formula val atom_for_nat : int * int -> int -> int val min_int_for_card : int -> int val max_int_for_card : int -> int val int_for_atom : int * int -> int -> int val atom_for_int : int * int -> int -> int val is_twos_complement_representable : int -> int -> bool val inline_rel_expr : rel_expr -> bool val empty_n_ary_rel : int -> rel_expr val num_seq : int -> int -> int_expr list val s_and : formula -> formula -> formula type kodkod_constrs = { kk_all: decl list -> formula -> formula, kk_exist: decl list -> formula -> formula, kk_formula_let: expr_assign list -> formula -> formula, kk_formula_if: formula -> formula -> formula -> formula, kk_or: formula -> formula -> formula, kk_not: formula -> formula, kk_iff: formula -> formula -> formula, kk_implies: formula -> formula -> formula, kk_and: formula -> formula -> formula, kk_subset: rel_expr -> rel_expr -> formula, kk_rel_eq: rel_expr -> rel_expr -> formula, kk_no: rel_expr -> formula, kk_lone: rel_expr -> formula, kk_one: rel_expr -> formula, kk_some: rel_expr -> formula, kk_rel_let: expr_assign list -> rel_expr -> rel_expr, kk_rel_if: formula -> rel_expr -> rel_expr -> rel_expr, kk_union: rel_expr -> rel_expr -> rel_expr, kk_difference: rel_expr -> rel_expr -> rel_expr, kk_override: rel_expr -> rel_expr -> rel_expr, kk_intersect: rel_expr -> rel_expr -> rel_expr, kk_product: rel_expr -> rel_expr -> rel_expr, kk_join: rel_expr -> rel_expr -> rel_expr, kk_closure: rel_expr -> rel_expr, kk_reflexive_closure: rel_expr -> rel_expr, kk_comprehension: decl list -> formula -> rel_expr, kk_project: rel_expr -> int_expr list -> rel_expr, kk_project_seq: rel_expr -> int -> int -> rel_expr, kk_not3: rel_expr -> rel_expr, kk_nat_less: rel_expr -> rel_expr -> rel_expr, kk_int_less: rel_expr -> rel_expr -> rel_expr } val kodkod_constrs : bool -> int -> int -> int -> kodkod_constrs end; structure Nitpick_Peephole : NITPICK_PEEPHOLE = struct open Kodkod open Nitpick_Util type name_pool = { rels: n_ary_index list, vars: n_ary_index list, formula_reg: int, rel_reg: int} (* If you add new built-in relations, make sure to increment the counters here as well to avoid name clashes (which fortunately would be detected by Kodkodi). *) val initial_pool = {rels = [(2, 10), (3, 20), (4, 10)], vars = [], formula_reg = 10, rel_reg = 10} val not3_rel = (2, 0) val suc_rel = (2, 1) val unsigned_bit_word_sel_rel = (2, 2) val signed_bit_word_sel_rel = (2, 3) val nat_add_rel = (3, 0) val int_add_rel = (3, 1) val nat_subtract_rel = (3, 2) val int_subtract_rel = (3, 3) val nat_multiply_rel = (3, 4) val int_multiply_rel = (3, 5) val nat_divide_rel = (3, 6) val int_divide_rel = (3, 7) val nat_less_rel = (3, 8) val int_less_rel = (3, 9) val gcd_rel = (3, 10) val lcm_rel = (3, 11) val norm_frac_rel = (4, 0) (* int -> bool -> rel_expr *) fun atom_for_bool j0 = Atom o Integer.add j0 o int_for_bool (* bool -> formula *) fun formula_for_bool b = if b then True else False (* int * int -> int -> int *) fun atom_for_nat (k, j0) n = if n < 0 orelse n >= k then ~1 else n + j0 (* int -> int *) fun min_int_for_card k = ~k div 2 + 1 fun max_int_for_card k = k div 2 (* int * int -> int -> int *) fun int_for_atom (k, j0) j = let val j = j - j0 in if j <= max_int_for_card k then j else j - k end fun atom_for_int (k, j0) n = if n < min_int_for_card k orelse n > max_int_for_card k then ~1 else if n < 0 then n + k + j0 else n + j0 (* int -> int -> bool *) fun is_twos_complement_representable bits n = let val max = reasonable_power 2 bits in n >= ~ max andalso n < max end (* rel_expr -> bool *) fun is_none_product (Product (r1, r2)) = is_none_product r1 orelse is_none_product r2 | is_none_product None = true | is_none_product _ = false (* rel_expr -> bool *) fun is_one_rel_expr (Atom _) = true | is_one_rel_expr (AtomSeq (1, _)) = true | is_one_rel_expr (Var _) = true | is_one_rel_expr _ = false (* rel_expr -> bool *) fun inline_rel_expr (Product (r1, r2)) = inline_rel_expr r1 andalso inline_rel_expr r2 | inline_rel_expr Iden = true | inline_rel_expr Ints = true | inline_rel_expr None = true | inline_rel_expr Univ = true | inline_rel_expr (Atom _) = true | inline_rel_expr (AtomSeq _) = true | inline_rel_expr (Rel _) = true | inline_rel_expr (Var _) = true | inline_rel_expr (RelReg _) = true | inline_rel_expr _ = false (* rel_expr -> rel_expr -> bool option *) fun rel_expr_equal None (Atom _) = SOME false | rel_expr_equal None (AtomSeq (k, _)) = SOME (k = 0) | rel_expr_equal (Atom _) None = SOME false | rel_expr_equal (AtomSeq (k, _)) None = SOME (k = 0) | rel_expr_equal (Atom j1) (Atom j2) = SOME (j1 = j2) | rel_expr_equal (Atom j) (AtomSeq (k, j0)) = SOME (j = j0 andalso k = 1) | rel_expr_equal (AtomSeq (k, j0)) (Atom j) = SOME (j = j0 andalso k = 1) | rel_expr_equal (AtomSeq x1) (AtomSeq x2) = SOME (x1 = x2) | rel_expr_equal r1 r2 = if r1 = r2 then SOME true else NONE (* rel_expr -> rel_expr -> bool option *) fun rel_expr_intersects (Atom j1) (Atom j2) = SOME (j1 = j2) | rel_expr_intersects (Atom j) (AtomSeq (k, j0)) = SOME (j < j0 + k) | rel_expr_intersects (AtomSeq (k, j0)) (Atom j) = SOME (j < j0 + k) | rel_expr_intersects (AtomSeq (k1, j01)) (AtomSeq (k2, j02)) = SOME (k1 > 0 andalso k2 > 0 andalso j01 + k1 > j02 andalso j02 + k2 > j01) | rel_expr_intersects r1 r2 = if is_none_product r1 orelse is_none_product r2 then SOME false else NONE (* int -> rel_expr *) fun empty_n_ary_rel 0 = raise ARG ("Nitpick_Peephole.empty_n_ary_rel", "0") | empty_n_ary_rel n = funpow (n - 1) (curry Product None) None (* decl -> rel_expr *) fun decl_one_set (DeclOne (_, r)) = r | decl_one_set _ = raise ARG ("Nitpick_Peephole.decl_one_set", "not \"DeclOne\"") (* int_expr -> bool *) fun is_Num (Num _) = true | is_Num _ = false (* int_expr -> int *) fun dest_Num (Num k) = k | dest_Num _ = raise ARG ("Nitpick_Peephole.dest_Num", "not \"Num\"") (* int -> int -> int_expr list *) fun num_seq j0 n = map Num (index_seq j0 n) (* rel_expr -> rel_expr -> bool *) fun occurs_in_union r (Union (r1, r2)) = occurs_in_union r r1 orelse occurs_in_union r r2 | occurs_in_union r r' = (r = r') (* rel_expr -> rel_expr -> rel_expr *) fun s_and True f2 = f2 | s_and False _ = False | s_and f1 True = f1 | s_and _ False = False | s_and f1 f2 = And (f1, f2) type kodkod_constrs = { kk_all: decl list -> formula -> formula, kk_exist: decl list -> formula -> formula, kk_formula_let: expr_assign list -> formula -> formula, kk_formula_if: formula -> formula -> formula -> formula, kk_or: formula -> formula -> formula, kk_not: formula -> formula, kk_iff: formula -> formula -> formula, kk_implies: formula -> formula -> formula, kk_and: formula -> formula -> formula, kk_subset: rel_expr -> rel_expr -> formula, kk_rel_eq: rel_expr -> rel_expr -> formula, kk_no: rel_expr -> formula, kk_lone: rel_expr -> formula, kk_one: rel_expr -> formula, kk_some: rel_expr -> formula, kk_rel_let: expr_assign list -> rel_expr -> rel_expr, kk_rel_if: formula -> rel_expr -> rel_expr -> rel_expr, kk_union: rel_expr -> rel_expr -> rel_expr, kk_difference: rel_expr -> rel_expr -> rel_expr, kk_override: rel_expr -> rel_expr -> rel_expr, kk_intersect: rel_expr -> rel_expr -> rel_expr, kk_product: rel_expr -> rel_expr -> rel_expr, kk_join: rel_expr -> rel_expr -> rel_expr, kk_closure: rel_expr -> rel_expr, kk_reflexive_closure: rel_expr -> rel_expr, kk_comprehension: decl list -> formula -> rel_expr, kk_project: rel_expr -> int_expr list -> rel_expr, kk_project_seq: rel_expr -> int -> int -> rel_expr, kk_not3: rel_expr -> rel_expr, kk_nat_less: rel_expr -> rel_expr -> rel_expr, kk_int_less: rel_expr -> rel_expr -> rel_expr } (* We assume throughout that Kodkod variables have a "one" constraint. This is always the case if Kodkod's skolemization is disabled. *) (* bool -> int -> int -> int -> kodkod_constrs *) fun kodkod_constrs optim nat_card int_card main_j0 = let val false_atom = Atom main_j0 val true_atom = Atom (main_j0 + 1) (* bool -> int *) val from_bool = atom_for_bool main_j0 (* int -> rel_expr *) fun from_nat n = Atom (n + main_j0) val from_int = Atom o atom_for_int (int_card, main_j0) (* int -> int *) fun to_nat j = j - main_j0 val to_int = int_for_atom (int_card, main_j0) (* decl list -> formula -> formula *) fun s_all _ True = True | s_all _ False = False | s_all [] f = f | s_all ds (All (ds', f)) = All (ds @ ds', f) | s_all ds f = All (ds, f) fun s_exist _ True = True | s_exist _ False = False | s_exist [] f = f | s_exist ds (Exist (ds', f)) = Exist (ds @ ds', f) | s_exist ds f = Exist (ds, f) (* expr_assign list -> formula -> formula *) fun s_formula_let _ True = True | s_formula_let _ False = False | s_formula_let assigns f = FormulaLet (assigns, f) (* formula -> formula *) fun s_not True = False | s_not False = True | s_not (All (ds, f)) = Exist (ds, s_not f) | s_not (Exist (ds, f)) = All (ds, s_not f) | s_not (Or (f1, f2)) = And (s_not f1, s_not f2) | s_not (Implies (f1, f2)) = And (f1, s_not f2) | s_not (And (f1, f2)) = Or (s_not f1, s_not f2) | s_not (Not f) = f | s_not (No r) = Some r | s_not (Some r) = No r | s_not f = Not f (* formula -> formula -> formula *) fun s_or True _ = True | s_or False f2 = f2 | s_or _ True = True | s_or f1 False = f1 | s_or f1 f2 = if f1 = f2 then f1 else Or (f1, f2) fun s_iff True f2 = f2 | s_iff False f2 = s_not f2 | s_iff f1 True = f1 | s_iff f1 False = s_not f1 | s_iff f1 f2 = if f1 = f2 then True else Iff (f1, f2) fun s_implies True f2 = f2 | s_implies False _ = True | s_implies _ True = True | s_implies f1 False = s_not f1 | s_implies f1 f2 = if f1 = f2 then True else Implies (f1, f2) (* formula -> formula -> formula -> formula *) fun s_formula_if True f2 _ = f2 | s_formula_if False _ f3 = f3 | s_formula_if f1 True f3 = s_or f1 f3 | s_formula_if f1 False f3 = s_and (s_not f1) f3 | s_formula_if f1 f2 True = s_implies f1 f2 | s_formula_if f1 f2 False = s_and f1 f2 | s_formula_if f f1 f2 = FormulaIf (f, f1, f2) (* rel_expr -> int_expr list -> rel_expr *) fun s_project r is = (case r of Project (r1, is') => if forall is_Num is then s_project r1 (map (nth is' o dest_Num) is) else raise SAME () | _ => raise SAME ()) handle SAME () => let val n = length is in if arity_of_rel_expr r = n andalso is = num_seq 0 n then r else Project (r, is) end (* rel_expr -> formula *) fun s_no None = True | s_no (Product (r1, r2)) = s_or (s_no r1) (s_no r2) | s_no (Intersect (Closure (Rel x), Iden)) = Acyclic x | s_no r = if is_one_rel_expr r then False else No r fun s_lone None = True | s_lone r = if is_one_rel_expr r then True else Lone r fun s_one None = False | s_one r = if is_one_rel_expr r then True else if inline_rel_expr r then case arity_of_rel_expr r of 1 => One r | arity => foldl1 And (map (One o s_project r o single o Num) (index_seq 0 arity)) else One r fun s_some None = False | s_some (Atom _) = True | s_some (Product (r1, r2)) = s_and (s_some r1) (s_some r2) | s_some r = if is_one_rel_expr r then True else Some r (* rel_expr -> rel_expr *) fun s_not3 (Atom j) = Atom (if j = main_j0 then j + 1 else j - 1) | s_not3 (r as Join (r1, r2)) = if r2 = Rel not3_rel then r1 else Join (r, Rel not3_rel) | s_not3 r = Join (r, Rel not3_rel) (* rel_expr -> rel_expr -> formula *) fun s_rel_eq r1 r2 = (case (r1, r2) of (Join (r11, Rel x), _) => if x = not3_rel then s_rel_eq r11 (s_not3 r2) else raise SAME () | (_, Join (r21, Rel x)) => if x = not3_rel then s_rel_eq r21 (s_not3 r1) else raise SAME () | (RelIf (f, r11, r12), _) => if inline_rel_expr r2 then s_formula_if f (s_rel_eq r11 r2) (s_rel_eq r12 r2) else raise SAME () | (_, RelIf (f, r21, r22)) => if inline_rel_expr r1 then s_formula_if f (s_rel_eq r1 r21) (s_rel_eq r1 r22) else raise SAME () | (RelLet (bs, r1'), Atom _) => s_formula_let bs (s_rel_eq r1' r2) | (Atom _, RelLet (bs, r2')) => s_formula_let bs (s_rel_eq r1 r2') | _ => raise SAME ()) handle SAME () => case rel_expr_equal r1 r2 of SOME true => True | SOME false => False | NONE => case (r1, r2) of (_, RelIf (f, r21, r22)) => if inline_rel_expr r1 then s_formula_if f (s_rel_eq r1 r21) (s_rel_eq r1 r22) else RelEq (r1, r2) | (RelIf (f, r11, r12), _) => if inline_rel_expr r2 then s_formula_if f (s_rel_eq r11 r2) (s_rel_eq r12 r2) else RelEq (r1, r2) | (_, None) => s_no r1 | (None, _) => s_no r2 | _ => RelEq (r1, r2) fun s_subset (Atom j1) (Atom j2) = formula_for_bool (j1 = j2) | s_subset (Atom j) (AtomSeq (k, j0)) = formula_for_bool (j >= j0 andalso j < j0 + k) | s_subset (r1 as Union (r11, r12)) r2 = s_and (s_subset r11 r2) (s_subset r12 r2) | s_subset r1 (r2 as Union (r21, r22)) = if is_one_rel_expr r1 then s_or (s_subset r1 r21) (s_subset r1 r22) else if s_subset r1 r21 = True orelse s_subset r1 r22 = True orelse r1 = r2 then True else Subset (r1, r2) | s_subset r1 r2 = if r1 = r2 orelse is_none_product r1 then True else if is_none_product r2 then s_no r1 else if forall is_one_rel_expr [r1, r2] then s_rel_eq r1 r2 else Subset (r1, r2) (* expr_assign list -> rel_expr -> rel_expr *) fun s_rel_let [b as AssignRelReg (x', r')] (r as RelReg x) = if x = x' then r' else RelLet ([b], r) | s_rel_let bs r = RelLet (bs, r) (* formula -> rel_expr -> rel_expr -> rel_expr *) fun s_rel_if f r1 r2 = (case (f, r1, r2) of (True, _, _) => r1 | (False, _, _) => r2 | (No r1', None, RelIf (One r2', r3', r4')) => if r1' = r2' andalso r2' = r3' then s_rel_if (Lone r1') r1' r4' else raise SAME () | _ => raise SAME ()) handle SAME () => if r1 = r2 then r1 else RelIf (f, r1, r2) (* rel_expr -> rel_expr -> rel_expr *) fun s_union r1 (Union (r21, r22)) = s_union (s_union r1 r21) r22 | s_union r1 r2 = if is_none_product r1 then r2 else if is_none_product r2 then r1 else if r1 = r2 then r1 else if occurs_in_union r2 r1 then r1 else Union (r1, r2) fun s_difference r1 r2 = if is_none_product r1 orelse is_none_product r2 then r1 else if r1 = r2 then empty_n_ary_rel (arity_of_rel_expr r1) else Difference (r1, r2) fun s_override r1 r2 = if is_none_product r2 then r1 else if is_none_product r1 then r2 else Override (r1, r2) fun s_intersect r1 r2 = case rel_expr_intersects r1 r2 of SOME true => if r1 = r2 then r1 else Intersect (r1, r2) | SOME false => empty_n_ary_rel (arity_of_rel_expr r1) | NONE => if is_none_product r1 then r1 else if is_none_product r2 then r2 else Intersect (r1, r2) fun s_product r1 r2 = if is_none_product r1 then Product (r1, empty_n_ary_rel (arity_of_rel_expr r2)) else if is_none_product r2 then Product (empty_n_ary_rel (arity_of_rel_expr r1), r2) else Product (r1, r2) fun s_join r1 (Product (Product (r211, r212), r22)) = Product (s_join r1 (Product (r211, r212)), r22) | s_join (Product (r11, Product (r121, r122))) r2 = Product (r11, s_join (Product (r121, r122)) r2) | s_join None r = empty_n_ary_rel (arity_of_rel_expr r - 1) | s_join r None = empty_n_ary_rel (arity_of_rel_expr r - 1) | s_join (Product (None, None)) r = empty_n_ary_rel (arity_of_rel_expr r) | s_join r (Product (None, None)) = empty_n_ary_rel (arity_of_rel_expr r) | s_join Iden r2 = r2 | s_join r1 Iden = r1 | s_join (Product (r1, r2)) Univ = if arity_of_rel_expr r2 = 1 then r1 else Product (r1, s_join r2 Univ) | s_join Univ (Product (r1, r2)) = if arity_of_rel_expr r1 = 1 then r2 else Product (s_join Univ r1, r2) | s_join r1 (r2 as Product (r21, r22)) = if arity_of_rel_expr r1 = 1 then case rel_expr_intersects r1 r21 of SOME true => r22 | SOME false => empty_n_ary_rel (arity_of_rel_expr r2 - 1) | NONE => Join (r1, r2) else Join (r1, r2) | s_join (r1 as Product (r11, r12)) r2 = if arity_of_rel_expr r2 = 1 then case rel_expr_intersects r2 r12 of SOME true => r11 | SOME false => empty_n_ary_rel (arity_of_rel_expr r1 - 1) | NONE => Join (r1, r2) else Join (r1, r2) | s_join r1 (r2 as RelIf (f, r21, r22)) = if inline_rel_expr r1 then s_rel_if f (s_join r1 r21) (s_join r1 r22) else Join (r1, r2) | s_join (r1 as RelIf (f, r11, r12)) r2 = if inline_rel_expr r2 then s_rel_if f (s_join r11 r2) (s_join r12 r2) else Join (r1, r2) | s_join (r1 as Atom j1) (r2 as Rel (x as (2, j2))) = if x = suc_rel then let val n = to_nat j1 + 1 in if n < nat_card then from_nat n else None end else Join (r1, r2) | s_join r1 (r2 as Project (r21, Num k :: is)) = if k = arity_of_rel_expr r21 - 1 andalso arity_of_rel_expr r1 = 1 then s_project (s_join r21 r1) is else Join (r1, r2) | s_join r1 (Join (r21, r22 as Rel (x as (3, j22)))) = ((if x = nat_add_rel then case (r21, r1) of (Atom j1, Atom j2) => let val n = to_nat j1 + to_nat j2 in if n < nat_card then from_nat n else None end | (Atom j, r) => (case to_nat j of 0 => r | 1 => s_join r (Rel suc_rel) | _ => raise SAME ()) | (r, Atom j) => (case to_nat j of 0 => r | 1 => s_join r (Rel suc_rel) | _ => raise SAME ()) | _ => raise SAME () else if x = nat_subtract_rel then case (r21, r1) of (Atom j1, Atom j2) => from_nat (nat_minus (to_nat j1) (to_nat j2)) | _ => raise SAME () else if x = nat_multiply_rel then case (r21, r1) of (Atom j1, Atom j2) => let val n = to_nat j1 * to_nat j2 in if n < nat_card then from_nat n else None end | (Atom j, r) => (case to_nat j of 0 => Atom j | 1 => r | _ => raise SAME ()) | (r, Atom j) => (case to_nat j of 0 => Atom j | 1 => r | _ => raise SAME ()) | _ => raise SAME () else raise SAME ()) handle SAME () => List.foldr Join r22 [r1, r21]) | s_join r1 r2 = Join (r1, r2) (* rel_expr -> rel_expr *) fun s_closure Iden = Iden | s_closure r = if is_none_product r then r else Closure r fun s_reflexive_closure Iden = Iden | s_reflexive_closure r = if is_none_product r then Iden else ReflexiveClosure r (* decl list -> formula -> rel_expr *) fun s_comprehension ds False = empty_n_ary_rel (length ds) | s_comprehension ds True = fold1 s_product (map decl_one_set ds) | s_comprehension [d as DeclOne ((1, j1), r)] (f as RelEq (Var (1, j2), Atom j)) = if j1 = j2 andalso rel_expr_intersects (Atom j) r = SOME true then Atom j else Comprehension ([d], f) | s_comprehension ds f = Comprehension (ds, f) (* rel_expr -> int -> int -> rel_expr *) fun s_project_seq r = let (* int -> rel_expr -> int -> int -> rel_expr *) fun aux arity r j0 n = if j0 = 0 andalso arity = n then r else case r of RelIf (f, r1, r2) => s_rel_if f (aux arity r1 j0 n) (aux arity r2 j0 n) | Product (r1, r2) => let val arity2 = arity_of_rel_expr r2 val arity1 = arity - arity2 val n1 = Int.min (nat_minus arity1 j0, n) val n2 = n - n1 (* unit -> rel_expr *) fun one () = aux arity1 r1 j0 n1 fun two () = aux arity2 r2 (nat_minus j0 arity1) n2 in case (n1, n2) of (0, _) => s_rel_if (s_some r1) (two ()) (empty_n_ary_rel n2) | (_, 0) => s_rel_if (s_some r2) (one ()) (empty_n_ary_rel n1) | _ => s_product (one ()) (two ()) end | _ => s_project r (num_seq j0 n) in aux (arity_of_rel_expr r) r end (* rel_expr -> rel_expr -> rel_expr *) fun s_nat_subtract r1 r2 = fold s_join [r1, r2] (Rel nat_subtract_rel) fun s_nat_less (Atom j1) (Atom j2) = from_bool (j1 < j2) | s_nat_less r1 r2 = fold s_join [r1, r2] (Rel nat_less_rel) fun s_int_less (Atom j1) (Atom j2) = from_bool (to_int j1 < to_int j2) | s_int_less r1 r2 = fold s_join [r1, r2] (Rel int_less_rel) (* rel_expr -> int -> int -> rel_expr *) fun d_project_seq r j0 n = Project (r, num_seq j0 n) (* rel_expr -> rel_expr *) fun d_not3 r = Join (r, Rel not3_rel) (* rel_expr -> rel_expr -> rel_expr *) fun d_nat_subtract r1 r2 = List.foldl Join (Rel nat_subtract_rel) [r1, r2] fun d_nat_less r1 r2 = List.foldl Join (Rel nat_less_rel) [r1, r2] fun d_int_less r1 r2 = List.foldl Join (Rel int_less_rel) [r1, r2] in if optim then {kk_all = s_all, kk_exist = s_exist, kk_formula_let = s_formula_let, kk_formula_if = s_formula_if, kk_or = s_or, kk_not = s_not, kk_iff = s_iff, kk_implies = s_implies, kk_and = s_and, kk_subset = s_subset, kk_rel_eq = s_rel_eq, kk_no = s_no, kk_lone = s_lone, kk_one = s_one, kk_some = s_some, kk_rel_let = s_rel_let, kk_rel_if = s_rel_if, kk_union = s_union, kk_difference = s_difference, kk_override = s_override, kk_intersect = s_intersect, kk_product = s_product, kk_join = s_join, kk_closure = s_closure, kk_reflexive_closure = s_reflexive_closure, kk_comprehension = s_comprehension, kk_project = s_project, kk_project_seq = s_project_seq, kk_not3 = s_not3, kk_nat_less = s_nat_less, kk_int_less = s_int_less} else {kk_all = curry All, kk_exist = curry Exist, kk_formula_let = curry FormulaLet, kk_formula_if = curry3 FormulaIf, kk_or = curry Or,kk_not = Not, kk_iff = curry Iff, kk_implies = curry Implies, kk_and = curry And, kk_subset = curry Subset, kk_rel_eq = curry RelEq, kk_no = No, kk_lone = Lone, kk_one = One, kk_some = Some, kk_rel_let = curry RelLet, kk_rel_if = curry3 RelIf, kk_union = curry Union, kk_difference = curry Difference, kk_override = curry Override, kk_intersect = curry Intersect, kk_product = curry Product, kk_join = curry Join, kk_closure = Closure, kk_reflexive_closure = ReflexiveClosure, kk_comprehension = curry Comprehension, kk_project = curry Project, kk_project_seq = d_project_seq, kk_not3 = d_not3, kk_nat_less = d_nat_less, kk_int_less = d_int_less} end end; diff --git a/src/HOL/Tools/Nitpick/nitpick_rep.ML b/src/HOL/Tools/Nitpick/nitpick_rep.ML --- a/src/HOL/Tools/Nitpick/nitpick_rep.ML +++ b/src/HOL/Tools/Nitpick/nitpick_rep.ML @@ -1,344 +1,344 @@ (* Title: HOL/Tools/Nitpick/nitpick_rep.ML Author: Jasmin Blanchette, TU Muenchen - Copyright 2008, 2009 + Copyright 2008, 2009, 2010 Kodkod representations of Nitpick terms. *) signature NITPICK_REP = sig type polarity = Nitpick_Util.polarity type scope = Nitpick_Scope.scope datatype rep = Any | Formula of polarity | Unit | Atom of int * int | Struct of rep list | Vect of int * rep | Func of rep * rep | Opt of rep exception REP of string * rep list val string_for_polarity : polarity -> string val string_for_rep : rep -> string val is_Func : rep -> bool val is_Opt : rep -> bool val is_opt_rep : rep -> bool val flip_rep_polarity : rep -> rep val card_of_rep : rep -> int val arity_of_rep : rep -> int val min_univ_card_of_rep : rep -> int val is_one_rep : rep -> bool val is_lone_rep : rep -> bool val dest_Func : rep -> rep * rep val smart_range_rep : int Typtab.table -> typ -> (unit -> int) -> rep -> rep val binder_reps : rep -> rep list val body_rep : rep -> rep val one_rep : int Typtab.table -> typ -> rep -> rep val optable_rep : int Typtab.table -> typ -> rep -> rep val opt_rep : int Typtab.table -> typ -> rep -> rep val unopt_rep : rep -> rep val min_rep : rep -> rep -> rep val min_reps : rep list -> rep list -> rep list val card_of_domain_from_rep : int -> rep -> int val rep_to_binary_rel_rep : int Typtab.table -> typ -> rep -> rep val best_one_rep_for_type : scope -> typ -> rep val best_opt_set_rep_for_type : scope -> typ -> rep val best_non_opt_set_rep_for_type : scope -> typ -> rep val best_set_rep_for_type : scope -> typ -> rep val best_non_opt_symmetric_reps_for_fun_type : scope -> typ -> rep * rep val atom_schema_of_rep : rep -> (int * int) list val atom_schema_of_reps : rep list -> (int * int) list val type_schema_of_rep : typ -> rep -> typ list val type_schema_of_reps : typ list -> rep list -> typ list val all_combinations_for_rep : rep -> int list list val all_combinations_for_reps : rep list -> int list list end; structure Nitpick_Rep : NITPICK_REP = struct open Nitpick_Util open Nitpick_HOL open Nitpick_Scope datatype rep = Any | Formula of polarity | Unit | Atom of int * int | Struct of rep list | Vect of int * rep | Func of rep * rep | Opt of rep exception REP of string * rep list (* polarity -> string *) fun string_for_polarity Pos = "+" | string_for_polarity Neg = "-" | string_for_polarity Neut = "=" (* rep -> string *) fun atomic_string_for_rep rep = let val s = string_for_rep rep in if String.isPrefix "[" s orelse not (is_substring_of " " s) then s else "(" ^ s ^ ")" end (* rep -> string *) and string_for_rep Any = "X" | string_for_rep (Formula polar) = "F" ^ string_for_polarity polar | string_for_rep Unit = "U" | string_for_rep (Atom (k, j0)) = "A" ^ string_of_int k ^ (if j0 = 0 then "" else "@" ^ string_of_int j0) | string_for_rep (Struct rs) = "[" ^ commas (map string_for_rep rs) ^ "]" | string_for_rep (Vect (k, R)) = string_of_int k ^ " x " ^ atomic_string_for_rep R | string_for_rep (Func (R1, R2)) = atomic_string_for_rep R1 ^ " => " ^ string_for_rep R2 | string_for_rep (Opt R) = atomic_string_for_rep R ^ "?" (* rep -> bool *) fun is_Func (Func _) = true | is_Func _ = false fun is_Opt (Opt _) = true | is_Opt _ = false fun is_opt_rep (Func (_, R2)) = is_opt_rep R2 | is_opt_rep (Opt _) = true | is_opt_rep _ = false (* rep -> int *) fun card_of_rep Any = raise REP ("Nitpick_Rep.card_of_rep", [Any]) | card_of_rep (Formula _) = 2 | card_of_rep Unit = 1 | card_of_rep (Atom (k, _)) = k | card_of_rep (Struct rs) = Integer.prod (map card_of_rep rs) | card_of_rep (Vect (k, R)) = reasonable_power (card_of_rep R) k | card_of_rep (Func (R1, R2)) = reasonable_power (card_of_rep R2) (card_of_rep R1) | card_of_rep (Opt R) = card_of_rep R fun arity_of_rep Any = raise REP ("Nitpick_Rep.arity_of_rep", [Any]) | arity_of_rep (Formula _) = 0 | arity_of_rep Unit = 0 | arity_of_rep (Atom _) = 1 | arity_of_rep (Struct Rs) = Integer.sum (map arity_of_rep Rs) | arity_of_rep (Vect (k, R)) = k * arity_of_rep R | arity_of_rep (Func (R1, R2)) = arity_of_rep R1 + arity_of_rep R2 | arity_of_rep (Opt R) = arity_of_rep R fun min_univ_card_of_rep Any = raise REP ("Nitpick_Rep.min_univ_card_of_rep", [Any]) | min_univ_card_of_rep (Formula _) = 0 | min_univ_card_of_rep Unit = 0 | min_univ_card_of_rep (Atom (k, j0)) = k + j0 + 1 | min_univ_card_of_rep (Struct Rs) = fold Integer.max (map min_univ_card_of_rep Rs) 0 | min_univ_card_of_rep (Vect (_, R)) = min_univ_card_of_rep R | min_univ_card_of_rep (Func (R1, R2)) = Int.max (min_univ_card_of_rep R1, min_univ_card_of_rep R2) | min_univ_card_of_rep (Opt R) = min_univ_card_of_rep R (* rep -> bool *) fun is_one_rep Unit = true | is_one_rep (Atom _) = true | is_one_rep (Struct _) = true | is_one_rep (Vect _) = true | is_one_rep _ = false fun is_lone_rep (Opt R) = is_one_rep R | is_lone_rep R = is_one_rep R (* rep -> rep * rep *) fun dest_Func (Func z) = z | dest_Func R = raise REP ("Nitpick_Rep.dest_Func", [R]) (* int Typtab.table -> typ -> (unit -> int) -> rep -> rep *) fun smart_range_rep _ _ _ Unit = Unit | smart_range_rep _ _ _ (Vect (_, R)) = R | smart_range_rep _ _ _ (Func (_, R2)) = R2 | smart_range_rep ofs T ran_card (Opt R) = Opt (smart_range_rep ofs T ran_card R) | smart_range_rep ofs (Type ("fun", [_, T2])) _ (Atom (1, _)) = Atom (1, offset_of_type ofs T2) | smart_range_rep ofs (Type ("fun", [_, T2])) ran_card (Atom _) = Atom (ran_card (), offset_of_type ofs T2) | smart_range_rep _ _ _ R = raise REP ("Nitpick_Rep.smart_range_rep", [R]) (* rep -> rep list *) fun binder_reps (Func (R1, R2)) = R1 :: binder_reps R2 | binder_reps R = [] (* rep -> rep *) fun body_rep (Func (_, R2)) = body_rep R2 | body_rep R = R (* rep -> rep *) fun flip_rep_polarity (Formula polar) = Formula (flip_polarity polar) | flip_rep_polarity (Func (R1, R2)) = Func (R1, flip_rep_polarity R2) | flip_rep_polarity R = R (* int Typtab.table -> rep -> rep *) fun one_rep _ _ Any = raise REP ("Nitpick_Rep.one_rep", [Any]) | one_rep _ _ (Atom x) = Atom x | one_rep _ _ (Struct Rs) = Struct Rs | one_rep _ _ (Vect z) = Vect z | one_rep ofs T (Opt R) = one_rep ofs T R | one_rep ofs T R = Atom (card_of_rep R, offset_of_type ofs T) fun optable_rep ofs (Type ("fun", [_, T2])) (Func (R1, R2)) = Func (R1, optable_rep ofs T2 R2) | optable_rep ofs T R = one_rep ofs T R fun opt_rep ofs (Type ("fun", [_, T2])) (Func (R1, R2)) = Func (R1, opt_rep ofs T2 R2) | opt_rep ofs T R = Opt (optable_rep ofs T R) (* rep -> rep *) fun unopt_rep (Func (R1, R2)) = Func (R1, unopt_rep R2) | unopt_rep (Opt R) = R | unopt_rep R = R (* polarity -> polarity -> polarity *) fun min_polarity polar1 polar2 = if polar1 = polar2 then polar1 else if polar1 = Neut then polar2 else if polar2 = Neut then polar1 else raise ARG ("Nitpick_Rep.min_polarity", commas (map (quote o string_for_polarity) [polar1, polar2])) (* It's important that Func is before Vect, because if the range is Opt we could lose information by converting a Func to a Vect. *) (* rep -> rep -> rep *) fun min_rep (Opt R1) (Opt R2) = Opt (min_rep R1 R2) | min_rep (Opt R) _ = Opt R | min_rep _ (Opt R) = Opt R | min_rep (Formula polar1) (Formula polar2) = Formula (min_polarity polar1 polar2) | min_rep (Formula polar) _ = Formula polar | min_rep _ (Formula polar) = Formula polar | min_rep Unit _ = Unit | min_rep _ Unit = Unit | min_rep (Atom x) _ = Atom x | min_rep _ (Atom x) = Atom x | min_rep (Struct Rs1) (Struct Rs2) = Struct (min_reps Rs1 Rs2) | min_rep (Struct Rs) _ = Struct Rs | min_rep _ (Struct Rs) = Struct Rs | min_rep (R1 as Func (R11, R12)) (R2 as Func (R21, R22)) = (case pairself is_opt_rep (R12, R22) of (true, false) => R1 | (false, true) => R2 | _ => if R11 = R21 then Func (R11, min_rep R12 R22) else if min_rep R11 R21 = R11 then R1 else R2) | min_rep (Func z) _ = Func z | min_rep _ (Func z) = Func z | min_rep (Vect (k1, R1)) (Vect (k2, R2)) = if k1 < k2 then Vect (k1, R1) else if k1 > k2 then Vect (k2, R2) else Vect (k1, min_rep R1 R2) | min_rep R1 R2 = raise REP ("Nitpick_Rep.min_rep", [R1, R2]) (* rep list -> rep list -> rep list *) and min_reps [] _ = [] | min_reps _ [] = [] | min_reps (R1 :: Rs1) (R2 :: Rs2) = if R1 = R2 then R1 :: min_reps Rs1 Rs2 else if min_rep R1 R2 = R1 then R1 :: Rs1 else R2 :: Rs2 (* int -> rep -> int *) fun card_of_domain_from_rep ran_card R = case R of Unit => 1 | Atom (k, _) => exact_log ran_card k | Vect (k, _) => k | Func (R1, _) => card_of_rep R1 | Opt R => card_of_domain_from_rep ran_card R | _ => raise REP ("Nitpick_Rep.card_of_domain_from_rep", [R]) (* int Typtab.table -> typ -> rep -> rep *) fun rep_to_binary_rel_rep ofs T R = let val k = exact_root 2 (card_of_domain_from_rep 2 R) val j0 = offset_of_type ofs (fst (HOLogic.dest_prodT (domain_type T))) in Func (Struct [Atom (k, j0), Atom (k, j0)], Formula Neut) end (* scope -> typ -> rep *) fun best_one_rep_for_type (scope as {card_assigns, ...} : scope) (Type ("fun", [T1, T2])) = (case best_one_rep_for_type scope T2 of Unit => Unit | R2 => Vect (card_of_type card_assigns T1, R2)) | best_one_rep_for_type scope (Type ("*", [T1, T2])) = (case (best_one_rep_for_type scope T1, best_one_rep_for_type scope T2) of (Unit, Unit) => Unit | (R1, R2) => Struct [R1, R2]) | best_one_rep_for_type (scope as {card_assigns, datatypes, ofs, ...}) T = (case card_of_type card_assigns T of 1 => if is_some (datatype_spec datatypes T) orelse is_fp_iterator_type T then Atom (1, offset_of_type ofs T) else Unit | k => Atom (k, offset_of_type ofs T)) (* Datatypes are never represented by Unit, because it would confuse "nfa_transitions_for_ctor". *) (* scope -> typ -> rep *) fun best_opt_set_rep_for_type scope (Type ("fun", [T1, T2])) = Func (best_one_rep_for_type scope T1, best_opt_set_rep_for_type scope T2) | best_opt_set_rep_for_type (scope as {ofs, ...}) T = opt_rep ofs T (best_one_rep_for_type scope T) fun best_non_opt_set_rep_for_type (scope as {ofs, ...}) (Type ("fun", [T1, T2])) = (case (best_one_rep_for_type scope T1, best_non_opt_set_rep_for_type scope T2) of (_, Unit) => Unit | (Unit, Atom (2, _)) => Func (Atom (1, offset_of_type ofs T1), Formula Neut) | (R1, Atom (2, _)) => Func (R1, Formula Neut) | z => Func z) | best_non_opt_set_rep_for_type scope T = best_one_rep_for_type scope T fun best_set_rep_for_type (scope as {datatypes, ...}) T = (if is_exact_type datatypes T then best_non_opt_set_rep_for_type else best_opt_set_rep_for_type) scope T fun best_non_opt_symmetric_reps_for_fun_type (scope as {ofs, ...}) (Type ("fun", [T1, T2])) = (optable_rep ofs T1 (best_one_rep_for_type scope T1), optable_rep ofs T2 (best_one_rep_for_type scope T2)) | best_non_opt_symmetric_reps_for_fun_type _ T = raise TYPE ("Nitpick_Rep.best_non_opt_symmetric_reps_for_fun_type", [T], []) (* rep -> (int * int) list *) fun atom_schema_of_rep Any = raise REP ("Nitpick_Rep.atom_schema_of_rep", [Any]) | atom_schema_of_rep (Formula _) = [] | atom_schema_of_rep Unit = [] | atom_schema_of_rep (Atom x) = [x] | atom_schema_of_rep (Struct Rs) = atom_schema_of_reps Rs | atom_schema_of_rep (Vect (k, R)) = replicate_list k (atom_schema_of_rep R) | atom_schema_of_rep (Func (R1, R2)) = atom_schema_of_rep R1 @ atom_schema_of_rep R2 | atom_schema_of_rep (Opt R) = atom_schema_of_rep R (* rep list -> (int * int) list *) and atom_schema_of_reps Rs = maps atom_schema_of_rep Rs (* typ -> rep -> typ list *) fun type_schema_of_rep _ (Formula _) = [] | type_schema_of_rep _ Unit = [] | type_schema_of_rep T (Atom _) = [T] | type_schema_of_rep (Type ("*", [T1, T2])) (Struct [R1, R2]) = type_schema_of_reps [T1, T2] [R1, R2] | type_schema_of_rep (Type ("fun", [_, T2])) (Vect (k, R)) = replicate_list k (type_schema_of_rep T2 R) | type_schema_of_rep (Type ("fun", [T1, T2])) (Func (R1, R2)) = type_schema_of_rep T1 R1 @ type_schema_of_rep T2 R2 | type_schema_of_rep T (Opt R) = type_schema_of_rep T R | type_schema_of_rep T R = raise REP ("Nitpick_Rep.type_schema_of_rep", [R]) (* typ list -> rep list -> typ list *) and type_schema_of_reps Ts Rs = flat (map2 type_schema_of_rep Ts Rs) (* rep -> int list list *) val all_combinations_for_rep = all_combinations o atom_schema_of_rep (* rep list -> int list list *) val all_combinations_for_reps = all_combinations o atom_schema_of_reps end; diff --git a/src/HOL/Tools/Nitpick/nitpick_scope.ML b/src/HOL/Tools/Nitpick/nitpick_scope.ML --- a/src/HOL/Tools/Nitpick/nitpick_scope.ML +++ b/src/HOL/Tools/Nitpick/nitpick_scope.ML @@ -1,546 +1,546 @@ (* Title: HOL/Tools/Nitpick/nitpick_scope.ML Author: Jasmin Blanchette, TU Muenchen - Copyright 2008, 2009 + Copyright 2008, 2009, 2010 Scope enumerator for Nitpick. *) signature NITPICK_SCOPE = sig type styp = Nitpick_Util.styp type extended_context = Nitpick_HOL.extended_context type constr_spec = { const: styp, delta: int, epsilon: int, exclusive: bool, explicit_max: int, total: bool} type dtype_spec = { typ: typ, card: int, co: bool, complete: bool, concrete: bool, - shallow: bool, + deep: bool, constrs: constr_spec list} type scope = { ext_ctxt: extended_context, card_assigns: (typ * int) list, bits: int, bisim_depth: int, datatypes: dtype_spec list, ofs: int Typtab.table} val datatype_spec : dtype_spec list -> typ -> dtype_spec option val constr_spec : dtype_spec list -> styp -> constr_spec val is_complete_type : dtype_spec list -> typ -> bool val is_concrete_type : dtype_spec list -> typ -> bool val is_exact_type : dtype_spec list -> typ -> bool val offset_of_type : int Typtab.table -> typ -> int val spec_of_type : scope -> typ -> int * int val pretties_for_scope : scope -> bool -> Pretty.T list val multiline_string_for_scope : scope -> string val scopes_equivalent : scope -> scope -> bool val scope_less_eq : scope -> scope -> bool val all_scopes : extended_context -> int -> (typ option * int list) list -> (styp option * int list) list -> (styp option * int list) list -> int list -> int list -> typ list -> typ list -> typ list -> int * scope list end; structure Nitpick_Scope : NITPICK_SCOPE = struct open Nitpick_Util open Nitpick_HOL type constr_spec = { const: styp, delta: int, epsilon: int, exclusive: bool, explicit_max: int, total: bool} type dtype_spec = { typ: typ, card: int, co: bool, complete: bool, concrete: bool, - shallow: bool, + deep: bool, constrs: constr_spec list} type scope = { ext_ctxt: extended_context, card_assigns: (typ * int) list, bits: int, bisim_depth: int, datatypes: dtype_spec list, ofs: int Typtab.table} datatype row_kind = Card of typ | Max of styp type row = row_kind * int list type block = row list (* dtype_spec list -> typ -> dtype_spec option *) fun datatype_spec (dtypes : dtype_spec list) T = List.find (curry (op =) T o #typ) dtypes (* dtype_spec list -> styp -> constr_spec *) fun constr_spec [] x = raise TERM ("Nitpick_Scope.constr_spec", [Const x]) | constr_spec ({constrs, ...} :: dtypes : dtype_spec list) (x as (s, T)) = case List.find (curry (op =) (s, body_type T) o (apsnd body_type o #const)) constrs of SOME c => c | NONE => constr_spec dtypes x (* dtype_spec list -> typ -> bool *) fun is_complete_type dtypes (Type ("fun", [T1, T2])) = is_concrete_type dtypes T1 andalso is_complete_type dtypes T2 | is_complete_type dtypes (Type ("*", Ts)) = forall (is_complete_type dtypes) Ts | is_complete_type dtypes T = not (is_integer_type T) andalso not (is_bit_type T) andalso #complete (the (datatype_spec dtypes T)) handle Option.Option => true and is_concrete_type dtypes (Type ("fun", [T1, T2])) = is_complete_type dtypes T1 andalso is_concrete_type dtypes T2 | is_concrete_type dtypes (Type ("*", Ts)) = forall (is_concrete_type dtypes) Ts | is_concrete_type dtypes T = #concrete (the (datatype_spec dtypes T)) handle Option.Option => true fun is_exact_type dtypes = is_complete_type dtypes andf is_concrete_type dtypes (* int Typtab.table -> typ -> int *) fun offset_of_type ofs T = case Typtab.lookup ofs T of SOME j0 => j0 | NONE => Typtab.lookup ofs dummyT |> the_default 0 (* scope -> typ -> int * int *) fun spec_of_type ({card_assigns, ofs, ...} : scope) T = (card_of_type card_assigns T handle TYPE ("Nitpick_HOL.card_of_type", _, _) => ~1, offset_of_type ofs T) (* (string -> string) -> scope -> string list * string list * string list * string list * string list *) fun quintuple_for_scope quote ({ext_ctxt as {thy, ctxt, ...}, card_assigns, bits, bisim_depth, datatypes, ...} : scope) = let - val boring_Ts = [@{typ unsigned_bit}, @{typ signed_bit}, + val boring_Ts = [@{typ unsigned_bit}, @{typ signed_bit}, @{typ \}, @{typ bisim_iterator}] val (iter_assigns, card_assigns) = card_assigns |> filter_out (member (op =) boring_Ts o fst) |> List.partition (is_fp_iterator_type o fst) val (secondary_card_assigns, primary_card_assigns) = card_assigns |> List.partition ((is_integer_type orf is_datatype thy) o fst) val cards = map (fn (T, k) => quote (string_for_type ctxt T) ^ " = " ^ string_of_int k) fun maxes () = maps (map_filter (fn {const, explicit_max, ...} => if explicit_max < 0 then NONE else SOME (Syntax.string_of_term ctxt (Const const) ^ " = " ^ string_of_int explicit_max)) o #constrs) datatypes fun iters () = map (fn (T, k) => quote (Syntax.string_of_term ctxt (Const (const_for_iterator_type T))) ^ " = " ^ string_of_int (k - 1)) iter_assigns fun miscs () = (if bits = 0 then [] else ["bits = " ^ string_of_int bits]) @ (if bisim_depth < 0 andalso forall (not o #co) datatypes then [] else ["bisim_depth = " ^ string_of_int bisim_depth]) in setmp_show_all_types (fn () => (cards primary_card_assigns, cards secondary_card_assigns, maxes (), iters (), miscs ())) () end (* scope -> bool -> Pretty.T list *) fun pretties_for_scope scope verbose = let val (primary_cards, secondary_cards, maxes, iters, bisim_depths) = quintuple_for_scope maybe_quote scope val ss = map (prefix "card ") primary_cards @ (if verbose then map (prefix "card ") secondary_cards @ map (prefix "max ") maxes @ map (prefix "iter ") iters @ bisim_depths else []) in if null ss then [] else serial_commas "and" ss |> map Pretty.str |> Pretty.breaks end (* scope -> string *) fun multiline_string_for_scope scope = let val (primary_cards, secondary_cards, maxes, iters, bisim_depths) = quintuple_for_scope I scope val cards = primary_cards @ secondary_cards in case (if null cards then [] else ["card: " ^ commas cards]) @ (if null maxes then [] else ["max: " ^ commas maxes]) @ (if null iters then [] else ["iter: " ^ commas iters]) @ bisim_depths of [] => "empty" | lines => space_implode "\n" lines end (* scope -> scope -> bool *) fun scopes_equivalent (s1 : scope) (s2 : scope) = #datatypes s1 = #datatypes s2 andalso #card_assigns s1 = #card_assigns s2 fun scope_less_eq (s1 : scope) (s2 : scope) = (s1, s2) |> pairself (map snd o #card_assigns) |> op ~~ |> forall (op <=) (* row -> int *) fun rank_of_row (_, ks) = length ks (* block -> int *) fun rank_of_block block = fold Integer.max (map rank_of_row block) 1 (* int -> typ * int list -> typ * int list *) fun project_row column (y, ks) = (y, [nth ks (Int.min (column, length ks - 1))]) (* int -> block -> block *) fun project_block (column, block) = map (project_row column) block (* (''a * ''a -> bool) -> (''a option * int list) list -> ''a -> int list *) fun lookup_ints_assign eq assigns key = case triple_lookup eq assigns key of SOME ks => ks | NONE => raise ARG ("Nitpick_Scope.lookup_ints_assign", "") (* theory -> (typ option * int list) list -> typ -> int list *) fun lookup_type_ints_assign thy assigns T = map (curry Int.max 1) (lookup_ints_assign (type_match thy) assigns T) handle ARG ("Nitpick_Scope.lookup_ints_assign", _) => raise TYPE ("Nitpick_Scope.lookup_type_ints_assign", [T], []) (* theory -> (styp option * int list) list -> styp -> int list *) fun lookup_const_ints_assign thy assigns x = lookup_ints_assign (const_match thy) assigns x handle ARG ("Nitpick_Scope.lookup_ints_assign", _) => raise TERM ("Nitpick_Scope.lookup_const_ints_assign", [Const x]) (* theory -> (styp option * int list) list -> styp -> row option *) fun row_for_constr thy maxes_assigns constr = SOME (Max constr, lookup_const_ints_assign thy maxes_assigns constr) handle TERM ("lookup_const_ints_assign", _) => NONE val max_bits = 31 (* Kodkod limit *) (* extended_context -> (typ option * int list) list -> (styp option * int list) list -> (styp option * int list) list -> int list -> int list -> typ -> block *) fun block_for_type (ext_ctxt as {thy, ...}) cards_assigns maxes_assigns iters_assigns bitss bisim_depths T = if T = @{typ unsigned_bit} then [(Card T, map (Integer.min max_bits o Integer.max 1) bitss)] else if T = @{typ signed_bit} then [(Card T, map (Integer.add 1 o Integer.min max_bits o Integer.max 1) bitss)] else if T = @{typ "unsigned_bit word"} then [(Card T, lookup_type_ints_assign thy cards_assigns nat_T)] else if T = @{typ "signed_bit word"} then [(Card T, lookup_type_ints_assign thy cards_assigns int_T)] else if T = @{typ bisim_iterator} then [(Card T, map (Integer.add 1 o Integer.max 0) bisim_depths)] else if is_fp_iterator_type T then [(Card T, map (Integer.add 1 o Integer.max 0) (lookup_const_ints_assign thy iters_assigns (const_for_iterator_type T)))] else (Card T, lookup_type_ints_assign thy cards_assigns T) :: (case datatype_constrs ext_ctxt T of [_] => [] | constrs => map_filter (row_for_constr thy maxes_assigns) constrs) (* extended_context -> (typ option * int list) list -> (styp option * int list) list -> (styp option * int list) list -> int list -> int list -> typ list -> typ list -> block list *) fun blocks_for_types ext_ctxt cards_assigns maxes_assigns iters_assigns bitss bisim_depths mono_Ts nonmono_Ts = let (* typ -> block *) val block_for = block_for_type ext_ctxt cards_assigns maxes_assigns iters_assigns bitss bisim_depths val mono_block = maps block_for mono_Ts val nonmono_blocks = map block_for nonmono_Ts in mono_block :: nonmono_blocks end val sync_threshold = 5 (* int list -> int list list *) fun all_combinations_ordered_smartly ks = let (* int list -> int *) fun cost_with_monos [] = 0 | cost_with_monos (k :: ks) = if k < sync_threshold andalso forall (curry (op =) k) ks then k - sync_threshold else k * (k + 1) div 2 + Integer.sum ks fun cost_without_monos [] = 0 | cost_without_monos [k] = k | cost_without_monos (_ :: k :: ks) = if k < sync_threshold andalso forall (curry (op =) k) ks then k - sync_threshold else Integer.sum (k :: ks) in ks |> all_combinations |> map (`(if fst (hd ks) > 1 then cost_with_monos else cost_without_monos)) |> sort (int_ord o pairself fst) |> map snd end (* typ -> bool *) fun is_self_recursive_constr_type T = exists (exists_subtype (curry (op =) (body_type T))) (binder_types T) (* (styp * int) list -> styp -> int *) fun constr_max maxes x = the_default ~1 (AList.lookup (op =) maxes x) type scope_desc = (typ * int) list * (styp * int) list (* extended_context -> scope_desc -> typ * int -> bool *) fun is_surely_inconsistent_card_assign ext_ctxt (card_assigns, max_assigns) (T, k) = case datatype_constrs ext_ctxt T of [] => false | xs => let val dom_cards = map (Integer.prod o map (bounded_card_of_type k ~1 card_assigns) o binder_types o snd) xs val maxes = map (constr_max max_assigns) xs (* int -> int -> int *) fun effective_max card ~1 = card | effective_max card max = Int.min (card, max) val max = map2 effective_max dom_cards maxes |> Integer.sum in max < k end (* extended_context -> (typ * int) list -> (typ * int) list -> (styp * int) list -> bool *) fun is_surely_inconsistent_scope_description ext_ctxt seen rest max_assigns = exists (is_surely_inconsistent_card_assign ext_ctxt (seen @ rest, max_assigns)) seen (* extended_context -> scope_desc -> (typ * int) list option *) fun repair_card_assigns ext_ctxt (card_assigns, max_assigns) = let (* (typ * int) list -> (typ * int) list -> (typ * int) list option *) fun aux seen [] = SOME seen | aux seen ((T, 0) :: _) = NONE | aux seen ((T, k) :: rest) = (if is_surely_inconsistent_scope_description ext_ctxt ((T, k) :: seen) rest max_assigns then raise SAME () else case aux ((T, k) :: seen) rest of SOME assigns => SOME assigns | NONE => raise SAME ()) handle SAME () => aux seen ((T, k - 1) :: rest) in aux [] (rev card_assigns) end (* theory -> (typ * int) list -> typ * int -> typ * int *) fun repair_iterator_assign thy assigns (T as Type (s, Ts), k) = (T, if T = @{typ bisim_iterator} then let val co_cards = map snd (filter (is_codatatype thy o fst) assigns) in Int.min (k, Integer.sum co_cards) end else if is_fp_iterator_type T then case Ts of [] => 1 | _ => bounded_card_of_type k ~1 assigns (foldr1 HOLogic.mk_prodT Ts) else k) | repair_iterator_assign _ _ assign = assign (* row -> scope_desc -> scope_desc *) fun add_row_to_scope_descriptor (kind, ks) (card_assigns, max_assigns) = case kind of Card T => ((T, the_single ks) :: card_assigns, max_assigns) | Max x => (card_assigns, (x, the_single ks) :: max_assigns) (* block -> scope_desc *) fun scope_descriptor_from_block block = fold_rev add_row_to_scope_descriptor block ([], []) (* extended_context -> block list -> int list -> scope_desc option *) fun scope_descriptor_from_combination (ext_ctxt as {thy, ...}) blocks columns = let val (card_assigns, max_assigns) = maps project_block (columns ~~ blocks) |> scope_descriptor_from_block val card_assigns = repair_card_assigns ext_ctxt (card_assigns, max_assigns) |> the in SOME (map (repair_iterator_assign thy card_assigns) card_assigns, max_assigns) end handle Option.Option => NONE (* theory -> (typ * int) list -> dtype_spec list -> int Typtab.table *) fun offset_table_for_card_assigns thy assigns dtypes = let (* int -> (int * int) list -> (typ * int) list -> int Typtab.table -> int Typtab.table *) fun aux next _ [] = Typtab.update_new (dummyT, next) | aux next reusable ((T, k) :: assigns) = if k = 1 orelse is_integer_type T orelse is_bit_type T then aux next reusable assigns else if length (these (Option.map #constrs (datatype_spec dtypes T))) > 1 then Typtab.update_new (T, next) #> aux (next + k) reusable assigns else case AList.lookup (op =) reusable k of SOME j0 => Typtab.update_new (T, j0) #> aux next reusable assigns | NONE => Typtab.update_new (T, next) #> aux (next + k) ((k, next) :: reusable) assigns in aux 0 [] assigns Typtab.empty end (* int -> (typ * int) list -> typ -> int *) fun domain_card max card_assigns = Integer.prod o map (bounded_card_of_type max max card_assigns) o binder_types (* scope_desc -> bool -> int -> (int -> int) -> int -> int -> bool * styp -> constr_spec list -> constr_spec list *) fun add_constr_spec (card_assigns, max_assigns) co card sum_dom_cards num_self_recs num_non_self_recs (self_rec, x as (s, T)) constrs = let val max = constr_max max_assigns x (* int -> int *) fun bound k = Int.min (card, (max >= 0 ? curry Int.min max) k) (* unit -> int *) fun next_delta () = if null constrs then 0 else #epsilon (hd constrs) val {delta, epsilon, exclusive, total} = if max = 0 then let val delta = next_delta () in {delta = delta, epsilon = delta, exclusive = true, total = false} end else if not co andalso num_self_recs > 0 then if not self_rec andalso num_non_self_recs = 1 andalso domain_card 2 card_assigns T = 1 then - {delta = 0, epsilon = 1, exclusive = (s = @{const_name Nil}), + {delta = 0, epsilon = 1, + exclusive = (s = @{const_name Nil} andalso length constrs = 2), total = true} - else if s = @{const_name Cons} then + else if s = @{const_name Cons} andalso length constrs = 2 then {delta = 1, epsilon = card, exclusive = true, total = false} else {delta = 0, epsilon = card, exclusive = false, total = false} else if card = sum_dom_cards (card + 1) then let val delta = next_delta () in {delta = delta, epsilon = delta + domain_card card card_assigns T, exclusive = true, total = true} end else {delta = 0, epsilon = card, exclusive = (num_self_recs + num_non_self_recs = 1), total = false} in {const = x, delta = delta, epsilon = epsilon, exclusive = exclusive, explicit_max = max, total = total} :: constrs end (* extended_context -> (typ * int) list -> typ -> bool *) fun has_exact_card ext_ctxt card_assigns T = let val card = card_of_type card_assigns T in card = bounded_exact_card_of_type ext_ctxt (card + 1) 0 card_assigns T end (* extended_context -> typ list -> scope_desc -> typ * int -> dtype_spec *) -fun datatype_spec_from_scope_descriptor (ext_ctxt as {thy, ...}) shallow_dataTs +fun datatype_spec_from_scope_descriptor (ext_ctxt as {thy, ...}) deep_dataTs (desc as (card_assigns, _)) (T, card) = let - val shallow = member (op =) shallow_dataTs T + val deep = member (op =) deep_dataTs T val co = is_codatatype thy T val xs = boxed_datatype_constrs ext_ctxt T val self_recs = map (is_self_recursive_constr_type o snd) xs val (num_self_recs, num_non_self_recs) = List.partition I self_recs |> pairself length val complete = has_exact_card ext_ctxt card_assigns T val concrete = xs |> maps (binder_types o snd) |> maps binder_types |> forall (has_exact_card ext_ctxt card_assigns) (* int -> int *) fun sum_dom_cards max = map (domain_card max card_assigns o snd) xs |> Integer.sum val constrs = fold_rev (add_constr_spec desc co card sum_dom_cards num_self_recs num_non_self_recs) (self_recs ~~ xs) [] in {typ = T, card = card, co = co, complete = complete, concrete = concrete, - shallow = shallow, constrs = constrs} + deep = deep, constrs = constrs} end (* int -> int *) fun min_bits_for_nat_value n = if n <= 0 then 0 else IntInf.log2 n + 1 (* scope_desc -> int *) fun min_bits_for_max_assigns (_, []) = 0 | min_bits_for_max_assigns (card_assigns, max_assigns) = min_bits_for_nat_value (fold Integer.max (map snd card_assigns @ map snd max_assigns) 0) (* extended_context -> int -> typ list -> scope_desc -> scope *) -fun scope_from_descriptor (ext_ctxt as {thy, ...}) sym_break shallow_dataTs +fun scope_from_descriptor (ext_ctxt as {thy, ...}) sym_break deep_dataTs (desc as (card_assigns, _)) = let val datatypes = - map (datatype_spec_from_scope_descriptor ext_ctxt shallow_dataTs desc) + map (datatype_spec_from_scope_descriptor ext_ctxt deep_dataTs desc) (filter (is_datatype thy o fst) card_assigns) val bits = card_of_type card_assigns @{typ signed_bit} - 1 handle TYPE ("Nitpick_HOL.card_of_type", _, _) => card_of_type card_assigns @{typ unsigned_bit} handle TYPE ("Nitpick_HOL.card_of_type", _, _) => 0 val bisim_depth = card_of_type card_assigns @{typ bisim_iterator} - 1 in {ext_ctxt = ext_ctxt, card_assigns = card_assigns, datatypes = datatypes, bits = bits, bisim_depth = bisim_depth, ofs = if sym_break <= 0 then Typtab.empty else offset_table_for_card_assigns thy card_assigns datatypes} end (* theory -> typ list -> (typ option * int list) list -> (typ option * int list) list *) fun repair_cards_assigns_wrt_boxing _ _ [] = [] | repair_cards_assigns_wrt_boxing thy Ts ((SOME T, ks) :: cards_assigns) = (if is_fun_type T orelse is_pair_type T then Ts |> filter (curry (type_match thy o swap) T o unbit_and_unbox_type) |> map (rpair ks o SOME) else [(SOME T, ks)]) @ repair_cards_assigns_wrt_boxing thy Ts cards_assigns | repair_cards_assigns_wrt_boxing thy Ts ((NONE, ks) :: cards_assigns) = (NONE, ks) :: repair_cards_assigns_wrt_boxing thy Ts cards_assigns val max_scopes = 4096 val distinct_threshold = 512 (* extended_context -> int -> (typ option * int list) list -> (styp option * int list) list -> (styp option * int list) list -> int list -> typ list -> typ list -> typ list -> int * scope list *) fun all_scopes (ext_ctxt as {thy, ...}) sym_break cards_assigns maxes_assigns - iters_assigns bitss bisim_depths mono_Ts nonmono_Ts - shallow_dataTs = + iters_assigns bitss bisim_depths mono_Ts nonmono_Ts deep_dataTs = let val cards_assigns = repair_cards_assigns_wrt_boxing thy mono_Ts cards_assigns val blocks = blocks_for_types ext_ctxt cards_assigns maxes_assigns iters_assigns bitss bisim_depths mono_Ts nonmono_Ts val ranks = map rank_of_block blocks val all = all_combinations_ordered_smartly (map (rpair 0) ranks) val head = take max_scopes all val descs = map_filter (scope_descriptor_from_combination ext_ctxt blocks) head in (length all - length head, descs |> length descs <= distinct_threshold ? distinct (op =) - |> map (scope_from_descriptor ext_ctxt sym_break shallow_dataTs)) + |> map (scope_from_descriptor ext_ctxt sym_break deep_dataTs)) end end; diff --git a/src/HOL/Tools/Nitpick/nitpick_tests.ML b/src/HOL/Tools/Nitpick/nitpick_tests.ML --- a/src/HOL/Tools/Nitpick/nitpick_tests.ML +++ b/src/HOL/Tools/Nitpick/nitpick_tests.ML @@ -1,334 +1,334 @@ (* Title: HOL/Tools/Nitpick/nitpick_tests.ML Author: Jasmin Blanchette, TU Muenchen - Copyright 2008, 2009 + Copyright 2008, 2009, 2010 Unit tests for Nitpick. *) signature NITPICK_TESTS = sig val run_all_tests : unit -> unit end structure Nitpick_Tests = struct open Nitpick_Util open Nitpick_Peephole open Nitpick_Rep open Nitpick_Nut open Nitpick_Kodkod val settings = [("solver", "\"zChaff\""), ("skolem_depth", "-1")] fun cast_to_rep R u = Op1 (Cast, type_of u, R, u) val unit_T = @{typ unit} val dummy_T = @{typ 'a} val unity = Cst (Unity, unit_T, Unit) val atom1_v1 = FreeName ("atom1_v1", dummy_T, Atom (1, 0)) val atom2_v1 = FreeName ("atom2_v1", dummy_T, Atom (2, 0)) val atom6_v1 = FreeName ("atom6_v1", dummy_T, Atom (6, 0)) val atom16_v1 = FreeName ("atom16_v1", dummy_T, Atom (16, 0)) val atom24_v1 = FreeName ("atom24_v1", dummy_T, Atom (24, 0)) val atom36_v1 = FreeName ("atom36_v1", dummy_T, Atom (36, 0)) val atom81_v1 = FreeName ("atom81_v1", dummy_T, Atom (81, 0)) val struct_atom1_atom1_v1 = FreeName ("struct_atom1_atom1_v1", dummy_T, Struct [Atom (1, 0), Atom (1, 0)]) val struct_atom1_unit_v1 = FreeName ("struct_atom1_atom1_v1", dummy_T, Struct [Atom (1, 0), Unit]) val struct_unit_atom1_v1 = FreeName ("struct_atom1_atom1_v1", dummy_T, Struct [Unit, Atom (1, 0)]) (* Formula Unit Atom Struct Vect Func Formula X N/A X X N/A N/A Unit N/A N/A N/A N/A N/A N/A Atom X N/A X X X X Struct N/A N/A X X N/A N/A Vect N/A N/A X N/A X X Func N/A N/A X N/A X X *) val tests = [("rep_conversion_formula_formula", Op2 (Eq, bool_T, Formula Neut, cast_to_rep (Formula Neut) (cast_to_rep (Formula Neut) atom2_v1), atom2_v1)), ("rep_conversion_atom_atom", Op2 (Eq, bool_T, Formula Neut, cast_to_rep (Atom (16, 0)) (cast_to_rep (Atom (16, 0)) atom16_v1), atom16_v1)), ("rep_conversion_struct_struct_1", Op2 (Eq, bool_T, Formula Neut, cast_to_rep (Struct [Atom (4, 0), Atom (6, 0)]) (cast_to_rep (Struct [Atom (4, 0), Atom (6, 0)]) atom24_v1), atom24_v1)), ("rep_conversion_struct_struct_2", Op2 (Eq, bool_T, Formula Neut, cast_to_rep (Struct [Atom (4, 0), Struct [Atom (2, 0), Atom (3, 0)]]) (cast_to_rep (Struct [Atom (4, 0), Atom (6, 0)]) atom24_v1), atom24_v1)), ("rep_conversion_struct_struct_3", Op2 (Eq, bool_T, Formula Neut, cast_to_rep (Struct [Atom (4, 0), Atom (6, 0)]) (cast_to_rep (Struct [Atom (4, 0), Struct [Atom (2, 0), Atom (3, 0)]]) atom24_v1), atom24_v1)), ("rep_conversion_struct_struct_4", Op2 (Eq, bool_T, Formula Neut, cast_to_rep (Struct [Atom (24, 0), Unit]) (cast_to_rep (Struct [Atom (24, 0), Atom (1, 0)]) atom24_v1), atom24_v1)), ("rep_conversion_struct_struct_5", Op2 (Eq, bool_T, Formula Neut, cast_to_rep (Struct [Atom (24, 0), Atom (1, 0)]) (cast_to_rep (Struct [Atom (24, 0), Unit]) atom24_v1), atom24_v1)), ("rep_conversion_struct_struct_6", Op2 (Eq, bool_T, Formula Neut, cast_to_rep (Struct [Atom (1, 0), Atom (1, 0)]) (cast_to_rep (Struct [Atom (1, 0), Unit]) (cast_to_rep (Struct [Unit, Atom (1, 0)]) atom1_v1)), atom1_v1)), ("rep_conversion_vect_vect_1", Op2 (Eq, bool_T, Formula Neut, cast_to_rep (Atom (16, 0)) (cast_to_rep (Vect (2, Atom (4, 0))) (cast_to_rep (Vect (2, Struct [Atom (2, 0), Atom (2, 0)])) atom16_v1)), atom16_v1)), ("rep_conversion_vect_vect_2", Op2 (Eq, bool_T, Formula Neut, cast_to_rep (Atom (16, 0)) (cast_to_rep (Vect (2, Struct [Atom (2, 0), Atom (2, 0)])) (cast_to_rep (Vect (2, Atom (4, 0))) atom16_v1)), atom16_v1)), ("rep_conversion_vect_vect_3", Op2 (Eq, bool_T, Formula Neut, cast_to_rep (Atom (16, 0)) (cast_to_rep (Vect (2, Atom (4, 0))) (cast_to_rep (Vect (2, Vect (2, Atom (2, 0)))) atom16_v1)), atom16_v1)), ("rep_conversion_vect_vect_4", Op2 (Eq, bool_T, Formula Neut, cast_to_rep (Atom (16, 0)) (cast_to_rep (Vect (2, Vect (2, Atom (2, 0)))) (cast_to_rep (Vect (2, Atom (4, 0))) atom16_v1)), atom16_v1)), ("rep_conversion_func_func_1", Op2 (Eq, bool_T, Formula Neut, cast_to_rep (Atom (36, 0)) (cast_to_rep (Func (Atom (2, 0), Struct [Atom (2, 0), Atom (3, 0)])) (cast_to_rep (Func (Atom (2, 0), Atom (6, 0))) atom36_v1)), atom36_v1)), ("rep_conversion_func_func_2", Op2 (Eq, bool_T, Formula Neut, cast_to_rep (Atom (36, 0)) (cast_to_rep (Func (Atom (2, 0), Atom (6, 0))) (cast_to_rep (Func (Atom (2, 0), Struct [Atom (2, 0), Atom (3, 0)])) atom36_v1)), atom36_v1)), ("rep_conversion_func_func_3", Op2 (Eq, bool_T, Formula Neut, cast_to_rep (Atom (36, 0)) (cast_to_rep (Func (Unit, Struct [Atom (6, 0), Atom (6, 0)])) (cast_to_rep (Func (Atom (1, 0), Atom (36, 0))) atom36_v1)), atom36_v1)), ("rep_conversion_func_func_4", Op2 (Eq, bool_T, Formula Neut, cast_to_rep (Atom (36, 0)) (cast_to_rep (Func (Atom (1, 0), Atom (36, 0))) (cast_to_rep (Func (Unit, Struct [Atom (6, 0), Atom (6, 0)])) atom36_v1)), atom36_v1)), ("rep_conversion_func_func_5", Op2 (Eq, bool_T, Formula Neut, cast_to_rep (Atom (36, 0)) (cast_to_rep (Func (Unit, Vect (2, Atom (6, 0)))) (cast_to_rep (Func (Atom (1, 0), Atom (36, 0))) atom36_v1)), atom36_v1)), ("rep_conversion_func_func_6", Op2 (Eq, bool_T, Formula Neut, cast_to_rep (Atom (36, 0)) (cast_to_rep (Func (Atom (1, 0), Atom (36, 0))) (cast_to_rep (Func (Unit, Vect (2, Atom (6, 0)))) atom36_v1)), atom36_v1)), ("rep_conversion_func_func_7", Op2 (Eq, bool_T, Formula Neut, cast_to_rep (Atom (2, 0)) (cast_to_rep (Func (Unit, Atom (2, 0))) (cast_to_rep (Func (Atom (1, 0), Formula Neut)) atom2_v1)), atom2_v1)), ("rep_conversion_func_func_8", Op2 (Eq, bool_T, Formula Neut, cast_to_rep (Atom (2, 0)) (cast_to_rep (Func (Atom (1, 0), Formula Neut)) (cast_to_rep (Func (Unit, Atom (2, 0))) atom2_v1)), atom2_v1)), ("rep_conversion_atom_formula_atom", Op2 (Eq, bool_T, Formula Neut, cast_to_rep (Atom (2, 0)) (cast_to_rep (Formula Neut) atom2_v1), atom2_v1)), ("rep_conversion_unit_atom", Op2 (Eq, bool_T, Formula Neut, cast_to_rep (Atom (1, 0)) unity, unity)), ("rep_conversion_atom_struct_atom1", Op2 (Eq, bool_T, Formula Neut, cast_to_rep (Atom (6, 0)) (cast_to_rep (Struct [Atom (3, 0), Atom (2, 0)]) atom6_v1), atom6_v1)), ("rep_conversion_atom_struct_atom_2", Op2 (Eq, bool_T, Formula Neut, cast_to_rep (Atom (24, 0)) (cast_to_rep (Struct [Struct [Atom (3, 0), Atom (4, 0)], Atom (2, 0)]) atom24_v1), atom24_v1)), ("rep_conversion_atom_struct_atom_3", Op2 (Eq, bool_T, Formula Neut, cast_to_rep (Atom (6, 0)) (cast_to_rep (Struct [Atom (6, 0), Unit]) atom6_v1), atom6_v1)), ("rep_conversion_atom_struct_atom_4", Op2 (Eq, bool_T, Formula Neut, cast_to_rep (Atom (6, 0)) (cast_to_rep (Struct [Struct [Atom (3, 0), Unit], Atom (2, 0)]) atom6_v1), atom6_v1)), ("rep_conversion_atom_vect_func_atom_1", Op2 (Eq, bool_T, Formula Neut, cast_to_rep (Atom (16, 0)) (cast_to_rep (Vect (4, Atom (2, 0))) (cast_to_rep (Func (Atom (4, 0), Atom (2, 0))) atom16_v1)), atom16_v1)), ("rep_conversion_atom_vect_func_atom_2", Op2 (Eq, bool_T, Formula Neut, cast_to_rep (Atom (16, 0)) (cast_to_rep (Vect (4, Atom (2, 0))) (cast_to_rep (Func (Atom (4, 0), Atom (2, 0))) atom16_v1)), atom16_v1)), ("rep_conversion_atom_vect_func_atom_3", Op2 (Eq, bool_T, Formula Neut, cast_to_rep (Atom (16, 0)) (cast_to_rep (Vect (4, Atom (2, 0))) (cast_to_rep (Func (Atom (4, 0), Formula Neut)) atom16_v1)), atom16_v1)), ("rep_conversion_atom_vect_func_atom_4", Op2 (Eq, bool_T, Formula Neut, cast_to_rep (Atom (16, 0)) (cast_to_rep (Vect (1, Atom (16, 0))) (cast_to_rep (Func (Unit, Atom (16, 0))) atom16_v1)), atom16_v1)), ("rep_conversion_atom_vect_func_atom_5", Op2 (Eq, bool_T, Formula Neut, cast_to_rep (Atom (16, 0)) (cast_to_rep (Vect (1, Atom (16, 0))) (cast_to_rep (Func (Unit, Atom (16, 0))) atom16_v1)), atom16_v1)), ("rep_conversion_atom_func_vect_atom_1", Op2 (Eq, bool_T, Formula Neut, cast_to_rep (Atom (16, 0)) (cast_to_rep (Func (Atom (4, 0), Atom (2, 0))) (cast_to_rep (Vect (4, Atom (2, 0))) atom16_v1)), atom16_v1)), ("rep_conversion_atom_func_vect_atom_2", Op2 (Eq, bool_T, Formula Neut, cast_to_rep (Atom (16, 0)) (cast_to_rep (Func (Atom (4, 0), Atom (2, 0))) (cast_to_rep (Vect (4, Atom (2, 0))) atom16_v1)), atom16_v1)), ("rep_conversion_atom_func_vect_atom_3", Op2 (Eq, bool_T, Formula Neut, cast_to_rep (Atom (16, 0)) (cast_to_rep (Func (Atom (4, 0), Formula Neut)) (cast_to_rep (Vect (4, Atom (2, 0))) atom16_v1)), atom16_v1)), ("rep_conversion_atom_func_vect_atom_4", Op2 (Eq, bool_T, Formula Neut, cast_to_rep (Atom (16, 0)) (cast_to_rep (Func (Unit, Atom (16, 0))) (cast_to_rep (Vect (1, Atom (16, 0))) atom16_v1)), atom16_v1)), ("rep_conversion_atom_func_vect_atom_5", Op2 (Eq, bool_T, Formula Neut, cast_to_rep (Atom (16, 0)) (cast_to_rep (Func (Atom (1, 0), Atom (16, 0))) (cast_to_rep (Vect (1, Atom (16, 0))) atom16_v1)), atom16_v1)), ("rep_conversion_atom_vect_atom", Op2 (Eq, bool_T, Formula Neut, cast_to_rep (Atom (36, 0)) (cast_to_rep (Vect (2, Struct [Atom (2, 0), Atom (3, 0)])) atom36_v1), atom36_v1)), ("rep_conversion_atom_func_atom", Op2 (Eq, bool_T, Formula Neut, cast_to_rep (Atom (36, 0)) (cast_to_rep (Func (Atom (2, 0), Struct [Atom (2, 0), Atom (3, 0)])) atom36_v1), atom36_v1)), ("rep_conversion_struct_atom1_1", Op2 (Eq, bool_T, Formula Neut, cast_to_rep (Struct [Atom (1, 0), Atom (1, 0)]) atom1_v1, struct_atom1_atom1_v1)), ("rep_conversion_struct_atom1_2", Op2 (Eq, bool_T, Formula Neut, cast_to_rep (Struct [Atom (1, 0), Unit]) atom1_v1, struct_atom1_unit_v1)), ("rep_conversion_struct_atom1_3", Op2 (Eq, bool_T, Formula Neut, cast_to_rep (Struct [Unit, Atom (1, 0)]) atom1_v1, struct_unit_atom1_v1)) (* ("rep_conversion_struct_formula_struct_1", Op2 (Eq, bool_T, Formula Neut, cast_to_rep (Struct [Atom (2, 0), Unit]) (cast_to_rep (Formula Neut) struct_atom_2_unit_v1), struct_atom_2_unit_v1)) *) ] fun problem_for_nut ctxt name u = let val debug = false val peephole_optim = true val nat_card = 4 val int_card = 9 val bits = 8 val j0 = 0 val constrs = kodkod_constrs peephole_optim nat_card int_card j0 val (free_rels, pool, table) = rename_free_vars (fst (add_free_and_const_names u ([], []))) initial_pool NameTable.empty val u = Op1 (Not, type_of u, rep_of u, u) |> rename_vars_in_nut pool table val formula = kodkod_formula_from_nut bits Typtab.empty false constrs u val bounds = map (bound_for_plain_rel ctxt debug) free_rels val univ_card = univ_card nat_card int_card j0 bounds formula val declarative_axioms = map (declarative_axiom_for_plain_rel constrs) free_rels val formula = fold_rev s_and declarative_axioms formula in {comment = name, settings = settings, univ_card = univ_card, tuple_assigns = [], bounds = bounds, int_bounds = [], expr_assigns = [], formula = formula} end (* string -> unit *) fun run_test name = case Kodkod.solve_any_problem false NONE 0 1 [problem_for_nut @{context} name (the (AList.lookup (op =) tests name))] of Kodkod.Normal ([], _) => () | _ => warning ("Test " ^ quote name ^ " failed") (* unit -> unit *) fun run_all_tests () = List.app run_test (map fst tests) end; diff --git a/src/HOL/Tools/Nitpick/nitpick_util.ML b/src/HOL/Tools/Nitpick/nitpick_util.ML --- a/src/HOL/Tools/Nitpick/nitpick_util.ML +++ b/src/HOL/Tools/Nitpick/nitpick_util.ML @@ -1,292 +1,292 @@ (* Title: HOL/Tools/Nitpick/nitpick_util.ML Author: Jasmin Blanchette, TU Muenchen - Copyright 2008, 2009 + Copyright 2008, 2009, 2010 General-purpose functions used by the Nitpick modules. *) signature NITPICK_UTIL = sig type styp = string * typ datatype polarity = Pos | Neg | Neut exception ARG of string * string exception BAD of string * string exception TOO_SMALL of string * string exception TOO_LARGE of string * string exception NOT_SUPPORTED of string exception SAME of unit val nitpick_prefix : string val curry3 : ('a * 'b * 'c -> 'd) -> 'a -> 'b -> 'c -> 'd val pairf : ('a -> 'b) -> ('a -> 'c) -> 'a -> 'b * 'c val int_for_bool : bool -> int val nat_minus : int -> int -> int val reasonable_power : int -> int -> int val exact_log : int -> int -> int val exact_root : int -> int -> int val offset_list : int list -> int list val index_seq : int -> int -> int list val filter_indices : int list -> 'a list -> 'a list val filter_out_indices : int list -> 'a list -> 'a list val fold1 : ('a -> 'a -> 'a) -> 'a list -> 'a val replicate_list : int -> 'a list -> 'a list val n_fold_cartesian_product : 'a list list -> 'a list list val all_distinct_unordered_pairs_of : ''a list -> (''a * ''a) list val nth_combination : (int * int) list -> int -> int list val all_combinations : (int * int) list -> int list list val all_permutations : 'a list -> 'a list list val batch_list : int -> 'a list -> 'a list list val chunk_list_unevenly : int list -> 'a list -> 'a list list val map3 : ('a -> 'b -> 'c -> 'd) -> 'a list -> 'b list -> 'c list -> 'd list val double_lookup : ('a * 'a -> bool) -> ('a option * 'b) list -> 'a -> 'b option val triple_lookup : (''a * ''a -> bool) -> (''a option * 'b) list -> ''a -> 'b option val is_substring_of : string -> string -> bool val serial_commas : string -> string list -> string list val plural_s : int -> string val plural_s_for_list : 'a list -> string val flip_polarity : polarity -> polarity val prop_T : typ val bool_T : typ val nat_T : typ val int_T : typ val nat_subscript : int -> string val time_limit : Time.time option -> ('a -> 'b) -> 'a -> 'b val DETERM_TIMEOUT : Time.time option -> tactic -> tactic val setmp_show_all_types : ('a -> 'b) -> 'a -> 'b val indent_size : int val pstrs : string -> Pretty.T list - val plain_string_from_yxml : string -> string + val unyxml : string -> string val maybe_quote : string -> string end structure Nitpick_Util : NITPICK_UTIL = struct type styp = string * typ datatype polarity = Pos | Neg | Neut exception ARG of string * string exception BAD of string * string exception TOO_SMALL of string * string exception TOO_LARGE of string * string exception NOT_SUPPORTED of string exception SAME of unit val nitpick_prefix = "Nitpick." (* ('a * 'b * 'c -> 'd) -> 'a -> 'b -> 'c -> 'd *) fun curry3 f = fn x => fn y => fn z => f (x, y, z) (* ('a -> 'b) -> ('a -> 'c) -> 'a -> 'b * 'c *) fun pairf f g x = (f x, g x) (* bool -> int *) fun int_for_bool b = if b then 1 else 0 (* int -> int -> int *) fun nat_minus i j = if i > j then i - j else 0 val max_exponent = 16384 (* int -> int -> int *) fun reasonable_power a 0 = 1 | reasonable_power a 1 = a | reasonable_power 0 _ = 0 | reasonable_power 1 _ = 1 | reasonable_power a b = if b < 0 then raise ARG ("Nitpick_Util.reasonable_power", "negative exponent (" ^ signed_string_of_int b ^ ")") else if b > max_exponent then raise TOO_LARGE ("Nitpick_Util.reasonable_power", "too large exponent (" ^ signed_string_of_int b ^ ")") else let val c = reasonable_power a (b div 2) in c * c * reasonable_power a (b mod 2) end (* int -> int -> int *) fun exact_log m n = let val r = Math.ln (Real.fromInt n) / Math.ln (Real.fromInt m) |> Real.round in if reasonable_power m r = n then r else raise ARG ("Nitpick_Util.exact_log", commas (map signed_string_of_int [m, n])) end (* int -> int -> int *) fun exact_root m n = let val r = Math.pow (Real.fromInt n, 1.0 / (Real.fromInt m)) |> Real.round in if reasonable_power r m = n then r else raise ARG ("Nitpick_Util.exact_root", commas (map signed_string_of_int [m, n])) end (* ('a -> 'a -> 'a) -> 'a list -> 'a *) fun fold1 f = foldl1 (uncurry f) (* int -> 'a list -> 'a list *) fun replicate_list 0 _ = [] | replicate_list n xs = xs @ replicate_list (n - 1) xs (* int list -> int list *) fun offset_list ns = rev (tl (fold (fn x => fn xs => (x + hd xs) :: xs) ns [0])) (* int -> int -> int list *) fun index_seq j0 n = if j0 < 0 then j0 downto j0 - n + 1 else j0 upto j0 + n - 1 (* int list -> 'a list -> 'a list *) fun filter_indices js xs = let (* int -> int list -> 'a list -> 'a list *) fun aux _ [] _ = [] | aux i (j :: js) (x :: xs) = if i = j then x :: aux (i + 1) js xs else aux (i + 1) (j :: js) xs | aux _ _ _ = raise ARG ("Nitpick_Util.filter_indices", "indices unordered or out of range") in aux 0 js xs end fun filter_out_indices js xs = let (* int -> int list -> 'a list -> 'a list *) fun aux _ [] xs = xs | aux i (j :: js) (x :: xs) = if i = j then aux (i + 1) js xs else x :: aux (i + 1) (j :: js) xs | aux _ _ _ = raise ARG ("Nitpick_Util.filter_out_indices", "indices unordered or out of range") in aux 0 js xs end (* 'a list -> 'a list list -> 'a list list *) fun cartesian_product [] _ = [] | cartesian_product (x :: xs) yss = map (cons x) yss @ cartesian_product xs yss (* 'a list list -> 'a list list *) fun n_fold_cartesian_product xss = fold_rev cartesian_product xss [[]] (* ''a list -> (''a * ''a) list *) fun all_distinct_unordered_pairs_of [] = [] | all_distinct_unordered_pairs_of (x :: xs) = map (pair x) xs @ all_distinct_unordered_pairs_of xs (* (int * int) list -> int -> int list *) val nth_combination = let (* (int * int) list -> int -> int list * int *) fun aux [] n = ([], n) | aux ((k, j0) :: xs) n = let val (js, n) = aux xs n in ((n mod k) + j0 :: js, n div k) end in fst oo aux end (* (int * int) list -> int list list *) val all_combinations = n_fold_cartesian_product o map (uncurry index_seq o swap) (* 'a list -> 'a list list *) fun all_permutations [] = [[]] | all_permutations xs = maps (fn j => map (cons (nth xs j)) (all_permutations (nth_drop j xs))) (index_seq 0 (length xs)) (* int -> 'a list -> 'a list list *) fun batch_list _ [] = [] | batch_list k xs = if length xs <= k then [xs] else List.take (xs, k) :: batch_list k (List.drop (xs, k)) (* int list -> 'a list -> 'a list list *) fun chunk_list_unevenly _ [] = [] | chunk_list_unevenly [] ys = map single ys | chunk_list_unevenly (k :: ks) ys = let val (ys1, ys2) = chop k ys in ys1 :: chunk_list_unevenly ks ys2 end (* ('a -> 'b -> 'c -> 'd) -> 'a list -> 'b list -> 'c list -> 'd list *) fun map3 _ [] [] [] = [] | map3 f (x :: xs) (y :: ys) (z :: zs) = f x y z :: map3 f xs ys zs | map3 _ _ _ _ = raise UnequalLengths (* ('a * 'a -> bool) -> ('a option * 'b) list -> 'a -> 'b option *) fun double_lookup eq ps key = case AList.lookup (fn (SOME x, SOME y) => eq (x, y) | _ => false) ps (SOME key) of SOME z => SOME z | NONE => ps |> find_first (is_none o fst) |> Option.map snd (* (''a * ''a -> bool) -> (''a option * 'b) list -> ''a -> 'b option *) fun triple_lookup eq ps key = case AList.lookup (op =) ps (SOME key) of SOME z => SOME z | NONE => double_lookup eq ps key (* string -> string -> bool *) fun is_substring_of needle stack = not (Substring.isEmpty (snd (Substring.position needle (Substring.full stack)))) (* string -> string list -> string list *) fun serial_commas _ [] = ["??"] | serial_commas _ [s] = [s] | serial_commas conj [s1, s2] = [s1, conj, s2] | serial_commas conj [s1, s2, s3] = [s1 ^ ",", s2 ^ ",", conj, s3] | serial_commas conj (s :: ss) = s ^ "," :: serial_commas conj ss (* int -> string *) fun plural_s n = if n = 1 then "" else "s" (* 'a list -> string *) fun plural_s_for_list xs = plural_s (length xs) (* polarity -> polarity *) fun flip_polarity Pos = Neg | flip_polarity Neg = Pos | flip_polarity Neut = Neut val prop_T = @{typ prop} val bool_T = @{typ bool} val nat_T = @{typ nat} val int_T = @{typ int} (* string -> string *) val subscript = implode o map (prefix "\<^isub>") o explode (* int -> string *) fun nat_subscript n = (* cheap trick to ensure proper alphanumeric ordering for one- and two-digit numbers *) if n <= 9 then "\<^bsub>" ^ signed_string_of_int n ^ "\<^esub>" else subscript (string_of_int n) (* Time.time option -> ('a -> 'b) -> 'a -> 'b *) fun time_limit NONE = I | time_limit (SOME delay) = TimeLimit.timeLimit delay (* Time.time option -> tactic -> tactic *) fun DETERM_TIMEOUT delay tac st = Seq.of_list (the_list (time_limit delay (fn () => SINGLE tac st) ())) (* ('a -> 'b) -> 'a -> 'b *) fun setmp_show_all_types f = setmp_CRITICAL show_all_types (! show_types orelse ! show_sorts orelse ! show_all_types) f val indent_size = 2 (* string -> Pretty.T list *) val pstrs = Pretty.breaks o map Pretty.str o space_explode " " (* XML.tree -> string *) fun plain_string_from_xml_tree t = Buffer.empty |> XML.add_content t |> Buffer.content (* string -> string *) -val plain_string_from_yxml = plain_string_from_xml_tree o YXML.parse +val unyxml = plain_string_from_xml_tree o YXML.parse (* string -> bool *) val is_long_identifier = forall Syntax.is_identifier o space_explode "." (* string -> string *) fun maybe_quote y = - let val s = plain_string_from_yxml y in + let val s = unyxml y in y |> (not (is_long_identifier (perhaps (try (unprefix "'")) s)) orelse OuterKeyword.is_keyword s) ? quote end end; diff --git a/src/Pure/Isar/proof_context.ML b/src/Pure/Isar/proof_context.ML --- a/src/Pure/Isar/proof_context.ML +++ b/src/Pure/Isar/proof_context.ML @@ -1,1384 +1,1385 @@ (* Title: Pure/Isar/proof_context.ML Author: Markus Wenzel, TU Muenchen The key concept of Isar proof contexts: elevates primitive local reasoning Gamma |- phi to a structured concept, with generic context elements. See also structure Variable and Assumption. *) signature PROOF_CONTEXT = sig val theory_of: Proof.context -> theory val init: theory -> Proof.context type mode val mode_default: mode val mode_stmt: mode val mode_pattern: mode val mode_schematic: mode val mode_abbrev: mode val set_mode: mode -> Proof.context -> Proof.context val get_mode: Proof.context -> mode val restore_mode: Proof.context -> Proof.context -> Proof.context val abbrev_mode: Proof.context -> bool val set_stmt: bool -> Proof.context -> Proof.context val local_naming: Name_Space.naming val map_naming: (Name_Space.naming -> Name_Space.naming) -> Proof.context -> Proof.context val naming_of: Proof.context -> Name_Space.naming val restore_naming: Proof.context -> Proof.context -> Proof.context val full_name: Proof.context -> binding -> string val consts_of: Proof.context -> Consts.T val const_syntax_name: Proof.context -> string -> string val the_const_constraint: Proof.context -> string -> typ val mk_const: Proof.context -> string * typ list -> term val set_syntax_mode: Syntax.mode -> Proof.context -> Proof.context val restore_syntax_mode: Proof.context -> Proof.context -> Proof.context val facts_of: Proof.context -> Facts.T + val cases_of: Proof.context -> (string * (Rule_Cases.T * bool)) list val transfer_syntax: theory -> Proof.context -> Proof.context val transfer: theory -> Proof.context -> Proof.context val theory: (theory -> theory) -> Proof.context -> Proof.context val theory_result: (theory -> 'a * theory) -> Proof.context -> 'a * Proof.context val extern_fact: Proof.context -> string -> xstring val pretty_term_abbrev: Proof.context -> term -> Pretty.T val pretty_fact_aux: Proof.context -> bool -> string * thm list -> Pretty.T val pretty_fact: Proof.context -> string * thm list -> Pretty.T val read_typ: Proof.context -> string -> typ val read_typ_syntax: Proof.context -> string -> typ val read_typ_abbrev: Proof.context -> string -> typ val cert_typ: Proof.context -> typ -> typ val cert_typ_syntax: Proof.context -> typ -> typ val cert_typ_abbrev: Proof.context -> typ -> typ val get_skolem: Proof.context -> string -> string val revert_skolem: Proof.context -> string -> string val infer_type: Proof.context -> string -> typ val inferred_param: string -> Proof.context -> typ * Proof.context val inferred_fixes: Proof.context -> (string * typ) list * Proof.context val read_tyname: Proof.context -> string -> typ val read_const_proper: Proof.context -> string -> term val read_const: Proof.context -> string -> term val allow_dummies: Proof.context -> Proof.context val decode_term: Proof.context -> term -> term val standard_infer_types: Proof.context -> term list -> term list val read_term_pattern: Proof.context -> string -> term val read_term_schematic: Proof.context -> string -> term val read_term_abbrev: Proof.context -> string -> term val expand_abbrevs: Proof.context -> term -> term val cert_term: Proof.context -> term -> term val cert_prop: Proof.context -> term -> term val goal_export: Proof.context -> Proof.context -> thm list -> thm list val export: Proof.context -> Proof.context -> thm list -> thm list val export_morphism: Proof.context -> Proof.context -> morphism val norm_export_morphism: Proof.context -> Proof.context -> morphism val bind_terms: (indexname * term option) list -> Proof.context -> Proof.context val auto_bind_goal: term list -> Proof.context -> Proof.context val auto_bind_facts: term list -> Proof.context -> Proof.context val match_bind: bool -> (string list * string) list -> Proof.context -> term list * Proof.context val match_bind_i: bool -> (term list * term) list -> Proof.context -> term list * Proof.context val read_propp: Proof.context * (string * string list) list list -> Proof.context * (term * term list) list list val cert_propp: Proof.context * (term * term list) list list -> Proof.context * (term * term list) list list val read_propp_schematic: Proof.context * (string * string list) list list -> Proof.context * (term * term list) list list val cert_propp_schematic: Proof.context * (term * term list) list list -> Proof.context * (term * term list) list list val bind_propp: Proof.context * (string * string list) list list -> Proof.context * (term list list * (Proof.context -> Proof.context)) val bind_propp_i: Proof.context * (term * term list) list list -> Proof.context * (term list list * (Proof.context -> Proof.context)) val bind_propp_schematic: Proof.context * (string * string list) list list -> Proof.context * (term list list * (Proof.context -> Proof.context)) val bind_propp_schematic_i: Proof.context * (term * term list) list list -> Proof.context * (term list list * (Proof.context -> Proof.context)) val fact_tac: thm list -> int -> tactic val some_fact_tac: Proof.context -> int -> tactic val get_fact: Proof.context -> Facts.ref -> thm list val get_fact_single: Proof.context -> Facts.ref -> thm val get_thms: Proof.context -> xstring -> thm list val get_thm: Proof.context -> xstring -> thm val note_thmss: string -> (Thm.binding * (thm list * attribute list) list) list -> Proof.context -> (string * thm list) list * Proof.context val put_thms: bool -> string * thm list option -> Proof.context -> Proof.context val read_vars: (binding * string option * mixfix) list -> Proof.context -> (binding * typ option * mixfix) list * Proof.context val cert_vars: (binding * typ option * mixfix) list -> Proof.context -> (binding * typ option * mixfix) list * Proof.context val add_fixes: (binding * typ option * mixfix) list -> Proof.context -> string list * Proof.context val auto_fixes: Proof.context * (term list list * 'a) -> Proof.context * (term list list * 'a) val bind_fixes: string list -> Proof.context -> (term -> term) * Proof.context val add_assms: Assumption.export -> (Thm.binding * (string * string list) list) list -> Proof.context -> (string * thm list) list * Proof.context val add_assms_i: Assumption.export -> (Thm.binding * (term * term list) list) list -> Proof.context -> (string * thm list) list * Proof.context val add_cases: bool -> (string * Rule_Cases.T option) list -> Proof.context -> Proof.context val apply_case: Rule_Cases.T -> Proof.context -> (string * term list) list * Proof.context val get_case: Proof.context -> string -> string option list -> Rule_Cases.T val notation: bool -> Syntax.mode -> (term * mixfix) list -> Proof.context -> Proof.context val target_notation: bool -> Syntax.mode -> (term * mixfix) list -> morphism -> Context.generic -> Context.generic val add_const_constraint: string * typ option -> Proof.context -> Proof.context val add_abbrev: string -> binding * term -> Proof.context -> (term * term) * Proof.context val revert_abbrev: string -> string -> Proof.context -> Proof.context val verbose: bool Unsynchronized.ref val setmp_verbose_CRITICAL: ('a -> 'b) -> 'a -> 'b val print_syntax: Proof.context -> unit val print_abbrevs: Proof.context -> unit val print_binds: Proof.context -> unit val print_lthms: Proof.context -> unit val print_cases: Proof.context -> unit val debug: bool Unsynchronized.ref val prems_limit: int Unsynchronized.ref val pretty_ctxt: Proof.context -> Pretty.T list val pretty_context: Proof.context -> Pretty.T list end; structure ProofContext: PROOF_CONTEXT = struct open ProofContext; (** inner syntax mode **) datatype mode = Mode of {stmt: bool, (*inner statement mode*) pattern: bool, (*pattern binding schematic variables*) schematic: bool, (*term referencing loose schematic variables*) abbrev: bool}; (*abbrev mode -- no normalization*) fun make_mode (stmt, pattern, schematic, abbrev) = Mode {stmt = stmt, pattern = pattern, schematic = schematic, abbrev = abbrev}; val mode_default = make_mode (false, false, false, false); val mode_stmt = make_mode (true, false, false, false); val mode_pattern = make_mode (false, true, false, false); val mode_schematic = make_mode (false, false, true, false); val mode_abbrev = make_mode (false, false, false, true); (** Isar proof context information **) datatype ctxt = Ctxt of {mode: mode, (*inner syntax mode*) naming: Name_Space.naming, (*local naming conventions*) syntax: Local_Syntax.T, (*local syntax*) consts: Consts.T * Consts.T, (*local/global consts*) facts: Facts.T, (*local facts*) cases: (string * (Rule_Cases.T * bool)) list}; (*named case contexts*) fun make_ctxt (mode, naming, syntax, consts, facts, cases) = Ctxt {mode = mode, naming = naming, syntax = syntax, consts = consts, facts = facts, cases = cases}; val local_naming = Name_Space.default_naming |> Name_Space.add_path "local"; structure ContextData = Proof_Data ( type T = ctxt; fun init thy = make_ctxt (mode_default, local_naming, Local_Syntax.init thy, (Sign.consts_of thy, Sign.consts_of thy), Facts.empty, []); ); fun rep_context ctxt = ContextData.get ctxt |> (fn Ctxt args => args); fun map_context f = ContextData.map (fn Ctxt {mode, naming, syntax, consts, facts, cases} => make_ctxt (f (mode, naming, syntax, consts, facts, cases))); fun set_mode mode = map_context (fn (_, naming, syntax, consts, facts, cases) => (mode, naming, syntax, consts, facts, cases)); fun map_mode f = map_context (fn (Mode {stmt, pattern, schematic, abbrev}, naming, syntax, consts, facts, cases) => (make_mode (f (stmt, pattern, schematic, abbrev)), naming, syntax, consts, facts, cases)); fun map_naming f = map_context (fn (mode, naming, syntax, consts, facts, cases) => (mode, f naming, syntax, consts, facts, cases)); fun map_syntax f = map_context (fn (mode, naming, syntax, consts, facts, cases) => (mode, naming, f syntax, consts, facts, cases)); fun map_consts f = map_context (fn (mode, naming, syntax, consts, facts, cases) => (mode, naming, syntax, f consts, facts, cases)); fun map_facts f = map_context (fn (mode, naming, syntax, consts, facts, cases) => (mode, naming, syntax, consts, f facts, cases)); fun map_cases f = map_context (fn (mode, naming, syntax, consts, facts, cases) => (mode, naming, syntax, consts, facts, f cases)); val get_mode = #mode o rep_context; val restore_mode = set_mode o get_mode; val abbrev_mode = get_mode #> (fn Mode {abbrev, ...} => abbrev); fun set_stmt stmt = map_mode (fn (_, pattern, schematic, abbrev) => (stmt, pattern, schematic, abbrev)); val naming_of = #naming o rep_context; val restore_naming = map_naming o K o naming_of val full_name = Name_Space.full_name o naming_of; val syntax_of = #syntax o rep_context; val syn_of = Local_Syntax.syn_of o syntax_of; val set_syntax_mode = map_syntax o Local_Syntax.set_mode; val restore_syntax_mode = map_syntax o Local_Syntax.restore_mode o syntax_of; val consts_of = #1 o #consts o rep_context; val const_syntax_name = Consts.syntax_name o consts_of; val the_const_constraint = Consts.the_constraint o consts_of; fun mk_const ctxt (c, Ts) = Const (c, Consts.instance (consts_of ctxt) (c, Ts)); val facts_of = #facts o rep_context; val cases_of = #cases o rep_context; (* theory transfer *) fun transfer_syntax thy = map_syntax (Local_Syntax.rebuild thy) #> map_consts (fn consts as (local_consts, global_consts) => let val thy_consts = Sign.consts_of thy in if Consts.eq_consts (thy_consts, global_consts) then consts else (Consts.merge (local_consts, thy_consts), thy_consts) end); fun transfer thy = Context.raw_transfer thy #> transfer_syntax thy; fun theory f ctxt = transfer (f (theory_of ctxt)) ctxt; fun theory_result f ctxt = let val (res, thy') = f (theory_of ctxt) in (res, ctxt |> transfer thy') end; (** pretty printing **) (* extern *) fun extern_fact ctxt name = let val local_facts = facts_of ctxt; val global_facts = PureThy.facts_of (theory_of ctxt); in if is_some (Facts.lookup (Context.Proof ctxt) local_facts name) then Facts.extern local_facts name else Facts.extern global_facts name end; (* pretty *) fun pretty_term_abbrev ctxt = Syntax.pretty_term (set_mode mode_abbrev ctxt); fun pretty_fact_name ctxt a = Pretty.block [Pretty.markup (Markup.fact a) [Pretty.str (extern_fact ctxt a)], Pretty.str ":"]; fun pretty_fact_aux ctxt flag ("", ths) = Display.pretty_thms_aux ctxt flag ths | pretty_fact_aux ctxt flag (a, [th]) = Pretty.block [pretty_fact_name ctxt a, Pretty.brk 1, Display.pretty_thm_aux ctxt flag th] | pretty_fact_aux ctxt flag (a, ths) = Pretty.block (Pretty.fbreaks (pretty_fact_name ctxt a :: map (Display.pretty_thm_aux ctxt flag) ths)); fun pretty_fact ctxt = pretty_fact_aux ctxt true; (** prepare types **) (* read_typ *) fun read_typ_mode mode ctxt s = Syntax.read_typ (Type.set_mode mode ctxt) s; val read_typ = read_typ_mode Type.mode_default; val read_typ_syntax = read_typ_mode Type.mode_syntax; val read_typ_abbrev = read_typ_mode Type.mode_abbrev; (* cert_typ *) fun cert_typ_mode mode ctxt T = Sign.certify_typ_mode mode (theory_of ctxt) T handle TYPE (msg, _, _) => error msg; val cert_typ = cert_typ_mode Type.mode_default; val cert_typ_syntax = cert_typ_mode Type.mode_syntax; val cert_typ_abbrev = cert_typ_mode Type.mode_abbrev; (** prepare variables **) (* internalize Skolem constants *) val lookup_skolem = AList.lookup (op =) o Variable.fixes_of; fun get_skolem ctxt x = the_default x (lookup_skolem ctxt x); fun no_skolem internal x = if can Name.dest_skolem x then error ("Illegal reference to internal Skolem constant: " ^ quote x) else if not internal andalso can Name.dest_internal x then error ("Illegal reference to internal variable: " ^ quote x) else x; (* revert Skolem constants -- if possible *) fun revert_skolem ctxt x = (case find_first (fn (_, y) => y = x) (Variable.fixes_of ctxt) of SOME (x', _) => if lookup_skolem ctxt x' = SOME x then x' else x | NONE => x); (* default token translations *) local fun free_or_skolem ctxt x = (if can Name.dest_skolem x then Pretty.mark Markup.skolem (Pretty.str (revert_skolem ctxt x)) else Pretty.mark Markup.free (Pretty.str x)) |> Pretty.mark (if Variable.is_fixed ctxt x orelse Syntax.is_pretty_global ctxt then Markup.fixed x else Markup.hilite); fun var_or_skolem _ s = (case Lexicon.read_variable s of SOME (x, i) => (case try Name.dest_skolem x of NONE => Pretty.mark Markup.var (Pretty.str s) | SOME x' => Pretty.mark Markup.skolem (Pretty.str (setmp_CRITICAL show_question_marks true Term.string_of_vname (x', i)))) | NONE => Pretty.mark Markup.var (Pretty.str s)); fun class_markup _ c = (* FIXME authentic name *) Pretty.mark (Markup.tclassN, []) (Pretty.str c); fun plain_markup m _ s = Pretty.mark m (Pretty.str s); val token_trans = Syntax.tokentrans_mode "" [("class", class_markup), ("tfree", plain_markup Markup.tfree), ("tvar", plain_markup Markup.tvar), ("free", free_or_skolem), ("bound", plain_markup Markup.bound), ("var", var_or_skolem), ("numeral", plain_markup Markup.numeral), ("inner_string", plain_markup Markup.inner_string)]; in val _ = Context.>> (Context.map_theory (Sign.add_tokentrfuns token_trans)) end; (** prepare terms and propositions **) (* inferred types of parameters *) fun infer_type ctxt x = Term.fastype_of (singleton (Syntax.check_terms (set_mode mode_schematic ctxt)) (Free (x, dummyT))); fun inferred_param x ctxt = let val T = infer_type ctxt x in (T, ctxt |> Variable.declare_term (Free (x, T))) end; fun inferred_fixes ctxt = let val xs = rev (map #2 (Variable.fixes_of ctxt)); val (Ts, ctxt') = fold_map inferred_param xs ctxt; in (xs ~~ Ts, ctxt') end; (* type and constant names *) local val token_content = Syntax.read_token #>> Symbol_Pos.content; fun prep_const_proper ctxt (c, pos) = let val t as (Const (d, _)) = (case Variable.lookup_const ctxt c of SOME d => Const (d, Consts.type_scheme (consts_of ctxt) d handle TYPE (msg, _, _) => error msg) | NONE => Consts.read_const (consts_of ctxt) c) in Position.report (Markup.const d) pos; t end; in fun read_tyname ctxt str = let val thy = theory_of ctxt; val (c, pos) = token_content str; in if Syntax.is_tid c then (Position.report Markup.tfree pos; TFree (c, the_default (Sign.defaultS thy) (Variable.def_sort ctxt (c, ~1)))) else let val d = Sign.intern_type thy c; val _ = Position.report (Markup.tycon d) pos; in Type (d, replicate (Sign.arity_number thy d) dummyT) end end; fun read_const_proper ctxt = prep_const_proper ctxt o token_content; fun read_const ctxt str = let val (c, pos) = token_content str in (case (lookup_skolem ctxt c, Variable.is_const ctxt c) of (SOME x, false) => (Position.report (Markup.name x (if can Name.dest_skolem x then Markup.skolem else Markup.free)) pos; Free (x, infer_type ctxt x)) | _ => prep_const_proper ctxt (c, pos)) end; end; (* read_term *) fun read_term_mode mode ctxt = Syntax.read_term (set_mode mode ctxt); val read_term_pattern = read_term_mode mode_pattern; val read_term_schematic = read_term_mode mode_schematic; val read_term_abbrev = read_term_mode mode_abbrev; (* local abbreviations *) val tsig_of = Sign.tsig_of o ProofContext.theory_of; local fun certify_consts ctxt = Consts.certify (Syntax.pp ctxt) (tsig_of ctxt) (not (abbrev_mode ctxt)) (consts_of ctxt); fun reject_schematic (Var (xi, _)) = error ("Unbound schematic variable: " ^ Term.string_of_vname xi) | reject_schematic (Abs (_, _, t)) = reject_schematic t | reject_schematic (t $ u) = (reject_schematic t; reject_schematic u) | reject_schematic _ = (); fun expand_binds ctxt = let val Mode {pattern, schematic, ...} = get_mode ctxt in if pattern then I else Variable.expand_binds ctxt #> (if schematic then I else tap reject_schematic) end; in fun expand_abbrevs ctxt = certify_consts ctxt #> expand_binds ctxt; end; fun contract_abbrevs ctxt t = let val thy = theory_of ctxt; val consts = consts_of ctxt; val Mode {abbrev, ...} = get_mode ctxt; val retrieve = Consts.retrieve_abbrevs consts (print_mode_value () @ [""]); fun match_abbrev u = Option.map #1 (get_first (Pattern.match_rew thy u) (retrieve u)); in if abbrev orelse print_mode_active "no_abbrevs" orelse not (can Term.type_of t) then t else Pattern.rewrite_term thy [] [match_abbrev] t end; (* patterns *) fun prepare_patternT ctxt T = let val Mode {pattern, schematic, ...} = get_mode ctxt; val _ = pattern orelse schematic orelse T |> Term.exists_subtype (fn TVar (xi, _) => not (TypeInfer.is_param xi) andalso error ("Illegal schematic type variable: " ^ Term.string_of_vname xi) | _ => false) in T end; local structure Allow_Dummies = Proof_Data(type T = bool fun init _ = false); fun check_dummies ctxt t = if Allow_Dummies.get ctxt then t else Term.no_dummy_patterns t handle TERM _ => error "Illegal dummy pattern(s) in term"; fun prepare_dummies ts = #1 (fold_map Term.replace_dummy_patterns ts 1); in val allow_dummies = Allow_Dummies.put true; fun prepare_patterns ctxt = let val Mode {pattern, ...} = get_mode ctxt in TypeInfer.fixate_params (Variable.names_of ctxt) #> pattern ? Variable.polymorphic ctxt #> (map o Term.map_types) (prepare_patternT ctxt) #> (if pattern then prepare_dummies else map (check_dummies ctxt)) end; end; (* decoding raw terms (syntax trees) *) (* types *) fun get_sort thy def_sort raw_env = let val tsig = Sign.tsig_of thy; fun eq ((xi, S), (xi', S')) = Term.eq_ix (xi, xi') andalso Type.eq_sort tsig (S, S'); val env = distinct eq raw_env; val _ = (case duplicates (eq_fst (op =)) env of [] => () | dups => error ("Inconsistent sort constraints for type variable(s) " ^ commas_quote (map (Term.string_of_vname' o fst) dups))); fun get xi = (case (AList.lookup (op =) env xi, def_sort xi) of (NONE, NONE) => Type.defaultS tsig | (NONE, SOME S) => S | (SOME S, NONE) => S | (SOME S, SOME S') => if Type.eq_sort tsig (S, S') then S' else error ("Sort constraint " ^ Syntax.string_of_sort_global thy S ^ " inconsistent with default " ^ Syntax.string_of_sort_global thy S' ^ " for type variable " ^ quote (Term.string_of_vname' xi))); in get end; local fun intern_skolem ctxt def_type x = let val _ = no_skolem false x; val sko = lookup_skolem ctxt x; val is_const = can (read_const_proper ctxt) x orelse Long_Name.is_qualified x; val is_declared = is_some (def_type (x, ~1)); in if Variable.is_const ctxt x then NONE else if is_some sko then sko else if not is_const orelse is_declared then SOME x else NONE end; in fun term_context ctxt = let val thy = theory_of ctxt in {get_sort = get_sort thy (Variable.def_sort ctxt), map_const = fn a => ((true, #1 (Term.dest_Const (read_const_proper ctxt a))) handle ERROR _ => (false, Consts.intern (consts_of ctxt) a)), map_free = intern_skolem ctxt (Variable.def_type ctxt false), map_type = Sign.intern_tycons thy, map_sort = Sign.intern_sort thy} end; fun decode_term ctxt = let val {get_sort, map_const, map_free, map_type, map_sort} = term_context ctxt in Syntax.decode_term get_sort map_const map_free map_type map_sort end; end; (* certify terms *) local fun gen_cert prop ctxt t = t |> expand_abbrevs ctxt |> (fn t' => #1 (Sign.certify' prop (Syntax.pp ctxt) false (consts_of ctxt) (theory_of ctxt) t') handle TYPE (msg, _, _) => error msg | TERM (msg, _) => error msg); in val cert_term = gen_cert false; val cert_prop = gen_cert true; end; (* type checking/inference *) fun standard_infer_types ctxt ts = let val Mode {pattern, ...} = get_mode ctxt in TypeInfer.infer_types (Syntax.pp ctxt) (tsig_of ctxt) (Syntax.check_typs ctxt) (try (Consts.the_constraint (consts_of ctxt))) (Variable.def_type ctxt pattern) (Variable.names_of ctxt) (Variable.maxidx_of ctxt) ts handle TYPE (msg, _, _) => error msg end; local fun standard_typ_check ctxt = map (cert_typ_mode (Type.get_mode ctxt) ctxt) #> map (prepare_patternT ctxt); fun standard_term_check ctxt = standard_infer_types ctxt #> map (expand_abbrevs ctxt); fun standard_term_uncheck ctxt = map (contract_abbrevs ctxt); fun add eq what f = Context.>> (what (fn xs => fn ctxt => let val xs' = f ctxt xs in if eq_list eq (xs, xs') then NONE else SOME (xs', ctxt) end)); in val _ = add (op =) (Syntax.add_typ_check 0 "standard") standard_typ_check; val _ = add (op aconv) (Syntax.add_term_check 0 "standard") standard_term_check; val _ = add (op aconv) (Syntax.add_term_check 100 "fixate") prepare_patterns; val _ = add (op aconv) (Syntax.add_term_uncheck 0 "standard") standard_term_uncheck; end; (** inner syntax operations **) local fun parse_sort ctxt text = let val (syms, pos) = Syntax.parse_token Markup.sort text; val S = Syntax.standard_parse_sort ctxt (syn_of ctxt) (Sign.intern_sort (theory_of ctxt)) (syms, pos) handle ERROR msg => cat_error msg ("Failed to parse sort" ^ Position.str_of pos) in S end; fun parse_typ ctxt text = let val thy = ProofContext.theory_of ctxt; val get_sort = get_sort thy (Variable.def_sort ctxt); val (syms, pos) = Syntax.parse_token Markup.typ text; val T = Sign.intern_tycons thy (Syntax.standard_parse_typ ctxt (syn_of ctxt) get_sort (Sign.intern_sort thy) (syms, pos)) handle ERROR msg => cat_error msg ("Failed to parse type" ^ Position.str_of pos); in T end; fun parse_term T ctxt text = let val thy = theory_of ctxt; val {get_sort, map_const, map_free, map_type, map_sort} = term_context ctxt; val (T', _) = TypeInfer.paramify_dummies T 0; val (markup, kind) = if T' = propT then (Markup.prop, "proposition") else (Markup.term, "term"); val (syms, pos) = Syntax.parse_token markup text; fun check t = (Syntax.check_term ctxt (TypeInfer.constrain T' t); NONE) handle ERROR msg => SOME msg; val t = Syntax.standard_parse_term (Syntax.pp ctxt) check get_sort map_const map_free map_type map_sort ctxt (Sign.is_logtype thy) (syn_of ctxt) T' (syms, pos) handle ERROR msg => cat_error msg ("Failed to parse " ^ kind ^ Position.str_of pos); in t end; fun unparse_sort ctxt S = Syntax.standard_unparse_sort ctxt (syn_of ctxt) (Sign.extern_sort (theory_of ctxt) S); fun unparse_typ ctxt T = Syntax.standard_unparse_typ ctxt (syn_of ctxt) (Sign.extern_typ (theory_of ctxt) T); fun unparse_term ctxt t = let val thy = theory_of ctxt; val syntax = syntax_of ctxt; val consts = consts_of ctxt; in t |> Sign.extern_term (Consts.extern_early consts) thy |> Local_Syntax.extern_term syntax |> Syntax.standard_unparse_term (Consts.extern consts) ctxt (Local_Syntax.syn_of syntax) (not (PureThy.old_appl_syntax thy)) end; in val _ = Syntax.install_operations {parse_sort = parse_sort, parse_typ = parse_typ, parse_term = parse_term dummyT, parse_prop = parse_term propT, unparse_sort = unparse_sort, unparse_typ = unparse_typ, unparse_term = unparse_term}; end; (** export results **) fun common_export is_goal inner outer = map (Assumption.export is_goal inner outer) #> Variable.export inner outer; val goal_export = common_export true; val export = common_export false; fun export_morphism inner outer = Assumption.export_morphism inner outer $> Variable.export_morphism inner outer; fun norm_export_morphism inner outer = export_morphism inner outer $> Morphism.thm_morphism Goal.norm_result; (** term bindings **) (* simult_matches *) fun simult_matches ctxt (t, pats) = (case Seq.pull (Unify.matchers (theory_of ctxt) (map (rpair t) pats)) of NONE => error "Pattern match failed!" | SOME (env, _) => Vartab.fold (fn (v, (_, t)) => cons (v, t)) (Envir.term_env env) []); (* bind_terms *) val bind_terms = fold (fn (xi, t) => fn ctxt => ctxt |> Variable.bind_term (xi, Option.map (cert_term (set_mode mode_default ctxt)) t)); (* auto_bind *) fun drop_schematic (b as (xi, SOME t)) = if Term.exists_subterm is_Var t then (xi, NONE) else b | drop_schematic b = b; fun auto_bind f ts ctxt = ctxt |> bind_terms (map drop_schematic (f (theory_of ctxt) ts)); val auto_bind_goal = auto_bind Auto_Bind.goal; val auto_bind_facts = auto_bind Auto_Bind.facts; (* match_bind(_i) *) local fun gen_bind prep_terms gen raw_binds ctxt = let fun prep_bind (raw_pats, t) ctxt1 = let val T = Term.fastype_of t; val ctxt2 = Variable.declare_term t ctxt1; val pats = prep_terms (set_mode mode_pattern ctxt2) T raw_pats; val binds = simult_matches ctxt2 (t, pats); in (binds, ctxt2) end; val ts = prep_terms ctxt dummyT (map snd raw_binds); val (binds, ctxt') = apfst flat (fold_map prep_bind (map fst raw_binds ~~ ts) ctxt); val binds' = if gen then map #1 binds ~~ Variable.exportT_terms ctxt' ctxt (map #2 binds) else binds; val binds'' = map (apsnd SOME) binds'; val ctxt'' = tap (Variable.warn_extra_tfrees ctxt) (if gen then ctxt (*sic!*) |> fold Variable.declare_term (map #2 binds') |> bind_terms binds'' else ctxt' |> bind_terms binds''); in (ts, ctxt'') end; in fun read_terms ctxt T = map (Syntax.parse_term ctxt #> TypeInfer.constrain T) #> Syntax.check_terms ctxt; val match_bind = gen_bind read_terms; val match_bind_i = gen_bind (fn ctxt => fn _ => map (cert_term ctxt)); end; (* propositions with patterns *) local fun prep_propp mode prep_props (context, args) = let fun prep (_, raw_pats) (ctxt, prop :: props) = let val ctxt' = Variable.declare_term prop ctxt in ((prop, prep_props (set_mode mode_pattern ctxt') raw_pats), (ctxt', props)) end; val (propp, (context', _)) = (fold_map o fold_map) prep args (context, prep_props (set_mode mode context) (maps (map fst) args)); in (context', propp) end; fun gen_bind_propp mode parse_prop (ctxt, raw_args) = let val (ctxt', args) = prep_propp mode parse_prop (ctxt, raw_args); val binds = flat (flat (map (map (simult_matches ctxt')) args)); val propss = map (map #1) args; (*generalize result: context evaluated now, binds added later*) val gen = Variable.exportT_terms ctxt' ctxt; fun gen_binds c = c |> bind_terms (map #1 binds ~~ map SOME (gen (map #2 binds))); in (ctxt' |> bind_terms (map (apsnd SOME) binds), (propss, gen_binds)) end; in val read_propp = prep_propp mode_default Syntax.read_props; val cert_propp = prep_propp mode_default (map o cert_prop); val read_propp_schematic = prep_propp mode_schematic Syntax.read_props; val cert_propp_schematic = prep_propp mode_schematic (map o cert_prop); val bind_propp = gen_bind_propp mode_default Syntax.read_props; val bind_propp_i = gen_bind_propp mode_default (map o cert_prop); val bind_propp_schematic = gen_bind_propp mode_schematic Syntax.read_props; val bind_propp_schematic_i = gen_bind_propp mode_schematic (map o cert_prop); end; (** theorems **) (* fact_tac *) fun comp_incr_tac [] _ = no_tac | comp_incr_tac (th :: ths) i = (fn st => Goal.compose_hhf_tac (Drule.incr_indexes st th) i st) APPEND comp_incr_tac ths i; fun fact_tac facts = Goal.norm_hhf_tac THEN' comp_incr_tac facts; fun potential_facts ctxt prop = Facts.could_unify (facts_of ctxt) (Term.strip_all_body prop); fun some_fact_tac ctxt = SUBGOAL (fn (goal, i) => fact_tac (potential_facts ctxt goal) i); (* get_thm(s) *) local fun retrieve_thms pick ctxt (Facts.Fact s) = let val (_, pos) = Syntax.read_token s; val prop = Syntax.read_prop (set_mode mode_default ctxt) s |> singleton (Variable.polymorphic ctxt); fun prove_fact th = Goal.prove ctxt [] [] prop (K (ALLGOALS (fact_tac [th]))); val res = (case get_first (try prove_fact) (potential_facts ctxt prop) of SOME res => res | NONE => error ("Failed to retrieve literal fact" ^ Position.str_of pos ^ ":\n" ^ Syntax.string_of_term ctxt prop)) in pick "" [res] end | retrieve_thms pick ctxt xthmref = let val thy = theory_of ctxt; val local_facts = facts_of ctxt; val thmref = Facts.map_name_of_ref (Facts.intern local_facts) xthmref; val name = Facts.name_of_ref thmref; val pos = Facts.pos_of_ref xthmref; val thms = if name = "" then [Thm.transfer thy Drule.dummy_thm] else (case Facts.lookup (Context.Proof ctxt) local_facts name of SOME (_, ths) => (Position.report (Markup.local_fact name) pos; map (Thm.transfer thy) (Facts.select thmref ths)) | NONE => PureThy.get_fact (Context.Proof ctxt) thy xthmref); in pick name thms end; in val get_fact = retrieve_thms (K I); val get_fact_single = retrieve_thms Facts.the_single; fun get_thms ctxt = get_fact ctxt o Facts.named; fun get_thm ctxt = get_fact_single ctxt o Facts.named; end; (* facts *) local fun update_thms _ (b, NONE) ctxt = ctxt |> map_facts (Facts.del (full_name ctxt b)) | update_thms do_props (b, SOME ths) ctxt = ctxt |> map_facts (Facts.add_local do_props (naming_of ctxt) (b, ths) #> snd); in fun note_thmss kind = fold_map (fn ((b, more_attrs), raw_facts) => fn ctxt => let val pos = Binding.pos_of b; val name = full_name ctxt b; val _ = Context_Position.report_visible ctxt (Markup.local_fact_decl name) pos; val facts = PureThy.name_thmss false name raw_facts; fun app (th, attrs) x = swap (Library.foldl_map (Thm.proof_attributes (surround (Thm.kind kind) (attrs @ more_attrs))) (x, th)); val (res, ctxt') = fold_map app facts ctxt; val thms = PureThy.name_thms false false name (flat res); val Mode {stmt, ...} = get_mode ctxt; in ((name, thms), ctxt' |> update_thms stmt (b, SOME thms)) end); fun put_thms do_props thms ctxt = ctxt |> map_naming (K local_naming) |> Context_Position.set_visible false |> update_thms do_props (apfst Binding.name thms) |> Context_Position.restore_visible ctxt |> restore_naming ctxt; end; (** parameters **) (* variables *) fun declare_var (x, opt_T, mx) ctxt = let val T = (case opt_T of SOME T => T | NONE => Syntax.mixfixT mx) in ((x, T, mx), ctxt |> Variable.declare_constraints (Free (x, T))) end; local fun prep_vars prep_typ internal = fold_map (fn (raw_b, raw_T, raw_mx) => fn ctxt => let val raw_x = Name.of_binding raw_b; val (x, mx) = Syntax.const_mixfix raw_x raw_mx; val _ = Syntax.is_identifier (no_skolem internal x) orelse error ("Illegal variable name: " ^ quote x); fun cond_tvars T = if internal then T else Type.no_tvars T handle TYPE (msg, _, _) => error msg; val opt_T = Option.map (cond_tvars o cert_typ ctxt o prep_typ ctxt) raw_T; val var = (Binding.map_name (K x) raw_b, opt_T, mx); in (var, ctxt |> declare_var (x, opt_T, mx) |> #2) end); in val read_vars = prep_vars Syntax.parse_typ false; val cert_vars = prep_vars (K I) true; end; (* authentic constants *) local fun const_ast_tr intern ctxt [Syntax.Variable c] = let val Const (c', _) = read_const_proper ctxt c; val d = if intern then const_syntax_name ctxt c' else c; in Syntax.Constant d end | const_ast_tr _ _ asts = raise Syntax.AST ("const_ast_tr", asts); in val _ = Context.>> (Context.map_theory (Sign.add_syntax [("_context_const", "id => logic", Delimfix "CONST _"), ("_context_const", "id => aprop", Delimfix "CONST _"), ("_context_const", "longid => logic", Delimfix "CONST _"), ("_context_const", "longid => aprop", Delimfix "CONST _"), ("_context_xconst", "id => logic", Delimfix "XCONST _"), ("_context_xconst", "id => aprop", Delimfix "XCONST _"), ("_context_xconst", "longid => logic", Delimfix "XCONST _"), ("_context_xconst", "longid => aprop", Delimfix "XCONST _")] #> Sign.add_advanced_trfuns ([("_context_const", const_ast_tr true), ("_context_xconst", const_ast_tr false)], [], [], []))); end; (* notation *) local fun const_syntax _ (Free (x, T), mx) = SOME (true, (x, T, mx)) | const_syntax ctxt (Const (c, _), mx) = Option.map (pair false) (try (Consts.syntax (consts_of ctxt)) (c, mx)) | const_syntax _ _ = NONE; in fun notation add mode args ctxt = ctxt |> map_syntax (Local_Syntax.update_modesyntax (theory_of ctxt) add mode (map_filter (const_syntax ctxt) args)); fun target_notation add mode args phi = let val args' = args |> map_filter (fn (t, mx) => let val t' = Morphism.term phi t in if Term.aconv_untyped (t, t') then SOME (t', mx) else NONE end); in Context.mapping (Sign.notation add mode args') (notation add mode args') end; end; (* local constants *) fun add_const_constraint (c, opt_T) ctxt = let fun prepT raw_T = let val T = cert_typ ctxt raw_T in cert_term ctxt (Const (c, T)); T end; in ctxt |> (map_consts o apfst) (Consts.constrain (c, Option.map prepT opt_T)) end; fun add_abbrev mode (b, raw_t) ctxt = let val t0 = cert_term (ctxt |> set_mode mode_abbrev) raw_t handle ERROR msg => cat_error msg ("in constant abbreviation " ^ quote (Binding.str_of b)); val [t] = Variable.exportT_terms (Variable.declare_term t0 ctxt) ctxt [t0]; val ((lhs, rhs), consts') = consts_of ctxt |> Consts.abbreviate (Syntax.pp ctxt) (tsig_of ctxt) (naming_of ctxt) mode (b, t); in ctxt |> (map_consts o apfst) (K consts') |> Variable.declare_term rhs |> pair (lhs, rhs) end; fun revert_abbrev mode c = (map_consts o apfst) (Consts.revert_abbrev mode c); (* fixes *) local fun prep_mixfix (x, T, mx) = if mx <> NoSyn andalso mx <> Structure andalso (can Name.dest_internal x orelse can Name.dest_skolem x) then error ("Illegal mixfix syntax for internal/skolem constant " ^ quote x) else (true, (x, T, mx)); in fun add_fixes raw_vars ctxt = let val (vars, _) = cert_vars raw_vars ctxt; val (xs', ctxt') = Variable.add_fixes (map (Name.of_binding o #1) vars) ctxt; val ctxt'' = ctxt' |> fold_map declare_var (map2 (fn x' => fn (_, T, mx) => (x', T, mx)) xs' vars) |-> (map_syntax o Local_Syntax.add_syntax (theory_of ctxt) o map prep_mixfix); val _ = (vars ~~ xs') |> List.app (fn ((b, _, _), x') => Context_Position.report_visible ctxt (Markup.fixed_decl x') (Binding.pos_of b)); in (xs', ctxt'') end; end; (* fixes vs. frees *) fun auto_fixes (ctxt, (propss, x)) = ((fold o fold) Variable.auto_fixes propss ctxt, (propss, x)); fun bind_fixes xs ctxt = let val (_, ctxt') = ctxt |> add_fixes (map (fn x => (Binding.name x, NONE, NoSyn)) xs); fun bind (t as Free (x, T)) = if member (op =) xs x then (case lookup_skolem ctxt' x of SOME x' => Free (x', T) | NONE => t) else t | bind (t $ u) = bind t $ bind u | bind (Abs (x, T, t)) = Abs (x, T, bind t) | bind a = a; in (bind, ctxt') end; (** assumptions **) local fun gen_assms prepp exp args ctxt = let val cert = Thm.cterm_of (theory_of ctxt); val (propss, ctxt1) = swap (prepp (ctxt, map snd args)); val _ = Variable.warn_extra_tfrees ctxt ctxt1; val (premss, ctxt2) = fold_burrow (Assumption.add_assms exp o map cert) propss ctxt1; in ctxt2 |> auto_bind_facts (flat propss) |> note_thmss "" (map fst args ~~ map (map (fn th => ([th], []))) premss) end; in val add_assms = gen_assms (apsnd #1 o bind_propp); val add_assms_i = gen_assms (apsnd #1 o bind_propp_i); end; (** cases **) local fun rem_case name = remove (fn (x: string, (y, _)) => x = y) name; fun add_case _ ("", _) cases = cases | add_case _ (name, NONE) cases = rem_case name cases | add_case is_proper (name, SOME c) cases = (name, (c, is_proper)) :: rem_case name cases; fun prep_case name fxs c = let fun replace (opt_x :: xs) ((y, T) :: ys) = (the_default y opt_x, T) :: replace xs ys | replace [] ys = ys | replace (_ :: _) [] = error ("Too many parameters for case " ^ quote name); val Rule_Cases.Case {fixes, assumes, binds, cases} = c; val fixes' = replace fxs fixes; val binds' = map drop_schematic binds; in if null (fold (Term.add_tvarsT o snd) fixes []) andalso null (fold (fold Term.add_vars o snd) assumes []) then Rule_Cases.Case {fixes = fixes', assumes = assumes, binds = binds', cases = cases} else error ("Illegal schematic variable(s) in case " ^ quote name) end; fun fix (x, T) ctxt = let val (bind, ctxt') = bind_fixes [x] ctxt; val t = bind (Free (x, T)); in (t, ctxt' |> Variable.declare_constraints t) end; in fun add_cases is_proper = map_cases o fold (add_case is_proper); fun case_result c ctxt = let val Rule_Cases.Case {fixes, ...} = c; val (ts, ctxt') = ctxt |> fold_map fix fixes; val Rule_Cases.Case {assumes, binds, cases, ...} = Rule_Cases.apply ts c; in ctxt' |> bind_terms (map drop_schematic binds) |> add_cases true (map (apsnd SOME) cases) |> pair (assumes, (binds, cases)) end; val apply_case = apfst fst oo case_result; fun get_case ctxt name xs = (case AList.lookup (op =) (cases_of ctxt) name of NONE => error ("Unknown case: " ^ quote name) | SOME (c, _) => prep_case name xs c); end; (** print context information **) val debug = Unsynchronized.ref false; val verbose = Unsynchronized.ref false; fun verb f x = if ! verbose then f (x ()) else []; fun setmp_verbose_CRITICAL f x = setmp_CRITICAL verbose true f x; (* local syntax *) val print_syntax = Syntax.print_syntax o syn_of; (* abbreviations *) fun pretty_abbrevs show_globals ctxt = let val ((space, consts), (_, globals)) = pairself (#constants o Consts.dest) (#consts (rep_context ctxt)); fun add_abbr (_, (_, NONE)) = I | add_abbr (c, (T, SOME t)) = if not show_globals andalso Symtab.defined globals c then I else cons (c, Logic.mk_equals (Const (c, T), t)); val abbrevs = Name_Space.extern_table (space, Symtab.make (Symtab.fold add_abbr consts [])); in if null abbrevs andalso not (! verbose) then [] else [Pretty.big_list "abbreviations:" (map (pretty_term_abbrev ctxt o #2) abbrevs)] end; val print_abbrevs = Pretty.writeln o Pretty.chunks o pretty_abbrevs true; (* term bindings *) fun pretty_binds ctxt = let val binds = Variable.binds_of ctxt; fun prt_bind (xi, (T, t)) = pretty_term_abbrev ctxt (Logic.mk_equals (Var (xi, T), t)); in if Vartab.is_empty binds andalso not (! verbose) then [] else [Pretty.big_list "term bindings:" (map prt_bind (Vartab.dest binds))] end; val print_binds = Pretty.writeln o Pretty.chunks o pretty_binds; (* local theorems *) fun pretty_lthms ctxt = let val local_facts = facts_of ctxt; val props = Facts.props local_facts; val facts = (if null props then [] else [("unnamed", props)]) @ Facts.dest_static [] local_facts; in if null facts andalso not (! verbose) then [] else [Pretty.big_list "facts:" (map #1 (sort_wrt (#1 o #2) (map (`(pretty_fact ctxt)) facts)))] end; val print_lthms = Pretty.writeln o Pretty.chunks o pretty_lthms; (* local contexts *) local fun pretty_case (name, (fixes, ((asms, (lets, cs)), ctxt))) = let val prt_term = Syntax.pretty_term ctxt; fun prt_let (xi, t) = Pretty.block [Pretty.quote (prt_term (Var (xi, Term.fastype_of t))), Pretty.str " =", Pretty.brk 1, Pretty.quote (prt_term t)]; fun prt_asm (a, ts) = Pretty.block (Pretty.breaks ((if a = "" then [] else [Pretty.str (a ^ ":")]) @ map (Pretty.quote o prt_term) ts)); fun prt_sect _ _ _ [] = [] | prt_sect s sep prt xs = [Pretty.block (Pretty.breaks (Pretty.str s :: flat (Library.separate sep (map (Library.single o prt) xs))))]; in Pretty.block (Pretty.fbreaks (Pretty.str (name ^ ":") :: prt_sect "fix" [] (Pretty.str o fst) fixes @ prt_sect "let" [Pretty.str "and"] prt_let (map_filter (fn (xi, SOME t) => SOME (xi, t) | _ => NONE) lets) @ (if forall (null o #2) asms then [] else prt_sect "assume" [Pretty.str "and"] prt_asm asms) @ prt_sect "subcases:" [] (Pretty.str o fst) cs)) end; in fun pretty_cases ctxt = let fun add_case (_, (_, false)) = I | add_case (name, (c as Rule_Cases.Case {fixes, ...}, true)) = cons (name, (fixes, case_result c ctxt)); val cases = fold add_case (cases_of ctxt) []; in if null cases andalso not (! verbose) then [] else [Pretty.big_list "cases:" (map pretty_case cases)] end; val print_cases = Pretty.writeln o Pretty.chunks o pretty_cases; end; (* core context *) val prems_limit = Unsynchronized.ref ~1; fun pretty_ctxt ctxt = if ! prems_limit < 0 andalso not (! debug) then [] else let val prt_term = Syntax.pretty_term ctxt; (*structures*) val structs = Local_Syntax.structs_of (syntax_of ctxt); val prt_structs = if null structs then [] else [Pretty.block (Pretty.str "structures:" :: Pretty.brk 1 :: Pretty.commas (map Pretty.str structs))]; (*fixes*) fun prt_fix (x, x') = if x = x' then Pretty.str x else Pretty.block [Pretty.str x, Pretty.str " =", Pretty.brk 1, prt_term (Syntax.free x')]; val fixes = rev (filter_out ((can Name.dest_internal orf member (op =) structs) o #1) (Variable.fixes_of ctxt)); val prt_fixes = if null fixes then [] else [Pretty.block (Pretty.str "fixed variables:" :: Pretty.brk 1 :: Pretty.commas (map prt_fix fixes))]; (*prems*) val prems = Assumption.all_prems_of ctxt; val len = length prems; val suppressed = len - ! prems_limit; val prt_prems = if null prems then [] else [Pretty.big_list "prems:" ((if suppressed <= 0 then [] else [Pretty.str "..."]) @ map (Display.pretty_thm ctxt) (drop suppressed prems))]; in prt_structs @ prt_fixes @ prt_prems end; (* main context *) fun pretty_context ctxt = let val prt_term = Syntax.pretty_term ctxt; val prt_typ = Syntax.pretty_typ ctxt; val prt_sort = Syntax.pretty_sort ctxt; (*theory*) val pretty_thy = Pretty.block [Pretty.str "theory:", Pretty.brk 1, Context.pretty_thy (theory_of ctxt)]; (*defaults*) fun prt_atom prt prtT (x, X) = Pretty.block [prt x, Pretty.str " ::", Pretty.brk 1, prtT X]; fun prt_var (x, ~1) = prt_term (Syntax.free x) | prt_var xi = prt_term (Syntax.var xi); fun prt_varT (x, ~1) = prt_typ (TFree (x, [])) | prt_varT xi = prt_typ (TVar (xi, [])); val prt_defT = prt_atom prt_var prt_typ; val prt_defS = prt_atom prt_varT prt_sort; val (types, sorts) = Variable.constraints_of ctxt; in verb single (K pretty_thy) @ pretty_ctxt ctxt @ verb (pretty_abbrevs false) (K ctxt) @ verb pretty_binds (K ctxt) @ verb pretty_lthms (K ctxt) @ verb pretty_cases (K ctxt) @ verb single (fn () => Pretty.big_list "type constraints:" (map prt_defT (Vartab.dest types))) @ verb single (fn () => Pretty.big_list "default sorts:" (map prt_defS (Vartab.dest sorts))) end; end;