From bad6432ee28e3b71919a0dd59218ca639f15c255 Mon Sep 17 00:00:00 2001
From: Tim Daly
Date: Thu, 5 Jan 2017 14:19:05 0500
Subject: [PATCH] books/bookvol13 books/bookvol13 COQ details
Goal: Proving Axiom Correct
\begin{chunk}{axiom.bib}
@book{Acze13,
author = "Aczel, Peter et al.",
title = "Homotopy Type Theory: Univalent Foundations of Mathematics",
publisher = "Institute for Advanced Study",
year = "2013",
url = "https://hott.github.io/book/nightly/hottletter1075g3c53219.pdf",
paper = "Acze13.pdf"
}
\end{chunk}
\index{Huet, G\'erard}
\begin{chunk}{axiom.bib}
@inproceedings{Huet87,
author = {Huet, G\'erard},
title = "Induction Principles Formalized in the Calculus of Constructions",
booktitle = "TAPSOFT 87",
publisher = "SpringerVerlag",
series ="LNCS 249",
year = "1987",
pages = "276286",
abstract =
"The Calculus of Constructions is a higherorder formalism for writing
constructive proofs in a natural deduction style, inspired from work
by de Bruijn, Girard, MartinLof, and Scott. THe calculus and its
syntactic theory were presented in Coquand's thesis, and an
implementation by the author was used to mechanically verify a
substantial number of proofs demonstrating the power of expression of
the formalism. The Calculus of Constructions is proposed as a
foundation for the design of programming environments where programs
are developed consistently with formal specifications. The current
paper shows how to define inductive concepts in the calculus.
A very general induction schema is obtained by postulating all
elements of the type of interest to belong to the standard
interpretation associated with a predicate map. This is similar to the
treatment of D. Park, but the power of expression of the formallism
permits a very direct treatment, in a language that is formalized
enough to be actually implemented on a computer. Special instances of
the induction schema specialize to Noetherian induction and Structural
induction over any algebraic type. Computational Induction is treated
in an axiomatization of Domain Theory in Constructions. It is argued
that the resulting principle is more powerful than LCF's, since the
restriction on admissibility is expressible in the object language.",
paper = "Huet87.pdf"
}
\end{chunk}
\index{Frade, Maria Joao}
\begin{chunk}{axiom.bib}
@article{Frad08,
author = "Frade, Maria Joao",
title = "Calculus of Inductive Construction. Software Formal Verification",
year = "2008",
url = "http://www4.di.uminho.pt/~jno/mfes/0809/SFVCIC.pdf",
paper = "Frad08.pdf"
}
\end{chunk}
\begin{chunk}{axiom.bib}
@misc{Wiki17,
author = "Wikipedia",
title = "Calculus of constructions",
year = "2017",
url = "https://en.wikipedia.org/wiki/Calculus\_of\_constructions"
}
\end{chunk}

books/bookvol13.pamphlet  144 ++++
books/bookvolbib.pamphlet  1720 +++++++++++++++++++++
changelog  3 +
patch  197 ++
src/axiomwebsite/patches.html  2 +
5 files changed, 1096 insertions(+), 970 deletions()
diff git a/books/bookvol13.pamphlet b/books/bookvol13.pamphlet
index 0ee912f..d8572f3 100644
 a/books/bookvol13.pamphlet
+++ b/books/bookvol13.pamphlet
@@ 747,7 +747,147 @@ proof: \[ a  b \implies a \le b; b  a \implies b \le a;
\[ a  1 \iff a = \pm 1\]
\chapter{COQ proof of GCD}
This is the proof of GCD\cite{COQx16a} in the COQ\cite{COQx16} sources:
+\section{Basics of the Calculus of Constructions}
+Coquend\cite{Coqu86}\cite{Wiki17}
+defines the Calculus of Constructions which can
+be considered an extension of the CurryHoward Isomorphism. The components
+are
+\subsection{Terms}
+A {\sl term} in the calculus of constructions is constructed using the
+following rules:
+\begin{itemize}
+\item {\bf T} is a term (also called {\sl Type})
+\item {\bf P} is a term (also called {\sl Prop}, the type of all propositions)
+\item Variables ($x,y,\ldots$) are terms
+\item if {\bf A} and {\bf B} are terms, then so are
+\begin{itemize}
+\item {\bf $(A,B)$}
+\item {\bf $(\lambda x: A,B)$}
+\item {\bf $(\forall x: A,B)$}
+\end{itemize}
+\end{itemize}
+
+The calculus of constructions has five kinds of objects:
+\begin{enumerate}
+\item {\sl proofs}, which are terms whose types are {\sl propositions}
+\item {\sl propositions}, which are also known as {\sl small types}
+\item {\sl predicates}, which are functions that return propositions
+\item {\sl large types}, which are the types of predicates. {\bf P} is
+an example of a large type)
+\item {\bf T} itself, which is the type of large types.
+\end{enumerate}
+
+\subsection{Judgements}
+
+The calculus of constructions allows proving {\bf typing judgements}
+\[x_1:A_1, x_2:A_2, \ldots \vdash t:B\]
+which can be read as the implication
+\[{\rm\ if\ variables\ }x_1, x_2, \ldots,
+{\rm\ have\ types\ }A_1, A_2,\ldots,
+{\rm\ then\ term\ }t{\rm\ has\ type\ }B\]
+
+The valid judgements for the calculus of constructions are derivable
+from a set of inference rules. In the following, we use $\Gamma$
+to mean a sequence of type assignments
+$x_1:A_1$, $x_2:A_2,\ldots$,
+and we use {\bf K} to mean either {\bf P} or {\bf T}.
+We shall write {\bf $A:B:C$} to mean
+"{\bf $A$} has type {\bf $B$}, and {\bf $B$} has type {\bf $C$}".
+We shall write {\bf $B(x:=N)$} to mean the result of substituting
+the term {\bf $N$} for the variable {\bf $x$} in the term {\bf $B$}.
+
+An inference rule is written in the form
+\[\frac{{\bf \Gamma} \vdash {\bf A : B}}
+{{\bf \Gamma^\prime} \vdash {\bf C : D}}\]
+which means
+\[{\rm\ if\ }{\bf \Gamma} \vdash {\bf A : B}
+{\rm\ is\ a\ valid\ judgement,\ then\ so\ is\ }
+{\bf \Gamma^\prime} \vdash {\bf C : D}\]
+
+\subsection{Inference Rules}
+
+In Frade\cite{Frad08} we find:
+
+\[\begin{array}{lcl}
+
+({\rm axiom}) &
+() \vdash s_1 : s_2
+& {\rm\ if\ }(s_1,s_2)\in A\\
+&&\\
+
+({\rm start}) &
+\displaystyle{\frac{\Gamma \vdash A : s}{\Gamma,x : A \vdash x : A}}
+& {\rm\ if\ }x\notin {\rm\ dom}(\Gamma) \\
+&&\\
+
+({\rm weakening}) &
+\displaystyle{\frac{\Gamma \vdash M : A\quad\quad\Gamma\vdash B : s}
+{\Gamma,x : B \vdash M : A}}
+& {\rm\ if\ }x\notin {\rm\ dom}(\Gamma) \\
+&&\\
+
+({\rm product}) &
+\displaystyle{\frac{\Gamma\vdash A : s_1\quad\quad\Gamma,x : A\vdash B : s_2}
+{\Gamma\vdash(\prod x:A. B) : s_3}}
+& {\rm\ if\ }(s_1,s_2,s_3) \in \mathbb{R}\\
+&&\\
+
+({\rm application}) &
+\displaystyle{\frac{\Gamma\vdash M:(\prod x : A. B)\quad\quad\Gamma \vdash N:A}
+{\Gamma \vdash MN : B[x:=N]}} & \\
+&&\\
+
+({\rm abstraction}) &
+\displaystyle{\frac{\Gamma,x:A\vdash M:B\quad\quad\Gamma\vdash(\prod x:A. B):s}
+{\Gamma\vdash\lambda x : A.M : (\prod x:A.B)}} & \\
+&&\\
+
+({\rm conversion}) &
+\displaystyle{\frac{\Gamma\vdash M : A\quad\quad\Gamma\vdash B : s}
+{\Gamma\vdash M : B}}
+& {\rm\ if\ }A =_\beta B\\
+
+\end{array}\]
+
+\subsection{Defining Logical Operators}
+
+\[\begin{array}{cccc}
+A\Rightarrow B & \equiv & \forall x:A.B & (x\notin B)\\
+A\wedge B & \equiv & \forall C:P.(A\Rightarrow B\Rightarrow C)\Rightarrow C &\\
+A\vee B & \equiv & \forall C:P.(A\Rightarrow C)\Rightarrow(B\Rightarrow C)
+\Rightarrow C &\\
+\neg A & \equiv & \forall C : P.(A\Rightarrow C) &\\
+\exists x : A. B & \equiv & \forall C : P.(\forall x : A.(B\Rightarrow C))
+\Rightarrow C &\\
+\end{array}\]
+
+\subsection{Defining Types}
+
+The basic data types used in computer science can be defined within the
+Calculus of Constructions:
+
+{\bf Booleans}
+
+\[\forall A : P.A \Rightarrow A \Rightarrow A\]
+
+{\bf Naturals}
+
+\[\forall A : P.(A \Rightarrow A) \Rightarrow (A \Rightarrow A)\]
+
+{\bf Product} $A\times B$
+
+\[A \wedge B\]
+
+{\bf Disjoint Union} $A + B$
+
+\[A \vee B\]
+
+Note that Booleans and Naturals are defined in the same way as in
+Church encoding. However additional problems raise from propositional
+extensionality and proof irrelevance.
+
+\section{Source code of COQ GCD Proof}
+This is the proof of GCD\cite{Coqu16a} in the COQ\cite{Coqu16} sources:
\begin{verbatim}
Library Coq.ZArith.Znumtheory
@@ 1518,6 +1658,8 @@ Spitters\cite{Spit11} Type Classes for Mathematics in Coq. Also see
Mahboubi\cite{Mahb16} Mathematical Components. This book contains a
proof of the Euclidean algorithm using COQ.
+Aczel\cite{Acze13} Homotopy Type Theory
+
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
\cleardoublepage
\phantomsection
diff git a/books/bookvolbib.pamphlet b/books/bookvolbib.pamphlet
index 6cc5602..50b411c 100644
 a/books/bookvolbib.pamphlet
+++ b/books/bookvolbib.pamphlet
@@ 5360,6 +5360,18 @@ Mathematics of Computation, Vol 32, No 144 Oct 1978, pp12151231
\section{Proving Axiom Correct} %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+\begin{chunk}{axiom.bib}
+@book{Acze13,
+ author = "Aczel, Peter et al.",
+ title = "Homotopy Type Theory: Univalent Foundations of Mathematics",
+ publisher = "Institute for Advanced Study",
+ year = "2013",
+ url = "https://hott.github.io/book/nightly/hottletter1075g3c53219.pdf",
+ paper = "Acze13.pdf"
+}
+
+\end{chunk}
+
\index{Adams, Andrew A.}
\index{Gottlieben, Hanne}
\index{Linton, Steve A.}
@@ 5473,6 +5485,44 @@ Martin, U.
\end{chunk}
+\index{Ballarin, Clemens}
+\index{Paulson, Lawrence C.}
+\begin{chunk}{axiom.bib}
+@article{Ball99,
+ author = "Ballarin, Clemens and Paulson, Lawrence C.",
+ title = "A Pragmatic Approach to Extending Provers by Computer Algebra 
+ with Applications to Coding Theory",
+ journal = "Fundam. Inform.",
+ volume = "39",
+ number = "12",
+ pages = "120",
+ year = "1999",
+ url = "http://www.cl.cam.ac.uk/~lp15/papers/Isabelle/coding.pdf",
+ abstract = "
+ The use of computer algebra is usually considered beneficial for
+ mechanised reasoning in mathematical domains. We present a case study,
+ in the application domain of coding theory, that supports this claim:
+ the mechanised proofs depend on nontrivial algorithms from computer
+ algebra and increase the reasoning power of the theorem prover.
+
+ The unsoundness of computer algebra systems is a major problem in
+ interfacing them to theorem provers. Our approach to obtaining a sound
+ overall system is not blanket distrust but based on the distinction
+ between algorithms we call sound and {\sl ad hoc} respectively. This
+ distinction is blurred in most computer algebra systems. Our
+ experimental interface therefore uses a computer algebra library. It
+ is based on formal specifications for the algorithms, and links the
+ computer algebra library Sumit to the prover Isabelle.
+
+ We give details of the interface, the use of the computer algebra
+ system on the tacticlevel of Isabelle and its integration into proof
+ procedures.",
+ paper = "Ball99.pdf",
+ keywords = "axiomref"
+}
+
+\end{chunk}
+
\index{Beeson, Michael}
\begin{chunk}{axiom.bib}
@misc{Beesxx,
@@ 5497,6 +5547,92 @@ Martin, U.
\end{chunk}
+\index{Bertot, Yves}
+\index{Cast\'eran, Pierre}
+\begin{chunk}{ignore}
+\bibitem[Bertot 04]{Bert04} Bertot, Yves; Cast\'eran, Pierre
+ title = "Interactive Theorem Proving and Program Development",
+ isbn = "3540208542",
+ abstract = "
+ Coq is an interactive proof assistant for the development of
+ mathematical theories and formally certified software. It is based on
+ a theory called the calculus of inductive constructions, a variant of
+ type theory.
+
+ This book provides a pragmatic introduction to the development of
+ proofs and certified programs using Coq. With its large collection of
+ examples and exercies it is an invaluable tool for researchers,
+ students, and engineers interested in formal methods and the
+ development of zerofault software."
+
+\end{chunk}
+
+\index{Boulm\'e, S.}
+\index{Hardin, T.}
+\index{Rioboo, Renaud}
+\begin{chunk}{axiom.bib}
+@misc{Boul00,
+ author = "Boulme, S. and Hardin, T. and Rioboo, R.",
+ title = "Polymorphic Data Types, Objects, Modules and Functors,:
+ is it too much?",
+ url = "ftp://ftp.lip6.fr/lip6/reports/2000/lip6.2000.014.ps.gz",
+ abstract = "
+ Abstraction is a powerful tool for developers and it is offered by
+ numerous features such as polymorphism, classes, modules, and
+ functors, $\ldots$ A working programmer may be confused by this
+ abundance. We develop a computer algebra library which is being
+ certificed. Reporting this experience made with a language (Ocaml)
+ offering all these features, we argue that the are all needed
+ together. We compare several ways of using classes to represent
+ algebraic concepts, trying to follow as close as possible mathematical
+ specification. Then we show how to combine classes and modules to
+ produce code having very strong typing properties. Currently, this
+ library is made of one hundred units of functional code and behaves
+ faster than analogous ones such as Axiom.",
+ paper = "Boul00.pdf",
+ keywords = "axiomref"
+}
+
+\end{chunk}
+
+\index{Boulm\'e, Sylvain}
+\index{Hardin, Therese}
+\index{Hirschkoff, Daniel}
+\index{Rioboo, Renaud}
+\index{M\'enissierMorain, Valerie}
+\begin{chunk}{axiom.bib}
+@InProceedings{Boul99,
+ author = "Boulme, S. and Hardin, T. and Hirschkoff, D. and Rioboo, Renaud",
+ title = "On the way to certify Computer Algebra Systems",
+ booktitle = "Systems for integrated computation and deduction",
+ series = "Calculemus 99",
+ year = "1999",
+ publisher = "Elsevier",
+ location = "Trento, Italy",
+ pages = "1112",
+ abstract =
+ "The FOC project aims at supporting, within a coherent software system,
+ the entire process of mathematical computation, starting with proved
+ theories, ending with certified implementations of algorithms. In this
+ paper, we explain our design requirements for the implementation,
+ using polynomials as a running example. Indeed, proving correctness of
+ implementations depends heavily on the way this design allows
+ mathematical properties to be truly handled at the programming level.
+
+ The FOC project, started at the fall of 1997, is aimed to build a
+ programming environment for the development of certified symbolic
+ computation. The working languages are Coq and Ocaml. In this paper,
+ we present first the motivations of the project. We then explain why
+ and how our concern for proving properties of programs has led us to
+ certain implementation choices in Ocaml. This way, the sources express
+ exactly the mathematical dependencies between different structures.
+ This may ease the achievement of proofs.",
+ paper = "Boul99.pdf",
+ keywords = "axiomref"
+}
+
+\end{chunk}
+
\index{Bressoud, David}
\begin{chunk}{axiom.bib}
@article{Bres93,
@@ 5526,77 +5662,6 @@ Martin, U.
\end{chunk}
\index{Coquand, Thierry}
\index{Huet, G\'erard}
\index{Paulin, Christine}
\begin{chunk}{axiom.bib}
@misc{COQx16,
 author = {Coquand, Thierry and Huet, G\'erard and Paulin, Christine},
 title = "The COQ Proof Assistant",
 year = "2016",
 url = "https://coq.inria.fr"
}

\index{Coquand, Thierry}
\index{Huet, G\'erard}
\index{Paulin, Christine}
\begin{chunk}{axiom.bib}
@misc{COQx16a,
 author = {Coquand, Thierry and Huet, G\'erard and Paulin, Christine},
 title = "COQ Proof Assistant Library Coq.ZArith.Znumtheory",
 year = "2016",
 url = "https://coq.inria.fr/library/Coq.ZArith.Znumtheory.html"
}

\index{Mahboubi, Assia}
\index{Tassi, Enrico}
\index{Bertot, Yves}
\index{Gonthier, Georges}
\begin{chunk}{axiom.bib}
@book{Mahb16,
 author = "Mahboubi, Assia and Tassi, Enrico and Bertot, Yves and
 Gonthier, Georges",
 title = "Mathematical Components",
 year = "2016",
 publisher = "mathcomp.github.io/mcb",
 url = "https://mathcomp.github.io/mcb/book.pdf",
 abstract =
 "{\sl Mathematical Components} is the name of a library of formalized
 mathematic for the COQ system. It covers a veriety of topics, from the
 theory of basic data structures (e.g. numbers, lists, finite sets) to
 advanced results in various flavors of algebra. This library
 constitutes the infrastructure for the machinechecked proofs of the
 Four Color Theorem and the Odd Order Theorem.

 The reason of existence of this books is to break down the barriers to
 entry. While there are several books around covering the usage of the
 COQ system and the theory it is based on, the Mathematical Components
 library is build in an unconventional way. As a consequence, this book
 provides a nonstandard presentation of COQ, putting upfront the
 formalization choices and the proof style that are the pillars of the
 library.

 This book targets two classes of public. On one hand, newcomers, even
 the more mathematically inclined ones, find a soft introduction to the
 programming language of COQ, Gallina, and the Ssreflect proof
 language. On the other hand accustomed COQ users find a substantial
 accound of the formalization style that made the Mathematical
 Components library possible.

 By no means does this book pretend to be a complete description of COQ
 or Ssreflect: both tools already come with a comprehensive user
 manual. In the course of the book, the reader is nevertheless invited
 to experiment with a large library of formalized concepts and she is
 given as soon as possible sufficient tools to prove nontrivial
 mathematical results by reusing parts of the library. By the end of
 the first part, the reader has learnt how to prove formally the
 infinitude of prime numbers, or the correctnes of the Euclidean's
 division algorithm, in a few lines of proof text.",
 paper = "Mahb16.pdf"
}

\end{chunk}

\index{MedinaBulo, I.}
\index{PalomoLozano, F.}
\index{AlonsoJim\'enez, J.A.}
@@ 5648,6 +5713,60 @@ Martin, U.
\end{chunk}
+\index{Coquand, Thierry}
+\index{Huet, G\'erard}
+\begin{chunk}{axiom.bib}
+@techreport{Coqu86,
+ author = {Coquand, Thierry and Huet, G\'erard},
+ title = "The Calculus of Constructions",
+ year = "1986",
+ institution = "INRIA Centre de Rocquencourt",
+ number = "530",
+ url = "https://hal.inria.fr/inria00076024/document",
+ abstract =
+ "The Calculus of Constructions is a higherorder formalism for
+ constructive proofs in natural deduction style. Every proof is a
+ $\lambda$expression, typed with propositions of the underlying
+ logic. By removing types we get a pure $\lambda$expression,
+ expressing its associated algorithm. Computing this
+ $\lambda$expression corresponds roughly to cutelimination. It is our
+ thesis that (as already advocated by MartinLof) the CurryHoward
+ correspondence between propositions and types is a powerful paradigm
+ for Computer Science. In the case of Constructions, we obtain the
+ notion of a very highlevel functional programming language, with
+ complex polymorphism wellsuited for modules specification. The notion
+ of type encompasses the usual notioin of data type, but allows as well
+ arbitrarily complex algorithmic specifications. We develop the basic
+ theory of a Calculus of Constructions, and prove a strong
+ normalization theorem showing that all computations terminate.
+ Finally, we suggest various extensions to stronger calculi.",
+ paper = "Coqu86.pdf"
+}
+
+\end{chunk}
+
+\index{Coquand, Thierry}
+\index{Huet, G\'erard}
+\index{Paulin, Christine}
+\begin{chunk}{axiom.bib}
+@misc{Coqu16,
+ author = {Coquand, Thierry and Huet, G\'erard and Paulin, Christine},
+ title = "The COQ Proof Assistant",
+ year = "2016",
+ url = "https://coq.inria.fr"
+}
+
+\index{Coquand, Thierry}
+\index{Huet, G\'erard}
+\index{Paulin, Christine}
+\begin{chunk}{axiom.bib}
+@misc{Coqu16a,
+ author = {Coquand, Thierry and Huet, G\'erard and Paulin, Christine},
+ title = "COQ Proof Assistant Library Coq.ZArith.Znumtheory",
+ year = "2016",
+ url = "https://coq.inria.fr/library/Coq.ZArith.Znumtheory.html"
+}
+
\index{Chlipala, Adam}
\begin{chunk}{axiom.bib}
@book{Chli15,
@@ 5683,38 +5802,90 @@ Martin, U.
\end{chunk}
\index{Coquand, Thierry}
\index{Huet, G\'erard}
\begin{chunk}{axiom.bib}
@techreport{Coqu86,
 author = {Coquand, Thierry and Huet, G\'erard},
 title = "The Calculus of Constructions",
 year = "1986",
 institution = "INRIA Centre de Rocquencourt",
 number = "530",
 url = "https://hal.inria.fr/inria00076024/document",
+\index{Daly, Timothy}
+\begin{chunk}{ignore}
+\bibitem[Daly 10]{Daly10} Daly, Timothy
+ title = "Intel Instruction Semantics Generator",
+ url = "http://daly.axiomdeveloper.org/TimothyDaly_files/publications/sei/intel/intel.pdf",
+ abstract = "
+ Given an Intel x86 binary, extract the semantics of the instruction
+ stream as Conditional Concurrent Assignments (CCAs). These CCAs
+ represent the semantics of each individual instruction. They can be
+ composed to represent higher level semantics.",
+ paper = "Daly10.pdf"
+
+\end{chunk}
+
+\index{Danielsson, Nils Anders}
+\index{Hughes, John}
+\index{Jansson, Patrik}
+\index{Gibbons, Jeremy}
+\begin{chunk}{axiom.bib}
+@InProceedings{Dani06,
+ author = "Danielsson, Nils Anders and Hughes, John and Jansson, Patrik and
+ Gibbons, Jeremy",
+ title = "Fast and Loose Reasoning is Morally Correct",
+ booktitle = "Proc. of ACM POPL '06",
+ series = "POPL '06",
+ year = "2006",
+ location = "Charleston, South Carolina",
abstract =
 "The Calculus of Constructions is a higherorder formalism for
 constructive proofs in natural deduction style. Every proof is a
 $\lambda$expression, typed with propositions of the underlying
 logic. By removing types we get a pure $\lambda$expression,
 expressing its associated algorithm. Computing this
 $\lambda$expression corresponds roughly to cutelimination. It is our
 thesis that (as already advocated by MartinLof) the CurryHoward
 correspondence between propositions and types is a powerful paradigm
 for Computer Science. In the case of Constructions, we obtain the
 notion of a very highlevel functional programming language, with
 complex polymorphism wellsuited for modules specification. The notion
 of type encompasses the usual notioin of data type, but allows as well
 arbitrarily complex algorithmic specifications. We develop the basic
 theory of a Calculus of Constructions, and prove a strong
 normalization theorem showing that all computations terminate.
 Finally, we suggest various extensions to stronger calculi.",
 paper = "Coqu86.pdf"
+ "Functional programmers often reason about programs as if they were
+ written in a total language, expecting the results to carry over to
+ nontoal (partial) languages. We justify such reasoning.
+
+ Two languages are defined, one total and one partial, with identical
+ syntax. The semantics of the partial language includes partial and
+ infinite values, and all types are lifted, including the function
+ spaces. A partial equivalence relation (PER) is then defined, the
+ domain of which is the total subset of the partial language. For types
+ not containing function spaces the PER relates equal values, and
+ functions are related if they map related values to related values.
+
+ It is proved that if two closed terms have the same semantics in the
+ total language, then they have related semantics in the partial
+ language. It is also shown that the PER gives rise to a bicartesian
+ closed category which can be used to reason about values in the domain
+ of the relation.",
+ paper = "Dani06.pdf",
+ keywords = "axiomref"
}
\end{chunk}
+\index{Davenport, James H.}
+\index{Bradford, Russell}
+\index{England, Matthew}
+\index{Wilson, David}
+\begin{chunk}{ignore}
+\bibitem[Davenport 12]{Davenp12} Davenport, James H.; Bradford, Russell;
+England, Matthew; Wilson, David
+ title = "Program Verification in the presence of complex numbers,
+ functions with branch cuts etc.",
+ url = "http://arxiv.org/pdf/1212.5417.pdf",
+ abstract = "
+ In considering the reliability of numerical programs, it is normal to
+ ``limit our study to the semantics dealing with numerical precision''.
+ On the other hand, there is a great deal of work on the reliability of
+ programs that essentially ignores the numerics. The thesis of this
+ paper is that there is a class of problems that fall between these
+ two, which could be described as ``does the lowlevel arithmetic
+ implement the highlevel mathematics''. Many of these problems arise
+ because mathematics, particularly the mathematics of the complex
+ numbers, is more difficult than expected: for example the complex
+ function log is not continuous, writing down a program to compute an
+ inverse function is more complicated than just solving an equation,
+ and many algebraic simplification rules are not universally valid.
+
+ The good news is that these problems are {\sl theoretically} capable
+ of being solved, and are {\sl practically} close to being solved, but
+ not yet solved, in several realworld examples. However, there is
+ still a long way to go before implementations match the theoretical
+ possibilities.",
+ paper = "Davenp12.pdf"
+
+\end{chunk}
+
\index{DeMilo, Richard A.}
\index{Lipton, Richard J.}
\index{Perlis, Alan J.}
@@ 5741,6 +5912,145 @@ Martin, U.
\end{chunk}
+\index{Dolzmann, Andreas}
+\index{Sturm, Thomas}
+\begin{chunk}{ignore}
+\bibitem[Dolzmann 97]{Dolz97} Dolzmann, Andreas; Sturm, Thomas
+ title = "Guarded Expressions in Practice",
+ url = "http://redlog.dolzmann.de/papers/pdf/MIP9702.pdf",
+ abstract = "
+ Computer algebra systems typically drop some degenerate cases when
+ evaluating expressions, e.g. $x/x$ becomes 1 dropping the case
+ $x=0$. We claim that it is feasible in practice to compute also the
+ degenerate cases yielding {\sl guarded expressions}. We work over real
+ closed fields but our ideas about handling guarded expressions can be
+ easily transferred to other situations. Using formulas as guards
+ provides a powerful tool for heuristically reducing the combinatorial
+ explosion of cases: equivalent, redundant, tautological, and
+ contradictive cases can be detected by simplification and quantifier
+ elimination. Our approach allows to simplify the expressions on the
+ basis of simplification knowledge on the logical side. The method
+ described in this paper is implemented in the REDUCE package GUARDIAN,
+ which is freely available on the WWW.",
+ paper = "Dolz97.pdf"
+
+\end{chunk}
+
+\index{Dos Reis, Gabriel}
+\index{Matthews, David}
+\index{Li, Yue}
+\begin{chunk}{ignore}
+\bibitem[Dos Reis 11]{DR11} Dos Reis, Gabriel; Matthews, David; Li, Yue
+ title = "Retargeting OpenAxiom to Poly/ML: Towards an Integrated Proof Assistants and Computer Algebra System Framework",
+Calculemus (2011) Springer
+ url = "http://paradise.caltech.edu/~yli/paper/oapolyml.pdf",
+ abstract = "
+ This paper presents an ongoing effort to integrate the Axiom family of
+ computer algebra systems with Poly/MLbased proof assistants in the
+ same framework. A long term goal is to make a large set of efficient
+ implementations of algebraic algorithms available to popular proof
+ assistants, and also to bring the power of mechanized formal
+ verification to a family of strongly typed computer algebra systems at
+ a modest cost. Our approach is based on retargeting the code generator
+ of the OpenAxiom compiler to the Poly/ML abstract machine.",
+ paper = "DR11.pdf",
+ keywords = "axiomref"
+
+\end{chunk}
+
+\index{Dunstan, Martin}
+\begin{chunk}{ignore}
+\bibitem[Dunstan 00a]{Dun00a} Dunstan, Martin N.
+ title = "Adding Larch/Aldor Specifications to Aldor",
+ abstract = "
+ We describe a proposal to add Larchstyle annotations to the Aldor
+ programming language, based on our PhD research. The annotations
+ are intended to be machinecheckable and may be used for a variety
+ of purposes ranging from compiler optimizations to verification
+ condition (VC) generation. In this report we highlight the options
+ available and describe the changes which would need to be made to
+ the compiler to make use of this technology.",
+ paper = "Dunxx.pdf"
+
+\end{chunk}
+
+\index{Dunstan, Martin}
+\index{Kelsey, Tom}
+\index{Linton, Steve A.}
+\index{Martin, Ursula}
+\begin{chunk}{axiom.bib}
+@InProceedings{Duns98,
+ author = "Dunstan, Martin and Kelsey, Tom and Linton, Steve and
+ Martin, Ursula",
+ title = "Lightweight Formal Methods For Computer Algebra Systems",
+ publisher = "ACM Press",
+ booktitle = "Proc. ISSAC 1998",
+ year = "1998",
+ location = "Rostock, Germany",
+ pages = "8087",
+ url = "http://www.cs.standrews.ac.uk/~tom/pub/issac98.pdf",
+ abstract =
+ "Demonstrates the use of formal methods tools to provide a semantics
+ for the type hierarchy of the Axiom computer algebra system, and a
+ methodology for Aldor program analysis and verification. There are
+ examples of abstract specifications of Axiom primitives.",
+ paper = "Duns98.pdf",
+ keywords = "axiomref"
+}
+
+\end{chunk}
+
+\index{Dunstan, Martin N.}
+\begin{chunk}{axiom.bib}
+@phdthesis{Duns99a,
+ author = "Dunstan, Martin N.",
+ title = "Larch/Aldor  A Larch BISL for AXIOM and Aldor",
+ school = "University of St. Andrews",
+ year = "1999",
+ abstract = "
+ In this thesis we investigate the use of lightweight formal methods
+ and verification conditions (VCs) to help improve the reliability of
+ components constructed within a computer algebra system. We follow the
+ Larch approach to formal methods and have designed a new behavioural
+ interface specification language (BISL) for use with Aldor: the
+ compiled extension language of Axiom and a fullyfeatured programming
+ language in its own right. We describe our idea of lightweight formal
+ methods, present a design for a lightweight verification condition
+ generator and review our implementation of a prototype verification
+ condition generator for Larch/Aldor.",
+ paper = "Duns99a.pdf",
+ keywords = "axiomref"
+}
+
+\end{chunk}
+
+\index{Dunstan, Martin}
+\index{Kelsey, Tom}
+\index{Martin, Ursula}
+\index{Linton, Steve A.}
+\begin{chunk}{axiom.bib}
+@InProceedings{Duns99,
+ author = "Dunstan, Martin and Kelsey, Tom and Martin, Ursula and
+ Linton, Steve A.",
+ title = "Formal Methods for Extensions to CAS",
+ booktitle = "Proc. of FME'99",
+ series = "FME'99",
+ location = "Toulouse, France",
+ year = "1999",
+ pages = "17581777",
+ url = "http://tom.host.cs.standrews.ac.uk/pub/fm99.ps",
+ abstract =
+ "We demonstrate the use of formal methods tools to provide a semantics
+ for the type hierarchy of the AXIOM computer algebra system, and a
+ methodology for Aldor program analysis and verification. We give a
+ case study of abstract specifications of AXIOM primitives, and provide
+ an interface between these abstractions and Aldor code.",
+ paper = "Duns99.pdf",
+ keywords = "axiomref"
+}
+
+\end{chunk}
+
\index{Fateman, Richard J.}
\begin{chunk}{axiom.bib}
@misc{Fate02a,
@@ 5808,6 +6118,18 @@ Martin, U.
\end{chunk}
+\index{Frade, Maria Joao}
+\begin{chunk}{axiom.bib}
+@article{Frad08,
+ author = "Frade, Maria Joao",
+ title = "Calculus of Inductive Construction. Software Formal Verification",
+ year = "2008",
+ url = "http://www4.di.uminho.pt/~jno/mfes/0809/SFVCIC.pdf",
+ paper = "Frad08.pdf"
+}
+
+\end{chunk}
+
\index{Geuvers, Herman}
\index{Pollack, Randy}
\index{Wiedijk, Freek}
@@ 5879,14 +6201,106 @@ Martin, U.
\end{chunk}
\index{Jackson, Paul Bernard}
+\index{Hardin, David S.}
+\index{McClurg, Jedidiah R.}
+\index{Davis, Jennifer A.}
\begin{chunk}{axiom.bib}
@phdthesis{Jack95,
 author = "Jackson, Paul Bernard",
 title = "Enhancing the NUPRL Proof Development System and Applying it to
 Computational Abstract Algebra",
 school = "Cornell University",
 year = "1995",
+@misc{Hard13,
+ author = "Hardin, David S. and McClurg, Jedidiah R. and Davis, Jennifer A.",
+ title = "Creating Formally Verified Components for Layered Assurance with an LLVM to ACL2 Translator",
+ url = "http://www.jrmcclurg.com/papers/law_2013_paper.pdf",
+ abstract = "
+ This paper describes an effort to create a library of formally
+ verified software component models from code that have been compiled
+ using the LowLevel Virtual Machine (LLVM) intermediate form. The idea
+ is to build a translator from LLVM to the applicative subset of Common
+ Lisp accepted by the ACL2 theorem prover. They perform verification of
+ the component model using ACL2's automated reasoning capabilities.",
+ paper = "Hard13.pdf"
+}
+
+\end{chunk}
+
+\index{Hardin, David S.}
+\index{Davis, Jennifer A.}
+\index{Greve, David A.}
+\index{McClurg, Jedidiah R.}
+\begin{chunk}{axiom.bib}
+@misc{Hard14,
+ author = "Hardin, David S. and Davis, Jennifer A. and Greve, David A. and
+ McClurg, Jedidiah R.",
+ title = "Development of a Translator from LLVM to ACL2",
+ url = "http://arxiv.org/pdf/1406.1566",
+ abstract = "
+ In our current work a library of formally verified software components
+ is to be created, and assembled, using the LowLevel Virtual Machine
+ (LLVM) intermediate form, into subsystems whose toplevel assurance
+ relies on the assurance of the individual components. We have thus
+ undertaken a project to build a translator from LLVM to the
+ applicative subset of Common Lisp accepted by the ACL2 theorem
+ prover. Our translator produces executable ACL2 formal models,
+ allowing us to both prove theorems about the translated models as well
+ as validate those models by testing. The resulting models can be
+ translated and certified without user intervention, even for code with
+ loops, thanks to the use of the def::ung macro which allows us to
+ defer the question of termination. Initial measurements of concrete
+ execution for translated LLVM functions indicate that performance is
+ nearly 2.4 million LLVM instructions per second on a typical laptop
+ computer. In this paper we overview the translation process and
+ illustrate the translator's capabilities by way of a concrete example,
+ including both a functional correctness theorem as well as a
+ validation test for that example.",
+ paper = "Hard14.pdf"
+}
+
+\end{chunk}
+
+\index{Huet, G\'erard}
+\begin{chunk}{axiom.bib}
+@inproceedings{Huet87,
+ author = {Huet, G\'erard},
+ title = "Induction Principles Formalized in the Calculus of Constructions",
+ booktitle = "TAPSOFT 87",
+ publisher = "SpringerVerlag",
+ series ="LNCS 249",
+ year = "1987",
+ pages = "276286",
+ abstract =
+ "The Calculus of Constructions is a higherorder formalism for writing
+ constructive proofs in a natural deduction style, inspired from work
+ by de Bruijn, Girard, MartinLof, and Scott. THe calculus and its
+ syntactic theory were presented in Coquand's thesis, and an
+ implementation by the author was used to mechanically verify a
+ substantial number of proofs demonstrating the power of expression of
+ the formalism. The Calculus of Constructions is proposed as a
+ foundation for the design of programming environments where programs
+ are developed consistently with formal specifications. The current
+ paper shows how to define inductive concepts in the calculus.
+
+ A very general induction schema is obtained by postulating all
+ elements of the type of interest to belong to the standard
+ interpretation associated with a predicate map. This is similar to the
+ treatment of D. Park, but the power of expression of the formallism
+ permits a very direct treatment, in a language that is formalized
+ enough to be actually implemented on a computer. Special instances of
+ the induction schema specialize to Noetherian induction and Structural
+ induction over any algebraic type. Computational Induction is treated
+ in an axiomatization of Domain Theory in Constructions. It is argued
+ that the resulting principle is more powerful than LCF's, since the
+ restriction on admissibility is expressible in the object language.",
+ paper = "Huet87.pdf"
+}
+
+\end{chunk}
+
+\index{Jackson, Paul Bernard}
+\begin{chunk}{axiom.bib}
+@phdthesis{Jack95,
+ author = "Jackson, Paul Bernard",
+ title = "Enhancing the NUPRL Proof Development System and Applying it to
+ Computational Abstract Algebra",
+ school = "Cornell University",
+ year = "1995",
month = "1",
abstract = "
This thesis describes substantial enhancements that were made to the
@@ 5936,6 +6350,74 @@ Martin, U.
\end{chunk}
+\index{Lamport, Leslie}
+\begin{chunk}{axiom.bib}
+@book{Lamp02,
+ author = "Lamport, Leslie",
+ title = "Specifying Systems",
+ year = "2002",
+ url = "http://research.microsoft.com/enus/um/people/lamport/tla/book020808.pdf",
+ publisher = "AddisonWesley",
+ isbn = "032114306X",
+ paper = "Lamp02.pdf"
+}
+
+\end{chunk}
+
+\index{Lamport, Leslie}
+\begin{chunk}{axiom.bib}
+@misc{Lamp13,
+ author = "Lamport, Leslie",
+ title = "Errata to Specifying Systems",
+ year = "2013",
+ url = "http://research.microsoft.com/enus/um/people/lamport/tla/errata1.pdf",
+ publisher = "Microsoft",
+ abstract = "
+ These are all the errors and omissions to the first printing (July
+ 2002) of the book {\sl Specifying Systems} reported as of 29 October
+ 2013. Positions in the book are indicated by page and line number,
+ where the top line of a page is number 1 and the bottom line is number
+ $1$. A running head and a page number are not considered to be lines,
+ but all other lines are. Please report any additional errors to the
+ author, whose email address is posted on {\tt http://lamport.org}. The
+ first person to report an error will be acknowledged in any revised
+ edition.",
+ paper = "Lamp13.pdf"
+}
+
+\end{chunk}
+
+\index{Lamport, Leslie}
+\begin{chunk}{axiom.bib}
+@misc{Lamp14,
+ author = "Lamport, Leslie",
+ title = "How to Write a $21^{st}$ Century Proof",
+ year = "2014",
+ url = "http://research.microsoft.com/enus/um/people/lamport/pubs/paper.pdf",
+ publisher = "Microsoft",
+ abstract = "
+ A method of writing proofs is described that makes it harder to prove
+ things that are not true. The method, based on hierarchical
+ structuring, is simple and practical. The author's twenty years of
+ experience writing such proofs is discussed.",
+ paper = "Lamp14.pdf"
+}
+
+\end{chunk}
+
+\index{Lamport, Leslie}
+\begin{chunk}{axiom.bib}
+@misc{Lamp14a,
+ author = "Lamport, Leslie",
+ title = "Talk: How to Write a $21^{st}$ Century Proof",
+ year = "2014",
+ url =
+"http://hits.mediasite.com/mediasite/Play/29d825439b3c49f088d35555426fbdf81d",
+ comment = "2nd Heidelberg Laureate Forum Lecture Tuesday Sep 23, 2014"
+}
+
+\end{chunk}
+
\index{Kaufmann, Matt}
\index{Moore, J Strother}
\begin{chunk}{axiom.bib}
@@ 6033,78 +6515,208 @@ Martin, U.
\end{chunk}
\index{Parent, Catherine}
+\index{Mahboubi, Assia}
+\index{Tassi, Enrico}
+\index{Bertot, Yves}
+\index{Gonthier, Georges}
\begin{chunk}{axiom.bib}
@inproceedings{Pare93,
 author = "Parent, Catherine",
 title = "Developing Certified Programs in the System Coq: The Program
 Tactic",
 booktitle = "Proc. Int. Workshop on Types for Proofs and Programs",
 publisher = "SpringerVerlag",
 isbn = "3540580859",
 pages = "291312",
 year = "1993",
+@book{Mahb16,
+ author = "Mahboubi, Assia and Tassi, Enrico and Bertot, Yves and
+ Gonthier, Georges",
+ title = "Mathematical Components",
+ year = "2016",
+ publisher = "mathcomp.github.io/mcb",
+ url = "https://mathcomp.github.io/mcb/book.pdf",
abstract =
 "The system {\sl Coq} is an environment for proof development based on
 the Calculus of Constructions extended by inductive definitions. The
 specification of a program can be represented by a logical formula and
 the program itself can be extracted from the constructive proof of the
 specification. In this paper, we look at the possibility of inverting
 the specification and a program, builds the logical condition to be
 verified in order to obtain a correctness proof of the program. We
 build a proof of the specification from the program from which the
 program can be extracted. Since some information cannot automatically
 be inferred, we show how to annotate the program by specifying some of
 its parts in order to guide the search for the proof.",
 paper = "Pare93.ps"
+ "{\sl Mathematical Components} is the name of a library of formalized
+ mathematic for the COQ system. It covers a veriety of topics, from the
+ theory of basic data structures (e.g. numbers, lists, finite sets) to
+ advanced results in various flavors of algebra. This library
+ constitutes the infrastructure for the machinechecked proofs of the
+ Four Color Theorem and the Odd Order Theorem.
+
+ The reason of existence of this books is to break down the barriers to
+ entry. While there are several books around covering the usage of the
+ COQ system and the theory it is based on, the Mathematical Components
+ library is build in an unconventional way. As a consequence, this book
+ provides a nonstandard presentation of COQ, putting upfront the
+ formalization choices and the proof style that are the pillars of the
+ library.
+
+ This book targets two classes of public. On one hand, newcomers, even
+ the more mathematically inclined ones, find a soft introduction to the
+ programming language of COQ, Gallina, and the Ssreflect proof
+ language. On the other hand accustomed COQ users find a substantial
+ accound of the formalization style that made the Mathematical
+ Components library possible.
+
+ By no means does this book pretend to be a complete description of COQ
+ or Ssreflect: both tools already come with a comprehensive user
+ manual. In the course of the book, the reader is nevertheless invited
+ to experiment with a large library of formalized concepts and she is
+ given as soon as possible sufficient tools to prove nontrivial
+ mathematical results by reusing parts of the library. By the end of
+ the first part, the reader has learnt how to prove formally the
+ infinitude of prime numbers, or the correctnes of the Euclidean's
+ division algorithm, in a few lines of proof text.",
+ paper = "Mahb16.pdf"
}
\end{chunk}
\index{Parent, Catherine}
+\index{Martin, Ursula}
+\index{Shand, D.}
\begin{chunk}{axiom.bib}
@techreport{Pare94,
 author = "Parent, Catherine",
 title = "Synthesizing proofs from programs in the Calculus of Inductive
 Constructions",
 year = "1994",
 institution = {Ecole Normale Sup\'erieure de Lyon},
 abstract =
 "In type theory, a proof can be represented as a typed $\lambda$term.
 There exist methods to mark logical parts in proofs and extract their
 algorithmic contents. The result is a correct program with respect to
 a specification. This paper focuses on the inverse problem: how to
 generate a proof from its specification. The framework is the Calculus
 of Inductive Constructions. A notion of coherence is introduced between
 a specification and a program containing types but no logical proofs.
 This notion is based on the definition of an extraction function called
 the weak extraction. Such a program can give a method to reconstruct a
 set of logical properties needed to have a proof of the initial
 specification. This can be seen either as a method of proving programs
 or as a method of synthetically describing proofs.",
 paper = "Pare94.pdf"
+@misc{Mart97,
+ author = "Martin, Ursula and Shand, D",
+ title = "Investigating some Embedded Verification Techniques for
+ Computer Algebra Systems",
+ url = "http://www.risc.jku.at/conferences/Theorema/papers/shand.ps.gz",
+ abstract = "
+ This paper reports some preliminary ideas on a collaborative project
+ between St. Andrews University in the UK and NAG Ltd. The project aims
+ to use embedded verification techniques to improve the reliability and
+ mathematical soundness of computer algebra systems. We give some
+ history of attempts to integrate computer algebra systems and
+ automated theorem provers and discuss possible advantages and
+ disadvantages of these approaches. We also discuss some possible case
+ studies.",
+ paper = "Mart97.ps"
}
\end{chunk}
\index{ParentVigouroux, Catherine}
+\index{Mason, Ian A.}
\begin{chunk}{axiom.bib}
@misc{Pare96,
 author = "ParentVigouroux, Catherine",
 title = "Natural proofs versus programs optimization in the
 Calculus of Inductive Constructions",
 year = "1996",
 abstract =
 "This paper presents how to automatically prove that an 'optimized'
 program is correct with respect to a set of given properties that is a
 specification. Proofs of specifications contain logical and
 computational parts. Programs can be seen as computational parts of
 proofs. They can thus be extracted from proofs and be certified to be
 correct. The inverse problem can be solved: it is possible to
 reconstruct proof obligations from a program and its specification.
 The framework is a type theory where a proof can be represented as a
 typed $\lambda$term and, particularly, the Calculus of Inductive
 Constructions. This paper shows how programs can be simplified in
+@book{Maso86,
+ author = "Mason, Ian A.",
+ title = "The Semantics of Destructive Lisp",
+ publisher = "Center for the Study of Language and Information",
+ year = "1986",
+ isbn = "0937073067",
+ abstract = "
+ Our basic premise is that the ability to construct and modify programs
+ will not improve without a new and comprehensive look at the entire
+ programming process. Past theoretical research, say, in the logic of
+ programs, has tended to focus on methods for reasoning about
+ individual programs; little has been done, it seems to us, to develop
+ a sound understanding of the process of programming  the process by
+ which programs evolve in concept and in practice. At present, we lack
+ the means to describe the techniques of program construction and
+ improvement in ways that properly link verification, documentation and
+ adaptability."
+}
+
+\end{chunk}
+
+\index{Newcombe, Chris}
+\index{Rath, Tim}
+\index{Zhang, Fan}
+\index{Munteanu, Bogdan}
+\index{Brooker, Marc}
+\index{Deardeuff, Michael}
+\begin{chunk}{ignore}
+\bibitem[Newcombe 13]{Newc13} Newcombe, Chris; Rath, Tim; Zhang, Fan;
+Munteanu, Bogdan; Brooker, Marc; Deardeuff, Michael
+ title = "Use of Formal Methods at Amazon Web Services",
+ url = "http://research.microsoft.com/enus/um/people/lamport/tla/formalmethodsamazon.pdf",
+ abstract = "
+ In order to find subtle bugs in a system design, it is necessary to
+ have a precise description of that design. There are at least two
+ major benefits to writing a precise design; the author is forced to
+ think more clearly, which helps eliminate ``plausible handwaving'',
+ and tools can be applied to check for errors in the design, even while
+ it is being written. In contrast, conventional design documents
+ consist of prose, static diagrams, and perhaps pseudocode in an ad
+ hoc untestable language. Such descriptions are far from precise; they
+ are often ambiguous, or omit critical aspects such as partial failure
+ or the granularity of concurrency (i.e. which constructs are assumed
+ to be atomic). At the other end of the spectrum, the final executable
+ code is unambiguous, but contains an overwhelming amount of detail. We
+ needed to be able to capture the essence of a design in a few hundred
+ lines of precise description. As our designs are unavoidably complex,
+ we need a highlyexpressive language, far above the level of code, but
+ with precise semantics. That expressivity must cover realworld
+ concurrency and faulttolerance. And, as we wish to build services
+ quickly, we wanted a language that is simple to learn and apply,
+ avoiding esoteric concepts. We also very much wanted an existing
+ ecosystem of tools. We found what we were looking for in TLA+, a
+ formal specification language."
+
+\end{chunk}
+
+\index{Parent, Catherine}
+\begin{chunk}{axiom.bib}
+@inproceedings{Pare93,
+ author = "Parent, Catherine",
+ title = "Developing Certified Programs in the System Coq: The Program
+ Tactic",
+ booktitle = "Proc. Int. Workshop on Types for Proofs and Programs",
+ publisher = "SpringerVerlag",
+ isbn = "3540580859",
+ pages = "291312",
+ year = "1993",
+ abstract =
+ "The system {\sl Coq} is an environment for proof development based on
+ the Calculus of Constructions extended by inductive definitions. The
+ specification of a program can be represented by a logical formula and
+ the program itself can be extracted from the constructive proof of the
+ specification. In this paper, we look at the possibility of inverting
+ the specification and a program, builds the logical condition to be
+ verified in order to obtain a correctness proof of the program. We
+ build a proof of the specification from the program from which the
+ program can be extracted. Since some information cannot automatically
+ be inferred, we show how to annotate the program by specifying some of
+ its parts in order to guide the search for the proof.",
+ paper = "Pare93.ps"
+}
+
+\end{chunk}
+
+\index{Parent, Catherine}
+\begin{chunk}{axiom.bib}
+@techreport{Pare94,
+ author = "Parent, Catherine",
+ title = "Synthesizing proofs from programs in the Calculus of Inductive
+ Constructions",
+ year = "1994",
+ institution = {Ecole Normale Sup\'erieure de Lyon},
+ abstract =
+ "In type theory, a proof can be represented as a typed $\lambda$term.
+ There exist methods to mark logical parts in proofs and extract their
+ algorithmic contents. The result is a correct program with respect to
+ a specification. This paper focuses on the inverse problem: how to
+ generate a proof from its specification. The framework is the Calculus
+ of Inductive Constructions. A notion of coherence is introduced between
+ a specification and a program containing types but no logical proofs.
+ This notion is based on the definition of an extraction function called
+ the weak extraction. Such a program can give a method to reconstruct a
+ set of logical properties needed to have a proof of the initial
+ specification. This can be seen either as a method of proving programs
+ or as a method of synthetically describing proofs.",
+ paper = "Pare94.pdf"
+}
+
+\end{chunk}
+
+\index{ParentVigouroux, Catherine}
+\begin{chunk}{axiom.bib}
+@misc{Pare96,
+ author = "ParentVigouroux, Catherine",
+ title = "Natural proofs versus programs optimization in the
+ Calculus of Inductive Constructions",
+ year = "1996",
+ abstract =
+ "This paper presents how to automatically prove that an 'optimized'
+ program is correct with respect to a set of given properties that is a
+ specification. Proofs of specifications contain logical and
+ computational parts. Programs can be seen as computational parts of
+ proofs. They can thus be extracted from proofs and be certified to be
+ correct. The inverse problem can be solved: it is possible to
+ reconstruct proof obligations from a program and its specification.
+ The framework is a type theory where a proof can be represented as a
+ typed $\lambda$term and, particularly, the Calculus of Inductive
+ Constructions. This paper shows how programs can be simplified in
order to be written in a much closer way to the ML one's. Indeed,
proofs structures are often much more heavy than program structures.
The problem is consequently to consider natural programs (in a ML sense)
@@ 6184,6 +6796,40 @@ Martin, U.
\end{chunk}
\index{Poll, Erik}
+\begin{chunk}{axiom.bib}
+@misc{Poll99,
+ author = "Poll, Erik",
+ title = "The Type System of Axiom",
+ year = "1999",
+ url = "http://www.cs.ru.nl/E.Poll/talks/axiom.pdf",
+ abstract =
+ "This is a slide deck from a talk on the correspondence between
+ Axiom/Aldor types and Logic.",
+ paper = "Poll99.pdf",
+ keywords = "axiomref"
+}
+
+\end{chunk}
+
+\index{Poll, Erik}
+\index{Thompson, Simon}
+\begin{chunk}{axiom.bib}
+@misc{Poll99a,
+ author = "Poll, Erik and Thompson, Simon",
+ title = "The Type System of Aldor",
+ url = "http://www.cs.kent.ac.uk/pubs/1999/874/content.ps",
+ abstract =
+ "This paper gives a formal description of  at least a part of 
+ the type system of Aldor, the extension language of the Axiom.
+ In the process of doing this a critique of the design of the system
+ emerges.",
+ paper = "Poll99a.pdf",
+ keywords = "axiomref"
+}
+
+\end{chunk}
+
+\index{Poll, Erik}
\index{Thompson, Simon}
\begin{chunk}{axiom.bib}
@misc{Poll00,
@@ 6250,6 +6896,17 @@ Martin, U.
\end{chunk}
+\index{Roberts, Siobhan}
+\begin{chunk}{axiom.bib}
+@misc{Robe15,
+ author = "Roberts, Siobhan",
+ title = "In Mathematics, Mistakes Aren't What They Used To Be",
+ year = 2015,
+ url = "http://nautil.us/issue/24/error/Inmathematicsmistakesarentwhattheyusedtobe"
+}
+
+\end{chunk}
+
\index{Lamport, Leslie}
\begin{chunk}{axiom.bib}
@misc{TLA16,
@@ 6306,6 +6963,63 @@ Martin, U.
\end{chunk}
+\index{Nguyen, Phuc C.}
+\index{TobinHochstadt, Sam}
+\index{van Horn, David}
+\begin{chunk}{axiom.bib}
+@article{Nguy16,
+ author = "Nguyen, Phuc C. and TobinHochstadt, Sam and van Horn, David",
+ title = "Higherorder symbolic execution for contract verification and
+ refutation",
+ url = "http://arxiv.org/pdf/1507.04817v2.pdf",
+ year = "2016",
+ month = "February",
+ abstract =
+ "We present a new approach to automated reasoning about higherorder
+ programs by endowing symbolic execution with a notion of higherorder,
+ symbolic values.
+
+ To validate our approach, we use it to develop and evaluate a system
+ for verifying and refuting behavioral software contracts of components
+ in a functional language, which we call {\sl soft contract
+ verification}. In doing so, we discover a mutually beneficial relation
+ between behavioral contracts and higherorder symbolic
+ execution. Contracts aid symbolic execution by providing a rich
+ language of specifications that can serve as the basis of symbolic
+ higherorder values; the theory of blame enables modular verification
+ and leads to the theorem that {\sl verified compnents can't be
+ blamed}; and the runtime monitoring of contracts enables {\sl soft}
+ verification whereby verified and unverified components can safely
+ interact and verification is not an allornothing
+ proposition. Conversely, symbolic execution aids contracts by
+ providing compiletime verification which increases assurance and
+ enables optimizations; automated testcase generation for contracts
+ with counterexamples; and engendering a virtuous cycle between
+ verification and the gradual spread of contracts.
+
+ Our system uses higherorder symbolic execution, leveraging contracts
+ as a source of symbolic values including unknown behavioral values,
+ and employs an updatable heap of contract invariants to reason about
+ flowsensitive facts. Whenever a contract is refuted, it reports a
+ concrete {\sl counterexample} reproducing the error, which may involve
+ solving for an unknown function. The approach is able to analyze
+ firstclass contracts, recursive data structures, unknown functions,
+ and controlflowsensitive refinement of values, which are all
+ idiomatic in dynamic languages. It makes effective use of an
+ offtheshelf solver to decide problems without heavy encodings. Our
+ counterexample search is sound and relatively complete with respect to
+ a firstorder solver for base type values. Therefore, it can form the
+ basis of automated verification and bugfinding tools for higherorder
+ programs. The approach is competitive with a wide range of existing
+ tools  including type systems, flow analyzers, and model checkers 
+ on their own benchmarks. We have built a tool which analyzes programs
+ written in Racket, and report on its effectiveness in verifying and
+ refuting contracts.",
+ paper = "Nguy16.pdf"
+}
+
+\end{chunk}
+
\index{O'Connor, Liam}
\begin{chunk}{ignore}
@misc{OCon15,
@@ 6372,77 +7086,20 @@ Martin, U.
\end{chunk}
\index{Nguyen, Phuc C.}
\index{TobinHochstadt, Sam}
\index{van Horn, David}
+\index{Rudnicki, Piotr}
+\index{Schwarzweller, Christoph}
+\index{Trybulec, Andrzej}
\begin{chunk}{axiom.bib}
@article{Nguy16,
 author = "Nguyen, Phuc C. and TobinHochstadt, Sam and van Horn, David",
 title = "Higherorder symbolic execution for contract verification and
 refutation",
 url = "http://arxiv.org/pdf/1507.04817v2.pdf",
 year = "2016",
 month = "February",
 abstract =
 "We present a new approach to automated reasoning about higherorder
 programs by endowing symbolic execution with a notion of higherorder,
 symbolic values.

 To validate our approach, we use it to develop and evaluate a system
 for verifying and refuting behavioral software contracts of components
 in a functional language, which we call {\sl soft contract
 verification}. In doing so, we discover a mutually beneficial relation
 between behavioral contracts and higherorder symbolic
 execution. Contracts aid symbolic execution by providing a rich
 language of specifications that can serve as the basis of symbolic
 higherorder values; the theory of blame enables modular verification
 and leads to the theorem that {\sl verified compnents can't be
 blamed}; and the runtime monitoring of contracts enables {\sl soft}
 verification whereby verified and unverified components can safely
 interact and verification is not an allornothing
 proposition. Conversely, symbolic execution aids contracts by
 providing compiletime verification which increases assurance and
 enables optimizations; automated testcase generation for contracts
 with counterexamples; and engendering a virtuous cycle between
 verification and the gradual spread of contracts.

 Our system uses higherorder symbolic execution, leveraging contracts
 as a source of symbolic values including unknown behavioral values,
 and employs an updatable heap of contract invariants to reason about
 flowsensitive facts. Whenever a contract is refuted, it reports a
 concrete {\sl counterexample} reproducing the error, which may involve
 solving for an unknown function. The approach is able to analyze
 firstclass contracts, recursive data structures, unknown functions,
 and controlflowsensitive refinement of values, which are all
 idiomatic in dynamic languages. It makes effective use of an
 offtheshelf solver to decide problems without heavy encodings. Our
 counterexample search is sound and relatively complete with respect to
 a firstorder solver for base type values. Therefore, it can form the
 basis of automated verification and bugfinding tools for higherorder
 programs. The approach is competitive with a wide range of existing
 tools  including type systems, flow analyzers, and model checkers 
 on their own benchmarks. We have built a tool which analyzes programs
 written in Racket, and report on its effectiveness in verifying and
 refuting contracts.",
 paper = "Nguy16.pdf"
}

\end{chunk}

\index{Rudnicki, Piotr}
\index{Schwarzweller, Christoph}
\index{Trybulec, Andrzej}
\begin{chunk}{axiom.bib}
@article{Rudn01,
 author = "Rudnicki, Piotr and Schwarzweller, Christoph and
 Trybulec, Andrzej",
 title = "Commutative algebra in the Mizar system",
 journal = "J. Symb. Comput.",
 volume = "32",
 number = "12",
 pages = "143169",
 year = "2001",
 url = "https://inf.ug.edu.pl/~schwarzw/papers/jsc01.pdf",
+@article{Rudn01,
+ author = "Rudnicki, Piotr and Schwarzweller, Christoph and
+ Trybulec, Andrzej",
+ title = "Commutative algebra in the Mizar system",
+ journal = "J. Symb. Comput.",
+ volume = "32",
+ number = "12",
+ pages = "143169",
+ year = "2001",
+ url = "https://inf.ug.edu.pl/~schwarzw/papers/jsc01.pdf",
abstract =
"We report on the development of algebra in the Mizar system. This
includes the construction of formal multivariate power series and
@@ 6545,597 +7202,12 @@ Martin, U.
\end{chunk}
\index{Ballarin, Clemens}
\index{Paulson, Lawrence C.}
\begin{chunk}{axiom.bib}
@article{Ball99,
 author = "Ballarin, Clemens and Paulson, Lawrence C.",
 title = "A Pragmatic Approach to Extending Provers by Computer Algebra 
 with Applications to Coding Theory",
 journal = "Fundam. Inform.",
 volume = "39",
 number = "12",
 pages = "120",
 year = "1999",
 url = "http://www.cl.cam.ac.uk/~lp15/papers/Isabelle/coding.pdf",
 abstract = "
 The use of computer algebra is usually considered beneficial for
 mechanised reasoning in mathematical domains. We present a case study,
 in the application domain of coding theory, that supports this claim:
 the mechanised proofs depend on nontrivial algorithms from computer
 algebra and increase the reasoning power of the theorem prover.

 The unsoundness of computer algebra systems is a major problem in
 interfacing them to theorem provers. Our approach to obtaining a sound
 overall system is not blanket distrust but based on the distinction
 between algorithms we call sound and {\sl ad hoc} respectively. This
 distinction is blurred in most computer algebra systems. Our
 experimental interface therefore uses a computer algebra library. It
 is based on formal specifications for the algorithms, and links the
 computer algebra library Sumit to the prover Isabelle.

 We give details of the interface, the use of the computer algebra
 system on the tacticlevel of Isabelle and its integration into proof
 procedures.",
 paper = "Ball99.pdf",
 keywords = "axiomref"
}

\end{chunk}

\index{Bertot, Yves}
\index{Cast\'eran, Pierre}
\begin{chunk}{ignore}
\bibitem[Bertot 04]{Bert04} Bertot, Yves; Cast\'eran, Pierre
 title = "Interactive Theorem Proving and Program Development",
 isbn = "3540208542",
 abstract = "
 Coq is an interactive proof assistant for the development of
 mathematical theories and formally certified software. It is based on
 a theory called the calculus of inductive constructions, a variant of
 type theory.

 This book provides a pragmatic introduction to the development of
 proofs and certified programs using Coq. With its large collection of
 examples and exercies it is an invaluable tool for researchers,
 students, and engineers interested in formal methods and the
 development of zerofault software."

\end{chunk}

\index{Boulm\'e, S.}
\index{Hardin, T.}
\index{Rioboo, Renaud}
\begin{chunk}{axiom.bib}
@misc{Boul00,
 author = "Boulme, S. and Hardin, T. and Rioboo, R.",
 title = "Polymorphic Data Types, Objects, Modules and Functors,:
 is it too much?",
 url = "ftp://ftp.lip6.fr/lip6/reports/2000/lip6.2000.014.ps.gz",
 abstract = "
 Abstraction is a powerful tool for developers and it is offered by
 numerous features such as polymorphism, classes, modules, and
 functors, $\ldots$ A working programmer may be confused by this
 abundance. We develop a computer algebra library which is being
 certificed. Reporting this experience made with a language (Ocaml)
 offering all these features, we argue that the are all needed
 together. We compare several ways of using classes to represent
 algebraic concepts, trying to follow as close as possible mathematical
 specification. Then we show how to combine classes and modules to
 produce code having very strong typing properties. Currently, this
 library is made of one hundred units of functional code and behaves
 faster than analogous ones such as Axiom.",
 paper = "Boul00.pdf",
 keywords = "axiomref"
}

\end{chunk}

\index{Boulm\'e, Sylvain}
\index{Hardin, Therese}
\index{Hirschkoff, Daniel}
\index{Rioboo, Renaud}
\index{M\'enissierMorain, Valerie}
\begin{chunk}{axiom.bib}
@InProceedings{Boul99,
 author = "Boulme, S. and Hardin, T. and Hirschkoff, D. and Rioboo, Renaud",
 title = "On the way to certify Computer Algebra Systems",
 booktitle = "Systems for integrated computation and deduction",
 series = "Calculemus 99",
 year = "1999",
 publisher = "Elsevier",
 location = "Trento, Italy",
 pages = "1112",
 abstract =
 "The FOC project aims at supporting, within a coherent software system,
 the entire process of mathematical computation, starting with proved
 theories, ending with certified implementations of algorithms. In this
 paper, we explain our design requirements for the implementation,
 using polynomials as a running example. Indeed, proving correctness of
 implementations depends heavily on the way this design allows
 mathematical properties to be truly handled at the programming level.

 The FOC project, started at the fall of 1997, is aimed to build a
 programming environment for the development of certified symbolic
 computation. The working languages are Coq and Ocaml. In this paper,
 we present first the motivations of the project. We then explain why
 and how our concern for proving properties of programs has led us to
 certain implementation choices in Ocaml. This way, the sources express
 exactly the mathematical dependencies between different structures.
 This may ease the achievement of proofs.",
 paper = "Boul99.pdf",
 keywords = "axiomref"
}

\end{chunk}

\index{Daly, Timothy}
\begin{chunk}{ignore}
\bibitem[Daly 10]{Daly10} Daly, Timothy
 title = "Intel Instruction Semantics Generator",
 url = "http://daly.axiomdeveloper.org/TimothyDaly_files/publications/sei/intel/intel.pdf",
 abstract = "
 Given an Intel x86 binary, extract the semantics of the instruction
 stream as Conditional Concurrent Assignments (CCAs). These CCAs
 represent the semantics of each individual instruction. They can be
 composed to represent higher level semantics.",
 paper = "Daly10.pdf"

\end{chunk}

\index{Danielsson, Nils Anders}
\index{Hughes, John}
\index{Jansson, Patrik}
\index{Gibbons, Jeremy}
\begin{chunk}{axiom.bib}
@InProceedings{Dani06,
 author = "Danielsson, Nils Anders and Hughes, John and Jansson, Patrik and
 Gibbons, Jeremy",
 title = "Fast and Loose Reasoning is Morally Correct",
 booktitle = "Proc. of ACM POPL '06",
 series = "POPL '06",
 year = "2006",
 location = "Charleston, South Carolina",
 abstract =
 "Functional programmers often reason about programs as if they were
 written in a total language, expecting the results to carry over to
 nontoal (partial) languages. We justify such reasoning.

 Two languages are defined, one total and one partial, with identical
 syntax. The semantics of the partial language includes partial and
 infinite values, and all types are lifted, including the function
 spaces. A partial equivalence relation (PER) is then defined, the
 domain of which is the total subset of the partial language. For types
 not containing function spaces the PER relates equal values, and
 functions are related if they map related values to related values.

 It is proved that if two closed terms have the same semantics in the
 total language, then they have related semantics in the partial
 language. It is also shown that the PER gives rise to a bicartesian
 closed category which can be used to reason about values in the domain
 of the relation.",
 paper = "Dani06.pdf",
 keywords = "axiomref"
}

\end{chunk}

\index{Davenport, James H.}
\index{Bradford, Russell}
\index{England, Matthew}
\index{Wilson, David}
\begin{chunk}{ignore}
\bibitem[Davenport 12]{Davenp12} Davenport, James H.; Bradford, Russell;
England, Matthew; Wilson, David
 title = "Program Verification in the presence of complex numbers,
 functions with branch cuts etc.",
 url = "http://arxiv.org/pdf/1212.5417.pdf",
 abstract = "
 In considering the reliability of numerical programs, it is normal to
 ``limit our study to the semantics dealing with numerical precision''.
 On the other hand, there is a great deal of work on the reliability of
 programs that essentially ignores the numerics. The thesis of this
 paper is that there is a class of problems that fall between these
 two, which could be described as ``does the lowlevel arithmetic
 implement the highlevel mathematics''. Many of these problems arise
 because mathematics, particularly the mathematics of the complex
 numbers, is more difficult than expected: for example the complex
 function log is not continuous, writing down a program to compute an
 inverse function is more complicated than just solving an equation,
 and many algebraic simplification rules are not universally valid.

 The good news is that these problems are {\sl theoretically} capable
 of being solved, and are {\sl practically} close to being solved, but
 not yet solved, in several realworld examples. However, there is
 still a long way to go before implementations match the theoretical
 possibilities.",
 paper = "Davenp12.pdf"

\end{chunk}

\index{Dolzmann, Andreas}
\index{Sturm, Thomas}
\begin{chunk}{ignore}
\bibitem[Dolzmann 97]{Dolz97} Dolzmann, Andreas; Sturm, Thomas
 title = "Guarded Expressions in Practice",
 url = "http://redlog.dolzmann.de/papers/pdf/MIP9702.pdf",
 abstract = "
 Computer algebra systems typically drop some degenerate cases when
 evaluating expressions, e.g. $x/x$ becomes 1 dropping the case
 $x=0$. We claim that it is feasible in practice to compute also the
 degenerate cases yielding {\sl guarded expressions}. We work over real
 closed fields but our ideas about handling guarded expressions can be
 easily transferred to other situations. Using formulas as guards
 provides a powerful tool for heuristically reducing the combinatorial
 explosion of cases: equivalent, redundant, tautological, and
 contradictive cases can be detected by simplification and quantifier
 elimination. Our approach allows to simplify the expressions on the
 basis of simplification knowledge on the logical side. The method
 described in this paper is implemented in the REDUCE package GUARDIAN,
 which is freely available on the WWW.",
 paper = "Dolz97.pdf"

\end{chunk}

\index{Dos Reis, Gabriel}
\index{Matthews, David}
\index{Li, Yue}
\begin{chunk}{ignore}
\bibitem[Dos Reis 11]{DR11} Dos Reis, Gabriel; Matthews, David; Li, Yue
 title = "Retargeting OpenAxiom to Poly/ML: Towards an Integrated Proof Assistants and Computer Algebra System Framework",
Calculemus (2011) Springer
 url = "http://paradise.caltech.edu/~yli/paper/oapolyml.pdf",
 abstract = "
 This paper presents an ongoing effort to integrate the Axiom family of
 computer algebra systems with Poly/MLbased proof assistants in the
 same framework. A long term goal is to make a large set of efficient
 implementations of algebraic algorithms available to popular proof
 assistants, and also to bring the power of mechanized formal
 verification to a family of strongly typed computer algebra systems at
 a modest cost. Our approach is based on retargeting the code generator
 of the OpenAxiom compiler to the Poly/ML abstract machine.",
 paper = "DR11.pdf",
 keywords = "axiomref"

\end{chunk}

\index{Dunstan, Martin}
\begin{chunk}{ignore}
\bibitem[Dunstan 00a]{Dun00a} Dunstan, Martin N.
 title = "Adding Larch/Aldor Specifications to Aldor",
 abstract = "
 We describe a proposal to add Larchstyle annotations to the Aldor
 programming language, based on our PhD research. The annotations
 are intended to be machinecheckable and may be used for a variety
 of purposes ranging from compiler optimizations to verification
 condition (VC) generation. In this report we highlight the options
 available and describe the changes which would need to be made to
 the compiler to make use of this technology.",
 paper = "Dunxx.pdf"

\end{chunk}

\index{Dunstan, Martin}
\index{Kelsey, Tom}
\index{Linton, Steve A.}
\index{Martin, Ursula}
\begin{chunk}{axiom.bib}
@InProceedings{Duns98,
 author = "Dunstan, Martin and Kelsey, Tom and Linton, Steve and
 Martin, Ursula",
 title = "Lightweight Formal Methods For Computer Algebra Systems",
 publisher = "ACM Press",
 booktitle = "Proc. ISSAC 1998",
 year = "1998",
 location = "Rostock, Germany",
 pages = "8087",
 url = "http://www.cs.standrews.ac.uk/~tom/pub/issac98.pdf",
 abstract =
 "Demonstrates the use of formal methods tools to provide a semantics
 for the type hierarchy of the Axiom computer algebra system, and a
 methodology for Aldor program analysis and verification. There are
 examples of abstract specifications of Axiom primitives.",
 paper = "Duns98.pdf",
 keywords = "axiomref"
}

\end{chunk}

\index{Dunstan, Martin N.}
\begin{chunk}{axiom.bib}
@phdthesis{Duns99a,
 author = "Dunstan, Martin N.",
 title = "Larch/Aldor  A Larch BISL for AXIOM and Aldor",
 school = "University of St. Andrews",
 year = "1999",
 abstract = "
 In this thesis we investigate the use of lightweight formal methods
 and verification conditions (VCs) to help improve the reliability of
 components constructed within a computer algebra system. We follow the
 Larch approach to formal methods and have designed a new behavioural
 interface specification language (BISL) for use with Aldor: the
 compiled extension language of Axiom and a fullyfeatured programming
 language in its own right. We describe our idea of lightweight formal
 methods, present a design for a lightweight verification condition
 generator and review our implementation of a prototype verification
 condition generator for Larch/Aldor.",
 paper = "Duns99a.pdf",
 keywords = "axiomref"
}

\end{chunk}

\index{Dunstan, Martin}
\index{Kelsey, Tom}
\index{Martin, Ursula}
\index{Linton, Steve A.}
\begin{chunk}{axiom.bib}
@InProceedings{Duns99,
 author = "Dunstan, Martin and Kelsey, Tom and Martin, Ursula and
 Linton, Steve A.",
 title = "Formal Methods for Extensions to CAS",
 booktitle = "Proc. of FME'99",
 series = "FME'99",
 location = "Toulouse, France",
 year = "1999",
 pages = "17581777",
 url = "http://tom.host.cs.standrews.ac.uk/pub/fm99.ps",
 abstract =
 "We demonstrate the use of formal methods tools to provide a semantics
 for the type hierarchy of the AXIOM computer algebra system, and a
 methodology for Aldor program analysis and verification. We give a
 case study of abstract specifications of AXIOM primitives, and provide
 an interface between these abstractions and Aldor code.",
 paper = "Duns99.pdf",
 keywords = "axiomref"
}

\end{chunk}

\index{Hardin, David S.}
\index{McClurg, Jedidiah R.}
\index{Davis, Jennifer A.}
\begin{chunk}{axiom.bib}
@misc{Hard13,
 author = "Hardin, David S. and McClurg, Jedidiah R. and Davis, Jennifer A.",
 title = "Creating Formally Verified Components for Layered Assurance with an LLVM to ACL2 Translator",
 url = "http://www.jrmcclurg.com/papers/law_2013_paper.pdf",
 abstract = "
 This paper describes an effort to create a library of formally
 verified software component models from code that have been compiled
 using the LowLevel Virtual Machine (LLVM) intermediate form. The idea
 is to build a translator from LLVM to the applicative subset of Common
 Lisp accepted by the ACL2 theorem prover. They perform verification of
 the component model using ACL2's automated reasoning capabilities.",
 paper = "Hard13.pdf"
}

\end{chunk}

\index{Hardin, David S.}
\index{Davis, Jennifer A.}
\index{Greve, David A.}
\index{McClurg, Jedidiah R.}
\begin{chunk}{axiom.bib}
@misc{Hard14,
 author = "Hardin, David S. and Davis, Jennifer A. and Greve, David A. and
 McClurg, Jedidiah R.",
 title = "Development of a Translator from LLVM to ACL2",
 url = "http://arxiv.org/pdf/1406.1566",
 abstract = "
 In our current work a library of formally verified software components
 is to be created, and assembled, using the LowLevel Virtual Machine
 (LLVM) intermediate form, into subsystems whose toplevel assurance
 relies on the assurance of the individual components. We have thus
 undertaken a project to build a translator from LLVM to the
 applicative subset of Common Lisp accepted by the ACL2 theorem
 prover. Our translator produces executable ACL2 formal models,
 allowing us to both prove theorems about the translated models as well
 as validate those models by testing. The resulting models can be
 translated and certified without user intervention, even for code with
 loops, thanks to the use of the def::ung macro which allows us to
 defer the question of termination. Initial measurements of concrete
 execution for translated LLVM functions indicate that performance is
 nearly 2.4 million LLVM instructions per second on a typical laptop
 computer. In this paper we overview the translation process and
 illustrate the translator's capabilities by way of a concrete example,
 including both a functional correctness theorem as well as a
 validation test for that example.",
 paper = "Hard14.pdf"
}

\end{chunk}

\index{Lamport, Leslie}
\begin{chunk}{axiom.bib}
@book{Lamp02,
 author = "Lamport, Leslie",
 title = "Specifying Systems",
 year = "2002",
 url = "http://research.microsoft.com/enus/um/people/lamport/tla/book020808.pdf",
 publisher = "AddisonWesley",
 isbn = "032114306X",
 paper = "Lamp02.pdf"
}

\end{chunk}

\index{Lamport, Leslie}
\begin{chunk}{axiom.bib}
@misc{Lamp13,
 author = "Lamport, Leslie",
 title = "Errata to Specifying Systems",
 year = "2013",
 url = "http://research.microsoft.com/enus/um/people/lamport/tla/errata1.pdf",
 publisher = "Microsoft",
 abstract = "
 These are all the errors and omissions to the first printing (July
 2002) of the book {\sl Specifying Systems} reported as of 29 October
 2013. Positions in the book are indicated by page and line number,
 where the top line of a page is number 1 and the bottom line is number
 $1$. A running head and a page number are not considered to be lines,
 but all other lines are. Please report any additional errors to the
 author, whose email address is posted on {\tt http://lamport.org}. The
 first person to report an error will be acknowledged in any revised
 edition.",
 paper = "Lamp13.pdf"
}

\end{chunk}

\index{Lamport, Leslie}
\begin{chunk}{axiom.bib}
@misc{Lamp14,
 author = "Lamport, Leslie",
 title = "How to Write a $21^{st}$ Century Proof",
 year = "2014",
 url = "http://research.microsoft.com/enus/um/people/lamport/pubs/paper.pdf",
 publisher = "Microsoft",
 abstract = "
 A method of writing proofs is described that makes it harder to prove
 things that are not true. The method, based on hierarchical
 structuring, is simple and practical. The author's twenty years of
 experience writing such proofs is discussed.",
 paper = "Lamp14.pdf"
}

\end{chunk}

\index{Lamport, Leslie}
\begin{chunk}{axiom.bib}
@misc{Lamp14a,
 author = "Lamport, Leslie",
 title = "Talk: How to Write a $21^{st}$ Century Proof",
 year = "2014",
 url =
"http://hits.mediasite.com/mediasite/Play/29d825439b3c49f088d35555426fbdf81d",
 comment = "2nd Heidelberg Laureate Forum Lecture Tuesday Sep 23, 2014"
}

\end{chunk}

\index{Martin, Ursula}
\index{Shand, D.}
\begin{chunk}{axiom.bib}
@misc{Mart97,
 author = "Martin, Ursula and Shand, D",
 title = "Investigating some Embedded Verification Techniques for
 Computer Algebra Systems",
 url = "http://www.risc.jku.at/conferences/Theorema/papers/shand.ps.gz",
 abstract = "
 This paper reports some preliminary ideas on a collaborative project
 between St. Andrews University in the UK and NAG Ltd. The project aims
 to use embedded verification techniques to improve the reliability and
 mathematical soundness of computer algebra systems. We give some
 history of attempts to integrate computer algebra systems and
 automated theorem provers and discuss possible advantages and
 disadvantages of these approaches. We also discuss some possible case
 studies.",
 paper = "Mart97.ps"
}

\end{chunk}

\index{Mason, Ian A.}
\begin{chunk}{axiom.bib}
@book{Maso86,
 author = "Mason, Ian A.",
 title = "The Semantics of Destructive Lisp",
 publisher = "Center for the Study of Language and Information",
 year = "1986",
 isbn = "0937073067",
 abstract = "
 Our basic premise is that the ability to construct and modify programs
 will not improve without a new and comprehensive look at the entire
 programming process. Past theoretical research, say, in the logic of
 programs, has tended to focus on methods for reasoning about
 individual programs; little has been done, it seems to us, to develop
 a sound understanding of the process of programming  the process by
 which programs evolve in concept and in practice. At present, we lack
 the means to describe the techniques of program construction and
 improvement in ways that properly link verification, documentation and
 adaptability."
}

\end{chunk}

\index{Newcombe, Chris}
\index{Rath, Tim}
\index{Zhang, Fan}
\index{Munteanu, Bogdan}
\index{Brooker, Marc}
\index{Deardeuff, Michael}
\begin{chunk}{ignore}
\bibitem[Newcombe 13]{Newc13} Newcombe, Chris; Rath, Tim; Zhang, Fan;
Munteanu, Bogdan; Brooker, Marc; Deardeuff, Michael
 title = "Use of Formal Methods at Amazon Web Services",
 url = "http://research.microsoft.com/enus/um/people/lamport/tla/formalmethodsamazon.pdf",
 abstract = "
 In order to find subtle bugs in a system design, it is necessary to
 have a precise description of that design. There are at least two
 major benefits to writing a precise design; the author is forced to
 think more clearly, which helps eliminate ``plausible handwaving'',
 and tools can be applied to check for errors in the design, even while
 it is being written. In contrast, conventional design documents
 consist of prose, static diagrams, and perhaps pseudocode in an ad
 hoc untestable language. Such descriptions are far from precise; they
 are often ambiguous, or omit critical aspects such as partial failure
 or the granularity of concurrency (i.e. which constructs are assumed
 to be atomic). At the other end of the spectrum, the final executable
 code is unambiguous, but contains an overwhelming amount of detail. We
 needed to be able to capture the essence of a design in a few hundred
 lines of precise description. As our designs are unavoidably complex,
 we need a highlyexpressive language, far above the level of code, but
 with precise semantics. That expressivity must cover realworld
 concurrency and faulttolerance. And, as we wish to build services
 quickly, we wanted a language that is simple to learn and apply,
 avoiding esoteric concepts. We also very much wanted an existing
 ecosystem of tools. We found what we were looking for in TLA+, a
 formal specification language."

\end{chunk}

\index{Poll, Erik}
\begin{chunk}{axiom.bib}
@misc{Poll99,
 author = "Poll, Erik",
 title = "The Type System of Axiom",
 year = "1999",
 url = "http://www.cs.ru.nl/E.Poll/talks/axiom.pdf",
 abstract =
 "This is a slide deck from a talk on the correspondence between
 Axiom/Aldor types and Logic.",
 paper = "Poll99.pdf",
 keywords = "axiomref"
}

\end{chunk}

\index{Poll, Erik}
\index{Thompson, Simon}
\begin{chunk}{axiom.bib}
@misc{Poll99a,
 author = "Poll, Erik and Thompson, Simon",
 title = "The Type System of Aldor",
 url = "http://www.cs.kent.ac.uk/pubs/1999/874/content.ps",
 abstract =
 "This paper gives a formal description of  at least a part of 
 the type system of Aldor, the extension language of the Axiom.
 In the process of doing this a critique of the design of the system
 emerges.",
 paper = "Poll99a.pdf",
 keywords = "axiomref"
}

\end{chunk}

\index{Roberts, Siobhan}
\begin{chunk}{axiom.bib}
@misc{Robe15,
 author = "Roberts, Siobhan",
 title = "In Mathematics, Mistakes Aren't What They Used To Be",
 year = 2015,
 url = "http://nautil.us/issue/24/error/Inmathematicsmistakesarentwhattheyusedtobe"
+@misc{Wiki17,
+ author = "Wikipedia",
+ title = "Calculus of constructions",
+ year = "2017",
+ url = "https://en.wikipedia.org/wiki/Calculus\_of\_constructions"
}
\end{chunk}
diff git a/changelog b/changelog
index fee9455..c84c509 100644
 a/changelog
+++ b/changelog
@@ 1,3 +1,6 @@
+20170104 tpd src/axiomwebsite/patches.html 20170104.01.tpd.patch
+20170104 tpd books/bookvol13 COQ details
+20170103 tpd books/bookvolbib COQ Program Construction papers
20170103 tpd src/axiomwebsite/patches.html 20170103.01.tpd.patch
20170103 tpd books/bookvol13 COQ proof of GCD
20170103 tpd books/bookvolbib COQ Program Construction papers
diff git a/patch b/patch
index f87b0c8..8e35e79 100644
 a/patch
+++ b/patch
@@ 1,169 +1,76 @@
books/bookvolbib COQ Program Construction papers
+books/bookvol13 books/bookvol13 COQ details
Goal: Proving Axiom Correct

\index{Coquand, Thierry}
\index{Huet, G\'erard}
\index{Paulin, Christine}
\begin{chunk}{axiom.bib}
@misc{COQx16,
 author = {Coquand, Thierry and Huet, G\'erard and Paulin, Christine},
 title = "The COQ Proof Assistant",
 year = "2016",
 url = "https://coq.inria.fr"
}

\index{Coquand, Thierry}
\index{Huet, G\'erard}
\index{Paulin, Christine}
\begin{chunk}{axiom.bib}
@misc{COQx16a,
 author = {Coquand, Thierry and Huet, G\'erard and Paulin, Christine},
 title = "COQ Proof Assistant Library Coq.ZArith.Znumtheory",
 year = "2016",
 url = "https://coq.inria.fr/library/Coq.ZArith.Znumtheory.html"
}

\index{Parent, Catherine}
\begin{chunk}{axiom.bib}
@techreport{Pare94,
 author = "Parent, Catherine",
 title = "Synthesizing proofs from programs in the Calculus of Inductive
 Constructions",
 year = "1994",
 institution = {Ecole Normale Sup\'erieure de Lyon},
 abstract =
 "In type theory, a proof can be represented as a typed $\lambda$term.
 There exist methods to mark logical parts in proofs and extract their
 algorithmic contents. The result is a correct program with respect to
 a specification. This paper focuses on the inverse problem: how to
 generate a proof from its specification. The framework is the Calculus
 of Inductive Constructions. A notion of coherence is introduced between
 a specification and a program containing types but no logical proofs.
 This notion is based on the definition of an extraction function called
 the weak extraction. Such a program can give a method to reconstruct a
 set of logical properties needed to have a proof of the initial
 specification. This can be seen either as a method of proving programs
 or as a method of synthetically describing proofs.",
 paper = "Pare94.pdf"
+@book{Acze13,
+ author = "Aczel, Peter et al.",
+ title = "Homotopy Type Theory: Univalent Foundations of Mathematics",
+ publisher = "Institute for Advanced Study",
+ year = "2013",
+ url = "https://hott.github.io/book/nightly/hottletter1075g3c53219.pdf",
+ paper = "Acze13.pdf"
}
\end{chunk}
\index{ParentVigouroux, Catherine}
+\index{Huet, G\'erard}
\begin{chunk}{axiom.bib}
@article{Pare97,
 author = "ParentVigouroux, Catherine",
 title = "Verifying programs in the Calculus of Inductive Constructions",
 year = "1997",
 journal = "Formal Aspects of Computing",
 volume = "9",
 number = "5",
 pages = "484517",
+@inproceedings{Huet87,
+ author = {Huet, G\'erard},
+ title = "Induction Principles Formalized in the Calculus of Constructions",
+ booktitle = "TAPSOFT 87",
+ publisher = "SpringerVerlag",
+ series ="LNCS 249",
+ year = "1987",
+ pages = "276286",
abstract =
 "This paper deals with a particular approach to the verification of
 functional programs. A specification of a program can be represented
 by a logical formula. In a constructive framework, developing a program
 then corresponds to proving this formula. Given a specification and a
 program, we focus on reconstructing a proof of the specification whose
 algorithmic contents corresponds to the given program. The best we can
 hope is to generate proof obligations on atomic parts of the program
 corresponding to logical properties to be verified. First, this paper
 studies a weak extraction of a program from a proof that keeps track
 of intermediate specifications. From such a program, we prove the
 determinism of retrieving proof obligations. Then, heuristic methods
 are proposed for retrieving the proof from a natural program containing
 only partial annotations. Finally, the implementation of this methos as
 a tactic of the {\sl Coq} proof assistant is presented.",
 paper = "Pare97.pdf"
+ "The Calculus of Constructions is a higherorder formalism for writing
+ constructive proofs in a natural deduction style, inspired from work
+ by de Bruijn, Girard, MartinLof, and Scott. THe calculus and its
+ syntactic theory were presented in Coquand's thesis, and an
+ implementation by the author was used to mechanically verify a
+ substantial number of proofs demonstrating the power of expression of
+ the formalism. The Calculus of Constructions is proposed as a
+ foundation for the design of programming environments where programs
+ are developed consistently with formal specifications. The current
+ paper shows how to define inductive concepts in the calculus.
+
+ A very general induction schema is obtained by postulating all
+ elements of the type of interest to belong to the standard
+ interpretation associated with a predicate map. This is similar to the
+ treatment of D. Park, but the power of expression of the formallism
+ permits a very direct treatment, in a language that is formalized
+ enough to be actually implemented on a computer. Special instances of
+ the induction schema specialize to Noetherian induction and Structural
+ induction over any algebraic type. Computational Induction is treated
+ in an axiomatization of Domain Theory in Constructions. It is argued
+ that the resulting principle is more powerful than LCF's, since the
+ restriction on admissibility is expressible in the object language.",
+ paper = "Huet87.pdf"
}
\end{chunk}
\index{ParentVigouroux, Catherine}
\begin{chunk}{axiom.bib}
@misc{Pare96,
 author = "ParentVigouroux, Catherine",
 title = "Natural proofs versus programs optimization in the
 Calculus of Inductive Constructions",
 year = "1996",
 abstract =
 "This paper presents how to automatically prove that an 'optimized'
 program is correct with respect to a set of given properties that is a
 specification. Proofs of specifications contain logical and
 computational parts. Programs can be seen as computational parts of
 proofs. They can thus be extracted from proofs and be certified to be
 correct. The inverse problem can be solved: it is possible to
 reconstruct proof obligations from a program and its specification.
 The framework is a type theory where a proof can be represented as a
 typed $\lambda$term and, particularly, the Calculus of Inductive
 Constructions. This paper shows how programs can be simplified in
 order to be written in a much closer way to the ML one's. Indeed,
 proofs structures are often much more heavy than program structures.
 The problem is consequently to consider natural programs (in a ML sense)
 and see how to retrieve natural structures of proofs from them.",
 paper = "Pare96.pdf"
}

\end{chunk}
\index{Parent, Catherine}
+\index{Frade, Maria Joao}
\begin{chunk}{axiom.bib}
@inproceedings{Pare93,
 author = "Parent, Catherine",
 title = "Developing Certified Programs in the System Coq: The Program
 Tactic",
 booktitle = "Proc. Int. Workshop on Types for Proofs and Programs",
 publisher = "SpringerVerlag",
 isbn = "3540580859",
 pages = "291312",
 year = "1993",
 abstract =
 "The system {\sl Coq} is an environment for proof development based on
 the Calculus of Constructions extended by inductive definitions. The
 specification of a program can be represented by a logical formula and
 the program itself can be extracted from the constructive proof of the
 specification. In this paper, we look at the possibility of inverting
 the specification and a program, builds the logical condition to be
 verified in order to obtain a correctness proof of the program. We
 build a proof of the specification from the program from which the
 program can be extracted. Since some information cannot automatically
 be inferred, we show how to annotate the program by specifying some of
 its parts in order to guide the search for the proof.",
 paper = "Pare93.ps"
+@article{Frad08,
+ author = "Frade, Maria Joao",
+ title = "Calculus of Inductive Construction. Software Formal Verification",
+ year = "2008",
+ url = "http://www4.di.uminho.pt/~jno/mfes/0809/SFVCIC.pdf",
+ paper = "Frad08.pdf"
}
\end{chunk}
\index{Coquand, Thierry}
\index{Huet, G\'erard}
\begin{chunk}{axiom.bib}
@techreport{Coqu86,
 author = {Coquand, Thierry and Huet, G\'erard},
 title = "The Calculus of Constructions",
 year = "1986",
 institution = "INRIA Centre de Rocquencourt",
 number = "530",
 url = "https://hal.inria.fr/inria00076024/document",
 abstract =
 "The Calculus of Constructions is a higherorder formalism for
 constructive proofs in natural deduction style. Every proof is a
 $\lambda$expression, typed with propositions of the underlying
 logic. By removing types we get a pure $\lambda$expression,
 expressing its associated algorithm. Computing this
 $\lambda$expression corresponds roughly to cutelimination. It is our
 thesis that (as already advocated by MartinLof) the CurryHoward
 correspondence between propositions and types is a powerful paradigm
 for Computer Science. In the case of Constructions, we obtain the
 notion of a very highlevel functional programming language, with
 complex polymorphism wellsuited for modules specification. The notion
 of type encompasses the usual notioin of data type, but allows as well
 arbitrarily complex algorithmic specifications. We develop the basic
 theory of a Calculus of Constructions, and prove a strong
 normalization theorem showing that all computations terminate.
 Finally, we suggest various extensions to stronger calculi.",
 paper = "Coqu86.pdf"
+@misc{Wiki17,
+ author = "Wikipedia",
+ title = "Calculus of constructions",
+ year = "2017",
+ url = "https://en.wikipedia.org/wiki/Calculus\_of\_constructions"
}
\end{chunk}
diff git a/src/axiomwebsite/patches.html b/src/axiomwebsite/patches.html
index e059e98..e67e962 100644
 a/src/axiomwebsite/patches.html
+++ b/src/axiomwebsite/patches.html
@@ 5620,6 +5620,8 @@ books/bookvolbib Stephane Delliere Phd Thesis
books/bookvol13 LEAN source code and GCD proof
20170103.01.tpd.patch
books/bookvolbib COQ Program Construction papers
+20170104.01.tpd.patch
+books/bookvol13 books/bookvol13 COQ details

1.7.5.4