From 31ab16961e1bae82a83066b4b8c9c7008622d01f Mon Sep 17 00:00:00 2001
From: Tim Daly
Date: Fri, 3 Mar 2017 17:42:39 0500
Subject: [PATCH] books/bookvolbib add additional references
MIMEVersion: 1.0
ContentType: text/plain; charset=UTF8
ContentTransferEncoding: 8bit
Goal: Proving Axiom Correct
\index{Sterling, Jonathan}
\index{Harper, Robert}
\begin{chunk}{axiom.bib}
@misc{Ster17,
author = "Sterling, Jonathan and Harper, Robert",
title = "Algebraic Foundations of Proof Refinement",
link = "\url{http://www.cs.cmu.edu/~rwh/papers/afpr/afpr.pdf}",
year = "2017",
abstract =
"We contribute a general apparatus for {\sl dependent} tacticbased
proof refinement in the LCF tradition, in which the statements of
subgoals may express a dependency on the proofs of other subgoals;
this form of dependency is extremely useful and can serve as an
{\sl algorithmic} alternative to extensions of LCF based on nonlocal
instantiation of schematic variables. Additionally, we introduce a
novel behavioral distinction between {\sl refinement rules} and
{\sl tactics} based on naturality. Our framework, called Dependent
LCF, is already deployed in the nascent RedPRL proof assistant for
computational cubical type theory.",
paper = "Ster17.pdf"
}
\end{chunk}
\index{Plotkin, G.D.}
\begin{chunk}{axiom.bib}
@article{Plot77,
author = "Plotkin, G.D.",
title = "LCF Considered as a Programming Language",
journal = "Theoretical Computer Science",
volume = "5",
year = "1977",
pages = "223255",
link = "\url{http://homepages.inf.ed.ac.uk/gdp/publications/LCF.pdf}",
abstract =
"The paper studies connections between denotational and operational
semantics for a simple programming language based on LCF. It begins
with the connection between the behaviour of a program and its
denotation. It turns out that a program denotes $\bot$ in any of
several possible semantics iff it does not terminate. From this it
follows that if two terms hae the same denotation in one of these
semantics, they have the same behaviour in all contexts. The converse
fails for all the semantics. If, however, the language is extended to
allow certain parallel facilities behavioural equivalence does coincide
with denotational equivalence in one of the semantics considered, which
may therefore be called ``fully abstract''. Next a connection is given
which actually determines the semantics up to isomorphism from the
behaviour alone. Conversely, by allowing further parallel facilities,
every r.e. element of the fully abstract semantics becomes definable,
thus characterising the programming language, up to interdefinability,
from the set of r.e. elements of the domains of the semantics.",
paper = "Plot77.pdf"
}
\end{chunk}
\index{Milner, Robin}
\begin{chunk}{axiom.bib}
@techreport{Miln72,
author = "Milner, Robert",
title = "Logic for Computable Functions: Description of a Machine
Implementation",
year = "1972",
institution = "Stanford Artificial Intelligence Project",
number = "STANCS72288",
link =
"\url{http://i.standford.edu/pub/cstr/reports/cs/tr/72/288/CSTR72288.pdf}",
abstract =
"LCF is based on a logic by Dana Scott, proposed by him at Oxford in the
fall of 1969, for reasoning about computable functions.",
paper = "Miln72.pdf"
}
\end{chunk}
\index{Gordon, Mike}
\begin{chunk}{axiom.bib}
@misc{Gord96,
author = "Gordon, Mike",
title = "From LCF to HOL: a short history",
year = "1996",
link = "\url{http://www.cl.cam.ac.uk/~mjcg/papers/HolHistory.pdf}",
paper = "Gord96.pdf"
}
\end{chunk}
\index{Trostle, Anne}
\begin{chunk}{axiom.bib}
@misc{Tros13,
author = "Trostle, Anne",
title = "An Algorithm for the Greatest Common Divisor",
link = "\url{http://www.nuprl.org/MathLibrary/gcd/}",
year = "2013"
}
\end{chunk}
\index{Babovsky, Hans}
\index{Grabmeier, Johannes}
\begin{chunk}{axiom.bib}
@inproceedings{Babo16,
author = "Babovsky, Hans and Grabmeier, Johannes",
title = "Calculus and design of discrete velocity models using
computer algebra",
booktitle = "AIP Conference Proceedings",
volume = "1786",
isbn = "9780735414488",
link = "\url{http://aip.scitation.org/doi/pdf/10.1063/1.4967672}",
year = "2016",
abstract =
"In [2,3], a framework for a calculus with Discrete Velocity Models
(DVM) has been derived. The rotational symmetry of the discrete
velocities can be modelled algebraically by the action of the cyclic
group $C_4$  or including reflections of the dihedral group $D_4$.
Taking this point of view, the linearized collision operator can be
represented in a compact form as a matrix of elements in the group
algebra. Or in other words, by choosing a special numbering it
exhibits a certain block structure which lets it appear as a matrix
with entries in a certain polynomial ring. A convenient way for
approaching such a structure is the use of a computer algebra system
able to treat these (predefined) algebraic structures. We use the
computer algebra system FriCAS/AXIOM [4,5] for the generation of the
velocity and the collision sets and for the analysis of the structure
of the collision operator. Concerning the fluid dynamic limit, the
system provides the characterization of sets of collisions and their
contribution to the flow parameters. It allows the design of
rotationally invariant symmetric models for prescribed Prandtl
numbers. The implementation in FriCAS/AXIOM is explained and its
results for a 25velocity model are presented.",
paper = "Babo16.pdf",
keywords = "axiomref"
}
\end{chunk}
\index{Hebisch, Waldemer}
\begin{chunk}{axiom.bib}
@article{Hebi15,
author = "Hebisch, Waldemer",
title = "Integration in terms of exponential integrals and incomplete
gamma functions",
year = "2015",
journal = "ACM Communications in Computer Algebra",
volume = "49",
Issue = "3",
pages = "98100",
abstract =
"Indefinite integration means that given $f$ in some set we want to
find $g$ from possibly larger set such that $f = g^\prime$. When $f$
and $g$ are required to be elementary functions due to work of among
others Risch, Rothstein, Trager, Bronstein (see [1] for references)
integration problem is now solved at least in theory. In his thesis
Cherry gave algorithm to integrate transcendental elementary functions
in terms of exponential integrals. In [2] he gave algorithm to
integrate transcendental elementary functions in so called reduced
fields in terms of error functions. Knowles [3] and [4] extended this
allowing also liovillian integrands and weakened restrictions on the
field containing integrands. We extend previous results allowing
incomplete gamma function $\Gamma(a, x)$ with rational $a$. Also, our
theory can handle algebraic extensions and is complete jointly (and
not only separately for Ei and erf). In purely transcendental case our
method should be more efficient and easier to implement than [2]. In
fact, it seems that no system currently implements algorithm from [2],
while partial implementation of our method in FriCAS works well enough
to be turned on by default. With our approach nonreduced case from
[2] can be handled easily. We hope that other classes of special
functions can be handled in a similar way, in particular irrational
case of incomplete gamma function and polylogarithms (however
polylogarithms raise tricky theoretical questions)."
}
\end{chunk}
\index{Knowles, Paul}
\begin{chunk}{axiom.bib}
@article{Know93,
author = "Knowles, Paul",
title = "Integration of a Class of Transcendental Liouvillian Functions
with ErrorFunctions, Part I",
journal = "Journal of Symbolic Computation",
volume = "13",
number = "5",
pages = "525543",
year = "1993",
abstract =
"This paper gives a decisionprocedure for the symbolic integration of
a certain class of transcendental Liouvillian functions in terms of
elementary functions and errorfunctions. An example illustrating the
use of the decisionprocedure is given.",
paper = "Know93.pdf"
}
\end{chunk}
\index{Knowles, Paul}
\begin{chunk}{axiom.bib}
@article{Know93a,
author = "Knowles, Paul",
title = "Integration of a Class of Transcendental Liouvillian Functions
with ErrorFunctions, Part II",
journal = "Journal of Symbolic Computation",
volume = "16",
number = "3",
year = "1993",
pages = "227239",
abstract =
"This paper extends the decision procedure for the symbolic
integration of a certain class of transcendental Liouvillian functions
in terms of elementary functions and errorfunctions given in Knowles
(1992) to allow a much larger class of integrands. Examples
illustrating the use of the decision procedure are given.",
paper = "Know93a.pdf"
}
\end{chunk}
\index{Cherry, G.W.}
\begin{chunk}{axiom.bib}
@article{Cher85,
author = "Cherry, G.W.",
title = "Integration in Finite Terms with Special Functions:
The Error Function",
journal = "J. Symbolic Computation",
year = "1985",
volume = "1",
pages = "283302",
abstract =
"A decision procedure for integrating a class of transcendental
elementary functions in terms of elementary functions and error
functions is described. The procedure consists of three mutually
exclusive cases. In the first two cases a generalised procedure for
completing squares is used to limit the error functions which can
appear in the integral of a finite number. This reduces the problem
to the solution of a differential equation and we use a result of
Risch (1969) to solve it. The third case can be reduced to the
determination of what we have termed $\sum$decompositions. The resutl
presented here is the key procuedure to a more general algorithm which
is described fully in Cherry (1983).",
paper = "Cher85.pdf"
}
\end{chunk}
\index{Lisonek, Petr}
\index{Paule, Peter},
\index{Strehl, Volker}
\begin{chunk}{axiom.bib}
@article{Liso93,
author = "Lisonek, Petr and Paule, Peter and Strehl, Volker",
title = "Improvement of the Degree Setting in Gosper's Algorithm",
journal = "J. Symbolic Computation",
volume = "16",
year = "1993",
pages = "243258",
link =
"\url{http://www.sciencedirect.com/science/article/pii/S0747717183710436}",
abstract =
"A detailed study of the degree setting for Gosper's algorithm for
indefinite hypergeometric summation is presented. In particular, we
discriminate between rational and proper hypergeometric input. As a
result, the critical degree bound can be improved in the former case.",
paper = "Liso93.pdf"
}
\end{chunk}
\index{Man, YiuKwong}
\begin{chunk}{axiom.bib}
@article{Manx93,
author = "Man, YiuKwong",
title = "On Computing Closed Forms for Indefinite Summations",
journal = "J. Symbolic Computation",
volume = "16",
pages = "335376",
year = "1993",
link =
"\url{http://www.sciencedirect.com/science/article/pii/S0747717183710539}",
abstract =
"A decision procedure for finding closed forms for indefinite
summation of polynomials, rational functions, quasipolynomials and
quasirational functions is presented. It is also extended to deal with
some nonhypergeometric sums with rational inputs, which are not
summable by means of Gosper's algorithm. Discussion of its
implementation, analysis of degree bounds and some illustrative
examples are included.",
paper = "Manx93.pdf"
}
\end{chunk}
\index{Paule, Peter}
\begin{chunk}{axiom.bib}
@article{Paul95,
author = "Paule, Peter",
title = "Greatest Factorial Factorization and Symbolic Summation",
journal = "Journal of Symbolic Computation",
year = "1995",
volume = "20",
pages = "235268",
link =
"\url{http://www.sciencedirect.com/science/article/pii/S0747717185710498}",
abstract =
"The greatest factorial factorization (GFF) of a polynomial provides
an analogue to squarefree factorization but with respect to integer
shifts instead to multiplicities. We illustrate the fundamental role
of that concept in the context of symbolic summation. Besides a
detailed discussion of the basic GFF notions we present a new approach
to the indefinite rational summation problem as well as to Gosper's
algorithm for summing hypergeometric sequences.",
paper = "Paul95.pdf"
}
\end{chunk}
\index{Petkovsek, Marko}
\begin{chunk}{axiom.bib}
@article{Petk92,
author = "Petkovsek, Marko",
title = "Hypergeometric solutions of linear recurrences with
polynomial coefficients",
journal = "J. Symbolic Computation",
volume = "14",
pages = "243264",
year = "1992",
link =
"\url{http://www.sciencedirect.com/science/article/pii/0747717192900386}",
abstract =
"We describe algorithm Hyper which can be used to find all
hypergeometric solutions of linear recurrences with polynomial
coefficients.",
paper = "Petk92.pdf"
}
\end{chunk}
\index{Grabm\"uller, Martin}
\begin{chunk}{axiom.bib}
@misc{Grab17,
author = {Grabm\"uller, Martin},
title = "Algorithm W",
year = "2017",
link = "\url{https://github.com/mgrabmueller/AlgorithmW}",
abstract =
"In this paper we develop a complete implementation of Algorithm W for
HindleyMilner polyhmorphic type inference in Haskell",
paper = "Grab17.pdf"
}
\end{chunk}
\index{Heeren, Bastiaan}
\index{Hage, Jurriaan},
\index{Swierstra, Doaitse}
\begin{chunk}{axiom.bib}
@misc{Heer02,
author = "Heeren, Bastiaan and Hage, Jurriaan and Swierstra, Doaitse",
title = "Generalizing HindleyMilner Type Inference Algorithms",
year = "2002",
link = "\url{https://pdfs.semanticscholar.org/8983/233b3dff2c5b94efb31235f62bddc22dc899.pdf}",
abstract =
"Type inferencing according to the standard algorithms $W$ and $M$
often yields uninformative error messages. Many times, this is a
consequence of a bias inherent in the algorithms. The method
developed here is to first collect constraints from the program, and
to solve these afterwards, possibly under the influence of a
heuristic. We show the soundness and completeness of our algorithm.
The algorithms $W$ and $M$ turn out to be deterministic instances of our
method, giving the correctness for $W$ and $M$ with respect to the
HindleyMilner typing rules for free. We also show that our algorithm
is more flexible, because it naturally allows the generation of
multiple messages",
paper = "Heer02.pdf"
}
\end{chunk}
\index{Milner, Robin}
\begin{chunk}{axiom.bib}
@article{Miln78,
author = "Milner, Robin",
title = "A Theory of Type Polymorphism in Programming",
year = "1978",
journal = "Journal of Computer and System Sciences",
volume = "17",
pages = "348375",
link = "\url{https://courses.engr.illinois.edu/cs421/sp2013/project/milnerpolymorphism.pdf}",
abstract =
"The aim of this work is largely a practical one. A widely employed
style of programming, particularly in structureprocessing languages
which impose no discipline of types, entails defining procedures which
work well on objects of a wide variety. We present a formal type
discipline for such polymorphic procedures in the context of a simple
programming language, and a compile time typechecking algorithm $W$
which enforces the discipline. A Semantic Soundness Theorem (based on
a formal semantics for the language) states that welltype programs
cannot “go wrong” and a Syntactic Soundness Theorem states that if $W$
accepts a program then it is well typed. We also discuss extending
these results to richer languages; a typechecking algorithm based on
$W$ is in fact already implemented and working, for the metalanguage ML
in the Edinburgh LCF system,",
paper = "Miln78.pdf"
}
\end{chunk}
\index{Wadler, Philip}
\index{Blott, Stephen}
\begin{chunk}{axiom.bib}
@inproceedings{Wadl88,
author = "Wadler, Philip and Blott, Stephen",
title = "How to make adhoc polymorphism less ad hoc",
booktitle = "Proc 16th ACM SIGPLANSIGACT Symp. on Princ. of Prog. Lang",
isbn = "0897912942",
pages = "6076",
year = "1988",
link = "\url{http://202.3.77.10/users/karkare/courses/2010/cs653/Papers/adhocpolymorphism.pdf}",
abstract =
"This paper presents {\sl type classes}, a new approach to {\sl adhoc}
polymorphism. Type classes permit overloading of arithmetic operators
such as multiplication, and generalise the ``eqtype variables'' of
Standard ML Type classes extend the Hindley/Milner polymorphic type
system, and provide a new approach to issues that arise in objectoriented
programming, bounded type quantification, and abstract data types. This
paper provides an informal introduction to type classes, and defines them
formally by means of type inference rules",
paper = "Wadl88.pdf"
}
\end{chunk}

books/bookvolbib.pamphlet  430 +++++++++++++++++++++++++++++
changelog  2 +
patch  570 ++++++++++++++++++++++++++++
src/axiomwebsite/patches.html  2 +
4 files changed, 811 insertions(+), 193 deletions()
diff git a/books/bookvolbib.pamphlet b/books/bookvolbib.pamphlet
index bfe8a24..5c3e2d8 100644
 a/books/bookvolbib.pamphlet
+++ b/books/bookvolbib.pamphlet
@@ 6606,6 +6606,18 @@ Calculemus (2011) Springer
\end{chunk}
+\index{Gordon, Mike}
+\begin{chunk}{axiom.bib}
+@misc{Gord96,
+ author = "Gordon, Mike",
+ title = "From LCF to HOL: a short history",
+ year = "1996",
+ link = "\url{http://www.cl.cam.ac.uk/~mjcg/papers/HolHistory.pdf}",
+ paper = "Gord96.pdf"
+}
+
+\end{chunk}
+
\index{Goguen, Healfdene}
\index{McBride, Conor}
\index{McKinna, James}
@@ 6663,6 +6675,21 @@ Calculemus (2011) Springer
\end{chunk}
+\index{Grabm\"uller, Martin}
+\begin{chunk}{axiom.bib}
+@misc{Grab17,
+ author = {Grabm\"uller, Martin},
+ title = "Algorithm W",
+ year = "2017",
+ link = "\url{https://github.com/mgrabmueller/AlgorithmW}",
+ abstract =
+ "In this paper we develop a complete implementation of Algorithm W for
+ HindleyMilner polyhmorphic type inference in Haskell",
+ paper = "Grab17.pdf"
+}
+
+\end{chunk}
+
\index{Gries, David}
\begin{chunk}{axiom.bib}
@book{Grie81,
@@ 6731,6 +6758,32 @@ Calculemus (2011) Springer
\end{chunk}
+\index{Heeren, Bastiaan}
+\index{Hage, Jurriaan},
+\index{Swierstra, Doaitse}
+\begin{chunk}{axiom.bib}
+@misc{Heer02,
+ author = "Heeren, Bastiaan and Hage, Jurriaan and Swierstra, Doaitse",
+ title = "Generalizing HindleyMilner Type Inference Algorithms",
+ year = "2002",
+ link = "\url{https://pdfs.semanticscholar.org/8983/233b3dff2c5b94efb31235f62bddc22dc899.pdf}",
+ abstract =
+ "Type inferencing according to the standard algorithms $W$ and $M$
+ often yields uninformative error messages. Many times, this is a
+ consequence of a bias inherent in the algorithms. The method
+ developed here is to first collect constraints from the program, and
+ to solve these afterwards, possibly under the influence of a
+ heuristic. We show the soundness and completeness of our algorithm.
+ The algorithms $W$ and $M$ turn out to be deterministic instances of our
+ method, giving the correctness for $W$ and $M$ with respect to the
+ HindleyMilner typing rules for free. We also show that our algorithm
+ is more flexible, because it naturally allows the generation of
+ multiple messages",
+ paper = "Heer02.pdf"
+}
+
+\end{chunk}
+
\index{Hoare, C. A. R.}
\begin{chunk}{axiom.bib}
@article{Hoar69,
@@ 7276,6 +7329,54 @@ Calculemus (2011) Springer
\end{chunk}
+\index{Milner, Robin}
+\begin{chunk}{axiom.bib}
+@techreport{Miln72,
+ author = "Milner, Robert",
+ title = "Logic for Computable Functions: Description of a Machine
+ Implementation",
+ year = "1972",
+ institution = "Stanford Artificial Intelligence Project",
+ number = "STANCS72288",
+ link =
+ "\url{http://i.standford.edu/pub/cstr/reports/cs/tr/72/288/CSTR72288.pdf}",
+ abstract =
+ "LCF is based on a logic by Dana Scott, proposed by him at Oxford in the
+ fall of 1969, for reasoning about computable functions.",
+ paper = "Miln72.pdf"
+}
+
+\end{chunk}
+
+\index{Milner, Robin}
+\begin{chunk}{axiom.bib}
+@article{Miln78,
+ author = "Milner, Robin",
+ title = "A Theory of Type Polymorphism in Programming",
+ year = "1978",
+ journal = "Journal of Computer and System Sciences",
+ volume = "17",
+ pages = "348375",
+ link = "\url{https://courses.engr.illinois.edu/cs421/sp2013/project/milnerpolymorphism.pdf}",
+ abstract =
+ "The aim of this work is largely a practical one. A widely employed
+ style of programming, particularly in structureprocessing languages
+ which impose no discipline of types, entails defining procedures which
+ work well on objects of a wide variety. We present a formal type
+ discipline for such polymorphic procedures in the context of a simple
+ programming language, and a compile time typechecking algorithm $W$
+ which enforces the discipline. A Semantic Soundness Theorem (based on
+ a formal semantics for the language) states that welltype programs
+ cannot “go wrong” and a Syntactic Soundness Theorem states that if $W$
+ accepts a program then it is well typed. We also discuss extending
+ these results to richer languages; a typechecking algorithm based on
+ $W$ is in fact already implemented and working, for the metalanguage ML
+ in the Edinburgh LCF system,",
+ paper = "Miln78.pdf"
+}
+
+\end{chunk}
+
\index{MohringPaulin, Christine}
\begin{chunk}{axiom.bib}
@misc{Mohr14,
@@ 7661,6 +7762,38 @@ Munteanu, Bogdan; Brooker, Marc; Deardeuff, Michael
\end{chunk}
+\index{Plotkin, G.D.}
+\begin{chunk}{axiom.bib}
+@article{Plot77,
+ author = "Plotkin, G.D.",
+ title = "LCF Considered as a Programming Language",
+ journal = "Theoretical Computer Science",
+ volume = "5",
+ year = "1977",
+ pages = "223255",
+ link = "\url{http://homepages.inf.ed.ac.uk/gdp/publications/LCF.pdf}",
+ abstract =
+ "The paper studies connections between denotational and operational
+ semantics for a simple programming language based on LCF. It begins
+ with the connection between the behaviour of a program and its
+ denotation. It turns out that a program denotes $\bot$ in any of
+ several possible semantics iff it does not terminate. From this it
+ follows that if two terms hae the same denotation in one of these
+ semantics, they have the same behaviour in all contexts. The converse
+ fails for all the semantics. If, however, the language is extended to
+ allow certain parallel facilities behavioural equivalence does coincide
+ with denotational equivalence in one of the semantics considered, which
+ may therefore be called ``fully abstract''. Next a connection is given
+ which actually determines the semantics up to isomorphism from the
+ behaviour alone. Conversely, by allowing further parallel facilities,
+ every r.e. element of the fully abstract semantics becomes definable,
+ thus characterising the programming language, up to interdefinability,
+ from the set of r.e. elements of the domains of the semantics.",
+ paper = "Plot77.pdf"
+}
+
+\end{chunk}
+
\index{Poll, Erik}
\index{Thompson, Simon}
\begin{chunk}{axiom.bib}
@@ 7971,6 +8104,30 @@ Munteanu, Bogdan; Brooker, Marc; Deardeuff, Michael
\end{chunk}
+\index{Sterling, Jonathan}
+\index{Harper, Robert}
+\begin{chunk}{axiom.bib}
+@misc{Ster17,
+ author = "Sterling, Jonathan and Harper, Robert",
+ title = "Algebraic Foundations of Proof Refinement",
+ link = "\url{http://www.cs.cmu.edu/~rwh/papers/afpr/afpr.pdf}",
+ year = "2017",
+ abstract =
+ "We contribute a general apparatus for {\sl dependent} tacticbased
+ proof refinement in the LCF tradition, in which the statements of
+ subgoals may express a dependency on the proofs of other subgoals;
+ this form of dependency is extremely useful and can serve as an
+ {\sl algorithmic} alternative to extensions of LCF based on nonlocal
+ instantiation of schematic variables. Additionally, we introduce a
+ novel behavioral distinction between {\sl refinement rules} and
+ {\sl tactics} based on naturality. Our framework, called Dependent
+ LCF, is already deployed in the nascent RedPRL proof assistant for
+ computational cubical type theory.",
+ paper = "Ster17.pdf"
+}
+
+\end{chunk}
+
\subsection{T} %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
\index{Th\'ery, Laurent}
@@ 8020,9 +8177,45 @@ Munteanu, Bogdan; Brooker, Marc; Deardeuff, Michael
\end{chunk}
+\index{Trostle, Anne}
+\begin{chunk}{axiom.bib}
+@misc{Tros13,
+ author = "Trostle, Anne",
+ title = "An Algorithm for the Greatest Common Divisor",
+ link = "\url{http://www.nuprl.org/MathLibrary/gcd/}",
+ year = "2013"
+}
+
+\end{chunk}
+
\subsection{W} %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
\index{Wadler, Philip}
+\index{Blott, Stephen}
+\begin{chunk}{axiom.bib}
+@inproceedings{Wadl88,
+ author = "Wadler, Philip and Blott, Stephen",
+ title = "How to make adhoc polymorphism less ad hoc",
+ booktitle = "Proc 16th ACM SIGPLANSIGACT Symp. on Princ. of Prog. Lang",
+ isbn = "0897912942",
+ pages = "6076",
+ year = "1988",
+ link = "\url{http://202.3.77.10/users/karkare/courses/2010/cs653/Papers/adhocpolymorphism.pdf}",
+ abstract =
+ "This paper presents {\sl type classes}, a new approach to {\sl adhoc}
+ polymorphism. Type classes permit overloading of arithmetic operators
+ such as multiplication, and generalise the ``eqtype variables'' of
+ Standard ML Type classes extend the Hindley/Milner polymorphic type
+ system, and provide a new approach to issues that arise in objectoriented
+ programming, bounded type quantification, and abstract data types. This
+ paper provides an informal introduction to type classes, and defines them
+ formally by means of type inference rules",
+ paper = "Wadl88.pdf"
+}
+
+\end{chunk}
+
+\index{Wadler, Philip}
\begin{chunk}{axiom.bib}
@misc{Wadl14,
author = "Wadler, Philip",
@@ 9125,7 +9318,7 @@ Lecture Notes in Computer Science Vol 378 pp491497
compute the integral. We also generalize a result of Rothstein that
gives us a necessary condition for elementary integrability, and
provide examples of its use.",
 paper = "Bro90a.pdf"
+ paper = "Bron90a.pdf"
}
\end{chunk}
@@ 9342,12 +9535,17 @@ The Electronic J of Math. and Tech. Vol 2, No 3, ISSN 19332823
\end{chunk}
\index{Cherry, G.W.}
\begin{chunk}{ignore}
\bibitem[Cherry 84]{Che84} Cherry, G.W.
 title = "Integration in Finite Terms with Special Functions: The Error Function",
J. Symbolic Computation (1985) Vol 1 pp283302
 abstract = "
 A decision procedure for integrating a class of transcendental
+\begin{chunk}{axiom.bib}
+@article{Cher85,
+ author = "Cherry, G.W.",
+ title = "Integration in Finite Terms with Special Functions:
+ The Error Function",
+ journal = "J. Symbolic Computation",
+ year = "1985",
+ volume = "1",
+ pages = "283302",
+ abstract =
+ "A decision procedure for integrating a class of transcendental
elementary functions in terms of elementary functions and error
functions is described. The procedure consists of three mutually
exclusive cases. In the first two cases a generalised procedure for
@@ 9358,7 +9556,8 @@ J. Symbolic Computation (1985) Vol 1 pp283302
determination of what we have termed $\sum$decompositions. The resutl
presented here is the key procuedure to a more general algorithm which
is described fully in Cherry (1983).",
 paper = "Che84.pdf"
+ paper = "Cher85.pdf"
+}
\end{chunk}
@@ 9570,6 +9769,44 @@ Cambridge Unversity Press, Cambridge, 1916
\end{chunk}
+\index{Hebisch, Waldemer}
+\begin{chunk}{axiom.bib}
+@article{Hebi15,
+ author = "Hebisch, Waldemer",
+ title = "Integration in terms of exponential integrals and incomplete
+ gamma functions",
+ year = "2015",
+ journal = "ACM Communications in Computer Algebra",
+ volume = "49",
+ Issue = "3",
+ pages = "98100",
+ abstract =
+ "Indefinite integration means that given $f$ in some set we want to
+ find $g$ from possibly larger set such that $f = g^\prime$. When $f$
+ and $g$ are required to be elementary functions due to work of among
+ others Risch, Rothstein, Trager, Bronstein (see [1] for references)
+ integration problem is now solved at least in theory. In his thesis
+ Cherry gave algorithm to integrate transcendental elementary functions
+ in terms of exponential integrals. In [2] he gave algorithm to
+ integrate transcendental elementary functions in so called reduced
+ fields in terms of error functions. Knowles [3] and [4] extended this
+ allowing also liovillian integrands and weakened restrictions on the
+ field containing integrands. We extend previous results allowing
+ incomplete gamma function $\Gamma(a, x)$ with rational $a$. Also, our
+ theory can handle algebraic extensions and is complete jointly (and
+ not only separately for Ei and erf). In purely transcendental case our
+ method should be more efficient and easier to implement than [2]. In
+ fact, it seems that no system currently implements algorithm from [2],
+ while partial implementation of our method in FriCAS works well enough
+ to be turned on by default. With our approach nonreduced case from
+ [2] can be handled easily. We hope that other classes of special
+ functions can be handled in a similar way, in particular irrational
+ case of incomplete gamma function and polylogarithms (however
+ polylogarithms raise tricky theoretical questions)."
+}
+
+\end{chunk}
+
\index{Hermite, E.}
\begin{chunk}{axiom.bib}
@misc{Herm1872,
@@ 9660,19 +9897,46 @@ Rich, A.D.
\end{chunk}
\index{Knowles, P.}
\begin{chunk}{ignore}
\bibitem[Knowles 93]{Know93} Knowles, P.
 title = "Integration of a class of transcendental liouvillian functions with errorfunctions i",
Journal of Symbolic Computation Vol 13 pp525543 (1993)
+\index{Knowles, Paul}
+\begin{chunk}{axiom.bib}
+@article{Know93,
+ author = "Knowles, Paul",
+ title = "Integration of a Class of Transcendental Liouvillian Functions
+ with ErrorFunctions, Part I",
+ journal = "Journal of Symbolic Computation",
+ volume = "13",
+ number = "5",
+ pages = "525543",
+ year = "1993",
+ abstract =
+ "This paper gives a decisionprocedure for the symbolic integration of
+ a certain class of transcendental Liouvillian functions in terms of
+ elementary functions and errorfunctions. An example illustrating the
+ use of the decisionprocedure is given.",
+ paper = "Know93.pdf"
+}
\end{chunk}
\index{Knowles, P.}
\begin{chunk}{ignore}
\bibitem[Knowles 95]{Know95} Knowles, P.
 title = "Integration of a class of transcendental liouvillian functions with errorfunctions ii",
Journal of Symbolic Computation Vol 16 pp227241 (1995)
+\index{Knowles, Paul}
+\begin{chunk}{axiom.bib}
+@article{Know93a,
+ author = "Knowles, Paul",
+ title = "Integration of a Class of Transcendental Liouvillian Functions
+ with ErrorFunctions, Part II",
+ journal = "Journal of Symbolic Computation",
+ volume = "16",
+ number = "3",
+ year = "1993",
+ pages = "227239",
+ abstract =
+ "This paper extends the decision procedure for the symbolic
+ integration of a certain class of transcendental Liouvillian functions
+ in terms of elementary functions and errorfunctions given in Knowles
+ (1992) to allow a much larger class of integrands. Examples
+ illustrating the use of the decision procedure are given.",
+ paper = "Know93a.pdf"
+}
\end{chunk}
@@ 12368,6 +12632,98 @@ Proc ISSAC 97 pp172175 (1997)
\end{chunk}
+\index{Lisonek, Petr}
+\index{Paule, Peter},
+\index{Strehl, Volker}
+\begin{chunk}{axiom.bib}
+@article{Liso93,
+ author = "Lisonek, Petr and Paule, Peter and Strehl, Volker",
+ title = "Improvement of the Degree Setting in Gosper's Algorithm",
+ journal = "J. Symbolic Computation",
+ volume = "16",
+ year = "1993",
+ pages = "243258",
+ link =
+ "\url{http://www.sciencedirect.com/science/article/pii/S0747717183710436}",
+ abstract =
+ "A detailed study of the degree setting for Gosper's algorithm for
+ indefinite hypergeometric summation is presented. In particular, we
+ discriminate between rational and proper hypergeometric input. As a
+ result, the critical degree bound can be improved in the former case.",
+ paper = "Liso93.pdf"
+}
+
+\end{chunk}
+
+\index{Man, YiuKwong}
+\begin{chunk}{axiom.bib}
+@article{Manx93,
+ author = "Man, YiuKwong",
+ title = "On Computing Closed Forms for Indefinite Summations",
+ journal = "J. Symbolic Computation",
+ volume = "16",
+ pages = "335376",
+ year = "1993",
+ link =
+ "\url{http://www.sciencedirect.com/science/article/pii/S0747717183710539}",
+ abstract =
+ "A decision procedure for finding closed forms for indefinite
+ summation of polynomials, rational functions, quasipolynomials and
+ quasirational functions is presented. It is also extended to deal with
+ some nonhypergeometric sums with rational inputs, which are not
+ summable by means of Gosper's algorithm. Discussion of its
+ implementation, analysis of degree bounds and some illustrative
+ examples are included.",
+ paper = "Manx93.pdf"
+}
+
+\end{chunk}
+
+\index{Paule, Peter}
+\begin{chunk}{axiom.bib}
+@article{Paul95,
+ author = "Paule, Peter",
+ title = "Greatest Factorial Factorization and Symbolic Summation",
+ journal = "Journal of Symbolic Computation",
+ year = "1995",
+ volume = "20",
+ pages = "235268",
+ link =
+ "\url{http://www.sciencedirect.com/science/article/pii/S0747717185710498}",
+ abstract =
+ "The greatest factorial factorization (GFF) of a polynomial provides
+ an analogue to squarefree factorization but with respect to integer
+ shifts instead to multiplicities. We illustrate the fundamental role
+ of that concept in the context of symbolic summation. Besides a
+ detailed discussion of the basic GFF notions we present a new approach
+ to the indefinite rational summation problem as well as to Gosper's
+ algorithm for summing hypergeometric sequences.",
+ paper = "Paul95.pdf"
+}
+
+\end{chunk}
+
+\index{Petkovsek, Marko}
+\begin{chunk}{axiom.bib}
+@article{Petk92,
+ author = "Petkovsek, Marko",
+ title = "Hypergeometric solutions of linear recurrences with
+ polynomial coefficients",
+ journal = "J. Symbolic Computation",
+ volume = "14",
+ pages = "243264",
+ year = "1992",
+ link =
+ "\url{http://www.sciencedirect.com/science/article/pii/0747717192900386}",
+ abstract =
+ "We describe algorithm Hyper which can be used to find all
+ hypergeometric solutions of linear recurrences with polynomial
+ coefficients.",
+ paper = "Petk92.pdf"
+}
+
+\end{chunk}
+
\index{Schneider, Carsten}
\begin{chunk}{axiom.bib}
@InProceedings{Schn00,
@@ 17342,6 +17698,44 @@ American Mathematical Society (1994)
\subsection{B} %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+\index{Babovsky, Hans}
+\index{Grabmeier, Johannes}
+\begin{chunk}{axiom.bib}
+@inproceedings{Babo16,
+ author = "Babovsky, Hans and Grabmeier, Johannes",
+ title = "Calculus and design of discrete velocity models using
+ computer algebra",
+ booktitle = "AIP Conference Proceedings",
+ volume = "1786",
+ isbn = "9780735414488",
+ link = "\url{http://aip.scitation.org/doi/pdf/10.1063/1.4967672}",
+ year = "2016",
+ abstract =
+ "In [2,3], a framework for a calculus with Discrete Velocity Models
+ (DVM) has been derived. The rotational symmetry of the discrete
+ velocities can be modelled algebraically by the action of the cyclic
+ group $C_4$  or including reflections of the dihedral group $D_4$.
+ Taking this point of view, the linearized collision operator can be
+ represented in a compact form as a matrix of elements in the group
+ algebra. Or in other words, by choosing a special numbering it
+ exhibits a certain block structure which lets it appear as a matrix
+ with entries in a certain polynomial ring. A convenient way for
+ approaching such a structure is the use of a computer algebra system
+ able to treat these (predefined) algebraic structures. We use the
+ computer algebra system FriCAS/AXIOM [4,5] for the generation of the
+ velocity and the collision sets and for the analysis of the structure
+ of the collision operator. Concerning the fluid dynamic limit, the
+ system provides the characterization of sets of collisions and their
+ contribution to the flow parameters. It allows the design of
+ rotationally invariant symmetric models for prescribed Prandtl
+ numbers. The implementation in FriCAS/AXIOM is explained and its
+ results for a 25velocity model are presented.",
+ paper = "Babo16.pdf",
+ keywords = "axiomref"
+}
+
+\end{chunk}
+
\index{Baclawski, Krystian}
\begin{chunk}{axiom.bib}
@article{Bacl14,
diff git a/changelog b/changelog
index 366ae99..32b1f93 100644
 a/changelog
+++ b/changelog
@@ 1,3 +1,5 @@
+20170303 tpd src/axiomwebsite/patches.html 20170303.01.tpd.patch
+20170303 tpd books/bookvolbib add additional references
20170220 tpd src/axiomwebsite/patches.html 20170220.01.tpd.patch
20170220 tpd books/bookvolbib add additional references
20170215 tpd src/axiomwebsite/patches.html 20170215.01.tpd.patch
diff git a/patch b/patch
index 1064b6f..0c73e78 100644
 a/patch
+++ b/patch
@@ 2,212 +2,432 @@ books/bookvolbib add additional references
Goal: Proving Axiom Correct
\index{Dybjer, Peter}
\begin{chunk}{axiom.bib}
@inproceedings{Dybj90,
 author = "Dybjer, Peter",
 title = {Inductive Sets and Families in MarinL\"of's Type Theory and
 Their SetTheoretic Semantics},
 booktitle = "Proc. First Workshop on Logical Frameworks",
 year = "1990",
 link =
 "\url{http://www.cse.chalmers.se/~peterd/papers/Setsem\_Inductive.pdf}",
+\index{Sterling, Jonathan}
+\index{Harper, Robert}
+\begin{chunk}{axiom.bib}
+@misc{Ster17,
+ author = "Sterling, Jonathan and Harper, Robert",
+ title = "Algebraic Foundations of Proof Refinement",
+ link = "\url{http://www.cs.cmu.edu/~rwh/papers/afpr/afpr.pdf}",
+ year = "2017",
abstract =
 "{MartinL\"of}'s type theory is presented in several steps. The kernel
 is a dependently typed $\lambda$calculs. Then there are schemata for
 inductive sets and families of sets and for primitive recursive functions
 and families of functions. Finally, there are set formers (generic
 polymorphism) and universes. At each step syntax, inference rules, and
 settheoretic sematics are given",
 paper = "Dybj90.pdf"
+ "We contribute a general apparatus for {\sl dependent} tacticbased
+ proof refinement in the LCF tradition, in which the statements of
+ subgoals may express a dependency on the proofs of other subgoals;
+ this form of dependency is extremely useful and can serve as an
+ {\sl algorithmic} alternative to extensions of LCF based on nonlocal
+ instantiation of schematic variables. Additionally, we introduce a
+ novel behavioral distinction between {\sl refinement rules} and
+ {\sl tactics} based on naturality. Our framework, called Dependent
+ LCF, is already deployed in the nascent RedPRL proof assistant for
+ computational cubical type theory.",
+ paper = "Ster17.pdf"
}
\end{chunk}
\index{Goguen, Healfdene}
\index{McBride, Conor}
\index{McKinna, James}
+\index{Plotkin, G.D.}
\begin{chunk}{axiom.bib}
@article{Gogu06,
 author = "Goguen, Healfdene and McBride, Conor and McKinna, James",
 title = "Eliminating Dependent Pattern Matching",
 year = "2006",
 journal = "Lecture Notes in Computer Science",
 volume = "4060",
 pages = "521540",
 link = "\url{http://cs.ru.nl/~james/RESEARCH/goguen2006.pdf}",
+@article{Plot77,
+ author = "Plotkin, G.D.",
+ title = "LCF Considered as a Programming Language",
+ journal = "Theoretical Computer Science",
+ volume = "5",
+ year = "1977",
+ pages = "223255",
+ link = "\url{http://homepages.inf.ed.ac.uk/gdp/publications/LCF.pdf}",
abstract =
 "This paper gives a reductionpreserving translation from Coquand's
 {\sl dependent pattern matching} into a traditional type theory
 with universes, inductive types and relations and the axiom K. This
 translation serves as a proof of termination for structurally
 recursive pattern matching programs, provides an implementable
 compilation technique in the style of functional programming languages,
 and demonstrates the equivelence with a more easily understood type
 theory.",
 paper = "Gogu06.pdf"
}

\end{chunk}

\index{McBride, Conor}
\index{Goguen, Healfdene}
\index{McKinna, James}
\begin{chunk}{axiom.bib}
@article{Mcbr06,
 author = "McBride, Conor and Goguen, Healfdene and McKinna, James",
 title = "A Few Constructions on Constructors",
 journal = "Lecture Notes in Computer Science",
 volume = "3839",
 pages = "186200",
 year = "2006",
 link = "\url{http://www.strictlypositive.org/concon.ps.gz}",
+ "The paper studies connections between denotational and operational
+ semantics for a simple programming language based on LCF. It begins
+ with the connection between the behaviour of a program and its
+ denotation. It turns out that a program denotes $\bot$ in any of
+ several possible semantics iff it does not terminate. From this it
+ follows that if two terms hae the same denotation in one of these
+ semantics, they have the same behaviour in all contexts. The converse
+ fails for all the semantics. If, however, the language is extended to
+ allow certain parallel facilities behavioural equivalence does coincide
+ with denotational equivalence in one of the semantics considered, which
+ may therefore be called ``fully abstract''. Next a connection is given
+ which actually determines the semantics up to isomorphism from the
+ behaviour alone. Conversely, by allowing further parallel facilities,
+ every r.e. element of the fully abstract semantics becomes definable,
+ thus characterising the programming language, up to interdefinability,
+ from the set of r.e. elements of the domains of the semantics.",
+ paper = "Plot77.pdf"
+}
+
+\end{chunk}
+
+\index{Milner, Robin}
+\begin{chunk}{axiom.bib}
+@techreport{Miln72,
+ author = "Milner, Robert",
+ title = "Logic for Computable Functions: Description of a Machine
+ Implementation",
+ year = "1972",
+ institution = "Stanford Artificial Intelligence Project",
+ number = "STANCS72288",
+ link =
+ "\url{http://i.standford.edu/pub/cstr/reports/cs/tr/72/288/CSTR72288.pdf}",
abstract =
 "We present four constructions for standard equipment which can be
 generated for every inductive datatype: case analysis, structural
 recursion, no confusion, acyclicity. Our constructions follow a
 twolevel approach  they require less work than the standard
 techniques which inspired them. Moreover, given a suitably
 heterogeneous notion of equality, they extend without difficulty to
 inductive families of datatypes. These constructions are vital
 components of the translation from dependently typed programs in
 pattern matching style to the equivalent programs expressed in terms
 of induction principles and as such play a crucial behindthescenes
 role in Epigram.",
 paper = "Mcbr06.pdf"
}
+ "LCF is based on a logic by Dana Scott, proposed by him at Oxford in the
+ fall of 1969, for reasoning about computable functions.",
+ paper = "Miln72.pdf"
+}
\end{chunk}
\index{Bove, Ana}
\index{Dybjer, Peter}
+\index{Gordon, Mike}
\begin{chunk}{axiom.bib}
@misc{Bove08,
 author = "Bove, Ana and Dybjer, Peter",
 title = "Dependent Types at Work",
 year = "2008",
 comment = "Lecture notes from LerNET Summer School, Piriapolis",
 link =
 "\url{http://www.cse.chalmers.se/~peterd/papers/DependentTypesAtWork.pdf}",
+@misc{Gord96,
+ author = "Gordon, Mike",
+ title = "From LCF to HOL: a short history",
+ year = "1996",
+ link = "\url{http://www.cl.cam.ac.uk/~mjcg/papers/HolHistory.pdf}",
+ paper = "Gord96.pdf"
+}
+
+\end{chunk}
+
+\index{Trostle, Anne}
+\begin{chunk}{axiom.bib}
+@misc{Tros13,
+ author = "Trostle, Anne",
+ title = "An Algorithm for the Greatest Common Divisor",
+ link = "\url{http://www.nuprl.org/MathLibrary/gcd/}",
+ year = "2013"
+}
+
+\end{chunk}
+
+\index{Babovsky, Hans}
+\index{Grabmeier, Johannes}
+\begin{chunk}{axiom.bib}
+@inproceedings{Babo16,
+ author = "Babovsky, Hans and Grabmeier, Johannes",
+ title = "Calculus and design of discrete velocity models using
+ computer algebra",
+ booktitle = "AIP Conference Proceedings",
+ volume = "1786",
+ isbn = "9780735414488",
+ link = "\url{http://aip.scitation.org/doi/pdf/10.1063/1.4967672}",
+ year = "2016",
abstract =
 "In these lecture notes we give an introduction to functional
 programming with dependent types. We use the dependently typed
 programming language Agda which is an extension of {MartinL\"of} type
 theory. First we show how to do simply typed functional programming in
 the style of Haskell and ML. Some differences between Agda's type
 system and the HindleyMilner type system of Haskell and ML are also
 discussed. Then we show how to use dependent types for programming and
 we explain the basic ideas behind typechecking dependent types. We go
 on to explain the CurryHoward identification of propositions and
 types. This is what makes Agda a programming logic and not only a
 programming language. According to CurryHoward, we identify programs
 and proofs, something which is possible only by requiring that all
 programs terminate. However, at the end of these notes we present a
 method for encoding partial and general recursive functions as total
 functions using dependent types.",
 paper = "Bove08.pdf"
}

\end{chunk}

\index{Benke, Marcin}
\index{Dybjer, Peter}
\index{Jansson, Patrik}
\begin{chunk}{axiom.bib}
@article{Benk03,
 author = "Benke, Marcin and Dybjer, Peter and Jansson, Patrik",
 title = "Universes for generic programs and proofs in dependent type
 theory",
 journal = "Nordic Journal of Computing",
 volume = "10",
 year = "2003",
 pages = "265269",
 link = "\url{http://www.cse.chalmers.se/~peterd/papers/generic.html}",
 abstract =
 "We show how to write generic programs and proofs in {Martin L\"of}
 type theory. To this end we considier several extensions of
 {MartinL\"of}'s logical framework for dependent types. Each extension
 has a universe of codes (signatures) for inductively defined sets with
 generic formation, introduction, elimination, and equality
 rules. These extensions are modeled on Dybjer and Setzer's finitely
 axiomatized theories of inductiverecursive definitions, which also
 have universese of codes for sets, and generic formation,
 introduction, elimination, and equality rules. Here we consider
 several smaller universes of interest for generic programming and
 universal algebra. We formalize onesorted and manysorted term
 algebras, as well as iterated, generalized, parameterized, and indexed
 inductive definitions. We also show how to extend the techniques of
 generic programming to these universes. Furthermore, we give generic
 proofs of reflexivity and substitutivity of a generic equality
 test. Most of the definitions in the paper have been implemented using
 the proof assistant Alfa for dependent type theory.",
 paper = "Benk03.pdf"
+ "In [2,3], a framework for a calculus with Discrete Velocity Models
+ (DVM) has been derived. The rotational symmetry of the discrete
+ velocities can be modelled algebraically by the action of the cyclic
+ group $C_4$  or including reflections of the dihedral group $D_4$.
+ Taking this point of view, the linearized collision operator can be
+ represented in a compact form as a matrix of elements in the group
+ algebra. Or in other words, by choosing a special numbering it
+ exhibits a certain block structure which lets it appear as a matrix
+ with entries in a certain polynomial ring. A convenient way for
+ approaching such a structure is the use of a computer algebra system
+ able to treat these (predefined) algebraic structures. We use the
+ computer algebra system FriCAS/AXIOM [4,5] for the generation of the
+ velocity and the collision sets and for the analysis of the structure
+ of the collision operator. Concerning the fluid dynamic limit, the
+ system provides the characterization of sets of collisions and their
+ contribution to the flow parameters. It allows the design of
+ rotationally invariant symmetric models for prescribed Prandtl
+ numbers. The implementation in FriCAS/AXIOM is explained and its
+ results for a 25velocity model are presented.",
+ paper = "Babo16.pdf",
+ keywords = "axiomref"
+}
+
+\end{chunk}
+
+\index{Hebisch, Waldemer}
+\begin{chunk}{axiom.bib}
+@article{Hebi15,
+ author = "Hebisch, Waldemer",
+ title = "Integration in terms of exponential integrals and incomplete
+ gamma functions",
+ year = "2015",
+ journal = "ACM Communications in Computer Algebra",
+ volume = "49",
+ Issue = "3",
+ pages = "98100",
+ abstract =
+ "Indefinite integration means that given $f$ in some set we want to
+ find $g$ from possibly larger set such that $f = g^\prime$. When $f$
+ and $g$ are required to be elementary functions due to work of among
+ others Risch, Rothstein, Trager, Bronstein (see [1] for references)
+ integration problem is now solved at least in theory. In his thesis
+ Cherry gave algorithm to integrate transcendental elementary functions
+ in terms of exponential integrals. In [2] he gave algorithm to
+ integrate transcendental elementary functions in so called reduced
+ fields in terms of error functions. Knowles [3] and [4] extended this
+ allowing also liovillian integrands and weakened restrictions on the
+ field containing integrands. We extend previous results allowing
+ incomplete gamma function $\Gamma(a, x)$ with rational $a$. Also, our
+ theory can handle algebraic extensions and is complete jointly (and
+ not only separately for Ei and erf). In purely transcendental case our
+ method should be more efficient and easier to implement than [2]. In
+ fact, it seems that no system currently implements algorithm from [2],
+ while partial implementation of our method in FriCAS works well enough
+ to be turned on by default. With our approach nonreduced case from
+ [2] can be handled easily. We hope that other classes of special
+ functions can be handled in a similar way, in particular irrational
+ case of incomplete gamma function and polylogarithms (however
+ polylogarithms raise tricky theoretical questions)."
}
\end{chunk}
\index{MartinL\"of, Per}
+\index{Knowles, Paul}
\begin{chunk}{axiom.bib}
@misc{Mart80,
 author = {MartinL\"of, Per},
 title = "Intuitionistic Type Theory",
 link = "\url{http://archivepml.github.io/martinlof/pdfs/BibliopolisBookretypeset1984.pdf}",
 year = "1980",
 paper = "Mart80.pdf"
+@article{Know93,
+ author = "Knowles, Paul",
+ title = "Integration of a Class of Transcendental Liouvillian Functions
+ with ErrorFunctions, Part I",
+ journal = "Journal of Symbolic Computation",
+ volume = "13",
+ number = "5",
+ pages = "525543",
+ year = "1993",
+ abstract =
+ "This paper gives a decisionprocedure for the symbolic integration of
+ a certain class of transcendental Liouvillian functions in terms of
+ elementary functions and errorfunctions. An example illustrating the
+ use of the decisionprocedure is given.",
+ paper = "Know93.pdf"
}
\end{chunk}
\index{MartinL\"of, Per}
+\index{Knowles, Paul}
\begin{chunk}{axiom.bib}
@inproceedings{Mart85,
 author = {MartinL\"of, Per},
 title = "Costructive Mathematics and Computer Programming",
 booktitle = "Proc Royal Soc. of London on Math. Logic and Programming Lang.",
 link = "\url{http://www.cs.tufts.edu/~nr/cs257/archive/permartinlof/constructivemath.pdf}",
+@article{Know93a,
+ author = "Knowles, Paul",
+ title = "Integration of a Class of Transcendental Liouvillian Functions
+ with ErrorFunctions, Part II",
+ journal = "Journal of Symbolic Computation",
+ volume = "16",
+ number = "3",
+ year = "1993",
+ pages = "227239",
+ abstract =
+ "This paper extends the decision procedure for the symbolic
+ integration of a certain class of transcendental Liouvillian functions
+ in terms of elementary functions and errorfunctions given in Knowles
+ (1992) to allow a much larger class of integrands. Examples
+ illustrating the use of the decision procedure are given.",
+ paper = "Know93a.pdf"
+}
+
+\end{chunk}
+
+\index{Cherry, G.W.}
+\begin{chunk}{axiom.bib}
+@article{Cher85,
+ author = "Cherry, G.W.",
+ title = "Integration in Finite Terms with Special Functions:
+ The Error Function",
+ journal = "J. Symbolic Computation",
year = "1985",
 isbn = "0135614651",
 pages = "168184",
 publisher = "PrenticeHall",
 paper = "Mart85.pdf"
+ volume = "1",
+ pages = "283302",
+ abstract =
+ "A decision procedure for integrating a class of transcendental
+ elementary functions in terms of elementary functions and error
+ functions is described. The procedure consists of three mutually
+ exclusive cases. In the first two cases a generalised procedure for
+ completing squares is used to limit the error functions which can
+ appear in the integral of a finite number. This reduces the problem
+ to the solution of a differential equation and we use a result of
+ Risch (1969) to solve it. The third case can be reduced to the
+ determination of what we have termed $\sum$decompositions. The resutl
+ presented here is the key procuedure to a more general algorithm which
+ is described fully in Cherry (1983).",
+ paper = "Cher85.pdf"
+}
+
+\end{chunk}
+
+\index{Lisonek, Petr}
+\index{Paule, Peter},
+\index{Strehl, Volker}
+\begin{chunk}{axiom.bib}
+@article{Liso93,
+ author = "Lisonek, Petr and Paule, Peter and Strehl, Volker",
+ title = "Improvement of the Degree Setting in Gosper's Algorithm",
+ journal = "J. Symbolic Computation",
+ volume = "16",
+ year = "1993",
+ pages = "243258",
+ link =
+ "\url{http://www.sciencedirect.com/science/article/pii/S0747717183710436}",
+ abstract =
+ "A detailed study of the degree setting for Gosper's algorithm for
+ indefinite hypergeometric summation is presented. In particular, we
+ discriminate between rational and proper hypergeometric input. As a
+ result, the critical degree bound can be improved in the former case.",
+ paper = "Liso93.pdf"
+}
+
+\end{chunk}
+
+\index{Man, YiuKwong}
+\begin{chunk}{axiom.bib}
+@article{Manx93,
+ author = "Man, YiuKwong",
+ title = "On Computing Closed Forms for Indefinite Summations",
+ journal = "J. Symbolic Computation",
+ volume = "16",
+ pages = "335376",
+ year = "1993",
+ link =
+ "\url{http://www.sciencedirect.com/science/article/pii/S0747717183710539}",
+ abstract =
+ "A decision procedure for finding closed forms for indefinite
+ summation of polynomials, rational functions, quasipolynomials and
+ quasirational functions is presented. It is also extended to deal with
+ some nonhypergeometric sums with rational inputs, which are not
+ summable by means of Gosper's algorithm. Discussion of its
+ implementation, analysis of degree bounds and some illustrative
+ examples are included.",
+ paper = "Manx93.pdf"
}
\end{chunk}
\index{Dybjer, Peter}
\index{Setzer, Anton}
+\index{Paule, Peter}
\begin{chunk}{axiom.bib}
@article{Dybj03,
 author = "Dybjer, Peter and Setzer, Anton",
 title = "Inductionrecursion and initial algebras",
 journal = "Annals of Pure and Applied Logic",
 volume = "124",
 year = "2003",
 pages = "147",
+@article{Paul95,
+ author = "Paule, Peter",
+ title = "Greatest Factorial Factorization and Symbolic Summation",
+ journal = "Journal of Symbolic Computation",
+ year = "1995",
+ volume = "20",
+ pages = "235268",
+ link =
+ "\url{http://www.sciencedirect.com/science/article/pii/S0747717185710498}",
abstract =
 "Inductionrecursion is a powerful definition method in intuitionistic
 type theory. It extends (generalized) inductive definitions and allows us
 to define all standard sets of Martin{L\"of} type theory as well as a
 large collection of commonly occuring inductive data structures. It also
 includes a variety of universes which are constructive analogues of
 inaccessibles and other large cardinals below the first Mahlo cardinal.
 In this article we give a new compact formalization of inductiverecursive
 definnitions by modeling them as initial algebras in slice categories. We
 give generic formation, introduction, elimination, and equality rules
 generalizing the usual rules of type theory. Moreover, we prove that the
 elimination and equality rules are equivalent to the principle of the
 existence of initial algebras for certain endofunctors. We also show the
 equivalence of the current formulation with the formulation of
 inductionrecursion as a reflection principle given in Dybjer and
 Setzer (Lecture Notes in Comput. Sci. 2183 (2001) 93). Finally we discuss
 two typetheoretic analogues of Mahlo cardinals in set theory: an external
 Mahlo universe which is defined by inductionrecursion and captured by our
 formalization, and an internal Mahlo universe, which goes beyond induction
 recursion. We show that the external Mahlo universe, and therefore also
 the theory of inductiverecursive definitions, have prooftheoretical
 strength of at least Rathjen's theory KPM.",
 paper = "Dybj03.pdf"
+ "The greatest factorial factorization (GFF) of a polynomial provides
+ an analogue to squarefree factorization but with respect to integer
+ shifts instead to multiplicities. We illustrate the fundamental role
+ of that concept in the context of symbolic summation. Besides a
+ detailed discussion of the basic GFF notions we present a new approach
+ to the indefinite rational summation problem as well as to Gosper's
+ algorithm for summing hypergeometric sequences.",
+ paper = "Paul95.pdf"
}
\end{chunk}
+\index{Petkovsek, Marko}
+\begin{chunk}{axiom.bib}
+@article{Petk92,
+ author = "Petkovsek, Marko",
+ title = "Hypergeometric solutions of linear recurrences with
+ polynomial coefficients",
+ journal = "J. Symbolic Computation",
+ volume = "14",
+ pages = "243264",
+ year = "1992",
+ link =
+ "\url{http://www.sciencedirect.com/science/article/pii/0747717192900386}",
+ abstract =
+ "We describe algorithm Hyper which can be used to find all
+ hypergeometric solutions of linear recurrences with polynomial
+ coefficients.",
+ paper = "Petk92.pdf"
+}
+
+\end{chunk}
+
+\index{Grabm\"uller, Martin}
+\begin{chunk}{axiom.bib}
+@misc{Grab17,
+ author = {Grabm\"uller, Martin},
+ title = "Algorithm W",
+ year = "2017",
+ link = "\url{https://github.com/mgrabmueller/AlgorithmW}",
+ abstract =
+ "In this paper we develop a complete implementation of Algorithm W for
+ HindleyMilner polyhmorphic type inference in Haskell",
+ paper = "Grab17.pdf"
+}
+\end{chunk}
+
+\index{Heeren, Bastiaan}
+\index{Hage, Jurriaan},
+\index{Swierstra, Doaitse}
+\begin{chunk}{axiom.bib}
+@misc{Heer02,
+ author = "Heeren, Bastiaan and Hage, Jurriaan and Swierstra, Doaitse",
+ title = "Generalizing HindleyMilner Type Inference Algorithms",
+ year = "2002",
+ link = "\url{https://pdfs.semanticscholar.org/8983/233b3dff2c5b94efb31235f62bddc22dc899.pdf}",
+ abstract =
+ "Type inferencing according to the standard algorithms $W$ and $M$
+ often yields uninformative error messages. Many times, this is a
+ consequence of a bias inherent in the algorithms. The method
+ developed here is to first collect constraints from the program, and
+ to solve these afterwards, possibly under the influence of a
+ heuristic. We show the soundness and completeness of our algorithm.
+ The algorithms $W$ and $M$ turn out to be deterministic instances of our
+ method, giving the correctness for $W$ and $M$ with respect to the
+ HindleyMilner typing rules for free. We also show that our algorithm
+ is more flexible, because it naturally allows the generation of
+ multiple messages",
+ paper = "Heer02.pdf"
+}
+
+\end{chunk}
+
+\index{Milner, Robin}
+\begin{chunk}{axiom.bib}
+@article{Miln78,
+ author = "Milner, Robin",
+ title = "A Theory of Type Polymorphism in Programming",
+ year = "1978",
+ journal = "Journal of Computer and System Sciences",
+ volume = "17",
+ pages = "348375",
+ link = "\url{https://courses.engr.illinois.edu/cs421/sp2013/project/milnerpolymorphism.pdf}",
+ abstract =
+ "The aim of this work is largely a practical one. A widely employed
+ style of programming, particularly in structureprocessing languages
+ which impose no discipline of types, entails defining procedures which
+ work well on objects of a wide variety. We present a formal type
+ discipline for such polymorphic procedures in the context of a simple
+ programming language, and a compile time typechecking algorithm $W$
+ which enforces the discipline. A Semantic Soundness Theorem (based on
+ a formal semantics for the language) states that welltype programs
+ cannot “go wrong” and a Syntactic Soundness Theorem states that if $W$
+ accepts a program then it is well typed. We also discuss extending
+ these results to richer languages; a typechecking algorithm based on
+ $W$ is in fact already implemented and working, for the metalanguage ML
+ in the Edinburgh LCF system,",
+ paper = "Miln78.pdf"
+}
+
+\end{chunk}
+
+\index{Wadler, Philip}
+\index{Blott, Stephen}
+\begin{chunk}{axiom.bib}
+@inproceedings{Wadl88,
+ author = "Wadler, Philip and Blott, Stephen",
+ title = "How to make adhoc polymorphism less ad hoc",
+ booktitle = "Proc 16th ACM SIGPLANSIGACT Symp. on Princ. of Prog. Lang",
+ isbn = "0897912942",
+ pages = "6076",
+ year = "1988",
+ link = "\url{http://202.3.77.10/users/karkare/courses/2010/cs653/Papers/adhocpolymorphism.pdf}",
+ abstract =
+ "This paper presents {\sl type classes}, a new approach to {\sl adhoc}
+ polymorphism. Type classes permit overloading of arithmetic operators
+ such as multiplication, and generalise the ``eqtype variables'' of
+ Standard ML Type classes extend the Hindley/Milner polymorphic type
+ system, and provide a new approach to issues that arise in objectoriented
+ programming, bounded type quantification, and abstract data types. This
+ paper provides an informal introduction to type classes, and defines them
+ formally by means of type inference rules",
+ paper = "Wadl88.pdf"
+}
+
+\end{chunk}
diff git a/src/axiomwebsite/patches.html b/src/axiomwebsite/patches.html
index 9f0488d..0709331 100644
 a/src/axiomwebsite/patches.html
+++ b/src/axiomwebsite/patches.html
@@ 5662,6 +5662,8 @@ readme add Jacob Nyffeler Smith to credits list
books/bookvol13 add Binary Power in COQ chapter
20170220.01.tpd.patch
books/bookvolbib add additional references
+20170303.01.tpd.patch
+books/bookvolbib add additional references

1.7.5.4