From d8c8c2723594c43534f85405329cef687b93a443 Mon Sep 17 00:00:00 2001
From: Tim Daly
Date: Sun, 10 Nov 2019 21:30:00 -0500
Subject: [PATCH] books/bookvol15 new book on Axiom Sane Compiler
Goal: Proving Axiom Sane
\index{Bronstein, Manuel}
\begin{chunk}
@article{Bron88a,
author = "Bronstein, Manuel",
title = {{Fast Reduction of the Risch Differential Equation}},
journal = "Lecture Notes in Computer Science",
volume = "358",
pages = "64-72",
year = "1988",
abstract =
"Since Risch (1969) reduced the problem of integrating
exponentials to solving a first order linear differntial equation
in a differential field, there has been considerable interest in
solving \[y^\prime + fy = g\] for $y$ in a given differential
field $K$, where $f,g\in K$. Risch (1969) gave an algorithm for a
more general equation. His algorithm, however, required factoring
the denominators of $f$ and $g$, which is an obstacle to efficient
implementations.
Later, Rothstein (1976) and Davenport (1986) presented algorithms
for equation $(R)$ that both rely on a squarefree factorization of
the denominator for $y$. The algorithm in (Davenport 1986) has
been implemented in the Scratchpad II (see Jenks et al.) and Maple
(see Char et al, 1985) computer algebra systems. This algorithm,
however, requires $f$ to be in a certain form (called {\sl weakly
normalized}), but no computer algorithm that makes $f$ weakly
normalized has been published.
We present here a weaker definition of weak-normality, which
\begin{enumerate}
\item can always be obtained in a tower of transcendental
elementary extensions,
\item gives us an explicit formula for the denominator of $y$, so
equation $(R)$ can be reduced to a polynomial equation in one
reduction step
\end{enumerate}
As a consequence, we obtain a new algorithm for solving equation
$(R)$. OUr algorithm is very similar to the one described in
Rothstein (1976), except that we use weak normality to prevent
finite cancellation, rather than having to find integer roots of
polynomials over the constant field of $K$ in order to detect it.",
paper = "Bron88a.pdf",
keywords = "axiomref, printed"
}
\end{chunk}
\index{Galligo, Andre}
\index{Pottier, Loic}
\index{Traverso, Carlo}
\begin{chunk}
@article{Gall88,
author = "Galligo, Andre and Pottier, Loic and Traverso, Carlo",
title = {{Greater Easy Common Divisor and Standard Basis Completion
Algorithms}},
journal = "Lecture Notes in Computer Science",
volume = "358",
pages = "162-176",
year = "1988",
abstract =
"The computation of a standard basis (also alled Groebner basis)
of a multivariate polynomial ideal over a field $K$ is crucial in
many appliations. The problem is intransically time and space
consuming, and many researchers aim to improve the basic algorithm
due to B. Buchberger [Bu1]. One has investigated the problem of
the optimal choice of the term ordering depending from the use
that has to be made of the results, [BS], and the systematic
elimination of unnecessary reductions [Bu2],[GM],[Po]. We can call
all these problems ``combinatorial complexity problems''.
The present paper considers arithmetic complexity problems; our
main problem is how to limit the growth of the coefficients in the
algorithms and the complexity of the field operations
involved. The problem is important with every ground field, with
the obvious exception of finite fields.
The ground field is often represented as the field of fractions of
some explict domain, which is usually a quotient of a finite
extension of $Z$, and the computations are hence reduced to these
domains.
The problem of coefficient growth, and the complexity already
appearing in the calculation of the GCD of tw univariate
polynomials, which is indeed a very special case of standard basis
computation; the PRS algorithms of Brown and Collins operate the
partial coefficient simplifications predicted by a theorem, hence
succeeding in controlling this complexity.
Our approach looks for analogies with these algorithms, but a
general structure theorem is missing, hence our approach relies on
a limited seqrch of coefficient simplifications. The basic idea is
the following: since the GCD is usually costly, we can use in its
place the ``greatest between the common divisors that are easy to
compute'' (the GECD), this suggestion allowing different
instances. A set of such instances is based on the remark that if
you hve elements in factorized form, then many common divisors are
immediately evident. Since irreducible factorization, even
assuming that is exists in our domain, is costly, we use a partial
factorization basically obtained using a ``lazy multiplication''
technique, i.e. performing coefficient multiplications only if
they are unavoidable. The resulting algorithms were tested with a
``simulatied'' implementation on the integers, and the results
suggest that a complete implementation should be very efficient,
at least when the coefficient domain is a multivariate rational
function field.",
paper = "Gall88.pdf",
keywords = "GCD"
}
\end{chunk}
\index{Bradford, R.J.}
\index{Davenport, J.H.}
\begin{chunk}
@article{Brad88,
author = "Bradford, R.J. and Davenport, J.H.",
title = {{Effective Tests for Cyclotomic Polynomials}},
journal = "Lecture Notes in Computer Science",
volume = "358",
pages = "244-251",
year = "1988",
abstract =
"We present two efficient tests that determine if a given
polynomial is cyclotomic, or is a product of cyclotomics. The
first method uses the fact that all the roots of a cyclotomic
polynomial are roots of unity, and the second the fact that the
degree of a cyclotomic polynomial is a value of $\phi(n)$, for
some $n$. We can also find the cyclotomic factors of any
polynomial.",
paper = "Brad88.pdf"
}
\end{chunk}
\index{Gianni, Patrizia}
\index{Miller, Victor}
\index{Trager, Barry}
\begin{chunk}
@article{Gian88a,
author = "Gianni, Patrizia and Miller, Victor and Trager, Barry",
title = {{Decomposition of Algebras}},
journal = "Lecture Notes in Computer Science",
volume = "358",
pages = "300-308",
year = "1988",
abstract =
"In this paper we deal with the problem of decomposing finite
commative $Q$-algebras as a direct product of local
$Q$-algebras. We solve this problem by reducing it to the problem
of finding a decomposition of finite algebras over finite
field. We will show that it is possible to define a lifting
process that allows to reconstruct the answer over the rational
numbers. This lifting appears to be very efficient since it is a
quadratic lifting that doesn't require stepwise inversions. It is
easy to see that the Berlekamp-Hensel algorithm for the
factorization of polynomials is a special case of this argument.",
paper = "Gian88a.pdf"
}
\end{chunk}
\index{Canny, John}
\begin{chunk}
@article{Cann88a,
author = "Canny, John",
title = {{Generalized Characteristic Polynomials}},
journal = "Lecture Notes in Computer Science",
volume = "358",
pages = "293-299",
year = "1988",
abstract =
"We generalize the notion of characteristic polynomial for a
system of linear equations to systems of multivariate polynomial
equations. The generalization is natural in the sense that it
reduces to the usual definition when all the polynomials are
linear. Whereas the constant coefficient of the characteristic
polynomial is the resultant of the system. This construction is
applied to solve a traditional problem with efficient methods for
solving systems of polynomial equations: the presence of
infinitely many solutions ``at infinity''. We give a
single-exponential time method for finding all the isolated
solution points of a system of polynomials, even in the presence
of infinitely many solutions at infinity or elsewhere.",
paper = "Cann88a.pdf"
}
\end{chunk}
\index{de la Tour, Thierry Boy}
\index{Caferra, Richardo}
\begin{chunk}
@article{Tour88,
author = "de la Tour, Thierry Boy and Caferra, Richardo",
title = {{A Formal Approach to some Usually Informal Techniques used
in Mathematical Reasoning}},
journal = "Lecture Notes in Computer Science",
volume = "358",
pages = "402-408",
year = "1988",
abstract =
"One of the striking characteristics of mathematical reasoning is
the contrast between the formal aspects of mathematical truth and
the informal character of the ways to that truth. Among the many
important and usually informal mathematical activities we are
interested by proof analogy (i.e. common pattern between proofs of
different theorems) in the context of interactive theorem
proving. In some sense we propose a partial contribution of one of
the Polya's wishes [Polya 73]:
\begin{quote}
Analogy pervades all our thinking, our everyday speech and our
trivial conclusions as well as artistic way of expression and the
highest scientific achievements... but analogy may reach the level
of mathematical precision...
\end{quote}
It is a work in philosophy of mathematics [Resnik 75], in which
mathematics is viewed as studying {\sl patterns} or
{\sl structures}, which encouraged us to pursue our aim of
partially formalizing {\sl analogy}. We naturally arrived at the
need of considering other activities strongly tied to the notion
of analogy and very well known of the working mathematician:
{\sl generalization}, {\sl abstraction} and {\sl analysis of
proofs}.
Let us assume that the user has to prove a conjecture C. Given a
proof P for a known theorem T, he describes in the langauge L a
scheme of P. Then he expresses the syntactic analogy he views as
relevant between the scheme of P and what ``should be'' a proof
scheme for C, as a {\sl transformation rule}. This process needs
analysis of proofs, generalization and abstraction.
A second order pattern matching algorithm constructs matchers of P
and its scheme, and applies them to the intended proof scheme of
the conjecture C. In the best case one obtains a potential proof
of C, but in general one obtains a more instantiated proof-scheme
with some ``holes'' to be filled by a theorem prover. In both
cases a proof-checking process is necessary (analogy is in general
a heuristic way of thinking).
A question arises naturally: ``What to do when the assumed analogy
fails?''. We study in this paper one of the possible answers:
``Inforamation may be extracted from the failure in order to
{\sl suggest lemmas} that try to {\sl semantically} restore
analogy''. Of coure these lemmas an also serve for detecting wrong
analogies (for example, if required lemmas clearly cannot be
theorems.). Two kinds of failure are possible:
\begin{enumerate}
\item The given proof P and its proposed scheme are not matchable
\item A type error is detected in the instantiated proof scheme
for C.
\end{enumerate}
We give in the following a method to suggest lemmas in the first
case, and sketch some ideas of how to use such a possibility in
the second case.",
paper = "Tour88.pdf",
keywords = "printed"
}
\end{chunk}
\index{Geddes, Keith O.}
\index{Gonnet, Gaston H.}
\index{Smedley, Trevor J.}
\begin{chunk}
@article{Gedd88,
author = "Geddes, Keith O. and Gonnet, Gaston H. and Smedley, Trevor J.",
title = {{Heuristic Methods for Operations With Algebraic Numbers}},
journal = "Lecture Notes in Computer Science",
volume = "358",
pages = "475-480",
year = "1988",
abstract =
"Algorithms for doing computations involving algebraic numbers
have been known for quite some time [6,9,12] and implementations
now exist in many computer algebra systems [1,4,11]. Many of these
algorithms have been analysed and shown to run in polynomial time
and space [7,8], but in spite of this many real problems take
large amounts of time and space to solve. In this paper we
describe a heuristic method which can be used for many operations
involving algebraic numbers. We gie specifics for doing algebraic
number inverses, and algebraic number polynomial exact division
and greatest common divisor calculation. The heurist will not
solve all instances of these problems, but it returns either the
correct result or with failure very quickly, and succeeds for a
very large number of problems. The heuristic method is similar to,
and based on concepts in [3].",
paper = "Gedd88.pdf",
keywords = "GCD"
}
\end{chunk}
\index{Knuth, Donal E.}
\index{Larrabee, Tracy}
\index{Roberts, Paul M.}
\begin{chunk}{axiom.bib}
@misc{Knut87,
author = "Knuth, Donal E. and Larrabee, Tracy and Roberts, Paul M.",
title = {{Mathematical Writing}},
year = "1987",
link = "\url{jmlr.csail.mit.edu/reviewing-papers/knuth_mathematical_writing.pdf}"
}
\end{chunk}
\index{Zeilberger, Doron}
\begin{chunk}{axiom.bib}
@misc{Zeil02,
author = "Zeilberger, Doron",
title = {{Topology: The Slum of Combinatorics}},
year = "2002",
link = "\url{http://sites.math.rutgers.edu/~zeilberg/Opinion1.html}"
}
\end{chunk}
\index{Schopenhauer, Arthur}
\begin{chunk}{axiom.bib}
@misc{Scho16,
author = "Schopenhauer, Arthur",
title = {{Essays of Schopenhauer: On Reading and Books}},
link =
"\url{https://ebooks.adelaide.edu.au/s/schopenhauer/arthur/essays/chapter3.html}",
year = "2016"
}
\end{chunk}
\index{Bhat, Siddharth}
\begin{chunk}{axiom.bib}
@misc{Bhat19,
author = "Bhat, Siddharth",
title = {{Computing Equivalent Gate Sets Using Grobner Bases}},
link = "\url{https://bollu.github.io/#computing-equivalent-gat-sets-using-grobner-bases}",
year = "2019"
}
\end{chunk}
\index{Carneiro, Mario}
\begin{chunk}{axiom.bib}
@misc{Carn19,
author = "Carneiro, Mario",
title = {{Metamath Zero: The Cartesian Theorem Prover}},
link = "\url{https://arxiv.org/pdf/1910.10703.pdf}",
year = "2019",
abstract =
"As the usage of theorem prover technology expands, so too does
the reliance on correctness of the tools. Metamath Zero is a
verification system that aims for simplicity of logic and
implementation, without compromising on efficiency of
verification. It is formally specified in its own language, and
supports a number of translations to and from other proof
languages. This paper describes the abstract logic of Metamath
Zero, essentially a multi-sorted first order logic, as well as the
binary proof format and the way in which it can ensure essentially
linear time verification while still being concise and efficient
at scale. Metamath Zero currently holds the record for fastest
verification of the {\tt set.mm} Metamath library of proofs in ZFC
(including 71 of Wiedijk's 100 formalization targets), at less
than 200 ms. Ultimately, we intend to use it to verify the
correctness of the implementation of the verifier down to binary
executable, so it can be used as a root of trust for more compolex
proof systems.",
paper = "Carn19.pdf",
keywords = "printed"
}
\end{chunk}
\index{Roanes-Lozano, Eugenio}
\index{Galan-Garcia, Jose Luis}
\index{Solano-Macias, Carmen}
\begin{chunk}{axiom.bib}
@article{Roan19,
author = "Roanes-Lozano, Eugenio and Galan-Garcia, Jose Luis and
Solano-Macias, Carmen",
title = {{Some Reflections About the Success and Impact of the
Computer Algebra System DERIVE with a 10-Year Time
Perspective}},
journal = "Mathematics in Computer Science",
volume = "13",
number = "3",
pages = "417-431",
year = "2019",
abstract =
"The computer algebra system DERIVE had a very important impact in
teaching mathematics, mainly in the 1990's. The authors analyze
the possible reasons for its success and impact and give personal
conclusions based on the facts collected. More than 10 years after
it was discontinued it is still used for teaching and several
scientific papers (mostly devoted to educational issues) still
refer to it. A summary of the history, journals and conferences
together with a brief bibliographic analysis are included.",
paper = "Roan19.pdf",
keywords = "axiomref, printed"
}
\end{chunk}
\index{Abraham, Erika}
\index{Abbott, John}
\index{Becker, Bernd}
\index{Bigatti, Anna M.}
\index{Brain, Martin}
\index{Buchberger, Bruno}
\index{Cimatti, Alessandro}
\index{Davenport, James H.}
\index{England, Matthew}
\index{Fontaine, Pascal}
\index{Forrest, Stephen}
\index{Griggio, Alberto}
\index{Droening, Daniel}
\index{Seller, Werner M.}
\index{Sturm, Thomas}
\begin{chunk}{axiom.bib}
@article{Abra16b,
author = "Abraham, Erika and Abbott, John and Becker, Bernd and
Bigatti, Anna M. and Brain, Martin and Buchberger, Bruno
and Cimatti, Alessandro and Davenport, James H. and
England, Matthew and Fontaine, Pascal and Forrest, Stephen
and Griggio, Alberto and Droening, Daniel and
Seller, Werner M. and Sturm, Thomas",
title = {{SC^2: Satisfiability Checking Meets Symbolic Computation}},
journal = "Lecture Notes in Computer Science",
volume = "9791",
year = "2016",
abstract =
"Symbolic Computation and Satisfiability Checking are two research
areas, both having their individual scientific focus but sharing
also common interests in the development, implementation and
application of decision procedures for arithmetic
theories. Despite their commonalities, the two communities are
weakly connected. The aim of our newly accepted $SC^2$ project
(H2020-FETOPEN-CSA) is to strengthen the connection between these
communities by creating common platforms, initiating interaction
and exchange, identifying common challenges, and developing a
common roadmap from theory along the way to tools and (industrial)
applications. In this paper we report on the aims and on the first
activities of this project, and formalise some relevant challenges
for the unified $SC^2$ community.",
paper = "Abra16b.pdf",
keywords = "axiomref"
}
\end{chunk}
\index{Lambe, Larry A.}
\begin{chunk}{axiom.bib}
@article{Lamb16,
author = "Lambe, Larry A.",
title = {{An Algebraic Study of the Klein Bottle}},
journal = "Homotopy and Related Structures",
volume = "11",
number = "4",
pages = "885-891",
year = "2016",
abstract =
"We use symbolic computation (SC) and homological perturbation
(HPT) to compute a resolution of the integers $\mathbb{Z}$ over
the integer group ring of $G$, the fundamental group of the Klein
bottle. From this it is easy to read off the homology of the Klein
bottle as well as other information.",
paper = "Lamb16.pdf",
keywords = "axiomref"
}
\end{chunk}
\index{Gerdt, Vladimir P.}
\index{Koepf, Wolfram}
\index{Mayr, Ernst W.}
\index{Vorozhtsov, Evgenii V.}
\begin{chunk}{axiom.bib}
@book{Gerd13,
author = "Gerdt, Vladimir P. and Koepf, Wolfram and Mayr, Ernst W.
and Vorozhtsov, Evgenii V.",
title = {{Computer Algebra in Scientific Computing}},
publisher = "Springer",
year = "2013",
comment = "LNCS 8136",
paper = "Gerd13.pdf",
keywords = "axiomref"
}
\end{chunk}
\index{Ardizzoni, Alessandro}
\index{Stumbo, Fabio}
\begin{chunk}{axiom.bib}
@article{Ardi09,
author = "Ardizzoni, Alessandro and Stumbo, Fabio",
title = {{Quadratic Lie Algebras}},
journal = "Commun. Algebra",
volume = "39",
number = "8",
pages = "2723-2751",
year = "2011",
link = "\url{https://arxiv.org/pdf/0906.4617.pdf}",
abstract =
"In this paper, the notion of universal envoloping algebra
introduced in [A. Ardizzoni, A First Sight Towards Primitively
Generated Connected Braided Bialgebras] is specialized to the case
of braided vector spaces whole Nichols algebra is quadratic as an
algebra. In this setting a classification of universal enveloping
algebras for braided vectors spaces of dimension not greater than
2 is handled. As an application, we investigate the structure of
primitively generated connected braided bialgebras whose braided
vector space of primitive elements forms a Nichols algebra which
is a quadratic algebra.",
paper = "Ardi09.pdf",
keywords = "axiomref"
}
\end{chunk}
\index{Aharonovich, I.}
\index{Horwitz, L.P.}
\begin{chunk}{axiom.bib}
@article{Ahar12,
author = "Aharonovich, I. and Horwitz, L.P.",
title = {{Radiation-reaction in classical off-shell electrodynamics,
I: The above mass-shell case}},
journal = "J. Math. Phys.",
volume = "53",
number = "3",
year = "2012",
abstract =
"Offshell electrodynamics based on a manifestly covarieant
off-shell relativeistic dynamics of Stueckelberg, Horwitz, and
Piron, is five-dimensional. In this paper, we study the problem of
radiation reaction of a particle in motion in this framework. In
particular, the case of the above-mass-shell is studied in detail,
where the renormalization of the Lorentz force leads to a system
of non-linear differential equations for 3 Lorentz scalars. The
system is then solved numerically, where it is shown that the
mass-shell deviation scalar either smoothly falls down to 0 (this
result provides a mechanism for the mass stability of the
off-shell theory), or strongly diverges under more extremem
conditions. In both cases, no runaway motion is
observed. Stability analysis indicates that the system seems to
have chaotic behavior. It is also shown that, although a motion
under which the mass-shell deviation $\epsilon$ is constant but
not-zero, is indeed possible, but, it is unstable, and eventually
it either decays to 0 or diverges.",
keywords = "axiomref"
}
\end{chunk}
\index{Ioakimidis, Nikolaos I.}
\begin{chunk}{axiom.bib}
@article{Ioak00,
author = "Ioakimidis, Nikolaos I.",
title = {{Derivation of Feasibility Conditions in Engineering
Problems under Parametric Inequality Constraints with
Classical Fourier Elimination}},
journal = "Int. J. Numerical Methods Eng.",
volume = "48",
number = "11",
pages = "1583-1599",
year = "2000",
abstract =
"Fourier (or Motzkin or even Fourier-Motzkin) elimiation is the
classical and equally old analogue of Gaussian elimination for the
solution of linear equations in the case of linear
inequalities. Here this approach and two standard improvements are
applied to two engineering problems (involving numerical
integration in fracture mechanics as well as finite differences in
heat transfer in the parametric case) with linear inequality
constraints. The results obtained by a solver of systems of
inequalities concern the feasibility conditions (quantifier-free
formulae), so that the satisfaction of the original system of
linear inequality constraints can be possible. We use the computer
algebra system Maple V and a related elementary procedure for
Fourier elimination. The results constitute an extension of
already available applications of computer algebra software to the
classical approximate-numerical methods traditionally employed in
enginerring, and are also related to computational elimination
techniques in computer algebra and applied logic.",
paper = "Ioak00.pdf",
keywords = "axiomref"
}
\end{chunk}
\index{Fournie, Michel}
\begin{chunk}{axiom.bib}
@article{Four99,
author = "Fournie, Michel",
title = {{High-order compact schemes: Application to bidimensional unsteady
diffusion-convection problems. II}},
journal = "C. R. Acad. Sci.",
volume = "328",
number = "6",
pages = "539-542",
year = "1999",
abstract =
"This Note generalizes the construcion and the analysis of high
order compact difference schemes for unsteady 2D
diffusion-convection problems cf. Part I by A. Rigal. The accuracy
(of order 2 in time and 4 in space) and the stability analysis
yield different choices of the weighting parameters. This work
realized with symbolic computation systems allow to obtain
analytical results for a wide class of schemes and to increase the
stability domain in some cases.",
keywords = "axiomref"
}
\end{chunk}
\index{Lamagna, Edmund A.}
\begin{chunk}{axiom.bib}
@book{Lama19,
author = "Lamagna, Edmund A.",
title = {{Computer Algebra: Concepts and Techniques}},
publisher = "CRC Press",
year = "2019",
isbn = "978-1-138-09314-0",
keywords = "axiomref"
}
\end{chunk}
\index{Benker, Hans}
\begin{chunk}{axiom.bib}
@book{Benk99,
author = "Benker, Hans",
title = {{Practical User of MATHCAD: Solving Mathematical Problems
with a Computer Algebra System}},
publisher = "Springer-Verlag",
year = "1999",
isbn = "978-1-85233-166-5",
keywords = "axiomref"
}
\end{chunk}
\index{Dooley, Samuel S.}
\begin{chunk}{axiom.bib}
@InProceedings{Dool02,
author = "Dooley, Samuel S.",
title = {{Editing mathematical content and presentation markup in
interactive mathematical documents}},
booktitle = "Proc. ISSAC 2002",
series = "ISSAC 02",
year = "2002",
publisher = "ACM Press",
pages = "55-62",
abstract =
"The IBM MathML Expression Editor is a two-dimensional
mathematical editor for expressions encoded using MathML content
and presentation elements. It allows a user to interact with the
visual presentation of an expression, while simultaneously
creating the underlying structure of the expression. This paper
describes the internal expression framework used by the editor to
represent the content and presentation structures, the layout
mechanisms used to transform content into presentation, the
structural navigation conventions used to select subexpressions,
the editing templates used to support visual input of MathML
content expressions, and the customization framework that allows
for a fully extensible set of content operators, including
complete support for MathML 2.0 content elements as well as
user-defined function symbols and operators."
}
\end{chunk}
\index{Bradford, Russell}
\index{Davenport, James H.}
\begin{chunk}{axiom.bib}
@inproceedings{Brad02a,
author = "Bradford, Russell and Davenport, James H.",
title = {{Towards Better Simplification of Elementary Functions}},
booktitle = "ISSAC 02",
publisher = "ACM",
year = "2002",
isbn = "1-58113-484-3",
abstract =
"We preent an algorithm for simplifying a large class of
elementary functions in the presence of branch cuts. This
algorithm works by:
\begin{itemize}
\item verifying that the proposed simplification is correct as a
simplification of multi-valued functions
\item decomposing $\mathbb{C}$ (or $\mathbb{C}^n$ in the case of
multivariate simplifications) according to the branch cuts of the
relevant functions
\item checking that the proposed identity is valid on each
component of that decomposition
\end{itemize}
This process can be interfaced to an assume facility, and, if
required, can verify that simplifications are valid ``almost
everywhere''.",
paper = "Brad02a.pdf"
}
\end{chunk}
\index{Zucker, Philip}
\begin{chunk}{axiom.bib}
@misc{Zuck19,
author = "Zucker, Philip",
title = {{Grobner Bases and Optics}},
link = "\url{http://www.philipzucker.com/grobner-bases-and-optics}",
year = "2019"
}
\end{chunk}
---
books/bookvolbib.pamphlet | 769 ++++++++++++++++++++++++++++++++++++++++-
changelog | 2 +
patch | 712 ++++++++++++++++++++++++++++++++++++++
src/axiom-website/patches.html | 2 +
4 files changed, 1476 insertions(+), 9 deletions(-)
diff --git a/books/bookvolbib.pamphlet b/books/bookvolbib.pamphlet
index 747462f..6396c99 100644
--- a/books/bookvolbib.pamphlet
+++ b/books/bookvolbib.pamphlet
@@ -6660,7 +6660,8 @@ when shown in factored form.
distribution mechanism that allows for parallel and distributed
execution of FOXBOX programs. Finally, FOXBOX plugs into a
server/client-style Maple application interface.",
- paper = "Diaz98.pdf"
+ paper = "Diaz98.pdf",
+ keywords = "axiomref"
}
\end{chunk}
@@ -12832,6 +12833,53 @@ when shown in factored form.
\end{chunk}
+\index{Abraham, Erika}
+\index{Abbott, John}
+\index{Becker, Bernd}
+\index{Bigatti, Anna M.}
+\index{Brain, Martin}
+\index{Buchberger, Bruno}
+\index{Cimatti, Alessandro}
+\index{Davenport, James H.}
+\index{England, Matthew}
+\index{Fontaine, Pascal}
+\index{Forrest, Stephen}
+\index{Griggio, Alberto}
+\index{Droening, Daniel}
+\index{Seller, Werner M.}
+\index{Sturm, Thomas}
+\begin{chunk}{axiom.bib}
+@article{Abra16b,
+ author = "Abraham, Erika and Abbott, John and Becker, Bernd and
+ Bigatti, Anna M. and Brain, Martin and Buchberger, Bruno
+ and Cimatti, Alessandro and Davenport, James H. and
+ England, Matthew and Fontaine, Pascal and Forrest, Stephen
+ and Griggio, Alberto and Droening, Daniel and
+ Seller, Werner M. and Sturm, Thomas",
+ title = {{$SC^2$: Satisfiability Checking Meets Symbolic Computation}},
+ journal = "Lecture Notes in Computer Science",
+ volume = "9791",
+ year = "2016",
+ abstract =
+ "Symbolic Computation and Satisfiability Checking are two research
+ areas, both having their individual scientific focus but sharing
+ also common interests in the development, implementation and
+ application of decision procedures for arithmetic
+ theories. Despite their commonalities, the two communities are
+ weakly connected. The aim of our newly accepted $SC^2$ project
+ (H2020-FETOPEN-CSA) is to strengthen the connection between these
+ communities by creating common platforms, initiating interaction
+ and exchange, identifying common challenges, and developing a
+ common roadmap from theory along the way to tools and (industrial)
+ applications. In this paper we report on the aims and on the first
+ activities of this project, and formalise some relevant challenges
+ for the unified $SC^2$ community.",
+ paper = "Abra16b.pdf",
+ keywords = "axiomref"
+}
+
+\end{chunk}
+
\index{Abrahams, Paul W.}
\index{Barnett, Jeffrey A.}
\index{Book, Erwin}
@@ -12874,6 +12922,40 @@ when shown in factored form.
\end{chunk}
+\index{Aharonovich, I.}
+\index{Horwitz, L.P.}
+\begin{chunk}{axiom.bib}
+@article{Ahar12,
+ author = "Aharonovich, I. and Horwitz, L.P.",
+ title = {{Radiation-reaction in classical off-shell electrodynamics,
+ I: The above mass-shell case}},
+ journal = "J. Math. Phys.",
+ volume = "53",
+ number = "3",
+ year = "2012",
+ abstract =
+ "Offshell electrodynamics based on a manifestly covarieant
+ off-shell relativeistic dynamics of Stueckelberg, Horwitz, and
+ Piron, is five-dimensional. In this paper, we study the problem of
+ radiation reaction of a particle in motion in this framework. In
+ particular, the case of the above-mass-shell is studied in detail,
+ where the renormalization of the Lorentz force leads to a system
+ of non-linear differential equations for 3 Lorentz scalars. The
+ system is then solved numerically, where it is shown that the
+ mass-shell deviation scalar either smoothly falls down to 0 (this
+ result provides a mechanism for the mass stability of the
+ off-shell theory), or strongly diverges under more extremem
+ conditions. In both cases, no runaway motion is
+ observed. Stability analysis indicates that the system seems to
+ have chaotic behavior. It is also shown that, although a motion
+ under which the mass-shell deviation $\epsilon$ is constant but
+ not-zero, is indeed possible, but, it is unstable, and eventually
+ it either decays to 0 or diverges.",
+ keywords = "axiomref"
+}
+
+\end{chunk}
+
\index{Ait-Kaci, Hassan}
\begin{chunk}{axiom.bib}
@book{Aitk99,
@@ -12953,6 +13035,35 @@ when shown in factored form.
\end{chunk}
+\index{Ardizzoni, Alessandro}
+\index{Stumbo, Fabio}
+\begin{chunk}{axiom.bib}
+@article{Ardi09,
+ author = "Ardizzoni, Alessandro and Stumbo, Fabio",
+ title = {{Quadratic Lie Algebras}},
+ journal = "Commun. Algebra",
+ volume = "39",
+ number = "8",
+ pages = "2723-2751",
+ year = "2011",
+ link = "\url{https://arxiv.org/pdf/0906.4617.pdf}",
+ abstract =
+ "In this paper, the notion of universal envoloping algebra
+ introduced in [A. Ardizzoni, A First Sight Towards Primitively
+ Generated Connected Braided Bialgebras] is specialized to the case
+ of braided vector spaces whole Nichols algebra is quadratic as an
+ algebra. In this setting a classification of universal enveloping
+ algebras for braided vectors spaces of dimension not greater than
+ 2 is handled. As an application, we investigate the structure of
+ primitively generated connected braided bialgebras whose braided
+ vector space of primitive elements forms a Nichols algebra which
+ is a quadratic algebra.",
+ paper = "Ardi09.pdf",
+ keywords = "axiomref"
+}
+
+\end{chunk}
+
\index{Altenkirch, Thorsten}
\index{McBride, Conor}
\index{Swierstra, Wouter}
@@ -13949,6 +14060,20 @@ when shown in factored form.
\end{chunk}
+\index{Benker, Hans}
+\begin{chunk}{axiom.bib}
+@book{Benk99,
+ author = "Benker, Hans",
+ title = {{Practical User of MATHCAD: Solving Mathematical Problems
+ with a Computer Algebra System}},
+ publisher = "Springer-Verlag",
+ year = "1999",
+ isbn = "978-1-85233-166-5",
+ keywords = "axiomref"
+}
+
+\end{chunk}
+
\index{Beer, Randall D.}
\begin{chunk}{axiom.bib}
@article{Beer87,
@@ -14022,6 +14147,17 @@ when shown in factored form.
\end{chunk}
+\index{Bhat, Siddharth}
+\begin{chunk}{axiom.bib}
+@misc{Bhat19,
+ author = "Bhat, Siddharth",
+ title = {{Computing Equivalent Gate Sets Using Grobner Bases}},
+ link = "\url{https://bollu.github.io/#computing-equivalent-gat-sets-using-grobner-bases}",
+ year = "2019"
+}
+
+\end{chunk}
+
\index{Biha, Sidi Ould}
\begin{chunk}{axiom.bib}
@article{Biha09,
@@ -14364,6 +14500,38 @@ when shown in factored form.
\end{chunk}
+\index{Bradford, Russell}
+\index{Davenport, James H.}
+\begin{chunk}{axiom.bib}
+@inproceedings{Brad02a,
+ author = "Bradford, Russell and Davenport, James H.",
+ title = {{Towards Better Simplification of Elementary Functions}},
+ booktitle = "ISSAC 02",
+ publisher = "ACM",
+ year = "2002",
+ isbn = "1-58113-484-3",
+ abstract =
+ "We preent an algorithm for simplifying a large class of
+ elementary functions in the presence of branch cuts. This
+ algorithm works by:
+ \begin{itemize}
+ \item verifying that the proposed simplification is correct as a
+ simplification of multi-valued functions
+ \item decomposing $\mathbb{C}$ (or $\mathbb{C}^n$ in the case of
+ multivariate simplifications) according to the branch cuts of the
+ relevant functions
+ \item checking that the proposed identity is valid on each
+ component of that decomposition
+ \end{itemize}
+
+ This process can be interfaced to an assume facility, and, if
+ required, can verify that simplifications are valid ``almost
+ everywhere''.",
+ paper = "Brad02a.pdf"
+}
+
+\end{chunk}
+
\index{Brady, Edwin C.}
\begin{chunk}{axiom.bib}
@phdthesis{Brad05,
@@ -14412,6 +14580,29 @@ when shown in factored form.
\end{chunk}
+\index{Bradford, R.J.}
+\index{Davenport, J.H.}
+\begin{chunk}{axiom.bib}
+@article{Brad88,
+ author = "Bradford, R.J. and Davenport, J.H.",
+ title = {{Effective Tests for Cyclotomic Polynomials}},
+ journal = "Lecture Notes in Computer Science",
+ volume = "358",
+ pages = "244-251",
+ year = "1988",
+ abstract =
+ "We present two efficient tests that determine if a given
+ polynomial is cyclotomic, or is a product of cyclotomics. The
+ first method uses the fact that all the roots of a cyclotomic
+ polynomial are roots of unity, and the second the fact that the
+ degree of a cyclotomic polynomial is a value of $\phi(n)$, for
+ some $n$. We can also find the cyclotomic factors of any
+ polynomial.",
+ paper = "Brad88.pdf"
+}
+
+\end{chunk}
+
\index{Bradford, Russell}
\index{Davenport, James H.}
\index{England, Matthew}
@@ -14457,6 +14648,54 @@ when shown in factored form.
\end{chunk}
+\index{Bronstein, Manuel}
+\begin{chunk}{axiom.bib}
+@article{Bron88a,
+ author = "Bronstein, Manuel",
+ title = {{Fast Reduction of the Risch Differential Equation}},
+ journal = "Lecture Notes in Computer Science",
+ volume = "358",
+ pages = "64-72",
+ year = "1988",
+ abstract =
+ "Since Risch (1969) reduced the problem of integrating
+ exponentials to solving a first order linear differntial equation
+ in a differential field, there has been considerable interest in
+ solving \[y^\prime + fy = g\] for $y$ in a given differential
+ field $K$, where $f,g\in K$. Risch (1969) gave an algorithm for a
+ more general equation. His algorithm, however, required factoring
+ the denominators of $f$ and $g$, which is an obstacle to efficient
+ implementations.
+
+ Later, Rothstein (1976) and Davenport (1986) presented algorithms
+ for equation $(R)$ that both rely on a squarefree factorization of
+ the denominator for $y$. The algorithm in (Davenport 1986) has
+ been implemented in the Scratchpad II (see Jenks et al.) and Maple
+ (see Char et al, 1985) computer algebra systems. This algorithm,
+ however, requires $f$ to be in a certain form (called {\sl weakly
+ normalized}), but no computer algorithm that makes $f$ weakly
+ normalized has been published.
+
+ We present here a weaker definition of weak-normality, which
+ \begin{enumerate}
+ \item can always be obtained in a tower of transcendental
+ elementary extensions,
+ \item gives us an explicit formula for the denominator of $y$, so
+ equation $(R)$ can be reduced to a polynomial equation in one
+ reduction step
+ \end{enumerate}
+
+ As a consequence, we obtain a new algorithm for solving equation
+ $(R)$. OUr algorithm is very similar to the one described in
+ Rothstein (1976), except that we use weak normality to prevent
+ finite cancellation, rather than having to find integer roots of
+ polynomials over the constant field of $K$ in order to detect it.",
+ paper = "Bron88a.pdf",
+ keywords = "axiomref, printed"
+}
+
+\end{chunk}
+
\index{Brooks, Frederick P.}
\begin{chunk}{axiom.bib}
@misc{Broo86a,
@@ -14827,6 +15066,33 @@ when shown in factored form.
\end{chunk}
+\index{Canny, John}
+\begin{chunk}{axiom.bib}
+@article{Cann88a,
+ author = "Canny, John",
+ title = {{Generalized Characteristic Polynomials}},
+ journal = "Lecture Notes in Computer Science",
+ volume = "358",
+ pages = "293-299",
+ year = "1988",
+ abstract =
+ "We generalize the notion of characteristic polynomial for a
+ system of linear equations to systems of multivariate polynomial
+ equations. The generalization is natural in the sense that it
+ reduces to the usual definition when all the polynomials are
+ linear. Whereas the constant coefficient of the characteristic
+ polynomial is the resultant of the system. This construction is
+ applied to solve a traditional problem with efficient methods for
+ solving systems of polynomial equations: the presence of
+ infinitely many solutions ``at infinity''. We give a
+ single-exponential time method for finding all the isolated
+ solution points of a system of polynomials, even in the presence
+ of infinitely many solutions at infinity or elsewhere.",
+ paper = "Cann88a.pdf"
+}
+
+\end{chunk}
+
\index{Cardelli, Luca}
\begin{chunk}{axiom.bib}
@article{Card88b,
@@ -15026,6 +15292,37 @@ when shown in factored form.
\end{chunk}
+\index{Carneiro, Mario}
+\begin{chunk}{axiom.bib}
+@misc{Carn19,
+ author = "Carneiro, Mario",
+ title = {{Metamath Zero: The Cartesian Theorem Prover}},
+ link = "\url{https://arxiv.org/pdf/1910.10703.pdf}",
+ year = "2019",
+ abstract =
+ "As the usage of theorem prover technology expands, so too does
+ the reliance on correctness of the tools. Metamath Zero is a
+ verification system that aims for simplicity of logic and
+ implementation, without compromising on efficiency of
+ verification. It is formally specified in its own language, and
+ supports a number of translations to and from other proof
+ languages. This paper describes the abstract logic of Metamath
+ Zero, essentially a multi-sorted first order logic, as well as the
+ binary proof format and the way in which it can ensure essentially
+ linear time verification while still being concise and efficient
+ at scale. Metamath Zero currently holds the record for fastest
+ verification of the {\tt set.mm} Metamath library of proofs in ZFC
+ (including 71 of Wiedijk's 100 formalization targets), at less
+ than 200 ms. Ultimately, we intend to use it to verify the
+ correctness of the implementation of the verifier down to binary
+ executable, so it can be used as a root of trust for more compolex
+ proof systems.",
+ paper = "Carn19.pdf",
+ keywords = "printed, DONE"
+}
+
+\end{chunk}
+
\index{Castagna, Giuseppe}
\index{Lanvin, Victor}
\index{Petrucciani, Tommaso}
@@ -15283,7 +15580,8 @@ when shown in factored form.
number = "3/4",
pages = "197-201",
year = "2014",
- paper = "Chen14.pdf"
+ paper = "Chen14.pdf",
+ keywords = "axiomref"
}
\end{chunk}
@@ -16948,7 +17246,8 @@ when shown in factored form.
popular logics such as first order logic, simple type theory, and
set theory. Many of the ideas and mechanisms used in the framework
are inspired by the IMPS Interactive Mathematical Proof System.",
- paper = "Farm01.pdf"
+ paper = "Farm01.pdf",
+ keywords = "axiomref"
}
\end{chunk}
@@ -17157,6 +17456,31 @@ when shown in factored form.
\end{chunk}
+\index{Fournie, Michel}
+\begin{chunk}{axiom.bib}
+@article{Four99,
+ author = "Fournie, Michel",
+ title = {{High-order compact schemes: Application to bidimensional unsteady
+ diffusion-convection problems. II}},
+ journal = "C. R. Acad. Sci.",
+ volume = "328",
+ number = "6",
+ pages = "539-542",
+ year = "1999",
+ abstract =
+ "This Note generalizes the construcion and the analysis of high
+ order compact difference schemes for unsteady 2D
+ diffusion-convection problems cf. Part I by A. Rigal. The accuracy
+ (of order 2 in time and 4 in space) and the stability analysis
+ yield different choices of the weighting parameters. This work
+ realized with symbolic computation systems allow to obtain
+ analytical results for a wide class of schemes and to increase the
+ stability domain in some cases.",
+ keywords = "axiomref"
+}
+
+\end{chunk}
+
\index{Fredrikson, Matt}
\begin{chunk}{axiom.bib}
@misc{Fred16,
@@ -17182,6 +17506,70 @@ when shown in factored form.
\end{chunk}
+\index{Galligo, Andre}
+\index{Pottier, Loic}
+\index{Traverso, Carlo}
+\begin{chunk}{axiom.bib}
+@article{Gall88,
+ author = "Galligo, Andre and Pottier, Loic and Traverso, Carlo",
+ title = {{Greater Easy Common Divisor and Standard Basis Completion
+ Algorithms}},
+ journal = "Lecture Notes in Computer Science",
+ volume = "358",
+ pages = "162-176",
+ year = "1988",
+ abstract =
+ "The computation of a standard basis (also alled Groebner basis)
+ of a multivariate polynomial ideal over a field $K$ is crucial in
+ many appliations. The problem is intransically time and space
+ consuming, and many researchers aim to improve the basic algorithm
+ due to B. Buchberger [Bu1]. One has investigated the problem of
+ the optimal choice of the term ordering depending from the use
+ that has to be made of the results, [BS], and the systematic
+ elimination of unnecessary reductions [Bu2],[GM],[Po]. We can call
+ all these problems ``combinatorial complexity problems''.
+
+ The present paper considers arithmetic complexity problems; our
+ main problem is how to limit the growth of the coefficients in the
+ algorithms and the complexity of the field operations
+ involved. The problem is important with every ground field, with
+ the obvious exception of finite fields.
+
+ The ground field is often represented as the field of fractions of
+ some explict domain, which is usually a quotient of a finite
+ extension of $Z$, and the computations are hence reduced to these
+ domains.
+
+ The problem of coefficient growth, and the complexity already
+ appearing in the calculation of the GCD of tw univariate
+ polynomials, which is indeed a very special case of standard basis
+ computation; the PRS algorithms of Brown and Collins operate the
+ partial coefficient simplifications predicted by a theorem, hence
+ succeeding in controlling this complexity.
+
+ Our approach looks for analogies with these algorithms, but a
+ general structure theorem is missing, hence our approach relies on
+ a limited seqrch of coefficient simplifications. The basic idea is
+ the following: since the GCD is usually costly, we can use in its
+ place the ``greatest between the common divisors that are easy to
+ compute'' (the GECD), this suggestion allowing different
+ instances. A set of such instances is based on the remark that if
+ you hve elements in factorized form, then many common divisors are
+ immediately evident. Since irreducible factorization, even
+ assuming that is exists in our domain, is costly, we use a partial
+ factorization basically obtained using a ``lazy multiplication''
+ technique, i.e. performing coefficient multiplications only if
+ they are unavoidable. The resulting algorithms were tested with a
+ ``simulatied'' implementation on the integers, and the results
+ suggest that a complete implementation should be very efficient,
+ at least when the coefficient domain is a multivariate rational
+ function field.",
+ paper = "Gall88.pdf",
+ keywords = "GCD"
+}
+
+\end{chunk}
+
\index{Ganesalingam, Mohan}
\begin{chunk}{axiom.bib}
@phdthesis{Gane09,
@@ -17268,6 +17656,56 @@ when shown in factored form.
\end{chunk}
+\index{Geddes, Keith O.}
+\index{Gonnet, Gaston H.}
+\index{Smedley, Trevor J.}
+\begin{chunk}{axiom.bib}
+@article{Gedd88,
+ author = "Geddes, Keith O. and Gonnet, Gaston H. and Smedley, Trevor J.",
+ title = {{Heuristic Methods for Operations With Algebraic Numbers}},
+ journal = "Lecture Notes in Computer Science",
+ volume = "358",
+ pages = "475-480",
+ year = "1988",
+ abstract =
+ "Algorithms for doing computations involving algebraic numbers
+ have been known for quite some time [6,9,12] and implementations
+ now exist in many computer algebra systems [1,4,11]. Many of these
+ algorithms have been analysed and shown to run in polynomial time
+ and space [7,8], but in spite of this many real problems take
+ large amounts of time and space to solve. In this paper we
+ describe a heuristic method which can be used for many operations
+ involving algebraic numbers. We gie specifics for doing algebraic
+ number inverses, and algebraic number polynomial exact division
+ and greatest common divisor calculation. The heurist will not
+ solve all instances of these problems, but it returns either the
+ correct result or with failure very quickly, and succeeds for a
+ very large number of problems. The heuristic method is similar to,
+ and based on concepts in [3].",
+ paper = "Gedd88.pdf",
+ keywords = "GCD"
+}
+
+\end{chunk}
+
+\index{Gerdt, Vladimir P.}
+\index{Koepf, Wolfram}
+\index{Mayr, Ernst W.}
+\index{Vorozhtsov, Evgenii V.}
+\begin{chunk}{axiom.bib}
+@book{Gerd13,
+ author = "Gerdt, Vladimir P. and Koepf, Wolfram and Mayr, Ernst W.
+ and Vorozhtsov, Evgenii V.",
+ title = {{Computer Algebra in Scientific Computing}},
+ publisher = "Springer",
+ year = "2013",
+ comment = "LNCS 8136",
+ paper = "Gerd13.pdf",
+ keywords = "axiomref"
+}
+
+\end{chunk}
+
\index{Geuvers, Herman}
\begin{chunk}{axiom.bib}
@article{Geuv00,
@@ -17331,6 +17769,33 @@ when shown in factored form.
\end{chunk}
+\index{Gianni, Patrizia}
+\index{Miller, Victor}
+\index{Trager, Barry}
+\begin{chunk}{axiom.bib}
+@article{Gian88a,
+ author = "Gianni, Patrizia and Miller, Victor and Trager, Barry",
+ title = {{Decomposition of Algebras}},
+ journal = "Lecture Notes in Computer Science",
+ volume = "358",
+ pages = "300-308",
+ year = "1988",
+ abstract =
+ "In this paper we deal with the problem of decomposing finite
+ commative $Q$-algebras as a direct product of local
+ $Q$-algebras. We solve this problem by reducing it to the problem
+ of finding a decomposition of finite algebras over finite
+ field. We will show that it is possible to define a lifting
+ process that allows to reconstruct the answer over the rational
+ numbers. This lifting appears to be very efficient since it is a
+ quadratic lifting that doesn't require stepwise inversions. It is
+ easy to see that the Berlekamp-Hensel algorithm for the
+ factorization of polynomials is a special case of this argument.",
+ paper = "Gian88a.pdf"
+}
+
+\end{chunk}
+
\index{Giannini, Paola}
\begin{chunk}{axiom.bib}
@techreport{Gian85,
@@ -18677,7 +19142,8 @@ when shown in factored form.
correctness of Computer Algebra algorithms. In particular, we use
ML4PG to help us in the formalisation of an efficient algorithm to
computer the inverse of triangular matrices.",
- paper = "Hera13.pdf"
+ paper = "Hera13.pdf",
+ keywords = "printed"
}
\end{chunk}
@@ -19039,6 +19505,42 @@ when shown in factored form.
\end{chunk}
+\index{Ioakimidis, Nikolaos I.}
+\begin{chunk}{axiom.bib}
+@article{Ioak00,
+ author = "Ioakimidis, Nikolaos I.",
+ title = {{Derivation of Feasibility Conditions in Engineering
+ Problems under Parametric Inequality Constraints with
+ Classical Fourier Elimination}},
+ journal = "Int. J. Numerical Methods Eng.",
+ volume = "48",
+ number = "11",
+ pages = "1583-1599",
+ year = "2000",
+ abstract =
+ "Fourier (or Motzkin or even Fourier-Motzkin) elimiation is the
+ classical and equally old analogue of Gaussian elimination for the
+ solution of linear equations in the case of linear
+ inequalities. Here this approach and two standard improvements are
+ applied to two engineering problems (involving numerical
+ integration in fracture mechanics as well as finite differences in
+ heat transfer in the parametric case) with linear inequality
+ constraints. The results obtained by a solver of systems of
+ inequalities concern the feasibility conditions (quantifier-free
+ formulae), so that the satisfaction of the original system of
+ linear inequality constraints can be possible. We use the computer
+ algebra system Maple V and a related elementary procedure for
+ Fourier elimination. The results constitute an extension of
+ already available applications of computer algebra software to the
+ classical approximate-numerical methods traditionally employed in
+ enginerring, and are also related to computational elimination
+ techniques in computer algebra and applied logic.",
+ paper = "Ioak00.pdf",
+ keywords = "axiomref"
+}
+
+\end{chunk}
+
\index{Innes, Sean}
\index{Uu, Nicolas}
\begin{chunk}{axiom.bib}
@@ -19591,6 +20093,19 @@ when shown in factored form.
\end{chunk}
+\index{Knuth, Donal E.}
+\index{Larrabee, Tracy}
+\index{Roberts, Paul M.}
+\begin{chunk}{axiom.bib}
+@misc{Knut87,
+ author = "Knuth, Donal E. and Larrabee, Tracy and Roberts, Paul M.",
+ title = {{Mathematical Writing}},
+ year = "1987",
+ link = "\url{jmlr.csail.mit.edu/reviewing-papers/knuth_mathematical_writing.pdf}"
+}
+
+\end{chunk}
+
\index{Kohlhase, Michael}
\index{Muller, Christine}
\index{Rabe, Florian}
@@ -19891,6 +20406,41 @@ when shown in factored form.
\subsection{L} %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+\index{Lamagna, Edmund A.}
+\begin{chunk}{axiom.bib}
+@book{Lama19,
+ author = "Lamagna, Edmund A.",
+ title = {{Computer Algebra: Concepts and Techniques}},
+ publisher = "CRC Press",
+ year = "2019",
+ isbn = "978-1-138-09314-0",
+ keywords = "axiomref"
+}
+
+\end{chunk}
+
+\index{Lambe, Larry A.}
+\begin{chunk}{axiom.bib}
+@article{Lamb16,
+ author = "Lambe, Larry A.",
+ title = {{An Algebraic Study of the Klein Bottle}},
+ journal = "Homotopy and Related Structures",
+ volume = "11",
+ number = "4",
+ pages = "885-891",
+ year = "2016",
+ abstract =
+ "We use symbolic computation (SC) and homological perturbation
+ (HPT) to compute a resolution of the integers $\mathbb{Z}$ over
+ the integer group ring of $G$, the fundamental group of the Klein
+ bottle. From this it is easy to read off the homology of the Klein
+ bottle as well as other information.",
+ paper = "Lamb16.pdf",
+ keywords = "axiomref"
+}
+
+\end{chunk}
+
\index{Lamport, Leslie}
\index{Paulson, Lawrence C.}
\begin{chunk}{axiom.bib}
@@ -22182,6 +22732,30 @@ when shown in factored form.
\end{chunk}
+\index{Rennert, Nicolas}
+\index{Valibouze, Annick}
+\begin{chunk}{axiom.bib}
+#article{Renn99,
+ author = "Rennert, Nicolas and Valibouze, Annick",
+ title = {{Computation of Resolvents using Cauchy Modules}},
+ journal = "Experimental Mathematics",
+ volume = "8",
+ number = "4",
+ pages = "351-366",
+ year = "1999",
+ comment = "French",
+ abstract =
+ "We give a new and efficient algorithm to compute some
+ characteristic polynomials using Cauchy modules. This algorithm is
+ used for the computation of absolute resolvents and
+ multiresolvents, which are essential tools in constructive Galois
+ theory.",
+ paper = "Renn99.pdf",
+ keywords = "axiomref"
+}
+
+\end{chunk}
+
\index{Reynolds, John C.}
\begin{chunk}{axiom.bib}
@inproceedings{Reyn83,
@@ -22312,6 +22886,36 @@ when shown in factored form.
\end{chunk}
+\index{Roanes-Lozano, Eugenio}
+\index{Galan-Garcia, Jose Luis}
+\index{Solano-Macias, Carmen}
+\begin{chunk}{axiom.bib}
+@article{Roan19,
+ author = "Roanes-Lozano, Eugenio and Galan-Garcia, Jose Luis and
+ Solano-Macias, Carmen",
+ title = {{Some Reflections About the Success and Impact of the
+ Computer Algebra System DERIVE with a 10-Year Time
+ Perspective}},
+ journal = "Mathematics in Computer Science",
+ volume = "13",
+ number = "3",
+ pages = "417-431",
+ year = "2019",
+ abstract =
+ "The computer algebra system DERIVE had a very important impact in
+ teaching mathematics, mainly in the 1990's. The authors analyze
+ the possible reasons for its success and impact and give personal
+ conclusions based on the facts collected. More than 10 years after
+ it was discontinued it is still used for teaching and several
+ scientific papers (mostly devoted to educational issues) still
+ refer to it. A summary of the history, journals and conferences
+ together with a brief bibliographic analysis are included.",
+ paper = "Roan19.pdf",
+ keywords = "axiomref, printed, DONE"
+}
+
+\end{chunk}
+
\index{Robinson, J.A.}
\index{Sibert, E.E.}
\begin{chunk}{axiom.bib}
@@ -22807,6 +23411,18 @@ when shown in factored form.
\end{chunk}
+\index{Schopenhauer, Arthur}
+\begin{chunk}{axiom.bib}
+@misc{Scho16,
+ author = "Schopenhauer, Arthur",
+ title = {{Essays of Schopenhauer: On Reading and Books}},
+ link =
+ "\url{https://ebooks.adelaide.edu.au/s/schopenhauer/arthur/essays/chapter3.html}",
+ year = "2016"
+}
+
+\end{chunk}
+
\index{Schorre, D.V.}
\begin{chunk}{axiom.bib}
@inproceedings{Scho64,
@@ -23732,6 +24348,79 @@ when shown in factored form.
\end{chunk}
+\index{de la Tour, Thierry Boy}
+\index{Caferra, Richardo}
+\begin{chunk}{axiom.bib}
+@article{Tour88,
+ author = "de la Tour, Thierry Boy and Caferra, Richardo",
+ title = {{A Formal Approach to some Usually Informal Techniques used
+ in Mathematical Reasoning}},
+ journal = "Lecture Notes in Computer Science",
+ volume = "358",
+ pages = "402-408",
+ year = "1988",
+ abstract =
+ "One of the striking characteristics of mathematical reasoning is
+ the contrast between the formal aspects of mathematical truth and
+ the informal character of the ways to that truth. Among the many
+ important and usually informal mathematical activities we are
+ interested by proof analogy (i.e. common pattern between proofs of
+ different theorems) in the context of interactive theorem
+ proving. In some sense we propose a partial contribution of one of
+ the Polya's wishes [Polya 73]:
+ \begin{quote}
+ Analogy pervades all our thinking, our everyday speech and our
+ trivial conclusions as well as artistic way of expression and the
+ highest scientific achievements... but analogy may reach the level
+ of mathematical precision...
+ \end{quote}
+
+ It is a work in philosophy of mathematics [Resnik 75], in which
+ mathematics is viewed as studying {\sl patterns} or
+ {\sl structures}, which encouraged us to pursue our aim of
+ partially formalizing {\sl analogy}. We naturally arrived at the
+ need of considering other activities strongly tied to the notion
+ of analogy and very well known of the working mathematician:
+ {\sl generalization}, {\sl abstraction} and {\sl analysis of
+ proofs}.
+
+ Let us assume that the user has to prove a conjecture C. Given a
+ proof P for a known theorem T, he describes in the langauge L a
+ scheme of P. Then he expresses the syntactic analogy he views as
+ relevant between the scheme of P and what ``should be'' a proof
+ scheme for C, as a {\sl transformation rule}. This process needs
+ analysis of proofs, generalization and abstraction.
+
+ A second order pattern matching algorithm constructs matchers of P
+ and its scheme, and applies them to the intended proof scheme of
+ the conjecture C. In the best case one obtains a potential proof
+ of C, but in general one obtains a more instantiated proof-scheme
+ with some ``holes'' to be filled by a theorem prover. In both
+ cases a proof-checking process is necessary (analogy is in general
+ a heuristic way of thinking).
+
+ A question arises naturally: ``What to do when the assumed analogy
+ fails?''. We study in this paper one of the possible answers:
+ ``Inforamation may be extracted from the failure in order to
+ {\sl suggest lemmas} that try to {\sl semantically} restore
+ analogy''. Of coure these lemmas an also serve for detecting wrong
+ analogies (for example, if required lemmas clearly cannot be
+ theorems.). Two kinds of failure are possible:
+ \begin{enumerate}
+ \item The given proof P and its proposed scheme are not matchable
+ \item A type error is detected in the instantiated proof scheme
+ for C.
+ \end{enumerate}
+
+ We give in the following a method to suggest lemmas in the first
+ case, and sketch some ideas of how to use such a possibility in
+ the second case.",
+ paper = "Tour88.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
\index{Trager, Barry}
\begin{chunk}{axiom.bib}
@article{Trag79,
@@ -24490,6 +25179,17 @@ when shown in factored form.
\end{chunk}
+\index{Zeilberger, Doron}
+\begin{chunk}{axiom.bib}
+@misc{Zeil02,
+ author = "Zeilberger, Doron",
+ title = {{Topology: The Slum of Combinatorics}},
+ year = "2002",
+ link = "\url{http://sites.math.rutgers.edu/~zeilberg/Opinion1.html}"
+}
+
+\end{chunk}
+
\index{Zhao, Jinxu}
\index{Oliveira, Bruno C.D.S}
\index{Schrijvers, Tom}
@@ -24537,6 +25237,17 @@ when shown in factored form.
\end{chunk}
+\index{Zucker, Philip}
+\begin{chunk}{axiom.bib}
+@misc{Zuck19,
+ author = "Zucker, Philip",
+ title = {{Grobner Bases and Optics}},
+ link = "\url{http://www.philipzucker.com/grobner-bases-and-optics}",
+ year = "2019"
+}
+
+\end{chunk}
+
\section{Proving Axiom Correct -- Spring 2018}
\subsection{A} %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
@@ -53097,7 +53808,7 @@ American Mathematical Society (1994)
title = {{Engineering mathematics with computer algebra systems}},
publisher = "Unknown",
year = "1998",
- comment = "german",
+ comment = "German",
keywords = "axiomref",
beebe = "Benker:1998:ICS"
}
@@ -56919,6 +57630,7 @@ May 1984
(constructible and polynomial) on the subject of square-free
conditions, it allows us to effect dramatic improvements in the
dynamic constructible closure programs.",
+ paper = "Dell01.pdf",
keywords = "axiomref"
}
@@ -57210,6 +57922,7 @@ and Laine, M. and Valkeila, E. pp1-12 University of Helsinki, Finland (1994)
closure domain constructor in the language Scratchpad II, simply
called Scratchpad below. In the first part we analyze the problem, and
in the second part we describe a solution based on the D5 system.",
+ paper = "Dicr88.pdf",
keywords = "axiomref",
beebe = "Dicrescenzo:1989:AEA"
}
@@ -57343,6 +58056,36 @@ and Laine, M. and Valkeila, E. pp1-12 University of Helsinki, Finland (1994)
\end{chunk}
+\index{Dooley, Samuel S.}
+\begin{chunk}{axiom.bib}
+@InProceedings{Dool02,
+ author = "Dooley, Samuel S.",
+ title = {{Editing mathematical content and presentation markup in
+ interactive mathematical documents}},
+ booktitle = "Proc. ISSAC 2002",
+ series = "ISSAC 02",
+ year = "2002",
+ publisher = "ACM Press",
+ pages = "55-62",
+ abstract =
+ "The IBM MathML Expression Editor is a two-dimensional
+ mathematical editor for expressions encoded using MathML content
+ and presentation elements. It allows a user to interact with the
+ visual presentation of an expression, while simultaneously
+ creating the underlying structure of the expression. This paper
+ describes the internal expression framework used by the editor to
+ represent the content and presentation structures, the layout
+ mechanisms used to transform content into presentation, the
+ structural navigation conventions used to select subexpressions,
+ the editing templates used to support visual input of MathML
+ content expressions, and the customization framework that allows
+ for a fully extensible set of content operators, including
+ complete support for MathML 2.0 content elements as well as
+ user-defined function symbols and operators."
+}
+
+\end{chunk}
+
\index{Dooley, Sam}
\begin{chunk}{ignore}
\bibitem[Dooley 99]{Doo99} Dooley, Sam editor.
@@ -60812,6 +61555,7 @@ Vol. 8 No. 3 pp195-210 (2001)
in Algebraic Topology. This work shows that ACL2 can be a suitable
tool for formalising algebraic concepts coming, for instance, from
computer algebra systems.",
+ paper = "Hera15.pdf",
keywords = "axiomref"
}
@@ -61189,6 +61933,7 @@ Springer-Verlag, Berlin, Germany / Heildelberg, Germany / London, UK / etc.,
paradigms for building PSEs. The discussion of the future is given in
three parts: future trends, scenarios for 2010/2025, and research
issues to be addressed.",
+ paper = "Hous00.pdf",
keywords = "axiomref"
}
@@ -63451,6 +64196,7 @@ SIGSAM Communications in Computer Algebra, 157 2006
process is inextensional, but a detailed study of the simulation
results shows that in more general cases, it is likely that stretching
or wrinkling will occur.",
+ paper = "Kuma00.pdf",
keywords = "axiomref"
}
@@ -66514,7 +67260,7 @@ Computers and Mathematics November 1993, Vol 40, Number 9 pp1203-1210
@article{Riga99,
author = "Rigal, Alain",
title = {{High-order compact schemes: Application to bidimensional unsteady
- diffusion-convection problems.}},
+ diffusion-convection problems. I}},
journal = "C. R. Acad. Sci.",
volume = "328",
number = "6",
@@ -66723,6 +67469,7 @@ J. of Symbolic Computation 36 pp 513-533 (2003)
the $A$ satisfies the identity $((yx)x)x=y((xx)x)$, a generalization
of right-alternativity. Finally we prove that $Ass[A]$ and $N(A)$ are
ideals in these algebras.",
+ paper = "Roja13.pdf",
keywords = "axiomref"
}
@@ -66875,6 +67622,7 @@ J. of Symbolic Computation 36 pp 513-533 (2003)
the new methods are analyzed. The numerical results section shows
the high predetermined accuracy and the substantial gain in the
calculation times obtained using the new methods.",
+ paper = "Safo00.pdf",
keywords = "axiomref"
}
@@ -68587,6 +69335,7 @@ Kognitive Systeme, Universit\"t Karlsruhe 1992
mathematicians, engineers and econometricians}},
year = "1999",
publisher = "Birkhauser",
+ isbn = "0-8176-6091-7",
abstract =
"During the past decade, the mathematical computer software packages
such as Mathematica, Maple, MATLAB (Axiom, Derive, Macsyma, MuPad are
@@ -69600,7 +70349,8 @@ in [Wit87], pp13-17
exp($u$) for a dense, infinite series $u$ is reduced from $O(n^4)$ to
$O(n^2)$ coefficient operations, the same as required by the standard
on-line algorithms.",
- keywords = "axiomref",
+ paper = "Watt88.pdf",
+ keywords = "axiomref, printed",
beebe = "Watt:1989:FPM"
}
@@ -75360,7 +76110,8 @@ J. Inst. Math. Appl. 14 89--103. (1974)
program Rate, the parts concerned with guessing of Bruno Salvy and
Paul Zimmermann's GFUN, the univariate case of Manuel Kauers' Guess.m
and Manuel Kauers' and Christoph Koutschan's qGeneratingFunctions.m.",
- paper = "Hebi10.pdf"
+ paper = "Hebi10.pdf",
+ keywords = "axiomref"
}
\end{chunk}
@@ -76512,7 +77263,7 @@ Prentice-Hall. (1974)
\begin{chunk}{axiom.bib}
@article{Laza91,
author = "Lazard, Daniel",
- title = {{A new method for solving algebraic systems of positive dimension}},
+ title = {{A New Method for Solving Algebraic Systems of Positive Dimension}},
journal = "Discrete. Applied. Mathematics",
volume = "33",
year = "1991",
diff --git a/changelog b/changelog
index febc26d..eaba8fb 100644
--- a/changelog
+++ b/changelog
@@ -1,3 +1,5 @@
+20191109 tpd src/axiom-website/patches.html 20191109.03.tpd.patch
+20191109 tpd books/bookvolbib add references
20191109 tpd src/axiom-website/patches.html 20191109.02.tpd.patch
20191109 tpd books/bookvol10.1 add Groebner basis examples
20191109 tpd src/axiom-website/community.html add names to credit list
diff --git a/patch b/patch
index 12e4026..051235e 100644
--- a/patch
+++ b/patch
@@ -2,3 +2,715 @@ books/bookvol15 new book on Axiom Sane Compiler
Goal: Proving Axiom Sane
+\index{Bronstein, Manuel}
+\begin{chunk}
+@article{Bron88a,
+ author = "Bronstein, Manuel",
+ title = {{Fast Reduction of the Risch Differential Equation}},
+ journal = "Lecture Notes in Computer Science",
+ volume = "358",
+ pages = "64-72",
+ year = "1988",
+ abstract =
+ "Since Risch (1969) reduced the problem of integrating
+ exponentials to solving a first order linear differntial equation
+ in a differential field, there has been considerable interest in
+ solving \[y^\prime + fy = g\] for $y$ in a given differential
+ field $K$, where $f,g\in K$. Risch (1969) gave an algorithm for a
+ more general equation. His algorithm, however, required factoring
+ the denominators of $f$ and $g$, which is an obstacle to efficient
+ implementations.
+
+ Later, Rothstein (1976) and Davenport (1986) presented algorithms
+ for equation $(R)$ that both rely on a squarefree factorization of
+ the denominator for $y$. The algorithm in (Davenport 1986) has
+ been implemented in the Scratchpad II (see Jenks et al.) and Maple
+ (see Char et al, 1985) computer algebra systems. This algorithm,
+ however, requires $f$ to be in a certain form (called {\sl weakly
+ normalized}), but no computer algorithm that makes $f$ weakly
+ normalized has been published.
+
+ We present here a weaker definition of weak-normality, which
+ \begin{enumerate}
+ \item can always be obtained in a tower of transcendental
+ elementary extensions,
+ \item gives us an explicit formula for the denominator of $y$, so
+ equation $(R)$ can be reduced to a polynomial equation in one
+ reduction step
+ \end{enumerate}
+
+ As a consequence, we obtain a new algorithm for solving equation
+ $(R)$. OUr algorithm is very similar to the one described in
+ Rothstein (1976), except that we use weak normality to prevent
+ finite cancellation, rather than having to find integer roots of
+ polynomials over the constant field of $K$ in order to detect it.",
+ paper = "Bron88a.pdf",
+ keywords = "axiomref, printed"
+}
+
+\end{chunk}
+
+\index{Galligo, Andre}
+\index{Pottier, Loic}
+\index{Traverso, Carlo}
+\begin{chunk}
+@article{Gall88,
+ author = "Galligo, Andre and Pottier, Loic and Traverso, Carlo",
+ title = {{Greater Easy Common Divisor and Standard Basis Completion
+ Algorithms}},
+ journal = "Lecture Notes in Computer Science",
+ volume = "358",
+ pages = "162-176",
+ year = "1988",
+ abstract =
+ "The computation of a standard basis (also alled Groebner basis)
+ of a multivariate polynomial ideal over a field $K$ is crucial in
+ many appliations. The problem is intransically time and space
+ consuming, and many researchers aim to improve the basic algorithm
+ due to B. Buchberger [Bu1]. One has investigated the problem of
+ the optimal choice of the term ordering depending from the use
+ that has to be made of the results, [BS], and the systematic
+ elimination of unnecessary reductions [Bu2],[GM],[Po]. We can call
+ all these problems ``combinatorial complexity problems''.
+
+ The present paper considers arithmetic complexity problems; our
+ main problem is how to limit the growth of the coefficients in the
+ algorithms and the complexity of the field operations
+ involved. The problem is important with every ground field, with
+ the obvious exception of finite fields.
+
+ The ground field is often represented as the field of fractions of
+ some explict domain, which is usually a quotient of a finite
+ extension of $Z$, and the computations are hence reduced to these
+ domains.
+
+ The problem of coefficient growth, and the complexity already
+ appearing in the calculation of the GCD of tw univariate
+ polynomials, which is indeed a very special case of standard basis
+ computation; the PRS algorithms of Brown and Collins operate the
+ partial coefficient simplifications predicted by a theorem, hence
+ succeeding in controlling this complexity.
+
+ Our approach looks for analogies with these algorithms, but a
+ general structure theorem is missing, hence our approach relies on
+ a limited seqrch of coefficient simplifications. The basic idea is
+ the following: since the GCD is usually costly, we can use in its
+ place the ``greatest between the common divisors that are easy to
+ compute'' (the GECD), this suggestion allowing different
+ instances. A set of such instances is based on the remark that if
+ you hve elements in factorized form, then many common divisors are
+ immediately evident. Since irreducible factorization, even
+ assuming that is exists in our domain, is costly, we use a partial
+ factorization basically obtained using a ``lazy multiplication''
+ technique, i.e. performing coefficient multiplications only if
+ they are unavoidable. The resulting algorithms were tested with a
+ ``simulatied'' implementation on the integers, and the results
+ suggest that a complete implementation should be very efficient,
+ at least when the coefficient domain is a multivariate rational
+ function field.",
+ paper = "Gall88.pdf",
+ keywords = "GCD"
+}
+
+\end{chunk}
+
+\index{Bradford, R.J.}
+\index{Davenport, J.H.}
+\begin{chunk}
+@article{Brad88,
+ author = "Bradford, R.J. and Davenport, J.H.",
+ title = {{Effective Tests for Cyclotomic Polynomials}},
+ journal = "Lecture Notes in Computer Science",
+ volume = "358",
+ pages = "244-251",
+ year = "1988",
+ abstract =
+ "We present two efficient tests that determine if a given
+ polynomial is cyclotomic, or is a product of cyclotomics. The
+ first method uses the fact that all the roots of a cyclotomic
+ polynomial are roots of unity, and the second the fact that the
+ degree of a cyclotomic polynomial is a value of $\phi(n)$, for
+ some $n$. We can also find the cyclotomic factors of any
+ polynomial.",
+ paper = "Brad88.pdf"
+}
+
+\end{chunk}
+
+\index{Gianni, Patrizia}
+\index{Miller, Victor}
+\index{Trager, Barry}
+\begin{chunk}
+@article{Gian88a,
+ author = "Gianni, Patrizia and Miller, Victor and Trager, Barry",
+ title = {{Decomposition of Algebras}},
+ journal = "Lecture Notes in Computer Science",
+ volume = "358",
+ pages = "300-308",
+ year = "1988",
+ abstract =
+ "In this paper we deal with the problem of decomposing finite
+ commative $Q$-algebras as a direct product of local
+ $Q$-algebras. We solve this problem by reducing it to the problem
+ of finding a decomposition of finite algebras over finite
+ field. We will show that it is possible to define a lifting
+ process that allows to reconstruct the answer over the rational
+ numbers. This lifting appears to be very efficient since it is a
+ quadratic lifting that doesn't require stepwise inversions. It is
+ easy to see that the Berlekamp-Hensel algorithm for the
+ factorization of polynomials is a special case of this argument.",
+ paper = "Gian88a.pdf"
+}
+
+\end{chunk}
+
+\index{Canny, John}
+\begin{chunk}
+@article{Cann88a,
+ author = "Canny, John",
+ title = {{Generalized Characteristic Polynomials}},
+ journal = "Lecture Notes in Computer Science",
+ volume = "358",
+ pages = "293-299",
+ year = "1988",
+ abstract =
+ "We generalize the notion of characteristic polynomial for a
+ system of linear equations to systems of multivariate polynomial
+ equations. The generalization is natural in the sense that it
+ reduces to the usual definition when all the polynomials are
+ linear. Whereas the constant coefficient of the characteristic
+ polynomial is the resultant of the system. This construction is
+ applied to solve a traditional problem with efficient methods for
+ solving systems of polynomial equations: the presence of
+ infinitely many solutions ``at infinity''. We give a
+ single-exponential time method for finding all the isolated
+ solution points of a system of polynomials, even in the presence
+ of infinitely many solutions at infinity or elsewhere.",
+ paper = "Cann88a.pdf"
+}
+
+\end{chunk}
+
+\index{de la Tour, Thierry Boy}
+\index{Caferra, Richardo}
+\begin{chunk}
+@article{Tour88,
+ author = "de la Tour, Thierry Boy and Caferra, Richardo",
+ title = {{A Formal Approach to some Usually Informal Techniques used
+ in Mathematical Reasoning}},
+ journal = "Lecture Notes in Computer Science",
+ volume = "358",
+ pages = "402-408",
+ year = "1988",
+ abstract =
+ "One of the striking characteristics of mathematical reasoning is
+ the contrast between the formal aspects of mathematical truth and
+ the informal character of the ways to that truth. Among the many
+ important and usually informal mathematical activities we are
+ interested by proof analogy (i.e. common pattern between proofs of
+ different theorems) in the context of interactive theorem
+ proving. In some sense we propose a partial contribution of one of
+ the Polya's wishes [Polya 73]:
+ \begin{quote}
+ Analogy pervades all our thinking, our everyday speech and our
+ trivial conclusions as well as artistic way of expression and the
+ highest scientific achievements... but analogy may reach the level
+ of mathematical precision...
+ \end{quote}
+
+ It is a work in philosophy of mathematics [Resnik 75], in which
+ mathematics is viewed as studying {\sl patterns} or
+ {\sl structures}, which encouraged us to pursue our aim of
+ partially formalizing {\sl analogy}. We naturally arrived at the
+ need of considering other activities strongly tied to the notion
+ of analogy and very well known of the working mathematician:
+ {\sl generalization}, {\sl abstraction} and {\sl analysis of
+ proofs}.
+
+ Let us assume that the user has to prove a conjecture C. Given a
+ proof P for a known theorem T, he describes in the langauge L a
+ scheme of P. Then he expresses the syntactic analogy he views as
+ relevant between the scheme of P and what ``should be'' a proof
+ scheme for C, as a {\sl transformation rule}. This process needs
+ analysis of proofs, generalization and abstraction.
+
+ A second order pattern matching algorithm constructs matchers of P
+ and its scheme, and applies them to the intended proof scheme of
+ the conjecture C. In the best case one obtains a potential proof
+ of C, but in general one obtains a more instantiated proof-scheme
+ with some ``holes'' to be filled by a theorem prover. In both
+ cases a proof-checking process is necessary (analogy is in general
+ a heuristic way of thinking).
+
+ A question arises naturally: ``What to do when the assumed analogy
+ fails?''. We study in this paper one of the possible answers:
+ ``Inforamation may be extracted from the failure in order to
+ {\sl suggest lemmas} that try to {\sl semantically} restore
+ analogy''. Of coure these lemmas an also serve for detecting wrong
+ analogies (for example, if required lemmas clearly cannot be
+ theorems.). Two kinds of failure are possible:
+ \begin{enumerate}
+ \item The given proof P and its proposed scheme are not matchable
+ \item A type error is detected in the instantiated proof scheme
+ for C.
+ \end{enumerate}
+
+ We give in the following a method to suggest lemmas in the first
+ case, and sketch some ideas of how to use such a possibility in
+ the second case.",
+ paper = "Tour88.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Geddes, Keith O.}
+\index{Gonnet, Gaston H.}
+\index{Smedley, Trevor J.}
+\begin{chunk}
+@article{Gedd88,
+ author = "Geddes, Keith O. and Gonnet, Gaston H. and Smedley, Trevor J.",
+ title = {{Heuristic Methods for Operations With Algebraic Numbers}},
+ journal = "Lecture Notes in Computer Science",
+ volume = "358",
+ pages = "475-480",
+ year = "1988",
+ abstract =
+ "Algorithms for doing computations involving algebraic numbers
+ have been known for quite some time [6,9,12] and implementations
+ now exist in many computer algebra systems [1,4,11]. Many of these
+ algorithms have been analysed and shown to run in polynomial time
+ and space [7,8], but in spite of this many real problems take
+ large amounts of time and space to solve. In this paper we
+ describe a heuristic method which can be used for many operations
+ involving algebraic numbers. We gie specifics for doing algebraic
+ number inverses, and algebraic number polynomial exact division
+ and greatest common divisor calculation. The heurist will not
+ solve all instances of these problems, but it returns either the
+ correct result or with failure very quickly, and succeeds for a
+ very large number of problems. The heuristic method is similar to,
+ and based on concepts in [3].",
+ paper = "Gedd88.pdf",
+ keywords = "GCD"
+}
+
+\end{chunk}
+
+\index{Knuth, Donal E.}
+\index{Larrabee, Tracy}
+\index{Roberts, Paul M.}
+\begin{chunk}{axiom.bib}
+@misc{Knut87,
+ author = "Knuth, Donal E. and Larrabee, Tracy and Roberts, Paul M.",
+ title = {{Mathematical Writing}},
+ year = "1987",
+ link = "\url{jmlr.csail.mit.edu/reviewing-papers/knuth_mathematical_writing.pdf}"
+}
+
+\end{chunk}
+
+\index{Zeilberger, Doron}
+\begin{chunk}{axiom.bib}
+@misc{Zeil02,
+ author = "Zeilberger, Doron",
+ title = {{Topology: The Slum of Combinatorics}},
+ year = "2002",
+ link = "\url{http://sites.math.rutgers.edu/~zeilberg/Opinion1.html}"
+}
+
+\end{chunk}
+
+\index{Schopenhauer, Arthur}
+\begin{chunk}{axiom.bib}
+@misc{Scho16,
+ author = "Schopenhauer, Arthur",
+ title = {{Essays of Schopenhauer: On Reading and Books}},
+ link =
+ "\url{https://ebooks.adelaide.edu.au/s/schopenhauer/arthur/essays/chapter3.html}",
+ year = "2016"
+}
+
+\end{chunk}
+
+\index{Bhat, Siddharth}
+\begin{chunk}{axiom.bib}
+@misc{Bhat19,
+ author = "Bhat, Siddharth",
+ title = {{Computing Equivalent Gate Sets Using Grobner Bases}},
+ link = "\url{https://bollu.github.io/#computing-equivalent-gat-sets-using-grobner-bases}",
+ year = "2019"
+}
+
+\end{chunk}
+
+\index{Carneiro, Mario}
+\begin{chunk}{axiom.bib}
+@misc{Carn19,
+ author = "Carneiro, Mario",
+ title = {{Metamath Zero: The Cartesian Theorem Prover}},
+ link = "\url{https://arxiv.org/pdf/1910.10703.pdf}",
+ year = "2019",
+ abstract =
+ "As the usage of theorem prover technology expands, so too does
+ the reliance on correctness of the tools. Metamath Zero is a
+ verification system that aims for simplicity of logic and
+ implementation, without compromising on efficiency of
+ verification. It is formally specified in its own language, and
+ supports a number of translations to and from other proof
+ languages. This paper describes the abstract logic of Metamath
+ Zero, essentially a multi-sorted first order logic, as well as the
+ binary proof format and the way in which it can ensure essentially
+ linear time verification while still being concise and efficient
+ at scale. Metamath Zero currently holds the record for fastest
+ verification of the {\tt set.mm} Metamath library of proofs in ZFC
+ (including 71 of Wiedijk's 100 formalization targets), at less
+ than 200 ms. Ultimately, we intend to use it to verify the
+ correctness of the implementation of the verifier down to binary
+ executable, so it can be used as a root of trust for more compolex
+ proof systems.",
+ paper = "Carn19.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Roanes-Lozano, Eugenio}
+\index{Galan-Garcia, Jose Luis}
+\index{Solano-Macias, Carmen}
+\begin{chunk}{axiom.bib}
+@article{Roan19,
+ author = "Roanes-Lozano, Eugenio and Galan-Garcia, Jose Luis and
+ Solano-Macias, Carmen",
+ title = {{Some Reflections About the Success and Impact of the
+ Computer Algebra System DERIVE with a 10-Year Time
+ Perspective}},
+ journal = "Mathematics in Computer Science",
+ volume = "13",
+ number = "3",
+ pages = "417-431",
+ year = "2019",
+ abstract =
+ "The computer algebra system DERIVE had a very important impact in
+ teaching mathematics, mainly in the 1990's. The authors analyze
+ the possible reasons for its success and impact and give personal
+ conclusions based on the facts collected. More than 10 years after
+ it was discontinued it is still used for teaching and several
+ scientific papers (mostly devoted to educational issues) still
+ refer to it. A summary of the history, journals and conferences
+ together with a brief bibliographic analysis are included.",
+ paper = "Roan19.pdf",
+ keywords = "axiomref, printed"
+}
+
+\end{chunk}
+
+\index{Abraham, Erika}
+\index{Abbott, John}
+\index{Becker, Bernd}
+\index{Bigatti, Anna M.}
+\index{Brain, Martin}
+\index{Buchberger, Bruno}
+\index{Cimatti, Alessandro}
+\index{Davenport, James H.}
+\index{England, Matthew}
+\index{Fontaine, Pascal}
+\index{Forrest, Stephen}
+\index{Griggio, Alberto}
+\index{Droening, Daniel}
+\index{Seller, Werner M.}
+\index{Sturm, Thomas}
+\begin{chunk}{axiom.bib}
+@article{Abra16b,
+ author = "Abraham, Erika and Abbott, John and Becker, Bernd and
+ Bigatti, Anna M. and Brain, Martin and Buchberger, Bruno
+ and Cimatti, Alessandro and Davenport, James H. and
+ England, Matthew and Fontaine, Pascal and Forrest, Stephen
+ and Griggio, Alberto and Droening, Daniel and
+ Seller, Werner M. and Sturm, Thomas",
+ title = {{SC^2: Satisfiability Checking Meets Symbolic Computation}},
+ journal = "Lecture Notes in Computer Science",
+ volume = "9791",
+ year = "2016",
+ abstract =
+ "Symbolic Computation and Satisfiability Checking are two research
+ areas, both having their individual scientific focus but sharing
+ also common interests in the development, implementation and
+ application of decision procedures for arithmetic
+ theories. Despite their commonalities, the two communities are
+ weakly connected. The aim of our newly accepted $SC^2$ project
+ (H2020-FETOPEN-CSA) is to strengthen the connection between these
+ communities by creating common platforms, initiating interaction
+ and exchange, identifying common challenges, and developing a
+ common roadmap from theory along the way to tools and (industrial)
+ applications. In this paper we report on the aims and on the first
+ activities of this project, and formalise some relevant challenges
+ for the unified $SC^2$ community.",
+ paper = "Abra16b.pdf",
+ keywords = "axiomref"
+}
+
+\end{chunk}
+
+\index{Lambe, Larry A.}
+\begin{chunk}{axiom.bib}
+@article{Lamb16,
+ author = "Lambe, Larry A.",
+ title = {{An Algebraic Study of the Klein Bottle}},
+ journal = "Homotopy and Related Structures",
+ volume = "11",
+ number = "4",
+ pages = "885-891",
+ year = "2016",
+ abstract =
+ "We use symbolic computation (SC) and homological perturbation
+ (HPT) to compute a resolution of the integers $\mathbb{Z}$ over
+ the integer group ring of $G$, the fundamental group of the Klein
+ bottle. From this it is easy to read off the homology of the Klein
+ bottle as well as other information.",
+ paper = "Lamb16.pdf",
+ keywords = "axiomref"
+}
+
+\end{chunk}
+
+\index{Gerdt, Vladimir P.}
+\index{Koepf, Wolfram}
+\index{Mayr, Ernst W.}
+\index{Vorozhtsov, Evgenii V.}
+\begin{chunk}{axiom.bib}
+@book{Gerd13,
+ author = "Gerdt, Vladimir P. and Koepf, Wolfram and Mayr, Ernst W.
+ and Vorozhtsov, Evgenii V.",
+ title = {{Computer Algebra in Scientific Computing}},
+ publisher = "Springer",
+ year = "2013",
+ comment = "LNCS 8136",
+ paper = "Gerd13.pdf",
+ keywords = "axiomref"
+}
+
+\end{chunk}
+
+\index{Ardizzoni, Alessandro}
+\index{Stumbo, Fabio}
+\begin{chunk}{axiom.bib}
+@article{Ardi09,
+ author = "Ardizzoni, Alessandro and Stumbo, Fabio",
+ title = {{Quadratic Lie Algebras}},
+ journal = "Commun. Algebra",
+ volume = "39",
+ number = "8",
+ pages = "2723-2751",
+ year = "2011",
+ link = "\url{https://arxiv.org/pdf/0906.4617.pdf}",
+ abstract =
+ "In this paper, the notion of universal envoloping algebra
+ introduced in [A. Ardizzoni, A First Sight Towards Primitively
+ Generated Connected Braided Bialgebras] is specialized to the case
+ of braided vector spaces whole Nichols algebra is quadratic as an
+ algebra. In this setting a classification of universal enveloping
+ algebras for braided vectors spaces of dimension not greater than
+ 2 is handled. As an application, we investigate the structure of
+ primitively generated connected braided bialgebras whose braided
+ vector space of primitive elements forms a Nichols algebra which
+ is a quadratic algebra.",
+ paper = "Ardi09.pdf",
+ keywords = "axiomref"
+}
+
+\end{chunk}
+
+\index{Aharonovich, I.}
+\index{Horwitz, L.P.}
+\begin{chunk}{axiom.bib}
+@article{Ahar12,
+ author = "Aharonovich, I. and Horwitz, L.P.",
+ title = {{Radiation-reaction in classical off-shell electrodynamics,
+ I: The above mass-shell case}},
+ journal = "J. Math. Phys.",
+ volume = "53",
+ number = "3",
+ year = "2012",
+ abstract =
+ "Offshell electrodynamics based on a manifestly covarieant
+ off-shell relativeistic dynamics of Stueckelberg, Horwitz, and
+ Piron, is five-dimensional. In this paper, we study the problem of
+ radiation reaction of a particle in motion in this framework. In
+ particular, the case of the above-mass-shell is studied in detail,
+ where the renormalization of the Lorentz force leads to a system
+ of non-linear differential equations for 3 Lorentz scalars. The
+ system is then solved numerically, where it is shown that the
+ mass-shell deviation scalar either smoothly falls down to 0 (this
+ result provides a mechanism for the mass stability of the
+ off-shell theory), or strongly diverges under more extremem
+ conditions. In both cases, no runaway motion is
+ observed. Stability analysis indicates that the system seems to
+ have chaotic behavior. It is also shown that, although a motion
+ under which the mass-shell deviation $\epsilon$ is constant but
+ not-zero, is indeed possible, but, it is unstable, and eventually
+ it either decays to 0 or diverges.",
+ keywords = "axiomref"
+}
+
+\end{chunk}
+
+\index{Ioakimidis, Nikolaos I.}
+\begin{chunk}{axiom.bib}
+@article{Ioak00,
+ author = "Ioakimidis, Nikolaos I.",
+ title = {{Derivation of Feasibility Conditions in Engineering
+ Problems under Parametric Inequality Constraints with
+ Classical Fourier Elimination}},
+ journal = "Int. J. Numerical Methods Eng.",
+ volume = "48",
+ number = "11",
+ pages = "1583-1599",
+ year = "2000",
+ abstract =
+ "Fourier (or Motzkin or even Fourier-Motzkin) elimiation is the
+ classical and equally old analogue of Gaussian elimination for the
+ solution of linear equations in the case of linear
+ inequalities. Here this approach and two standard improvements are
+ applied to two engineering problems (involving numerical
+ integration in fracture mechanics as well as finite differences in
+ heat transfer in the parametric case) with linear inequality
+ constraints. The results obtained by a solver of systems of
+ inequalities concern the feasibility conditions (quantifier-free
+ formulae), so that the satisfaction of the original system of
+ linear inequality constraints can be possible. We use the computer
+ algebra system Maple V and a related elementary procedure for
+ Fourier elimination. The results constitute an extension of
+ already available applications of computer algebra software to the
+ classical approximate-numerical methods traditionally employed in
+ enginerring, and are also related to computational elimination
+ techniques in computer algebra and applied logic.",
+ paper = "Ioak00.pdf",
+ keywords = "axiomref"
+}
+
+\end{chunk}
+
+\index{Fournie, Michel}
+\begin{chunk}{axiom.bib}
+@article{Four99,
+ author = "Fournie, Michel",
+ title = {{High-order compact schemes: Application to bidimensional unsteady
+ diffusion-convection problems. II}},
+ journal = "C. R. Acad. Sci.",
+ volume = "328",
+ number = "6",
+ pages = "539-542",
+ year = "1999",
+ abstract =
+ "This Note generalizes the construcion and the analysis of high
+ order compact difference schemes for unsteady 2D
+ diffusion-convection problems cf. Part I by A. Rigal. The accuracy
+ (of order 2 in time and 4 in space) and the stability analysis
+ yield different choices of the weighting parameters. This work
+ realized with symbolic computation systems allow to obtain
+ analytical results for a wide class of schemes and to increase the
+ stability domain in some cases.",
+ keywords = "axiomref"
+}
+
+\end{chunk}
+
+\index{Lamagna, Edmund A.}
+\begin{chunk}{axiom.bib}
+@book{Lama19,
+ author = "Lamagna, Edmund A.",
+ title = {{Computer Algebra: Concepts and Techniques}},
+ publisher = "CRC Press",
+ year = "2019",
+ isbn = "978-1-138-09314-0",
+ keywords = "axiomref"
+}
+
+\end{chunk}
+
+\index{Benker, Hans}
+\begin{chunk}{axiom.bib}
+@book{Benk99,
+ author = "Benker, Hans",
+ title = {{Practical User of MATHCAD: Solving Mathematical Problems
+ with a Computer Algebra System}},
+ publisher = "Springer-Verlag",
+ year = "1999",
+ isbn = "978-1-85233-166-5",
+ keywords = "axiomref"
+}
+
+\end{chunk}
+
+\index{Dooley, Samuel S.}
+\begin{chunk}{axiom.bib}
+@InProceedings{Dool02,
+ author = "Dooley, Samuel S.",
+ title = {{Editing mathematical content and presentation markup in
+ interactive mathematical documents}},
+ booktitle = "Proc. ISSAC 2002",
+ series = "ISSAC 02",
+ year = "2002",
+ publisher = "ACM Press",
+ pages = "55-62",
+ abstract =
+ "The IBM MathML Expression Editor is a two-dimensional
+ mathematical editor for expressions encoded using MathML content
+ and presentation elements. It allows a user to interact with the
+ visual presentation of an expression, while simultaneously
+ creating the underlying structure of the expression. This paper
+ describes the internal expression framework used by the editor to
+ represent the content and presentation structures, the layout
+ mechanisms used to transform content into presentation, the
+ structural navigation conventions used to select subexpressions,
+ the editing templates used to support visual input of MathML
+ content expressions, and the customization framework that allows
+ for a fully extensible set of content operators, including
+ complete support for MathML 2.0 content elements as well as
+ user-defined function symbols and operators."
+}
+
+\end{chunk}
+
+\index{Bradford, Russell}
+\index{Davenport, James H.}
+\begin{chunk}{axiom.bib}
+@inproceedings{Brad02a,
+ author = "Bradford, Russell and Davenport, James H.",
+ title = {{Towards Better Simplification of Elementary Functions}},
+ booktitle = "ISSAC 02",
+ publisher = "ACM",
+ year = "2002",
+ isbn = "1-58113-484-3",
+ abstract =
+ "We preent an algorithm for simplifying a large class of
+ elementary functions in the presence of branch cuts. This
+ algorithm works by:
+ \begin{itemize}
+ \item verifying that the proposed simplification is correct as a
+ simplification of multi-valued functions
+ \item decomposing $\mathbb{C}$ (or $\mathbb{C}^n$ in the case of
+ multivariate simplifications) according to the branch cuts of the
+ relevant functions
+ \item checking that the proposed identity is valid on each
+ component of that decomposition
+ \end{itemize}
+
+ This process can be interfaced to an assume facility, and, if
+ required, can verify that simplifications are valid ``almost
+ everywhere''.",
+ paper = "Brad02a.pdf"
+}
+
+\end{chunk}
+
+\index{Zucker, Philip}
+\begin{chunk}{axiom.bib}
+@misc{Zuck19,
+ author = "Zucker, Philip",
+ title = {{Grobner Bases and Optics}},
+ link = "\url{http://www.philipzucker.com/grobner-bases-and-optics}",
+ year = "2019"
+}
+
+\end{chunk}
diff --git a/src/axiom-website/patches.html b/src/axiom-website/patches.html
index eb2a1d8..8691d5d 100644
--- a/src/axiom-website/patches.html
+++ b/src/axiom-website/patches.html
@@ -6010,6 +6010,8 @@ books/bookvol15 The Axiom Sane Compiler

add names to credits
20191109.02.tpd.patch
books/bookvol10.1 add Groebner basis examples

+20191109.03.tpd.patch
+books/bookvolbib add references

--
1.9.1