-
Committer:
Tim Daly
-
Date:
2018-05-16 07:09:17 UTC
-
Revision ID:
git-v1:0a626d280d6a54ffcfa7c4871d618382227c92f0
books/bookvolbib add references
Goal: Proving Axiom Correct
\index{Walther, J.S.}
\begin{chunk}{axiom.bib}
@misc{Walt71,
author = "Walther, J.S.",
title = {{A Unified Algorithm for Elementary Functions}},
link = "\url{}",
year = "1971",
abstract =
"This paper describes a single unified algorithm for the calculation
of elementary functions including multipli- cation, division, sin,
cos, tan, arctan, sinh, cosh, tanh, arctanh, In, exp and square-root.
The basis for the algorithm is coordinate rotation in a linear,
circular, or hyperbolic coordinate system depending on which function
is to be calculated. The only operations re- quired are shifting,
adding, subtracting and the recall of prestored constants. The
limited domain of con- vergence of the algorithm is calculated,
leading to a discussion of the modifications required to extend the
domain for floating point calculations.
A hardware floating point processor using the algo- rithm was built at
Hewlett-Packard Laboratories. The block diagram of the processor, the
microprogram control used for the algorithm, and measures of actual
performance are shown.",
paper = "Walt71.pdf"
}
\end{chunk}
\index{Friedman, Daniel P.}
\index{Wise, David S.}
\begin{chunk}{axiom.bib}
@techreport{Frie76,
author = "Friedman, Daniel P. and Wise, David S.",
title = {{CONS should not Evaluate its Arguments}},
institution = "Indiana University",
number = "TR44",
year = "1976",
abstract =
"The constructor function which allocates and fills records in
recursive, side-effect-free procedural languages is redefined to be a
non-strict (Vuillemin 1974) elementary operation. Instead of
evaluating its arguments, it builds suspensions of them which are not
coerced until the suspensions is accessed by strict elementary
function. The resulting evalutation procedures are strictly more
powerful than existing schemes for languages such as LISP. The main
results are that Landin's streams are subsumed into McCarthy's LISP
merely by the redefinition of elementar functions, that invocations of
LISP's evaluator can be minimized by redefining the elemntary
functions without redefining the interpreter, and as a strong
conjecture, that redefining the elementary functions yields the least
fixed-point semantics for McCarthy's evalution scheme. This new
insight into the role of construction functions will do much to ease
the interface between recursive programmers and iterative programmers,
as well as the interface between programmers and data structure
designers.",
paper = "Frie16.pdf",
keywords = "printed"
}
\end{chunk}
\index{Sarma, Gopal}
\index{Hay, Nick J.}
\begin{chunk}{axiom.bib}
@article{Sarm17,
author = "Sarma, Gopal and Hay, Nick J.",
title = {{Robust Computer Algebra, Theorem Proving, and Oracle AI}},
journal = "Informatica",
volume = "41",
number = "3",
link = "\url{https://arxiv.org/pdf/1708.02553.pdf}",
year = "2017",
abstract =
"In the context of superintelligent AI systems, the term 'oracle' has
two meanings. One refers to modular systems queried for
domain-specific tasks. Another usage, referring to a class of systems
which may be useful for addressing the value alignment and AI control
problems, is a superintelligent AI system that only answers questions.
The aim of this manuscript is to survey contemporary research problems
related to oracles which align with long-term research goals of AI
safety. We examine existing question answering systems and argue that
their high degree of architectural heterogeneity makes them poor
candidates for rigorous analysis as oracles. On the other hand, we
identify computer algebra systems (CASs) as being primitive examples
of domain-specific oracles for mathematics and argue that efforts to
integrate computer algebra systems with theorem provers, systems which
have largely been developed independent of one another, provide a
concrete set of problems related to the notion of provable safety that
has emerged in the AI safety community. We review approaches to
interfacing CASs with theorem provers, describe well-defined
architectural deficiencies that have been identified with CASs, and
suggest possible lines of research and practical software projects for
scientists interested in AI safety.",
paper = "Sarm17.pdf",
keywords = "printed, axiomref"
}
\end{chunk}
\index{Keller, Chantal}
\begin{chunk}{axiom.bib}
@phdthesis{Kell13,
author = "Keller, C.",
title = {{A Matter of Trust: Skeptical Commuication Between Coq and
External Provers}},
school = "Ecole Polytechnique",
year = "2013",
link =
"\url{https://www.lri.fr/~keller/Documents-recherche/Publications/thesis13.pdf}",
abstract =
"This thesis studies the cooperation between the Coq proof assistant
and external provers through proof witnesses. We concentrate on two
different kinds of provers that can return certicates: first, answers
coming from SAT and SMT solvers can be checked in Coq to increase both
the confidence in these solvers and Coq 's automation; second,
theorems established in interactive provers based on Higher-Order
Logic can be exported to Coq and checked again, in order to offer the
possibility to produce formal developments which mix these two
different logical paradigms. It ended up in two software : SMTCoq, a
bi-directional cooperation between Coq and SAT/SMT solvers, and
HOLLIGHTCOQ, a tool importing HOL Light theorems into Coq.
For both tools, we took great care to define a modular and efficient
architecture, based on three clearly separated ingredients: an
embedding of the formalism of the external tool inside Coq which is
carefully translated into Coq terms, a certified checker to establish
the proofs using the certicates and a Ocaml preprocessor to transform
proof witnesses coming from different provers into a generic
certificate. This division allows that a change in the format of proof
witnesses only affects the preprocessor, but no proved Coq code.
Another fundamental component for efficiency and modularity is
computational reflection, which exploits the computational power of
Coq to establish generic and small proofs based on the certicates.",
paper = "Kell13.pdf"
}
\end{chunk}
\index{Farmer, William M.}
\begin{chunk}{axiom.bib}
@article{Farm07,
author = "Farmer, William M.",
title = {{Biform Theories in Chiron}},
journal = "LNCS",
volume = "4573",
pages = "66-79",
year = "2007",
abstract =
"An axiomatic theory represents mathematical knowledge declaratively
as a set of axioms. An algorithmic theory represents mathematical
knowledge procedurally as a set of algorithms. A biform theory is
simultaneously an axiomatic theory and an algorithmic theory. It
represents mathematical knowledge both declaratively and procedurally.
Since the algorithms of algorithmic theories manipulate th e syntax of
expressions, biform theories —- as well as algorithmic theories -— are
difficult to formalize in a traditional logic without the means to
reason about syntax. Chiron is a derivative of
von-Neumann-Bernays-G̈odel ( NBG ) set theory that is intended to be a
practical, general-purpose logic for mechanizing mathematics. It
includes elements of type theory, a scheme for handling undefinedness,
and a facility for reasoning about the syntax of expressions. It is an
exceptionally well-suited logic for formalizing biform theories. This
paper defines the notion of a biform theory, gives an overview of
Chiron, and illustrates how biform theories can be formalized in Chiron.",
paper = "Farm07.pdf",
keywords = "printed"
}
\end{chunk}
\index{Carette, Jacques}
\index{Farmer, William M.}
\index{Sorge, Volker}
\begin{chunk}{axiom.bib}
@inproceedings{Care07,
author = "Carette, Jacques and Farmer, William M. and Sorge, Volker",
title = {{A Rational Reconstruction of a System for Experimental
Mathematics}},
booktitle = "14th Workshop on Automated Reasoning",
publisher = "unknown",
year = "2007",
abstract =
"In previous papers we described the implementation of a system
which combines mathematical object generation, transformation and
filtering, conjecture generation, proving and disproving for
mathematical discovery in non-associative algebra. While the system
has generated novel, fully verified theorems, their construction
involved a lot of ad hoc communication between disparate systems. In
this paper we carefully reconstruct a specification of a sub-process
of the original system in a framework for trustable communication
between mathematics systems put forth by us. It employs the concept
of biform theories that enables the combined formalisation of the
axiomatic and algorithmic theories behind the generation
process. This allows us to gain a much better understanding of the
original system, and exposes clear generalisation opportunities.",
paper = "Care07.pdf",
keywords = "printed"
}
\end{chunk}
\index{Farmer, William M.}
\begin{chunk}{axiom.bib}
@inproceedings{Farm00,
author = "Farmer, William M.",
title = {{A Proposal for the Development of an Interactive
Mathematics Laboratory for Mathematics Eductions}},
booktitle = "Workshop on Deductions Systems for Mathematics Eduation",
pages = "20-25",
year = "2000"
paper = "Farm00.pdf",
keywords = "axiomref, printed"
}
\end{chunk}
\index{Farmer, William M.}
\begin{chunk}{axiom.bib}
@article{Farm04,
author = "Farmer, William M.",
title = {{MKM A New Interdisciplinary Field of Research}},
journal = "SIGSAM",
volume = "38",
pages = "47-52",
year = "2004",
abstract =
"Mathematical Knowledge Management (MKM) is a new interdisciplinary
field of research in the intersection of mathematics, computer
science, library science, and scientific publishing. Its objective is
to develop new and better ways of managing mathematical knowledge
using sophisticated software tools. Its grand challenge is to create
a universal digital mathematics library (UDML), accessible via the
World-Wide Web, that contains essentially all mathematical knowledge
(intended for the public). The challenges facing MKM are daunting,
but a UDML, even just partially constructed, would transform how
mathematics is learned and practiced.",
paper = "Farm04.pdf",
keywords = "printed, axiomref"
}
\end{chunk}
\index{Farmer, William M.}
\index{Mohrenschildt, Martin v.}
\begin{chunk}{axiom.bib}
@inproceedings{Farm00a,
author = "Farmer, William M. and Mohrenschildt, Martin v.",
title = {{Transformers for Symbolic Computation and Formal Deduction}},
booktitle = "CADE-17",
pages = "36-45",
year = "2000",
abstract =
"A transformer is a function that maps expressions to expressions.
Many transformational operators -— such as expression evaluators and
simplifiers, rewrite rules, rules of inference, and decision
procedures -— can be represented by transformers. Computations and
deductions can be formed by applying sound transformers in
sequence. This paper introduces machinery for defining sound
transformers in the context of an axiomatic theory in a formal
logic. The paper is intended to be a first step in a development of an
integrated framework for symbolic computation and formal deduction.",
paper = "Farm00a.pdf",
keywords = "printed"
}
\end{chunk}
\index{Farmer, William M.}
\index{Larjani, Pouya}
\begin{chunk}{axiom.bib}
@misc{Farm14,
author = "Farmer, William M. and Larjani, Pouya",
title = {{Frameworks for Reasoning about Syntax that Utilize
Quotation and Evaluation}},
links = "\url{http://imps.mcmaster.ca/doc/syntax.pdf}",
year = "2014",
abstract =
"It is often useful, if not necessary, to reason about the syntactic
structure of an expression in an interpreted language (i.e., a
language with a semantics). This paper introduces a mathematical
structure called a syntax framework that is intended to be an abstract
model of a system for reasoning about the syntax of an interpreted
language. Like many concrete systems for reasoning about syntax, a
syntax framework contains a mapping of expressions in the
interpreted language to syntactic values that represent the syntactic
structures of the expressions; a language for reasoning about the
syntactic values; a mechanism called quotation to refer to the
syntactic value of an expression; and a mechanism called evaluation to
refer to the value of the expression represented by a syntactic value.
A syntax framework provides a basis for integrating reasoning about
the syntax of the expressions with reasoning about what the
expressions mean. The notion of a syntax framework is used to discuss
how quotation and evaluation can be built into a language and to
define what quasiquotation is. Several examples of syntax frameworks
are presented.",
paper = "Farm14.pdf",
keywords = "printed"
}
\end{chunk}
\index{Carette, Jacques}
\index{Farmer, William M.}
\index{O'Connor, Russell}
\begin{chunk}{axiom.bib}
@misc{Care11c,
author = "Carette, Jacques and Farmer, William M. and O'Connor, Russell",
title = {{MathScheme: Project Description}},
year = "2011",
link = "\url{http://imps.mcmaster.ca/doc/cicm-2011-proj-desc.pdf}",
paper = "Care11c.pdf",
keywords = "printed"
}
\end{chunk}
\index{Carette, Jacques}
\index{Farmer, William M.}
\begin{chunk}{axiom.bib}
@article{Care08,
author = "Carette, Jacques and Farmer, William M.",
title = {{High-Level Theories}},
journal = "LNCS",
volume = "5144",
pages = "232-245"
year = "2008",
abstract =
"We introduce high-level theories in analogy with high-level
programming languages. The basic point is that even though one can
define many theories via simple, low-level axiomatizations , that is
neither an effective nor a comfortable way to work with such theories.
We present an approach which is closer to what users of mathematics
employ, while still being based on formal structures.",
paper = "Care08.pdf",
keywords = "printed"
}
\end{chunk}
\index{Carette, Jacques}
\index{O'Connor, Russell}
\begin{chunk}{axiom.bib}
@article{Care12,
author = "Carette, Jacques and O'Connor, Russell",
title = {{Theory Presentation Combinators}},
journal = "LNCS",
volume = "7362",
year = "2012",
abstract =
"We motivate and give semantics to theory presentation combinators
as the foundational building blocks for a scalable library of
theories. The key observation is that the category of contexts and
fibered categories are the ideal theoretical tools for this
purpose.",
paper = "Care12.pdf",
keywords = "printed"
}
\end{chunk}
\index{Musser, David R.}
\index{Kapur, Deepak}
\begin{chunk}{axiom.bib}
@article{Muss82,
author = "Musser, David R. and Kapur, Deepak",
title = {{Rewrite Rule Theory and Abstract Data Type Analysis}},
journal = "Lecture Notes in Computer Science",
volume = "144",
pages = "77-90",
year = "1982",
paper = "Muss82.pdf",
keywords = "axiomref"
}
\end{chunk}
\index{Lazard, Daniel}
\begin{chunk}{axiom.bib}
@article{Laza82,,
author = "Lazard, Daniel",
title = {{Commutative Algebra and Computer Algebra}},
journal = "Lecture Notes in Computer Science",
volume = "144",
pages = "40-48",
year = "1982",
abstract =
"It is well known that computer algebra deals with commutative rings
such as the integers, the rationals, the modular integers and
polynomials over such rings.
On the other hand, commutative algebra is that part of mathematics
which studies commutative rings.
Our aim is to make this link more precise. It will appear that most of
the constructions which appear in classical commutative algebra can be
done explicitly in finite time. However, there are
exceptions. Moreover, most of the known algorithms are intractable :
The problems are generally exponential by themselves, but many
algorithms are over-over-exponential. It needs a lot of work to find
better methods, and to implement them, with the final hope to open a
computation domain larger than this one which is possible by hand.
To illustrate the limits and the possibilities of computerizing
commutative algebra, we describe an algorithm which tests the
primality of a polynomial ideal and we give an example of a single
algebraic extension of fields $K\subset L$ wuch that there exists an
algorithm of factorization for the polynomials over $k$, but not for
the polynomials over $L$.",
paper = "Laza82.pdf"
}
\end{chunk}
\index{Hearn, Anthony C.}
\begin{chunk}{axiom.bib}
@article{Hear82,,
author = "Hearn, Anthony C.",
title = {{REDUCE - A Case Study in Algebra System Development}},
journal = "Lecture Notes in Computer Science",
volume = "144",
pages = "263-272",
year = "1982",
paper = "Hear82.pdf",
keywords = "axiomref"
}
\end{chunk}
\index{Padget, J.A.}
\begin{chunk}{axiom.bib}
@article{Padg82,
author = "Padget, J.A.",
title = {{Escaping from Intermediate Expression Swell: A Continuing Saga}},
journal = "Lecture Notes in Computer Science",
volume = "144",
pages = "256-262",
year = "1982",
abstract =
"The notion of a closed continuation is introduced, is presented,
coroutines using function call and return based on this concept, are
applications and a functional dialect of LISP shown to be merely a
more general form of for coroutines in algebraic simplification and
are suggested, by extension function. expression Potential evaluation
and a specific example of their use is given in a novel attack on the
phenomenon of intermediate expression swell in polynomial
multiplication.",
paper = "Padg82.pdf"
}
\end{chunk}
\index{Norman, Arthur}
\begin{chunk}{axiom.bib}
@article{Norm82,
author = "Norman, Arthur",
title = {{The Development of a Vector-Based Algebra System}},
journal = "Lecture Notes in Computer Science",
volume = "144",
pages = "237-248",
year = "1982",
paper = "Norm82.pdf"
}
\end{chunk}
\index{Bordoni, L.}
\index{Colagrossi, A.}
\index{Miola, A.}
\begin{chunk}{axiom.bib}
@article{Bord82,
author = "Bordoni, L. and Colagrossi, A. and Miola, A.",
title = {{Linear Algebraic Approach for Computing Polynomial Resultant}},
journal = "Lecture Notes in Computer Science",
volume = "144",
pages = "231-236",
year = "1982",
abstract =
"This paper presents a linear algebraic method for computing the
resultant of two polynomials. This method is based on the
computation of a determinant of order equal to the minimum of the
degrees of the two given polynomials. This method turns out to be
preferable to other known linear algebraic methods both from a
computational point of view and for a total generality respect to
the class of the given polynomials. Some relationships of this
method with the polynomial pseudo-remainder operation are discussed.",
paper = "Bord82.pdf"
}
\end{chunk}
\index{Arnon, Dennis S.}
\index{McCallum, Scott}
\begin{chunk}{axiom.bib}
@article{Arno82a,
author = "Arnon, Dennis S. and McCallum, Scott",
title = {{Cylindrical Algebraic Decomposition by Quantifier Eliminations}},
journal = "Lecture Notes in Computer Science",
volume = "144",
pages = "215-222",
year = "1982",
abstract =
"Cylindrical algebraic decompositions were introduced as a major
component of a new quantifier elimination algorithm for elementary
algebra and geometry (G. Collins, ~973). In the present paper we turn
the tables and show that one can use quantifier elimination for ele-
mentary algebra and geometry to obtain a new version of the
cylindrical algebraic decomposi- tion algorithm. A key part of our
result is a theorem, of interest in its own right, that relates the
multiplicities of the roots of a polynomial to their continuity.",
paper = "Arno82a.pdf"
}
\end{chunk}
\index{Collins, George E.}
\begin{chunk}{axiom.bib}
@article{Coll82a,
author = "Collins, George E.",
title = {{Factorization in Cylindrical Algebraic Decomposition - Abstract}},
journal = "Lecture Notes in Computer Science",
volume = "144",
pages = "212-214",
year = "1982",
paper = "Coll82a.pdf"
}
\end{chunk}
\index{Lazard, Daniel}
\begin{chunk}{axiom.bib}
@article{Laza82a,
author = "Lazard, Daniel",
title = {{On Polynomial Factorization}},
journal = "Lecture Notes in Computer Science",
volume = "144",
pages = "144-157",
year = "1982",
abstract =
"We present new algorithms for factoring univariate polynomials
over finite fields. They are variants of the algorithms of Camion
and Cantor-Zassenhaus and differ from them essentially by
computing the primitive idempotents of the algebra $K[X]/f$ before
factoring $f$.
These algorithms are probabilistic in the following sense. The
time of computation depends on random choices, but the validity of
the result does not depend on them. So, worst case complexity,
being infinite, is meaningless and we compute average
complexity. It appears that our and Cantor-Zassenhaus algorithms
have the same asymptotic complexity and they are the best
algorithms actuall known; with elementary multiplication and GCD
computation, Cantor-Zassenhaus algorithm is always bettern than
ours; with fast multiplication and GCD, it seems that ours is
better, but this fact is not yet proven.
Finally, for factoring polynomials over the integers, it seems
that the best strategy consists in choosing prime moduli as big as
possible in the range where the time of the multiplication does
not depend on the size of the factors (machine word size). An
accurate computation of the involved constants would be needed for
proving this estimation.",
paper = "Laza82a.pdf"
}
\end{chunk}
\index{Strachey, Christopher}
\index{Wadsworth, Christopher}
\begin{chunk}{axiom.bib}
@article{Stra00a,
author = "Strachey, Christopher and Wadsworth, Christopher",
title = {{Continuations: A Mathematical Semantics for Handling Full Jumps}},
journal = "Higher-Order and Symbolic Computation",
volume = "13",
pages = "135-152",
year = "2000",
abstract =
"This paper describes a method of giving the mathematical
semantics of programming languages which include the most general
form of jumps.",
paper = "Stra00a.pdf",
keywords = "printed"
}
\end{chunk}
\index{Kaes, Stefan}
\begin{chunk}{axiom.bib}
@article{Kaes88,
author = "Kaes, Stefan",
title = {{Parametric Overloading in Polymorphic Programming Languages}},
journal = "LNCS",
volume = "300",
pages = "131-144",
year = "1988",
abstract =
"The introduction of unrestricted overloading in languagues with type
systems based on implicit parametric potymorphism generally destroys
the principal type property: namely that the type of every expression
can uniformly be represented by a single type expression over some set
of type variables. As a consequence, type inference in the presence
of unrestricted overloading can become a NP-complete problem. In
this paper we define the concept of parametric overloading as a
restricted form of overloading which is easily combined with
parametric polymorphism. Parametric overloading preserves the
principal type property, thereby allowing the design of efficient type
inference algorithms. We present sound type deduction systems, both
for predefined and programmer defined overloading. Finally we state
that parametric overloading can be resolved either statically, at
compile time, or dynamically, during program execution.",
paper = "Kaes88.pdf",
keywords = "printed"
}
\end{chunk}
\index{Kaes, Stefan}
\begin{chunk}{axiom.bib}
@article{Kaes92,
author = "Kaes, Stefan",
title = {{Type Inference inthe Presence of Overloading, Subtyping and
Recursive Types}},
journal = "ACM Lisp Pointers",
volume = "5",
number = "1",
year = "1992",
pages = "193-204",
abstract =
"We present a unified approach to type inference in the presence of
overloading and coercions based on the concept of {\sl constrained
types}. We define a generic inference system, show that subtyping and
overloading can be treated as a special instance of this system and
develop a simple algorithm to compute principal types. We prove the
decidability of type inference for hte class of {\sl decomposable
predicates} and deelop a canonical representation for principal types
based on {\sl most accurate simplifications} of constraint
sets. Finally, we investigate the extension of our techniques to
{\sl recursive types}.",
paper = "Kaes92.pdf",
keywords = "printed"
}
\end{chunk}
\index{Hall, Cordelia V.}
\index{Hammond, Kevin}
\index{Jones, Simon L. Peyton}
\index{Wadler, Philip L.}
\begin{chunk}{axiom.bib}
@article{Hall96,
author = "Hall, Cordelia V. and Hammond, Kevin and Jones, Simon L. Peyton
and Wadler, Philip L.",
title = {{Type Classes in Haskell}},
journal = "Trans. on Programming Langues and Systems",
volume = "18",
number = "2",
pages = "109-138",
year = "1996",
abstract =
"This article de nes a set of type inference rules for resolving
overloading introduced by type classes, as used in the functional
programming language Haskell. Programs including type classes are
transformed into ones which may be typed by standard Hindley-Milner
inference rules. In contrast to other work on type classes, the rules
presented here relate directly to Haskell programs. An innovative
aspect of this work is the use of second-order lambda calculus to
record type information in the transformed program.",
paper = "Hall96.pdf",
keywords = "printed"
}
\end{chunk}
\index{Dreyer, Derek}
\index{Harper, Robert}
\index{Chakravarty, Manuel M.T.}
\index{Keller, Gabriele}
\begin{chunk}{axiom.bib}
@inproceedings{Drey07,
author = "Dreyer, Derek and Harper, Robert and Chakravarty, Manuel M.T.
and Keller, Gabriele",
title = {{Modlular Type Classes}},
booktitle = "Proc. POPL'07",
pages = "63-70",
year = "2007",
abstract =
"ML modules and Haskell type classes have proven to be highly
effective tools for program structuring. Modules emphasize explicit
configuration of program components and the use of data abstraction.
Type classes emphasize implicit program construction and ad hoc
polymorphism. In this paper , we show how the implicitly-typed
style of type class programming may be supported within the framework
of an explicitly-typed module language by viewing type classes as a
particular mode of use of modules. This view offers a harmonious
integration of modules and type classes, where type class features,
such as class hierarchies and associated types, arise naturally as
uses of existing module-language constructs, such as module
hierarchies and type components. In addition, programmers have
explicit control over which type class instances are available for
use by type inference in a given scope. We formalize our approach as
a Harper-Stone-style elaboration relation, and provide a sound type
inference algorithm as a guide to implementation.",
paper = "Drey07.pdf",
keywords = "printed"
}
\end{chunk}
\index{Wehr, Stefan}
\index{Chakravarty, Maneul M.T.}
\begin{chunk}{axiom.bib}
@article{Wehr08,
author = "Wehr, Stefan and Chakravarty, Maneul M.T.",
title = {{ML Modules and Haskell Type Classes: A Constructive
Comparison}},
journal = "LNCS",
volume = "5356",
pages = "188-204",
year = "2008",
abstract =
"Researchers repeatedly observed that the module system of ML and the
type class mechanism of Haskell are related. So far, this relationship
has received little formal investigation. The work at hand fills this
gap: It introduces type-preserving translations from modules to type
classes and vice versa, which enable a thorough comparison of the two
concepts.",
paper = "Wehr08.pdf",
keywords = "printed"
}
\end{chunk}
\index{Stuckey, Peter J.}
\index{Sulzmann, Martin}
\begin{chunk}{axiom.bib}
@article{Stuc05,
author = "Stuckey, Peter J. and Sulzmann, Martin",
title = {{A Theory of Overloading}},
journal = "ACM Trans. on Programming Languages and Systems",
volume = "27",
number = "6",
pages = "1-54",
year = "2005",
abstract =
"We present a novel approach to allow for overloading of
identifiers in the spirit of type classes. Our approach relies on
the combination of the HM(X) type system framework with Constraint
Handling Rules (CHRs). CHRs are a declarative language for writing
incremental constraint solvers, that provide our scheme with a
form of programmable type language. CHRs allow us to precisely
describe the relationships among overloaded identifiers. Under
some sufficient conditions on the CHRs we achieve decidable type
inference and the semantic meaning of programs is unambiguous. Our
approach provides a common formal basis for many type class
extensions such as multi-parameter type classes and functional
dependencies.",
paper = "Stuc05.pdf",
keywords = "printed"
}
\end{chunk}
\index{Reynolds, J.C.}
\begin{chunk}{axiom.bib}
@article{Reyn85,
author = "Reynolds, J.C.",
title = {{Three Approaches to Type Structure}},
journal = "LNCS",
volume = "185",
year = "1985",
abstract =
"We examine three disparate views of the type structure of
programming languages: Milner's type deduction system and polymorphic
let construct, the theory of subtypes and generic operators, and
the polymorphic or second-order typed lambda calculus. These
approaches are illustrated with a functional language including
product, sum and list constructors. The syntactic behavior of types
is formalized with type inference rules, bus their semantics is
treated intuitively.",
paper = "Reyn85.pdf",
keywords = "printed"
}
\end{chunk}
\index{Balsters, Herman}
\index{Fokkinga, Maarten M.}
\begin{chunk}{axiom.bib}
@article{Bals91,
author = "Balsters, Herman and Fokkinga, Maarten M.",
title = {{Subtyping can have a simple semantics}},
journal = "Theoretical Computer Science",
volume = "87",
pages = "81-96",
year = "1991",
abstract =
"Consider a first order typed language, with semantics
$\llbracket~\rrbracket$ for expressions and types. Adding
subtyping means that a partial order $\le$ on types is defined
and that the typing rules are extended to the effect that
expression $e$ has type $\tau$ whenever $e$ has type $\sigma$ and
$\sigma \le \tau$. We show how to adapt the semantics
$\llbracket~\rrbracket$ in a {\sl simple set theoretic way},
obtaining a semantics $\llbracket~\rrbracket$ that satisfies, in
addition to some obvious requirements, also the property
$\llbracket\sigma\rrbracket \subseteq $\llbracket\tau\rrbracket$,
whenever $\sigma \le \tau$.",
paper = "Bals91.pdf",
keywords = "printed"
}
\end{chunk}
\index{Cardelli, Luca}
\begin{chunk}{axiom.bib}
@article{Card84,
author = "Cardelli, Luca",
title = {{A Semantics of Multiple Inheritance}},
journal = "LNCS",
volume = "173",
year = "1984",
paper = "Card84.pdf",
keywords = "printed"
}
\end{chunk}
\index{Mosses, Peter}
\begin{chunk}{axiom.bib}
@article{Moss84,
author = "Mosses, Peter",
title = {{A Basic Abstract Semantics Algebra}},
journal = "LNCS",
volume = "173",
year = "1984",
abstract =
"It seems that there are some pragmatic advantages in using Abstract
Semantic Algebras (ASAs) instead of X-notation in denotational
semantics. The values of ASAs correspond to 'actions' (or
'processes'), and the operators correspond to primitive ways of
combining actions. There are simple ASAs for the various independent
'facets' of actions: a functional ASA for data-flow, an imperative ASA
for assignments, a declarative ASA for bindings, etc. The aim is to
obtain general ASAs by systematic combination of these simple ASAs.
Here we specify a basic ASA that captures the common features of the
functional, imperative and declarative ASAs -- and highlights their
differences. We discuss the correctness of ASA specifications, and
sketch the proof of the consistency and (limiting) completeness of the
functional ASA, relative to a simple model.
Some familiarity with denotational semantics and algebraic
specifications is assumed.",
paper = "Moss84.pdf"
}
\end{chunk}
\index{Gross, Jason}
\index{Chlipala, Adam}
\begin{chunk}{axiom.bib}
@misc{Gros15,
author = "Gross, Jason and Chlipala, Adam",
title = {{Parsing Parses}},
link = "\url{https://people.csail.mit.edu/jgross/personal-website/papers/2015-parsing-parse-trees.pdf}",
year = "2015",
abstract =
"We present a functional parser for arbitrary context-free grammars,
together with soundness and completeness proofs, all inside Coq. By
exposing the parser in the right way with parametric polymorphism and
dependent types, we are able to use the parser to prove its own
soundness, and, with a little help from relational parametricity,
prove its own completeness, too. Of particular interest is one strange
instantiation of the type and value parameters: by parsing parse trees
instead of strings, we convince the parser to generate its own
completeness proof. We conclude with highlights of our experiences
iterating through several versions of the Coq development, and some
general lessons about dependently typed programming.",
paper = "Gros15.pdf",
keywords = "printed"
}
\end{chunk}
\index{Wehr, Stefan}
\begin{chunk}{axiom.bib}
@phdthesis{Wehr05,
author = "Wehr, Stefan",
title = {{ML Modules and Haskell Type Classes: A Constructive
Comparison}},
school = "Albert Ludwigs Universitat",
year = "2005",
abstract =
"Researchers repeatedly observed that the module system of ML and the
type class mechanism of Haskell are related. So far, this relationship
has received little formal investigation. The work at hand fills this
gap: It introduces type-preserving translations from modules to type
classes and vice versa, which enable a thorough comparison of the two
concepts.
The source language of the first translation is a subset of
Standard ML. The target language is Haskell with common extensions
and one new feature, which was deeloped as part of this work. The
second translation maps a subset of Haskell 98 to ML, with
well-established extensions. I prove that the translations
preserve type correctness and provide implementations for both.
Building on the insights obtained from the translations, I present
a thorough comparison between ML modules and Haskell type
classes. Moreover, I evaluate to what extent the techniques used
in the translations can be exploited for modular programming in
Haskell and for programming with ad-hoc polymorphism in ML.",
paper = "Wehr05.pdf"
}
\end{chunk}
\index{Dreyer, Derek}
\index{Crary, Karl}
\index{Harper, Robert}
\begin{chunk}{axiom.bib}
@article{Drey03,
author = "Dreyer, Derek and Crary, Karl and Harper, Robert",
title = {{A Type System for Higher-Order Modules}},
journal = "ACM SIGPLAN Notices",
volume = "38",
number = "1",
year = "2003",
link = "\url{https://www.cs.cmu.edu/~crary/papers/2003/thoms/thoms.pdf}",
abstract =
"We present a type theory for higher-order modules that accounts for
many central issues in module system design, including translucency,
applicativity , generativity , and modules as first-class values.
Our type system harmonizes design elements from previous work,
resulting in a simple, economical account of modular programming. The
main unifying principle is the treatment of abstraction mechanisms
as computational effects. Our language is the first to provide a
complete and practical formalization of all of these critical issues
in module system design.",
paper = "Drey03.pdf",
keywords = "printed"
}
\end{chunk}
\index{Crary, Karl}
\index{Harper, Robert}
\index{Puri, Sidd}
\begin{chunk}{axiom.bib}
@inproceedings{Crar99,
author = "Crary, Karl and Harper, Robert and Puri, Sidd",
title = {{What is a Recursive Module}},
booktitle = "Conf. on Programming Language Design and Implementation",
year = "1999",
link = "\url{https://www.cs.cmu.edu/~crary/papers/1999/recmod/recmod.dvi}",
abstract =
"A hierarchical module system is an effective tool for structuring
large programs. Strictly hierarchical module systems impose an
acyclic ordering on import dependencies among program units. This
can impede modular programming by forcing mutually-dependent
components to be consolidated into a single module. Recently there
have been several proposals for module systems that admit cyclic
dependencies, but it is not clear how these proposals relate to
one another, nor how one mught integrate them into an expressive
module system such as that of ML.
To address this question we provide a type-theoretic analysis of
the notion of a recursive module in the context of a
``phase-distinction'' formalism for higher-order module
systems. We extend this calculus with a recursive module mechanism
and a new form of signature, called a {\sl recurslively dependent
signature}, to support the definition of recursive modules. These
extensions are justified by an interpretation in terms of more
primitive language constructs. This interpretation may also serve
as a guide for implementation.",
paper = "Crar99.pdf",
keywords = "printed"
}
\end{chunk}
\index{Dreyer, Derek}
\index{Crary, Karl}
\index{Harper, Robert}
\begin{chunk}{axiom.bib}
@techreport{Drey02,
author = "Dreyer, Derek and Crary, Karl and Harper, Robert",
title = {{A Type System for Higher-Order Modules (Expanded Version)}},
type = "technical report",
institution = "Carnegie Mellon University",
number = "CMU-CS-02-122R",
year = "2002",
link = "\url{https://www.cs.cmu.edu/~crary/papers/2003/thoms/thoms-tr.pdf}",
abstract =
"We present a type theory for higher-order modules that accounts for
many central issues in module system design, including translucency,
applicativity , generativity , and modules as first-class values.
Our type system harmonizes design elements from previous work,
resulting in a simple, economical account of modular programming. The
main unifying principle is the treatment of abstraction mechanisms
as computational effects. Our language is the first to provide a
complete and practical formalization of all of these critical issues
in module system design.",
paper = "Drey02.pdf"
}
\end{chunk}
\index{Crary, Karl}
\begin{chunk}{axiom.bib}
@techreport{Crar02,
author = "Crary, Karl",
title = {{Toward a Foundational Typed Assembly Language}},
institution = "Carnegie Mellon University",
number = "CMU-CS-02-196",
year = "2002,
link = "\url{https://www.cs.cmu.edu/~crary/papers/2003/talt/talt-tr.pdf}",
abstract =
"We present the design of a typed assembly language called TALT that
supports heterogeneous tuples, disjoint sums, arrays, and a general
account of addressing modes. TALT also implements the von Neumann
model in which programs are stored in memory, and supports relative
addressing. Type safety for execution and for garbage collection are
shown by machine-checkable proofs. TALT is the first formalized typed
assembly language to provide any of these features.",
paper = "Crar02.pdf"
}
\end{chunk}
\index{Mili, Ali}
\index{Aharon, Shir}
\index{Nadkarni, Chaitanya}
\begin{chunk}{axiom.bib}
@article{Mili09,
author = "Mili, Ali and Aharon, Shir and Nadkarni, Chaitanya",
title = {{Mathematics for Reasoning about Loop Functions}},
journal = "Science of Computer Programming",
volume = "79",
year = "2009",
pages = "989-1020",
abstract =
"The criticality of modern software applications, the pervasiveness of
malicious code concerns, the emergence of third-party software
development, and the preponderance of program inspection as a quality
assurance method all place a great premium on the ability to analyze
programs and derive their function in all circumstances of use and all
its functional detail. For C-like programming languages, one of the
most challenging tasks in this endeavor is the derivation of loop
functions. In this paper, we outline the premises of our approach to
this problem, present some mathematical results, and discuss how these
results can be used as a basis for building an automated tool that
derives the function of while loops under some conditions.",
paper = "Mili09.pdf",
keywords = "printed"
}
\end{chunk}
\index{Piroi, Florina}
\index{Buchberger, Bruno}
\index{Rosenkranz, Camelia}
\begin{chunk}{axiom.bib}
@misc{Piro08,
author = "Piroi, Florina and Buchberger, Bruno and Rosenkranz, Camelia",
title = {{Mathematical Journals as Reasoning Agents: Literature Review}},
year = "2008",
link = "\urlhttp://www.risc.jku.at/publications/download/risc_3442/Math-Agents-for-SFB-2008-03-10-12h00.pdf{}",
abstract =
"This report reviews the literature relevant for the research project
'Math−Agents: Mathematical Journals as Reasoning Agents' proposed by
Bruno Buchberger as a technology transfer project based on the results
of the SFB Project 'Scientific Computing', in particular the project
SFB 1302, 'Theorema'. The project aims at computer−supporting the
refereeing process of mathematical journals by tools that are mainly
based on automated reasoning and also at building up the knowledge
archived in mathematical journals in such a way that they can act as
interactive and active reasoning agents later on. In this report,
we review current mathematical software systems with a focus on the
availability of tools that can contribute to the goals of the Math−
Agents project.",
paper = "Piro08.pdf",
keywords = "printed"
}
\end{chunk}
\index{Sokolowski, Stefan}
\begin{chunk}{axiom.bib}
@article{Soko87,
author = "Sokolowski, Stefan",
title = {{Soundness of Hoare's Logic: An Automated Proof Using LCF}},
journal = "Trans. on Programming Languages and Systems",
volume = "9",
number = "1",
pages = "100-120",
year = "1987",
abstract =
"This paper presents a natural deduction proof of Hoare's logic
carried out by the Edinburgh LCF theorem prover. The emphasis is
on the way Hoare's theory is presented to the LCF, which looks
very much like an exposition of syntax and semantics to human
readers; and on the programmable heuristics (tactics). We also
discuss some problems and possible improvements to the LCF.",
paper = "Soko87.pdf",
keywords = "printed"
}
\end{chunk}
\index{Wadler, Philip}
\begin{chunk}{axiom.bib}
@article{Wadl95,
author = "Wadler, Philip",
title = {{Monads for Functional Programming}},
journal = "LNCS",
volume = "925",
abstract =
"The use of monads to structure functional programs is
described. Monads provide a convenient framework for simulating
effects found in other languages, such as global state, exception
handling, output, or non-determinism. Three case studies are
looked at in detail: how monads ease the modification of a simple
evaluator; how monads act as the basis of a datatype of arrays
subject to in-place update; and how monads can be used to build
parsers.",
paper = "Wadl95.pdf",
keywords = "printed"
}
\end{chunk}
\index{Freeman, Tim}
\index{Pfenning, Frank}
\begin{chunk}{axiom.bib}
@inproceedings{Free91,
author = "Freeman, Tim and Pfenning, Frank",
title = {{Refinement Types for ML}},
booktitle = "ACM SIGPLAN PLDI'91",
year = "1991",
link = "\url{http://www.cs.cmu.edu/~fp/papers/pldi91.pdf}",
abstract =
"We describe a refinement of ML's type system allowing the
specification of recursively defined subtypes of user-defined
datatypes. The resulting system of {\sl refinement types}
preserves desirable properties of ML such as decidability of type
inference, while at the same time allowing more errors to be
detected at compile-time. The type system combines abstract
interpretation with ideas from the intersection type discipline,
but remains closely tied to ML in that refinement types are given
only to programs which are already well-typed in ML.",
paper = "Free91.pdf",
keywords = "printed"
}
\end{chunk}
\index{Zeilberger, Noam}
\begin{chunk}{axiom.bib}
@misc{Zeil16,
author = "Zeilberger, Noam",
title = {{Towards a Mathematical Science of Programming}},
year = "2016"
}
\end{chunk}
\index{Zeilberger, Noam}
\begin{chunk}{axiom.bib}
@inproceedings{Zeil16a,
author = "Zeilberger, Noam",
title = {{Principles of Type Refinement}},
booktitle = "OPLSS 2106",
link = "\url{http://noamz.org/oplss16/refinements-notes.pdf}",
year = "2016",
paper = "Zeil16a.pdf"
}
\end{chunk}
\index{McCarthy, John}
\begin{chunk}{axiom.bib}
@incollection{Mcca63,
author = "McCarthy, John",
title = {{A Basis for a Mathematical Theory of Computation}},
booktitle = "Computer Programming and Formal Systems",
year = "1963",
paper = "Mcca63.pdf"
}
\end{chunk}
\index{Scott, Dana S.}
\index{Strachey, Christopher}
\begin{chunk}{axiom.bib}
@article{Scot71,
author = "Scott, Dana S. and Strachey, C.",
title = {{Towards a Mathematical Semantics for Computer Languages}},
journal = "Proc. Symp. on Computers and Automata",
volume = "21",
year = "1971",
abstract =
"Compilers for high-level languages are generally constructed to
give a complete translation of the programs into machine
lanugage. As machines merely juggle bit patterns, the concepts of
the original language may be lost or at least obscured during this
passage. The purpose of a mathematical semantics is to give a
correct and meaningful correspondence between programs and
mathematical entities in a way that is entirely independent of an
implementation. This plan is illustrated in a very elementary
method with the usual idea of state transformations. The next
section shows why the mathematics of functions has to be modified
to accommodate recursive commands. Section 3 explains the
modification. Section 4 introduces the environments for handling
variables and identifiers and shows how the semantical equations
define equivalence of programs. Section 5 gives an exposition of
the new type of mathematical function spaces that are required for
the semantics of procedures when these are allowed in assignment
statements. The conclusion traces some of the background of the
project and points the way to future work.",
paper = "Scot71.pdf"
}
\end{chunk}
\index{Mellies, Paul-Andre}
\index{Zeilberger, Noam}
\begin{chunk}{axiom.bib}
@inproceedings{Mell15,
author = "Mellies, Paul-Andre and Zeilberger, Noam",
title = {{Functors are Type Refinement Systems}},
booktitle = "POPL'15",
publisher = "ACM",
year = "2015",
abstract =
"The standard reading of type theory through the lens of category
theory is based on the idea of viewing a type system as a category
of well-typed terms. We propose a basic revision of this reading;
rather than interpreting type systems as categories, we describe
them as {\sl functors} from a category of typing derivations to a
category of underlying terms. Then, turning this around, we
explain how in fact {\sl any} functor gives rise to a generalized
type system, with an abstract notion of type judgment, typing
derivations and typing rules. This leads to a purely categorical
reformulation of various natural classes of type systems as
natural classes of functors.
The main purpose of this paper is to describe the general
framework (which can also be seen as providing a categorical
analysis of {\sl refinement types}, and to present a few
applications. As a larger case study, we revisit Reynold's paper
on ``The Meaning of Types'' (2000), showing how the paper's main
results may be reconstructed along these lines.",
paper = "Mell15.pdf",
keywords = "printed"
}
\end{chunk}
\index{Shields, Mark}
\index{Jones, Simon Peyton}
\begin{chunk}{axiom.bib}
@inproceedings{Shie02,
author = "Shields, Mark and Jones, Simon Peyton",
title = {{First-Class Modules for Haskell}},
booktitle = "9th Int. Conf. on Foundations of Object-Oriented Languages",
pages = "28-40",
year = "2002",
link = "\url{http://www.microsoft.com/en-us/research/wp-content/uploads/2016/02/first_class_modules.pdf}",
abstract =
"Though Haskell's module language is quite weak, its core language
is highly expressive. Indeed, it is tantalisingly close to being
able to express much of the structure traditionally delegated to a
separate module language. However, the encoding are awkward, and
some situations can't be encoded at all.
In this paper we refine Haskell's core language to support
{\sl first-class modules} with many of the features of ML-style
modules. Our proposal cleanly encodes signatures, structures and
functors with the appropriate type abstraction and type sharing,
and supports recursive modules. All of these features work across
compilation units, and interact harmoniously with Haskell's class
system. Coupled with support for staged computation, we believe
our proposal would be an elegant approach to run-time dynamic
linking of structured code.
Our work builds directly upon Jones' work on parameterised
signatures, Odersky and L\"aufer's system of higher-ranked type
annotations, Russo's semantics of ML modules using ordinary
existential and universal quantifications, and Odersky and
Zenger's work on nested types. We motivate the system by examples,
and include a more formal presentation in the appendix.",
paper = "Shie02.pdf",
keywords = "printed"
}
\end{chunk}
\index{Dijkstra, E.W.}
\begin{chunk}{axiom.bib}
@misc{Dijk71,
author = "Dijkstra, E.W.",
title = {{A Short Introduction to the Art of Programming}},
comment = "EWD316",
year = "1971",
paper = "Dijk71.pdf"
}
\end{chunk}
\index{Steele, Guy Lewis}
\index{Sussman, Gerald Jay}
\begin{chunk}{axiom.bib}
@techreport{Stee78a,
author = "Steele, Guy Lewis and Sussman, Gerald Jay",
title = {{The Art of the Interpreter or, The Modularity Complex (Parts
Zero, One, and Two}},
type = "technical report",
institution = "MIT AI Lab",
number = "AIM-453",
year = "1978",
abstract =
"We examine the effects of various language design decisions on
the programming styles available to a user of the language, with
particular emphasis on the ability to incrementally construct
modular systems. At each step we exhibit an interactive
meta-circular interpreter for the language under consideration.
We explore the consequences of various variable binding
disciplines and the introduction of side effects. We find that
dynamic scoping is unsuitable for constructing procedural
abstractions, but has another role as an agent of modularity,
being a structured form of side effect. More general side effects
are also found to be necessary to promote modular style. We find
that the notion of side effect and the notion of equality (object
identity) are mutually constraining: to define one is to define
the other.
The interpreters we exhibit are all written in a simple dialect of
LISP, and all implement LISP-like languages. A subset of these
interpreters constitute a partial historical reconstruction of the
actual evolution of LISP.",
paper = "Stee78a.pdf",
keywords = "printed"
}
\end{chunk}
\index{Wadler, Philip}
\index{Findler, Robert Bruce}
\begin{chunk}{axiom.bib}
@inproceedings{Wadl07a,
author = "Wadler, Philip and Findler, Robert Bruce",
title = {{Well-Typed Programs Can't Be Blamed}},
booktitle = "Workshop on Scheme and Functional Programming",
year = "2007",,
pages = "15-26",
abstract =
"We show how {\sl contracts} with blame fit naturally with recent
work on {\sl hybrid types} and {\sl gradual types}. Unlike hybrid
types or gradual types, we require casts in the source code, in
order to indicate where type errors may occur. Two (perhaps
surprising) aspects of our approach are that refined types can
provide useful static guarantees even in the absence of a theorem
prover, and that type {\sl dynamic} should not be regarded as a
supertype of all other types. We factor the well-known notion of
subtyping into new notions of positive and negative subtyping, and
use these to characterise where positive and negative blame may
arise. Our approach sharpens and clarifies some recent results in
the literature.",
paper = "Wadl07a.pdf",
keywords = "printed"
}
\end{chunk}
\index{Michaelson, Greg}
\begin{chunk}{axiom.bib}
@book{Mich11,
author = "Michaelson, Greg",
title = {{Functional Programming Through Lambda Calculus}},
year = "2011",
publisher = "Dover",
isbn = "978-0-486-47883-8"
}
\end{chunk}