cw.bib
@inproceedings{WZ:2024:metamath:aitp,
author = {Christoph Wernhard and Zsolt Zombori},
title = {Exploring {M}etamath Proof Structures (Extended Abstract)},
year = {2024},
booktitle = {9th Conference on Artificial Intelligence and Theorem
Proving, AITP 2024 (Informal Book of Abstracts)},
editor = {Michael R. Douglas and Thomas C. Hales and Cezary Kaliszyk and
Stephan Schulz and Josef Urban}
}
@inproceedings{RZDW:2024:aitp,
author = {Michael Rawson and Zsolt Zombori and Maximilian Dor{\'e} and
Christoph Wernhard},
title = {Forward Reasoning in Hindsight (Project Proposal)},
year = {2024},
booktitle = {9th Conference on Artificial Intelligence and Theorem
Proving, AITP 2024 (Informal Book of Abstracts)},
editor = {Michael R. Douglas and Thomas C. Hales and Cezary Kaliszyk and
Stephan Schulz and Josef Urban}
}
@article{Benedikt:Pradic:Wernhard:LMCS:2024,
author = {Michael Benedikt and C\'{e}cilia Pradic and Christoph Wernhard},
title = {Synthesizing Nested Relational Queries from Implicit
Specifications: Via Model Theory and via Proof Theory},
journal = {Logical Methods in Computer Science},
year = {2024},
doi = {10.46298/lmcs-20(3:7)2024},
volume = {20},
number = {3},
abstract = {Derived datasets can be defined implicitly or explicitly. An
\emph{implicit definition} (of dataset $O$ in terms of
datasets $\vec I$) is a logical specification involving two
distinguished sets of relational symbols. One set of
relations is for the ``source data'' $\vec I$, and the other
is for the ``interface data'' $O$. Such a specification is a
valid definition of $O$ in terms of $\vec I$, if any two
models of the specification agreeing on $\vec I$ agree on
$O$. In contrast, an \emph{explicit definition} is a
transformation (or ``query'' below) that produces $O$ from
$\vec I$. Variants of \emph{Beth's theorem} state that one
can convert implicit definitions to explicit ones. Further,
this conversion can be done effectively given a proof
witnessing implicit definability in a suitable proof system.
We prove the analogous implicit-to-explicit result for
nested relations: implicit definitions, given in the natural
logic for nested relations, can be converted to explicit
definitions in the nested relational calculus (NRC). We
first provide a model-theoretic argument for this result,
which makes some additional connections that may be of
independent interest between NRC queries,
\emph{interpretations}, a standard mechanism for defining
structure-to-structure translation in logic, and between
interpretations and implicit to definability ``up to unique
isomorphism''. The latter connection uses a variation of a
result of Gaifman concerning ``relatively categorical''
theories. We also provide a proof-theoretic result that
provides an effective argument: from a proof witnessing
implicit definability, we can efficiently produce an NRC
definition. This will involve introducing the appropriate
proof system for reasoning with nested sets, along with some
auxiliary Beth-type results for this system. As a
consequence, we can effectively extract rewritings of NRC
queries in terms of NRC views, given a proof witnessing that
the query is determined by the views.}
}
@inproceedings{HeuerWernhard:2024:synthesis,
author = {Jan Heuer and Christoph Wernhard},
title = {Synthesizing Strongly Equivalent Logic Programs: Beth Definability
for Answer Set Programs via Craig Interpolation in
First-Order Logic},
editor = {Chris Benzm{\"u}ller and Marjin Heule and Renate Schmidt},
year = {2024},
booktitle = {International Joint Conference on Automated Reasoning, IJCAR 2024},
publisher = {Springer},
series = {LNCS (LNAI)},
volume = {14739},
pages = {172--193},
doi = {10.1007/978-3-031-63498-7_11},
note = {[ Preprint: \url{https://arxiv.org/abs/2402.07696} |
\href{http://cs.christophwernhard.com/papers/synasp_handout.pdf}{Presentation
slides} ]},
abstract = {We show a projective Beth definability theorem for logic
programs under the stable model semantics: For given
programs $P$ and $Q$ and vocabulary $V$ (set of predicates)
the existence of a program $R$ in $V$ such that $P \cup R$
and $P \cup Q$ are strongly equivalent can be expressed as a
first-order entailment. Moreover, our result is effective: A
program $R$ can be constructed from a Craig interpolant for
this entailment, using a known first-order encoding for
testing strong equivalence, which we apply in reverse to
extract programs from formulas. As a further perspective,
this allows transforming logic programs via transforming
their first-order encodings. In a prototypical
implementation, the Craig interpolation is performed by
first-order provers based on clausal tableaux or resolution
calculi. Our work shows how definability and interpolation,
which underlie modern logic-based approaches to advanced
tasks in knowledge representation, transfer to answer set
programming.}
}
@inproceedings{Wernhard:2024:sgcd,
author = {Christoph Wernhard},
title = {Structure-Generating First-Order Theorem Proving},
year = {2024},
booktitle = {AReCCa 2023 -- Automated Reasoning with Connection Calculi,
International Workshop},
editor = {Jens Otten and Wolfgang Bibel},
series = {CEUR Workshop Proceedings},
publisher = {CEUR-WS.org},
volume = {3613},
pages = {64--83},
url = {https://ceur-ws.org/Vol-3613/AReCCa2023_paper5.pdf},
note = {[ \href{http://cs.christophwernhard.com/papers/sgcd_handout.pdf}{Presentation slides} ]},
abstract = {Provers based on the connection method can become much more
powerful than currently believed. We substantiate this
thesis with certain generalizations of known techniques. In
particular, we generalize proof structure enumeration
interwoven with unification -- the proceeding of goal-driven
connection and clausal tableaux provers -- to an interplay
of goal- and axiom-driven processing. It permits lemma
re-use and heuristic restrictions known from saturating
provers. Proof structure terms, proof objects that allow to
specify and implement various ways of building proofs, are
central for our approach. Meredith's condensed detachment
represents the prototypical base case of such proof
structure terms. Hence, we focus on condensed detachment
problems, first-order Horn problems of a specific form.
Experiments show that for this problem class the approach
keeps up with state-of-the-art first-order provers, leads to
remarkably short proofs, solves an ATP challenge problem,
and is useful in machine learning for ATP. A general aim is
to make ATP more accessible to systematic investigations in
the space between calculi and implementation aspects.}
}
@inproceedings{wernhard:range:2023,
author = {Christoph Wernhard},
title = {Range-Restricted and Horn Interpolation through Clausal Tableaux},
year = {2023},
booktitle = {Automated Reasoning with Analytic Tableaux and
Related Methods: 32nd International Conference,
TABLEAUX 2023},
publisher = {Springer},
series = {LNCS (LNAI)},
pages = {3--23},
volume = {14278},
doi = {10.1007/978-3-031-43513-3_1},
editor = {Revantha Ramanayake and Josef Urban},
note = {[ Preprint (extended version): \url{https://arxiv.org/abs/2306.03572} | \href{http://cs.christophwernhard.com/papers/range_handout.pdf}{Presentation slides} ]},
abstract = {We show how variations of range-restriction and also the Horn
property can be passed from inputs to outputs of Craig
interpolation in first-order logic. The proof system is
clausal tableaux, which stems from first-order ATP. Our
results are induced by a restriction of the clausal tableau
structure, which can be achieved in general by a proof
transformation, also if the source proof is by
resolution/paramodulation. Primarily addressed applications
are query synthesis and reformulation with interpolation.
Our methodical approach combines operations on proof
structures with the immediate perspective of feasible
implementation through incorporating highly optimized
first-order provers.}
}
@inproceedings{mrcwzzwb:lemmas:2023,
author = {Michael Rawson and Christoph Wernhard
and Zsolt Zombori and Wolfgang Bibel},
title = {Lemmas: Generation, Selection, Application},
year = {2023},
booktitle = {Automated Reasoning with Analytic Tableaux and
Related Methods: 32nd International Conference,
TABLEAUX 2023},
publisher = {Springer},
series = {LNCS (LNAI)},
pages = {153--174},
volume = {14278},
doi = {10.1007/978-3-031-43513-3_9},
editor = {Revantha Ramanayake and Josef Urban},
note = {[ Preprint (extended version): \url{https://arxiv.org/abs/2303.05854} | \href{http://cs.christophwernhard.com/papers/lemmas_handout.pdf}{Presentation slides} ]},
abstract = {Noting that lemmas are a key feature of mathematics, we engage
in an investigation of the role of lemmas in automated
theorem proving. The paper describes experiments with a
combined system involving learning technology that generates
useful lemmas for automated theorem provers, demonstrating
improvement for several representative systems and solving a
hard problem not solved by any system for twenty years. By
focusing on condensed detachment problems we simplify the
setting considerably, allowing us to get at the essence of
lemmas and their role in proof search.}
}
@inproceedings{RWZ:2023:aitp,
author = {Michael Rawson and Christoph Wernhard and Zsolt Zombori},
title = {Learning to Identify Useful Lemmas from Failure},
year = {2023},
booktitle = {8th Conference on Artificial Intelligence and Theorem
Proving, AITP 2023 (Informal Book of Abstracts)},
editor = {Michael R. Douglas and Thomas C. Hales and Cezary Kaliszyk and
Stephan Schulz and Josef Urban},
note = {[
\href{http://aitp-conference.org/2023/abstract/AITP_2023_paper_18.pdf}{Extended abstract} | \href{http://aitp-conference.org/2023/slides/ZZ.pdf}{Presentation slides} ]}
}
@article{wernhard:bibel:investigations:2023,
author = {Christoph Wernhard and Wolfgang Bibel},
title = {Investigations into Proof Structures},
year = {2023},
publisher = {arXiv},
note = {[ Preprint (submitted): \url{https://arxiv.org/abs/2304.12827} ]},
abstract = {We introduce and elaborate a novel formalism for the
manipulation and analysis of proofs as objects in a global manner. In this
first approach the formalism is restricted to first-order problems
characterized by condensed detachment. It is applied in an exemplary manner
to a coherent and comprehensive formal reconstruction and analysis of
historical proofs of a widely-studied problem due to {\L}ukasiewicz. The
underlying approach opens the door towards new systematic ways of generating
lemmas in the course of proof search to the effects of reducing the search
effort and finding shorter proofs. Among the numerous reported experiments
along this line, a proof of {\L}ukasiewicz's problem was automatically
discovered that is much shorter than any proof found before by man or
machine.}
}
@inproceedings{Benedikt:Pradic:Wernhard:2023:pods,
author = {Michael Benedikt and C\'{e}cilia Pradic and Christoph Wernhard},
title = {Synthesizing Nested Relational Queries from Implicit Specifications},
year = {2023},
booktitle = {Principles of Database Systems, PODS 23},
doi = {10.1145/3584372.3588653},
pages = {33--45},
abstract = {Derived datasets can be defined implicitly or explicitly. An
\emph{implicit definition} (of dataset $O$ in terms of
datasets $\vec I$) is a logical specification involving the
source data $\vec I$ and the interface data $O$. It is a
valid definition of $O$ in terms of $\vec I$, if two models
of the specification agreeing on $\vec I$ agree on $O$. In
contrast, an \emph{explicit definition} is a query that
produces $O$ from $\vec I$. Variants of \emph{Beth's
theorem} state that one can convert implicit definitions to
explicit ones. Further, this conversion can be done
effectively given a proof witnessing implicit definability
in a suitable proof system. We prove the analogous effective
implicit-to-explicit result for nested relations: implicit
definitions, given in the natural logic for nested
relations, can be effectively converted to explicit
definitions in the nested relational calculus (NRC). As a
consequence, we can effectively extract rewritings of NRC
queries in terms of NRC views, given a proof witnessing that
the query is determined by the views.}
}
@inproceedings{Wernhard:2022:aitp,
author = {Christoph Wernhard},
title = {Compressed Combinatory Proof Structures and Blending Goal- with
Axiom-Driven Reasoning: Perspectives for First-Order ATP
with Condensed Detachment and Clausal Tableaux},
year = {2022},
booktitle = {7th Conference on Artificial Intelligence and Theorem
Proving, AITP 2022 (Informal Book of Abstracts)},
editor = {Michael R. Douglas and Thomas C. Hales and Cezary Kaliszyk and
Stephan Schulz and Josef Urban},
note = {[
\href{http://cs.christophwernhard.com/papers/aitp22-abstract.pdf}{Extended abstract} | \href{http://cs.christophwernhard.com/papers/aitp22-slides-handout-version.pdf}{Presentation slides} ]}
}
@inproceedings{Wernhard:2022:ccs,
author = {Christoph Wernhard},
title = {Generating Compressed Combinatory Proof Structures -- An Approach
to Automated First-Order Theorem Proving},
year = {2022},
booktitle = {8th Workshop on Practical Aspects of Automated Reasoning, PAAR~2022},
series = {CEUR Workshop Proceedings},
publisher = {CEUR-WS.org},
volume = {3201},
editor = {Boris Konev and Claudia Schon and Alexander Steen},
note = {[ Preprint: \url{https://arxiv.org/abs/2209.12592} ]},
abstract = {Representing a proof tree by a combinator term that reduces to
the tree lets subtle forms of duplication within the tree
materialize as duplicated subterms of the combinator term.
In a DAG representation of the combinator term these
straightforwardly factor into shared subgraphs. To search
for proofs, combinator terms can be enumerated, like clausal
tableaux, interwoven with unification of formulas that are
associated with nodes of the enumerated structures. To
restrict the search space, the enumeration can be based on
proof schemas defined as parameterized combinator terms. We
introduce here this ``combinator term as proof structure''
approach to automated first-order proving, present an
implementation and first experimental results. The approach
builds on a term view of proof structures rooted in
condensed detachment and the connection method. It realizes
features known from the connection structure calculus, which
has not been implemented so far.}
}
@techreport{Wernhard:2022:cdtools,
author = {Christoph Wernhard},
title = {{CD Tools} -- {C}ondensed Detachment and Structure Generating
Theorem Proving (System Description)},
year = {2022},
doi = {10.48550/ARXIV.2207.08453},
publisher = {arXiv},
note = {\url{https://arxiv.org/abs/2207.08453}},
abstract = {CD Tools is a Prolog library for experimenting with condensed
detachment in first-order ATP, which puts a recent formal
view centered around proof structures into practice. From
the viewpoint of first-order ATP, condensed detachment
offers a setting that is relatively simple but with
essential features and serious applications, making it
attractive as a basis for developing and evaluating novel
techniques. CD Tools includes specialized provers based on
the enumeration of proof structures. We focus here on one of
these, SGCD, which permits to blend goal- and axiom-driven
proof search in particularly flexible ways. In purely
goal-driven configurations it acts similarly to a prover of
the clausal tableaux or connection method family. In blended
configurations its performance is much stronger, close to
state-of-the-art provers, while emitting relatively short
proofs. Experiments show characteristics and application
possibilities of the structure generating approach realized
by that prover. For a historic problem often studied in ATP
it produced a new proof that is much shorter than any known
one.}
}
@proceedings{soqe2021,
title = {Proceedings of the Second Workshop on Second-Order
Quantifier Elimination and Related Topics (SOQE 2021)},
year = {2021},
editor = {Renate A. Schmidt and Christoph Wernhard and Yizheng Zhao},
volume = {3009},
series = {CEUR Workshop Proceedings},
publisher = {CEUR-WS.org},
address = {Aachen},
issn = {1613-0073},
url = {http://ceur-ws.org/Vol-3009},
venue = {Online Event, associated with KR 2021, the 18th
International Conference on Principles of Knowledge
Representation and Reasoning},
eventdate = {2021-11-04}
}
@inproceedings{Wernhard:2021:goedel:soqe,
author = {Christoph Wernhard},
title = {Applying Second-Order Quantifier Elimination
in Inspecting {G}{\"o}del's Ontological Proof},
booktitle = {Proceedings of the Second Workshop on Second-Order
Elimination and Related Topics (SOQE 2021)},
editor = {Renate A. Schmidt and Christoph Wernhard and Yizheng Zhao},
year = {2021},
series = {CEUR Workshop Proceedings},
volume = {3009},
pages = {98--111},
url = {http://ceur-ws.org/Vol-3009/paper4.pdf},
note = {[ Preprint (extended version): \url{https://arxiv.org/abs/2110.11108} ]},
abstract = {In recent years, G{\"o}del's ontological proof and variations
of it were formalized and analyzed with automated tools in
various ways. We supplement these analyses with a modeling
in an automated environment based on first-order logic
extended by predicate quantification. Formula macros are
used to structure complex formulas and tasks. The analysis
is presented as a generated type-set document where informal
explanations are interspersed with pretty-printed formulas
and outputs of reasoners for first-order theorem proving and
second-order quantifier elimination. Previously unnoticed or
obscured aspects and details of G{\"o}del's proof become
apparent. Practical application possibilities of
second-order quantifier elimination are shown and the
encountered elimination tasks may serve as benchmarks.}
}
@inproceedings{WernhardBibel:2021:ProofStructures,
author = {Christoph Wernhard and Wolfgang Bibel},
title = {Learning from {\L}ukasiewicz and {M}eredith:
Investigations into Proof Structures},
booktitle = {Automated Deduction: \mbox{CADE 2021}},
editor = {Andr{\'e} Platzer and Geoff Sutcliffe},
year = {2021},
publisher = {Springer},
series = {LNCS (LNAI)},
pages = {58--75},
volume = {12699},
note = {[ Preprint (extended version):
\url{https://arxiv.org/abs/2104.13645} |
\href{http://cs.christophwernhard.com/papers/cade_2021_slides.pdf}{Presentation slides} ]},
doi = {10.1007/978-3-030-79876-5_4},
abstract = {The material presented in this paper contributes to establishing a
basis deemed essential for substantial progress in Automated
Deduction. It identifies and studies global features in
selected problems and their proofs which offer the potential
of guiding proof search in a more direct way. The studied
problems are of the wide-spread form of ``axiom(s) and
rule(s) imply goal(s)''. The features include the well-known
concept of lemmas. For their elaboration both human and
automated proofs of selected theorems are taken into a close
comparative consideration. The study at the same time
accounts for a coherent and comprehensive formal
reconstruction of historical work by {\L}ukasiewicz, Meredith
and others. First experiments resulting from the study
indicate novel ways of lemma generation to supplement
automated first-order provers of various families,
strengthening in particular their ability to find short
proofs.}
}
@article{Wernhard:2021:Interpolation,
author = {Christoph Wernhard},
title = {Craig Interpolation with Clausal First-Order Tableaux},
journal = {Journal of Automated Reasoning},
year = {2021},
pages = {647--690},
volume = {65},
number = {5},
doi = {10.1007/s10817-021-09590-3},
note = {[ Preprint: \url{ https://arxiv.org/abs/2008.03489} ]},
abstract = {We develop foundations for computing Craig-Lyndon interpolants of
two given formulas with first-order theorem provers that
construct clausal tableaux. Provers that can be understood
in this way include efficient machine-oriented systems based
on calculi of two families: goal-oriented such as model
elimination and the connection method, and bottom-up such as
the hypertableau calculus. We present the first
interpolation method for first-order proofs represented by
closed tableaux that proceeds in two stages, similar to
known interpolation methods for resolution proofs. The
first stage is an induction on the tableau structure, which
is sufficient to compute propositional interpolants. We
show that this can linearly simulate different prominent
propositional interpolation methods that operate by an
induction on a resolution deduction tree. In the second
stage, interpolant lifting, quantified variables that
replace certain terms (constants and compound terms) by
variables are introduced. We justify the correctness of
interpolant lifting (for the case without built-in equality)
abstractly on the basis of Herbrand's theorem and for a
different characterization of the formulas to be lifted than
in the literature. In addition, we discuss various subtle
aspects that are relevant for the investigation and
practical realization of first-order interpolation based on
clausal tableaux.}
}
@inproceedings{Wernhard:2020:PIE,
author = {Christoph Wernhard},
title = {Facets of the {PIE} Environment for Proving, Interpolating and
Eliminating on the Basis of First-Order Logic},
booktitle = {Declarative Programming and Knowledge Management (DECLARE
2019), Revised Selected Papers},
editor = {Petra Hofstedt and Salvador Abreu and Ulrich John and Herbert
Kuchen and Dietmar Seipel},
year = {2020},
series = {LNCS (LNAI)},
doi = {10.1007/978-3-030-46714-2_11},
volume = {12057},
pages = {160--177},
publisher = {Springer},
note = {[ Preprint: \url{https://arxiv.org/abs/2002.10892} ]},
abstract = {PIE is a Prolog-embedded environment for automated reasoning
on the basis of first-order logic. Its main focus is on
formulas, as constituents of complex formalizations that are
structured through formula macros, and as outputs of
reasoning tasks such as second-order quantifier elimination
and Craig interpolation. It supports a workflow based on
documents that intersperse macro definitions, invocations of
reasoners, and LaTeX-formatted natural language
text. Starting from various examples, the paper discusses
features and application possibilities of PIE along with
current limitations and issues for future research.}
}
@inproceedings{Kittelmann:Wernhard:2020:KBSET,
author = {Jana Kittelmann and Christoph Wernhard},
title = {{KBSET} -- Knowledge-Based Support for Scholarly Editing and Text
Processing with Declarative {LaTeX} Markup and a Core
Written in {SWI}-{P}rolog},
booktitle = {Declarative Programming and Knowledge Management (DECLARE
2019), Revised Selected Papers},
editor = {Petra Hofstedt and Salvador Abreu and Ulrich John and Herbert
Kuchen and Dietmar Seipel},
year = {2020},
series = {LNCS (LNAI)},
publisher = {Springer},
pages = {178--196},
volume = {12057},
doi = {10.1007/978-3-030-46714-2_12},
note = {[ Preprint: \url{https://arxiv.org/abs/2002.10329} ]},
abstract = {KBSET is an environment that provides support for scholarly
editing in two flavors: First, as a practical tool
KBSET/Letters that accompanies the development of editions
of correspondences (in particular from the 18th and 19th
century), completely from source documents to PDF and HTML
presentations. Second, as a prototypical tool KBSET/NER for
experimentally investigating novel forms of working on
editions that are centered around automated named entity
recognition. KBSET can process declarative
application-specific markup that is expressed in LaTeX
notation and incorporate large external fact bases that are
typically provided in RDF. KBSET includes specially
developed LaTeX styles and a core system that is written in
SWI-Prolog, which is used there in many roles, utilizing
that it realizes the potential of Prolog as a unifying
language.}
}
@incollection{Kittelmann:Wernhard:halle:2019,
author = {Jana Kittelmann and Christoph Wernhard},
title = {Von der Transkription zur Wissensbasis. Zum Zusammenspiel von
digitalen Editionstechniken und Formen der
Wissensrepr{\"a}sentation am Beispiel von Korrespondenzen Johann
Georg Sulzers},
editor = {Jana Kittelmann and Anne Purschwitz},
booktitle = {Aufkl{\"a}rungsforschung digital. Konzepte, Methoden, Perspektiven},
series = {IZEA -- Kleine Schriften},
volume = {10/2019},
publisher = {Mitteldeutscher Verlag},
year = {2019},
pages = {84--114},
note = {[ Preprint: \url{http://www.sulzer-digital.de/papiere/transkription_wissensbasis.pdf} ]}
}
@inproceedings{Wernhard:2019:PIE,
author = {Christoph Wernhard},
title = {{PIE} -- {P}roving, Interpolating and Eliminating
on the Basis of First-Order Logic},
booktitle = {Pre-proceedings of the DECLARE 2019
Conference},
editor = {Salvador Abreu and Petra Hofstedt and Ulrich John and Herbert
Kuchen and Dietmar Seipel},
year = {2019},
series = {CoRR},
volume = {abs/1909.04870},
archiveprefix = {arXiv},
eprint = {1908.11137},
abstract = {PIE is a Prolog-embedded environment for automated reasoning
on the basis of first-order logic. It includes a versatile
formula macro system and supports the creation of documents
that intersperse macro definitions, reasoner invocations and
LaTeX-formatted natural language text. Invocation of
various reasoners is supported: External provers as well as
sub-systems of PIE, which include preprocessors, a
Prolog-based first-order prover, methods for Craig
interpolation and methods for second-order quantifier
elimination.}
}
@inproceedings{Kittelmann:Wernhard:2019:KBSET,
author = {Jana Kittelmann and Christoph Wernhard},
title = {{KBSET} -- {K}nowledge-Based Support for Scholarly Editing and Text
Processing},
booktitle = {Pre-proceedings of the DECLARE 2019
Conference},
editor = {Salvador Abreu and Petra Hofstedt and Ulrich John and Herbert
Kuchen and Dietmar Seipel},
year = {2019},
series = {CoRR},
volume = {abs/1909.04870},
archiveprefix = {arXiv},
eprint = {1908.11135},
abstract = {KBSET supports a practical workflow for scholarly editing,
based on using LaTeX with dedicated commands for
semantics-oriented markup and a Prolog-implemented core
system. Prolog plays there various roles: as query language
and access mechanism for large Semantic Web fact bases, as
data representation of structured documents and as a
workflow model for advanced application tasks. The core
system includes a LaTeX parser and a facility for the
identification of named entities. We also sketch future
perspectives of this approach to scholarly editing based on
techniques of computational logic.}
}
@techreport{Wernhard:2018:Interpolation,
author = {Christoph Wernhard},
title = {Craig Interpolation and Access Interpolation
with Clausal First-Order Tableaux},
institution = {Technische Universit{\"a}t Dresden},
number = {Knowledge Representation and Reasoning 18-01},
year = {2018},
journal = {ArXiv e-prints},
archiveprefix = {arXiv},
eprint = {1802.04982},
abstract = {We develop foundations for computing Craig interpolants and
similar intermediates of two given formulas with first-order
theorem provers that construct clausal tableaux. Provers
that can be understood in this way include efficient
machine-oriented systems based on calculi of two families:
goal-oriented like model elimination and the connection
method, and bottom-up like the hyper tableau calculus. The
presented method for Craig-Lyndon interpolation involves a
lifting step where terms are replaced by quantified
variables, similar as known for resolution-based
interpolation, but applied to a differently characterized
ground formula and proven correct more abstractly on the
basis of Herbrand's theorem, independently of a particular
calculus. Access interpolation is a recent form of
interpolation for database query reformulation that applies
to first-order formulas with relativized quantifiers and
constrains the quantification patterns of predicate
occurrences. It has been previously investigated in the
framework of Smullyan's non-clausal tableaux. Here, in
essence, we simulate these with the more machine-oriented
clausal tableaux through structural constraints that can be
ensured either directly by bottom-up tableau construction
methods or, for closed clausal tableaux constructed with
arbitrary calculi, by postprocessing with restructuring
transformations.}
}
@proceedings{soqe2017,
title = {Proceedings of the Workshop on Second-Order Quantifier
Elimination and Related Topics, SOQE~2017},
year = {2017},
editor = {Patrick Koopmann and Sebastian Rudolph and
Renate A. Schmidt and Christoph Wernhard},
volume = {2013},
series = {CEUR Workshop Proceedings},
publisher = {CEUR-WS.org},
issn = {1613-0073},
url = {http://ceur-ws.org/Vol-2013/},
venue = {Dresden, Germany},
eventdate = {2017-12-06/2017-12-08}
}
@inproceedings{Wernhard:2017:Approximating,
author = {Christoph Wernhard},
title = {Approximating Resultants of Existential Second-Order Quantifier
Elimination upon Universal Relational First-Order Formulas},
booktitle = {Proceedings of the Workshop on Second-Order Quantifier
Elimination and Related Topics, SOQE~2017},
editor = {Patrick Koopmann and Sebastian Rudolph and
Renate A. Schmidt and Christoph Wernhard},
series = {CEUR Workshop Proceedings},
volume = {2013},
pages = {82--98},
publisher = {CEUR-WS.org},
year = {2017},
url = {http://ceur-ws.org/Vol-2013/paper17.pdf},
abstract = {We investigate second-order quantifier elimination for a class
of formulas characterized by a restriction on the quantifier
prefix: existential predicate quantifiers followed by
universal individual quantifiers and a relational
matrix. For a given second-order formula of this class a
possibly infinite sequence of universal first-order formulas
that have increasing strength and are all entailed by the
second-order formula can be constructed. Any first-order
consequence of the second-order formula is a consequence of
some member of the sequence. The sequence provides a
recursive base for the first-order theory of the
second-order formula, in the sense investigated by
Craig. The restricted formula class allows to derive further
properties, for example that the set of those members of
the sequence that are equivalent to the second-order
formula, or, more generally, have the same first-order
consequences, is co-recursively enumerable. Also the set of
first-order formulas that entails the second-order formula
is co-recursively enumerable. These properties are proven
with formula-based tools used in automated deduction, such
as domain closure axioms, eliminating individual quantifiers
by ground expansion, predicate quantifier elimination with
Ackermann's Lemma, Craig interpolation and decidability of
the Bernays-Sch{\"o}nfinkel-Ramsey class}
}
@techreport{Wernhard:2017:SolutionProblem:report,
author = {Christoph Wernhard},
title = {The {B}oolean Solution Problem from the Perspective
of Predicate Logic -- Extended Version},
institution = {Technische Universit{\"a}t Dresden},
journal = {ArXiv e-prints},
archiveprefix = {arXiv},
eprint = {1706.08329},
number = {Knowledge Representation and Reasoning 17-01},
year = {2017},
abstract = {Finding solution values for unknowns in Boolean equations was a
principal reasoning mode in the \textit{Algebra of Logic} of
the 19th century. Schr{\"o}der investigated it as
\textit{Aufl{\"o}sungsproblem} (\textit{solution problem}). It is
closely related to the modern notion of Boolean
unification. Today it is commonly presented in an algebraic
setting, but seems potentially useful also in knowledge
representation based on predicate logic. We show that it
can be modeled on the basis of first-order logic extended by
second-order quantification. A wealth of classical results
transfers, foundations for algorithms unfold, and
connections with second-order quantifier elimination and
Craig interpolation show up. Although for first-order
inputs the set of solutions is recursively enumerable, the
development of constructive methods remains a challenge. We
identify some cases that allow constructions, most of them
based on Craig interpolation, and show a method to take
vocabulary restrictions on solution components into
account.}
}
@inproceedings{Wernhard:2017:SolutionProblem,
author = {Christoph Wernhard},
title = {The {B}oolean Solution Problem from the Perspective of
Predicate Logic},
booktitle = {11th International Symposium on Frontiers
of Combining Systems, FroCoS 2017},
editor = {Clare Dixon and Marcelo Finger},
series = {LNCS (LNAI)},
volume = {10483},
pages = {333--350},
publisher = {Springer},
doi = {10.1007/978-3-319-66167-4_19},
year = {2017},
note = {[ \href{http://cs.christophwernhard.com/papers/solution.pdf}{Preprint} | \href{http://cs.christophwernhard.com/papers/frocos_2017_slides.pdf}{Presentation slides} ]},
abstract = {Finding solution values for unknowns in Boolean equations was
a principal reasoning mode in the \textit{Algebra of Logic} of
the 19th century. Schr{\"o}der investigated it as
\textit{Aufl{\"o}sungsproblem} (\textit{solution problem}). It is
closely related to the modern notion of Boolean
unification. Today it is commonly presented in an algebraic
setting, but seems potentially useful also in knowledge
representation based on predicate logic. We show that it
can be modeled on the basis of first-order logic extended by
second-order quantification. A wealth of classical results
transfers, foundations for algorithms unfold, and
connections with second-order quantifier elimination and
Craig interpolation show up.}
}
@misc{Wernhard:2017:tableauxposter,
author = {Christoph Wernhard},
title = {Craig Interpolation and Query Reformulation with Clausal
First-Order Tableaux},
year = {2017},
note = {Poster presentation at Automated Reasoning with Analytic
Tableaux and Related Methods: 26th International Conference, TABLEAUX 2017},
url = {http://cs.christophwernhard.com/papers/tableaux_2017_poster.pdf}
}
@inproceedings{Wernhard:PAAR:2016,
author = {Christoph Wernhard},
title = {The {PIE} system for Proving, Interpolating
and Eliminating},
booktitle = {5th Workshop on Practical Aspects of Automated Reasoning, PAAR 2016},
editor = {Pascal Fontaine and Stephan Schulz and Josef Urban},
year = {2016},
abstract = {The PIE system aims at providing an environment for creating
complex applications of automated first-order theorem
proving techniques. It is embedded in Prolog. Beyond actual
proving tasks, also interpolation and second-order
quantifier elimination are supported. A macro feature and a
\LaTeX\ formula pretty-printer facilitate the construction
of elaborate formalizations from small, understandable and
documented units. For use with interpolation and
elimination, preprocessing operations allow to preserve the
semantics of chosen predicates. The system comes with a
built-in default prover that can compute interpolants.},
volume = {1635},
pages = {125--138},
url = {http://ceur-ws.org/Vol-1635/paper-11.pdf},
series = {CEUR Workshop Proceedings},
publisher = {CEUR-WS.org}
}
@inproceedings{Kittelmann:Wernhard:AITP:2016,
author = {Jana Kittelmann and Christoph Wernhard},
title = {Towards Knowledge-Based Assistance for Scholarly Editing},
booktitle = {1st Conference on Artificial Intelligence and Theorem Proving,
{AITP} 2016 (Book of Abstracts)},
editor = {Thomas C. Hales and Cezary Kaliszyk and Stephan Schulz
and Josef Urban},
year = {2016},
url = {http://aitp-conference.org/aitp_proceedings.pdf#page=29},
pages = {29--31},
note = {[ \href{http://cs.christophwernhard.com/kbset/papers/slides_aitp_2016.pdf}{Presentation slides} ]},
abstract = {We investigate possibilities to utilize techniques of
computational logic for scholarly editing. Semantic Web
technology already makes relevant large knowledge bases
available in form of logic formulas. There are several
further roles of logic-based reasoning in machine supported
scholarly editing. KBSET, a free prototype system, provides
a platform for experiments.}
}
@inproceedings{Kittelmann:Wernhard:DHd:2016,
author = {Jana Kittelmann and Christoph Wernhard},
title = {Knowledge-Based Support for Scholarly Editing and Text Processing},
booktitle = {DHd 2016 -- Digital Humanities im
deutschsprachigen Raum: Modellierung -- Vernetzung --
Visualisierung. Die Digital Humanities als
f{\"a}cher{\"u}bergreifendes Forschungsparadigma. Konferenzabstracts},
year = {2016},
doi = {10.5281/zenodo.4645317},
pages = {178--181},
publisher = {nisaba verlag},
address = {Duisburg},
note = {[ \href{http://cs.christophwernhard.com/kbset/papers/slides_dhd_2016.pdf}{Presentation slides (in German)} ]}
}
@techreport{Wernhard:2015:behmann:report,
author = {Christoph Wernhard},
title = {Heinrich {B}ehmann's Contributions to
Second-Order Quantifier Elimination from the
View of Computational Logic},
institution = {Technische Universit{\"a}t Dresden},
number = {Knowledge Representation and Reasoning 15-05},
year = {2015},
note = {Revised 2017},
journal = {ArXiv e-prints},
archiveprefix = {arXiv},
eprint = {1712.06868},
abstract = {For relational monadic formulas (the L{\"o}wenheim class)
second-order quantifier elimination, which is closely
related to computation of uniform interpolants, projection
and forgetting - operations that currently receive much
attention in knowledge processing - always succeeds. The
decidability proof for this class by Heinrich Behmann from
1922 explicitly proceeds by elimination with equivalence
preserving formula rewriting. Here we reconstruct the
results from Behmann's publication in detail and discuss
related issues that are relevant in the context of modern
approaches to second-order quantifier elimination in
computational logic. In addition, an extensive documentation
of the letters and manuscripts in Behmann's bequest that
concern second-order quantifier elimination is given,
including a commented register and English abstracts of the
German sources with focus on technical material. In the late
1920s Behmann attempted to develop an elimination-based
decision method for formulas with predicates whose arity is
larger than one. His manuscripts and the correspondence with
Wilhelm Ackermann show technical aspects that are still of
interest today and give insight into the genesis of
Ackermann's landmark paper ``Untersuchungen {\"u}ber das
Eliminationsproblem der mathematischen Logik'' from 1935, which
laid the foundation of the two prevailing modern approaches to second-order
quantifier elimination.}
}
@misc{Wernhard:2015:Deduktionstreffen,
author = {Christoph Wernhard},
title = {Some Fragments Towards Establishing Completeness Properties of
Second-Order Quantifier Elimination Methods},
year = {2015},
note = {Poster presentation at Jahrestreffen der GI Fachgruppe
Deduktionssysteme, associated with CADE-25},
url = {http://cs.christophwernhard.com/papers/poster_deduktionstreffen_2015.pdf}
}
@inproceedings{Wernhard:2015:relmon,
author = {Christoph Wernhard},
title = {Second-Order Quantifier Elimination on Relational Monadic
Formulas -- {A} Basic Method and Some Less Expected
Applications},
booktitle = {Automated Reasoning with Analytic Tableaux and
Related Methods: 24th International Conference,
TABLEAUX 2015},
year = {2015},
publisher = {Springer},
series = {LNCS (LNAI)},
volume = {9323},
pages = {249--265},
doi = {10.1007/978-3-319-24312-2_18},
editor = {Hans de Nivelle},
note = {[ \href{http://cs.christophwernhard.com/papers/tableaux_2015_slides.pdf}{Presentation slides} ]},
abstract = {For relational monadic formulas (the L{\"o}wenheim class)
second-order quantifier elimination, which is closely
related to computation of uniform interpolants, forgetting
and projection, always succeeds. The decidability proof for
this class by Behmann from 1922 explicitly proceeds by
elimination with equivalence preserving formula rewriting.
We reconstruct Behmann's method, relate it to the modern DLS
elimination algorithm and show some applications where the
essential monadicity becomes apparent only at second
sight. In particular, deciding $\mathcal{ALCOQH}$ knowledge
bases, elimination in DL-Lite knowledge bases, and the
justification of the success of elimination methods for
Sahlqvist formulas.}
}
@techreport{Wernhard:2015:relmon:report,
author = {Christoph Wernhard},
title = {Second-Order Quantifier Elimination on Relational Monadic
Formulas -- {A} Basic Method and Some Less Expected
Applications (Extended Version)},
institution = {Technische Universit{\"a}t Dresden},
url = {http://www.wv.inf.tu-dresden.de/Publications/2015/report-15-04.pdf},
number = {Knowledge Representation and Reasoning 15-04},
year = {2015},
abstract = {For relational monadic formulas (the L{\"o}wenheim class)
second-order quantifier elimination, which is closely
related to computation of uniform interpolants, forgetting
and projection, always succeeds. The decidability proof for
this class by Behmann from 1922 explicitly proceeds by
elimination with equivalence preserving formula rewriting.
We reconstruct Behmann's method, relate it to the modern DLS
elimination algorithm and show some applications where the
essential monadicity becomes apparent only at second
sight. In particular, deciding $\mathcal{ALCOQH}$ knowledge
bases, elimination in DL-Lite knowledge bases, and the
justification of the success of elimination methods for
Sahlqvist formulas.}
}
@inproceedings{Wernhard:2014:DefinientiaARWDT,
author = {Christoph Wernhard},
title = {Second-Order Characterizations of Definientia in
Formula Classes},
editor = {Alexander Bolotov and Manfred Kerber},
booktitle = {Joint Automated Reasoning Workshop and Deduktionstreffen,
ARW-DT 2014},
pages = {36--37},
year = {2014},
url = {http://www.cs.bham.ac.uk/~mmk/events/ARW-DT2014/proceedings.pdf},
note = {Workshop presentation
[ \href{http://cs.christophwernhard.com/papers/arwdt2014.pdf}{Slides} ].}
}
@inproceedings{Wernhard:2014:Patterns,
author = {Christoph Wernhard},
title = {Application Patterns of Projection/Forgetting},
year = {2014},
booktitle = {\href{http://vsl2014.at/meetings/iPRA-index.html}{Workshop on Interpolation: From Proofs to Applications, iPRA 2014}},
editor = {Laura Kovacs and Georg Weissenbacher},
url = {http://cs.christophwernhard.com/papers/ipra14abstract.pdf},
note = {Workshop presentation
[ \href{http://cs.christophwernhard.com/papers/ipra14.pdf}
{Slides} ].}
}
@techreport{Wernhard:2014:Definientia,
author = {Christoph Wernhard},
title = {Second-Order Characterizations of Definientia in
Formula Classes},
institution = {Technische Universit{\"a}t Dresden},
url = {http://www.wv.inf.tu-dresden.de/Publications/2014/report-2014-03.pdf},
number = {Knowledge Representation and Reasoning 14-03},
year = {2014},
abstract = {Predicate quantification can be applied to characterize
definientia of a given formula that are in terms of a given
set of predicates. Methods for second-order quantifier
elimination and the closely related computation of
forgetting, projection and uniform interpolants can then be
applied to compute such definientia. Here we address the
question, whether this principle can be transferred to
definientia in given classes that allow efficient
processing, such as Horn or Krom formulas. Indeed, if
propositional logic is taken as basis, for the class of all
formulas that are equivalent to a conjunction of atoms and
the class of all formulas that are equivalent to a Krom
formula, the existence of definientia as well as
representative definientia themselves can be characterized
in terms of predicate quantification. For the class of
formulas that are equivalent to a Horn formula, this is
possible with a special further operator. For first-order
logic as basis, we indicate guidelines and open issues.}
}
@inproceedings{KittelmannWernhard:2014:Pueckler,
author = {Jana Kittelmann and Christoph Wernhard},
title = {{S}emantik,
{L}inked {D}ata, {W}eb-{P}r{\"a}sentation:
{G}rundlagen der {N}achlasserschlie{\ss}ung im {P}ortal
www.pueckler-digital.de},
editor = {Anne Baillot and Anna Busch},
booktitle = {Workshop Datenmodellierung in digitalen Briefeditionen
und ihre interpretatorische Leistung},
publisher = {Humboldt-Universit{\"a}t zu Berlin},
note = {Poster presentation,
[ \href{http://cs.christophwernhard.com/papers/abstract_pueckler_digital.pdf}{Abstract} | \href{http://cs.christophwernhard.com/papers/poster_pueckler_digital.pdf}{Poster} ].},
year = {2014}
}
@techreport{Wernhard:2014:VBQ,
author = {Christoph Wernhard},
title = {Expressing View-Based Query Processing and
Related Approaches with Second-Order Operators},
institution = {Technische Universit{\"a}t Dresden},
url = {http://www.wv.inf.tu-dresden.de/Publications/2014/report-2014-02.pdf},
number = {Knowledge Representation and Reasoning 14-02},
year = {2014}
}
@inproceedings{Wernhard:2013:Toyelim,
author = {Christoph Wernhard},
title = {Computing with Logic as Operator Elimination: The {ToyElim}
System},
booktitle = {Applications of Declarative Programming and Knowledge
Management, 19th International Conference (INAP 2011) and
25th Workshop on Logic Programming (WLP 2011), Revised
Selected Papers},
editor = {Hans Tompits and Salvador Abreu and Johannes Oetsch and J{\"o}rg
P{\"u}hrer and Dietmar Seipel and Masanobu Umeda and Armin
Wolf},
series = {LNCS (LNAI)},
volume = {7773},
publisher = {Springer},
year = {2013},
pages = {289--296},
doi = {10.1007/978-3-642-41524-1_17},
note = {[ \href{http://cs.christophwernhard.com/papers/toyelim.pdf}{Preprint} ]},
abstract = {A prototype system is described whose core functionality is,
based on propositional logic, the elimination of
second-order operators, such as Boolean quantifiers and
operators for projection, forgetting and
circumscription. This approach allows to express many
representational and computational tasks in knowledge
representation -- for example computation of abductive
explanations and models with respect to logic programming
semantics -- in a uniform operational system, backed by a
uniform classical semantic framework.}
}
@article{DHW:2014:Suppression,
title = {Modeling the Suppression Task under Weak
Completion and Well-Founded Semantics},
author = {Emmanuelle-Anna Dietz and Steffen H{\"o}lldobler and Christoph
Wernhard},
journal = {Journal of Applied Non-Classsical Logics},
volume = {24},
number = {1--2},
pages = {61--85},
year = {2014}
}
@incollection{KittelmannWernhard:2013:Pueckler,
author = {Jana Kittelmann and Christoph Wernhard},
title = {{S}emantik, {W}eb, {M}etadaten und digitale {E}dition:
{G}rundlagen und {Z}iele der {E}rschlie{\ss}ung neuer {Q}uellen des
{B}ranitzer {P}{\"u}ckler-{A}rchivs},
editor = {Irene Krebs and others},
booktitle = {Resonanzen. P{\"u}cklerforschung im Spannungsfeld zwischen
Wissenschaft und Kunst. Ein Konferenzbericht.},
year = {2013},
publisher = {trafo Verlag},
address = {Berlin},
pages = {179--202}
}
@inproceedings{KittelmannWernhard:2013:Nachlass,
author = {Jana Kittelmann and Christoph Wernhard},
title = {{M}{\"o}glichkeiten der {R}epr{\"a}sentation
von {N}achlassarchiven im {W}eb},
booktitle = {Fachtagung Semantische Technologien --
Verwertungsstrategien und Konvergenz,
Humboldt-Universit{\"a}t zu Berlin, Position Papers},
year = {2013},
url = {http://semantic-media-web.de/referenten/?detail=57}
}
@inproceedings{Wernhard:2013:Abduction,
author = {Christoph Wernhard},
title = {Abduction in Logic Programming as Second-Order Quantifier
Elimination},
booktitle = {9th International Symposium on Frontiers
of Combining Systems, FroCoS 2013},
editor = {Pascal Fontaine and Christophe Ringeissen and Renate A. Schmidt},
series = {LNCS (LNAI)},
volume = {8152},
publisher = {Springer},
pages = {103--119},
year = {2013},
doi = {10.1007/978-3-642-40885-4_8},
abstract = {It is known that skeptical abductive explanations with respect
to classical logic can be characterized semantically in a
natural way as formulas with second-order quantifiers.
Computing explanations is then just elimination of the
second-order quantifiers. By using application patterns and
generalizations of second-order quantification, like literal
projection, the globally weakest sufficient condition and
circumscription, we transfer these principles in a unifying
framework to abduction with three non-classical semantics of
logic programming: stable model, partial stable model and
well-founded semantics. New insights are revealed about
abduction with the partial stable model semantics.},
note = {[ \href{http://cs.christophwernhard.com/papers/abduction-lp-soqe.pdf}{Preprint (extended version)} ]}
}
@inproceedings{MPW:2013:SAT,
author = {Norbert Manthey and Tobias Philipp and Christoph Wernhard},
title = {Soundness of Inprocessing in Clause Sharing {SAT} Solvers},
booktitle = {Theory and Applications of Satisfiability Testing,
16th International Conference, SAT 2013},
editor = {Matti J{\"a}rvisalo and Allen Van Gelder},
series = {LNCS},
volume = {7962},
publisher = {Springer},
pages = {22--39},
year = {2013},
doi = {10.1007/978-3-642-39071-5_4},
note = {Received the SAT 2013 Best Paper Award
[ \href{http://cs.christophwernhard.com/papers/inprocessing.pdf}{Preprint} ]},
abstract = {We present a formalism that models the computation of clause
sharing portfolio solvers with inprocessing. The soundness
of these solvers is not a straightforward property since
shared clauses can make a formula unsatisfiable. Therefore,
we develop characterizations of simplification techniques
and suggest various settings how clause sharing and
inprocessing can be combined. Our formalization models most
of the recent implemented portfolio systems and we indicate
possibilities to improve these. A particular improvement is
a novel way to combine clause addition techniques -- like
blocked clause addition -- with clause deletion techniques --
like blocked clause elimination or variable elimination.}
}
@inproceedings{Wernhard:2012:HR,
author = {Christoph Wernhard},
title = {Towards a Declarative Approach to Model Human Reasoning with
Nonmonotonic Logics},
booktitle = {Human Reasoning and Automated Deduction: KI 2012 Workshop
Proceedings},
pages = {41--48},
editor = {Thomas Barkowsky and Marco Ragni and Frieder Stolzenburg},
series = {Report Series of the Transregional Collaborative Research Center SFB/TR 8 Spatial Cognition},
volume = {SFB/TR 8 Report 032-09/2012},
year = {2012},
publisher = {Universit{\"a}t Bremen / Universit{\"a}t Freiburg, Germany},
note = {(\url{http://cs.christophwernhard.com/papers/hrad2012.pdf})},
abstract = {Stenning and van Lambalgen introduced an approach to model
empirically studied human reasoning with nonmonotonic
logics. Some of the research questions that have been
brought up in this context concern the interplay of the
open- and closed-world assumption, the suitability of
particular logic programming semantics for the modeling of
human reasoning, and the role of three-valued logic
programming semantics and three-valued logics. We look into
these questions from the view of a framework where logic
programs that model human reasoning are represented
declaratively and are mechanizable by classical formulas
extended with certain second-order operators.}
}
@article{Wernhard:2012:ProjCirc,
author = {Christoph Wernhard},
title = {Projection and Scope-Determined Circumscription},
journal = {Journal of Symbolic Computation},
year = {2012},
volume = {47},
issue = {9},
pages = {1089--1108},
note = {[ \href{http://cs.christophwernhard.com/papers/projcirc.pdf}{Preprint} ]},
doi = {10.1016/j.jsc.2011.12.034},
abstract = {We develop a semantic framework that extends first-order
logic by literal projection and a novel second semantically
defined operator, ``raising'', which is only slightly
different from literal projection and can be used to define
a generalization of parallel circumscription with varied
predicates in a straightforward and compact way. We call
this variant of circumscription ``scope-determined'', since
like literal projection and raising its effects are
controlled by a so-called ``scope'', that is, a set of
literals, as parameter. We work out formally a toolkit of
propositions about projection, raising and circumscription
and their interaction. It reveals some refinements of and
new views on previously known properties. In particular, we
apply it to show that well-foundedness with respect to
circumscription can be expressed in terms of projection, and
that a characterization of the consequences of circumscribed
propositional formulas in terms of literal projection can be
generalized to first-order logic and expressed compactly in
terms of new variants of the strongest necessary and weakest
sufficient condition.}
}
@techreport{Wernhard:2011:HRLPCL,
author = {Christoph Wernhard},
title = {Forward Human Reasoning Modeled by Logic Programming Modeled by
Classical Logic with Circumscription and Projection},
institution = {Technische Universit{\"a}t Dresden},
year = {2011},
url = {http://www.wv.inf.tu-dresden.de/Publications/2011/report11-07.pdf},
number = {Knowledge Representation and Reasoning 11-07},
abstract = {Recently an approach to model human reasoning as studied in
cognitive science by logic programming, has been introduced
by Stenning and van Lambalgen and exemplified with the
suppression task. We investigate this approach from the
view of a framework where different logic programming
semantics correspond to different translations of logic
programs into formulas of classical two-valued logic
extended by two second-order operators, circumscription and
literal projection. Based on combining and extending
previously known such renderings of logic programming
semantics, we take semantics into account that have not yet
been considered in the context of human reasoning, such as
stable models and partial stable models. To model human
reasoning, it is essential that only some predicates can be
subjected to closed world reasoning, while others are
handled by open world reasoning. In our framework, variants
of familiar logic programing semantics that are extended
with this feature are derived from a generic circumscription
based representation. Further, we develop a two-valued
representation of a three-valued logic that renders
semantics considered for human reasoning based on the
Fitting operator.}
}
@inproceedings{Wernhard:2011:ToyElim,
author = {Christoph Wernhard},
title = {Computing with Logic as Operator Elimination: The {ToyElim}
System},
booktitle = {Proceedings of the 25th Workshop on Logic Programming,
WLP 2011},
organization = {Technische Universit{\"a}t Wien},
series = {Infsys Research Report 1843-11-06},
pages = {94--98},
year = {2011},
journal = {ArXiv e-prints},
archiveprefix = {arXiv},
eprint = {1108.4891},
abstract = {A prototype system is described whose core functionality is,
based on propositional logic, the elimination of
second-order operators, such as Boolean quantifiers and
operators for projection, forgetting and circumscription.
This approach allows to express many representational and
computational tasks in knowledge representation -- for
example computation of abductive explanations and models
with respect to logic programming semantics -- in a uniform
operational system, backed by a uniform classical semantic
framework.}
}
@inproceedings{HPW:2011:AMHR,
author = {Steffen H{\"o}lldobler and Tobias Philipp and Christoph Wernhard},
title = {An Abductive Model for Human Reasoning},
booktitle = {Logical Formalizations of Commonsense Reasoning, Papers
from the AAAI 2011 Spring Symposium},
series = {AAAI Spring Symposium Series Technical Reports},
publisher = {AAAI Press},
year = {2011},
pages = {135--138},
note = {[ Preprint (extended version):
\url{http://www.wv.inf.tu-dresden.de/Publications/2010/HoelldoblerPhilippWernhard.pdf} ]},
abstract = {In this paper we contribute to bridging the gap between human
reasoning as studied in Cognitive Science and commonsense
reasoning based on formal logics and formal theories. In
particular, the suppression task studied in Cognitive
Science provides an interesting challenge problem for human
reasoning based on logic. The work presented in the paper
is founded on the recent approach by Stenning and van
Lambalgen to model human reasoning by means of logic
programs with a specific three-valued completion semantics
and a semantic fixpoint operator that yields a least model,
as well as abduction. Their approach has been subsequently
made more precise and technically accurate by switching to
three-valued {\L}ukasiewicz logic. In this paper, we extend
this refined approach by abduction. We show that the
inclusion of abduction permits to adequately model
additional empiric results reported from Cognitive Science.
For the arising abductive reasoning tasks we give complexity
results. Finally, we outline several open research issues
that emerge from the application of logic to model human
reasoning.}
}
@inproceedings{Wernhard:2010:LP,
author = {Christoph Wernhard},
title = {Circumscription and Projection as Primitives of Logic Programming},
booktitle = {Technical Communications of the 26th International
Conference on Logic Programming, ICLP'10},
year = {2010},
volume = {7},
pages = {202--211},
editor = {M.~Hermenegildo and T.~Schaub},
publisher = {Schloss Dagstuhl--Leibniz-Zentrum f{\"u}r Informatik},
series = {Leibniz International Proceedings in
Informatics (LIPIcs)},
address = {Dagstuhl, Germany},
isbn = {978-3-939897-17-0},
issn = {1868-8969},
url = {http://drops.dagstuhl.de/opus/volltexte/2010/2598/},
doi = {10.4230/LIPIcs.ICLP.2010.281},
note = {[ \href{http://cs.christophwernhard.com/papers/logprog2010extended.pdf}{Preprint (extended version)} ]},
abstract = {We pursue a representation of logic programs as classical
first-order sentences. Different semantics for logic
programs can then be expressed by the way in which they are
wrapped into - semantically defined - operators for
circumscription and projection. (Projection is a
generalization of second-order quantification.) We
demonstrate this for the stable model semantics, Clark's
completion and a three-valued semantics based on the Fitting
operator. To represent the latter, we utilize the polarity
sensitiveness of projection, in contrast to second-order
quantification, and a variant of circumscription that allows
to express predicate minimization in parallel with
maximization. In accord with the aim of an integrated view
on different logic-based representation techniques, the
material is worked out on the basis of first-order logic
with a Herbrand semantics.}
}
@inproceedings{Wernhard:2009:ProjectionCircumscription,
author = {Christoph Wernhard},
title = {Literal Projection and Circumscription},
booktitle = {Proceedings of the 7th
International Workshop on First-Order Theorem Proving, FTP'09},
publisher = {CEUR-WS.org},
series = {CEUR Workshop Proceedings},
volume = {556},
pages = {60--74},
year = {2010},
url = {http://ceur-ws.org/Vol-556},
editor = {Nicolas Peltier and Viorica Sofronie-Stokkermans},
note = {[
\href{http://cs.christophwernhard.com/papers/projection-circumscription.pdf}{Preprint (extended version)} ]},
abstract = {We develop a formal framework intended as a preliminary step for
a single knowledge representation system that provides
different representation techniques in a unified way. In
particular we consider first-order logic extended by
techniques for second-order quantifier elimination and
non-monotonic reasoning. In this paper two independent
results are developed. The background for the first result
is literal projection, a generalization of second-order
quantification which permits, so to speak, to quantify upon
an arbitrary sets of ground literals, instead of just (all
ground literals with) a given predicate symbol. We introduce
an operator raise that is only slightly different from
literal projection and can be used to define a
generalization of predicate circumscription in a
straightforward and compact way. We call this variant of
circumscription scope-determined. Some properties of raise
and scope-determined circumscription, also in combination
with literal projection, are then shown. A previously known
characterization of consequences of circumscribed formulas
in terms of literal projection is generalized from
propositional to first-order logic and proven on the basis
of the introduced concepts. The second result developed in
this paper is a characterization stable models in terms of
circumscription. Unlike traditional characterizations, it
does not recur onto syntactic notions like reduct and
fixed-point construction. It essentially renders a recently
proposed ``circumscription-like'' characterization in a
compact way, without involvement of a non-classically
interpreted connective.}
}
@inproceedings{Wernhard:2009:Tableaux,
title = {Tableaux for Projection Computation and Knowledge Compilation},
booktitle = {Automated Reasoning with Analytic Tableaux and Related Methods:
18th International Conference, TABLEAUX 2009},
editor = {Martin Giese and Arild Waaler},
pages = {325--340},
year = {2009},
author = {Christoph Wernhard},
publisher = {Springer},
series = {LNCS (LNAI)},
volume = {5607},
url = {http://cs.christophwernhard.com/papers/tableaux-projection.pdf},
doi = {10.1007/978-3-642-02716-1_24},
abstract = {Projection computation is a generalization of second-order
quantifier elimination, which in turn is closely related to
the computation of forgetting and of uniform
interpolants. On the basis of a unified view on projection
computation and knowledge compilation, we develop a
framework for applying tableau methods to these tasks. It
takes refinements from performance oriented systems into
account. Formula simplifications are incorporated at the
level of tableau structure modification, and at the level of
simplifying encountered subformulas that are not yet fully
compiled. In particular, such simplifications can involve
projection computation, where this is possible with low
cost. We represent tableau construction by means of rewrite
rules on formulas, extended with some auxiliary functors,
which is particularly convenient for formula transformation
tasks. As instantiations of the framework, we discuss
approaches to propositional knowledge compilation from the
literature, including adaptions of DPLL, and the hyper
tableau calculus for first-order clauses.}
}
@book{Wernhard:2009:ProjectionElimination,
author = {Christoph Wernhard},
title = {Automated Deduction for Projection Elimination},
number = {324},
series = {Dissertations in Artificial Intelligence},
publisher = {AKA Verlag/IOS Press},
address = {Heidelberg, Amsterdam},
year = {2009},
url = {http://www.iospress.nl/loadtop/load.php?isbn=9781586039837},
abstract = {Projection is a logic operation which allows to express tasks in
knowledge representation. These tasks involve extraction or
removal of knowledge concerning a given sub-vocabulary. It
is a generalization of second-order quantification,
permitting, so to speak, to `quantify' upon an arbitrary set
of ground literals instead of just (all ground literals
with) a given predicate symbol. In \textit{Automated
Deduction for Projection Elimination}, a semantic
characterization of projection for first-order logic is
presented. On this basis, properties underlying applications
and processing methods are derived. The computational
processing of projection, called projection elimination in
analogy to quantifier elimination, can be performed by
adapted theorem proving methods. This is shown for resolvent
generation and, more in depth, tableau construction. An
abstract framework relates projection elimination with
knowledge compilation and shows the adaption of key features
of high performance tableau systems. As a prototypical
instance, an adaption of a modern DPLL method, such as
underlying state-of-the-art SAT solvers, is worked out. It
generalizes various recent knowledge compilation methods and
utilizes the interplay with projection elimination for
efficiency improvements.}
}
@inproceedings{Wernhard:2008:LiteralProjection,
author = {Christoph Wernhard},
title = {Literal Projection for First-Order Logic},
booktitle = {Logics in Artificial Intelligence: 11th
European Conference, JELIA 08},
editor = {Steffen H{\"o}lldobler and Carsten Lutz and Heinrich Wansing},
pages = {389--402},
publisher = {Springer},
series = {LNCS (LNAI)},
volume = {5293},
doi = {10.1007/978-3-540-87803-2_32},
year = {2008},
note = {[ \href{http://cs.christophwernhard.com/papers/literal-projection.pdf}{Preprint (extended version)} ]},
abstract = {The computation of literal projection generalizes predicate
quantifier elimination by permitting, so to speak,
quantifying upon an arbitrary sets of ground literals,
instead of just (all ground literals with) a given predicate
symbol. Literal projection allows, for example, to express
predicate quantification upon a predicate just in positive
or negative polarity. Occurrences of the predicate in
literals with the complementary polarity are then considered
as unquantified predicate symbols. We present a
formalization of literal projection and related concepts,
such as literal forgetting, for first-order logic with a
Herbrand semantics, which makes these notions easy to
access, since they are expressed there by means of
straightforward relationships between sets of literals. With
this formalization, we show properties of literal projection
which hold for formulas that are free of certain links,
pairs of literals with complementary instances, each in a
different conjunct of a conjunction, both in the scope of a
universal first-order quantifier, or one in a subformula and
the other in its context formula. These properties can
justify the application of methods that construct formulas
without such links to the computation of literal
projection. Some tableau methods and direct methods for
second-order quantifier elimination can be understood in
this way.}
}
@inproceedings{PelzerWernhard:2007:EKrhyper,
author = {Bj{\"o}rn Pelzer and Christoph Wernhard},
title = {System Description: \mbox{E-KRHyper}},
pages = {503--513},
booktitle = {Automated Deduction: \mbox{CADE-21}},
editor = {Frank Pfenning},
publisher = {Springer},
series = {LNCS (LNAI)},
volume = {4603},
url = {http://cs.christophwernhard.com/papers/ekrhyper.pdf},
doi = {10.1007/978-3-540-73595-3_37},
year = {2007},
abstract = {The E-KRHyper system is a model generator and theorem prover for
first-order logic with equality. It implements the new
E-hyper tableau calculus, which integrates a
superposition-based handling of equality into the hyper
tableau calculus. E-KRHyper extends our previous KRHyper
system, which has been used in a number of applications in
the field of knowledge representation. In contrast to most
first order theorem provers, it supports features
important for such applications, for example queries with
predicate extensions as answers, handling of large sets of
uniformly structured input facts, arithmetic evaluation and
stratified negation as failure. It is our goal to extend
the range of application possibilities of KRHyper by adding
equality reasoning.}
}
@techreport{Wernhard:2007:Tableaux,
author = {Christoph Wernhard},
title = {{Tableaux Between Proving, Projection and Compilation}},
institution = {{Universit{\"a}t Koblenz-Landau}},
year = {2007},
number = {Arbeitsberichte aus dem Fachbereich Informatik 18/2007},
address = {Institut f{\"u}r Informatik,
Universit{\"a}tsstr. 1, 56070 Koblenz, Germany},
abstract = {Generalized methods for automated theorem proving can be
used to compute formula transformations such as projection
elimination and knowledge compilation. We present a
framework based on clausal tableaux suited for such
tasks. These tableaux are characterized independently of
particular construction methods, but important features of
empirically successful methods are taken into account,
especially dependency directed backjumping and branch local
operation. As an instance of that framework an adaption of
DPLL is described. We show that knowledge compilation
methods can be essentially improved by weaving projection
elimination partially into the compilation phase.}
}
@inproceedings{Wernhard:2004:SKP,
author = {Christoph Wernhard},
title = {Semantic Knowledge Partitioning},
booktitle = {Logics in Artificial Intelligence:
9th European Conference, JELIA 04},
editor = {Jos{\'e} J{\'u}lio Alferes and Jo{\~a}o Leite Leite},
year = {2004},
pages = {552--564},
publisher = {Springer},
series = {LNCS (LNAI)},
volume = {3229},
doi = {10.1007/978-3-540-30227-8_46},
url = {http://cs.christophwernhard.com/papers/skp-jelia.ps},
abstract = {Some operations to decompose a knowledge base (considered as a
first order logic formula) in ways so that only its
semantics determines the results are investigated. Intended
uses include the extraction of ``parts'' relevant to an
application, the exploration and utilizing of implicit
possibilities of structuring a knowledge base and the
formulation of query answers in terms of a signature
demanded by an application. A semantic framework based on
Herbrand interpretations is outlined. The notion of Ā“model
relative to a scopeĀ” is introduced. It underlies the
partitioning operations ``projection'' and ``forgetting'' and
also provides a semantic account for certain formula
simplification operations. An algorithmic approach which is
based on resolution and may be regarded as a variation of
the SCAN algorithm is discussed.}
}
@inproceedings{Wernhard:2004:SKPShort,
author = {Christoph Wernhard},
title = {Semantic Knowledge Partitioning (Extended Abstract)},
editor = {Ulrike Sattler},
booktitle = {Contributions to the
Doctoral Programme of the Second International
Joint Conference on Automated Reasoning,
IJCAR 2004},
publisher = {CEUR-WS.org},
series = {CEUR Workshop Proceedings},
volume = {106},
year = {2004},
url = {http://ceur-ws.org/Vol-106/}
}
@inproceedings{BaumgartnerEtAl:2003:KrhyperInside,
author = {Peter Baumgartner and Ulrich Furbach and Margret Gross-Hardt
and Thomas Kleemann and Christoph Wernhard},
title = {{KRH}yper {I}nside -- {M}odel Based Deduction in Applications},
booktitle = {Proceedings of the \mbox{CADE-19} workshop
Challenges and Novel Applications for Automated Reasoning},
year = {2003},
pages = {55--72},
url = {http://cs.christophwernhard.com/papers/novel.ps},
url = {http://www.uclic.ucl.ac.uk/usr/jgow/cnaar.pdf},
abstract = {Three real world applications are depicted which all have a full
first order theorem prover based on the hyper tableau
calculus as their core component. These applications concern
information retrieval in electronic publishing, the
integration of description logics with other knowledge
representation techniques and XML query processing.}
}
@techreport{Wernhard:2003:KRHyper,
author = {Christoph Wernhard},
title = {{System Description: {KRHyper}}},
institution = {{Universit{\"a}t Koblenz-Landau}},
year = {2003},
number = {Fachberichte Informatik 14--2003},
address = {Institut f{\"u}r Informatik,
Universit{\"a}tsstr. 1, 56070 Koblenz, Germany},
note = {Presented at the \mbox{CADE-19} workshop
\textit{Model Computation: Principles, Algorithms, Applications}.},
url = {http://cs.christophwernhard.com/papers/krhsys.ps},
abstract = {KRHyper is a first order logic theorem proving and model
generation system based on the hyper tableau calculus. It is
targeted for use as an embedded system within knowledge
based applications. In contrast to most first order theorem
provers, it supports features important for those
applications, for example queries with predicate extensions
as answers, handling of large sets of uniformly structured
input facts, arithmetic evaluation and stratified negation
as failure.}
}
@unpublished{Wernhard:2002:InfraEngine,
author = {Christoph Wernhard},
title = {{InfraEngine}: {I}nferencing in the {S}emantic {W}eb by Planning},
year = {2002},
note = {System description (edited 2007). (A prototype implementation can be
downloaded from \url{http://www.infraengine.com/})},
url = {http://www.infraengine.com/papers/infra.pdf},
abstract = {The idea of InfraEngine is to help establishing a Semantic Web
infrastructure by a specially adapted AI planning
engine. Inputs are distributed Web documents in Semantic Web
formats proposed by the World Wide Web Consortium. Outputs
are also delivered in such formats. The user interface
allows to browse Semantic Web data and to control the
planning services from any Web browser. Also other programs
can make use of these services. A small and understandable,
but general, mechanism that can be used for different kinds
of applications should be provided.}
}
@unpublished{Wernhard:2001:SemWebProofs,
author = {Christoph Wernhard},
title = {Representing Proofs in the {S}emantic {W}eb},
year = {2001},
note = {Working paper for Persist AG, Teltow, Germany},
url = {http://cs.christophwernhard.com/papers/papers/persist/swproofs.pdf},
abstract = {First steps towards an RDF format for exchanging proofs in the
Semantic Web.}
}
@unpublished{Wernhard:2000:SemWebApplications,
author = {Christoph Wernhard},
title = {Two Short Term Applications of the {S}emantic {W}eb},
year = {2001},
note = {Working paper for Persist AG, Teltow, Germany},
url = {http://cs.christophwernhard.com/papers/persist/applic.pdf},
abstract = {We outline two application scenarios which might be in the realm
of short term applications of the Semantic Web: a software
packaging system and the organization of a business
trip. Both of them can be solved with today's technology to
some degree, so they do not show the novel potential of the
Semantic Web in full. However considering Semantic Web
solutions for them is useful to get a picture of the
characteristics of the Semantic Web and become aware of some
concrete technical issues.}
}
@unpublished{Wernhard:2000:PlanningWeb,
author = {Christoph Wernhard},
title = {The {P}lanning {W}eb in Action},
year = {2000},
note = {Working paper for Persist AG, Teltow, Germany},
url = {http://cs.christophwernhard.com/papers/persist/outline.pdf},
abstract = {We propose resource oriented inference as one way of bringing the
Semantic Web into action. It provides a framework for
expressing and processing a variety of tasks from areas such
as planning, scheduling, manufacturing resource planning,
product data management, configuration management, workflow
management and simulation. Resource oriented inference as a
part of the Semantic Web should allow such tasks to be
performed within the scope of the World Wide Web. A
prototypical application is be the purchase of a complex
product with the help of the Web. The product consists of
multiple parts, some of them complex products by
themselves. Several services are required to compose the
product. Subparts and services can be provided by different
companies all over the world. Delivery time and cost of the
product should be optimized.}
}
@unpublished{Wernhard:2000:SemanticWebModeling,
author = {Christoph Wernhard},
title = {Towards a {S}emantic {W}eb Modeling Language},
year = {2000},
note = {Working paper for Persist AG, Teltow, Germany. Presented
at \textit{KnowTech 2000}, Leipzig},
url = {http://cs.christophwernhard.com/papers/persist/swmod.pdf},
abstract = {Outline of a small Web embedded language for specifying types and
reasoning about them. Its main features are: (1.) From
primitive types, that require from their instances just that
they implement a single method, complex types can be
constructed by a few operators, such as as a type
intersection. This composition of type definitions maps to
the composition of fragments of type definitions in
different Web documents. (2.) Types are compared by
structural equivalence and not based on their names. This
facilitates combination of independently developed models
and implicit association of types with given instances. (3.)
The hyperlinking of expressions of the modeling language can
be understood in terms of defining equations of a rewriting
system. The left side of such a rewrite rule is an URI, the
right side an expression. Hyperlink dereferencing
corresponds to a rewrite step with such a rule.}
}
@unpublished{Wernhard:1999:LBCP,
author = {Christoph Wernhard},
title = {Experiments with a Linear Backward Chaining Planner},
url = {http://cs.christophwernhard.com/papers/exp.ps},
year = {1999},
note = {(Edited May 2003)},
abstract = {The performance of an implementation of the linear backward
chaining planning algorithm is compared to other planning
systems by means of the problem set of the first ai planning
systems competition (AIPS-98).}
}
@inproceedings{DahnEtAl:1998:MathlibMathematica,
author = {Ingo Dahn and Andreas Haida and Thomas Honigmann and
Christoph Wernhard},
title = {Using {M}athematica and Automated Theorem Provers to Access a
Mathematical Library},
booktitle = {Proceedings of the \mbox{CADE-15} Workshop on
Integration of Deductive Systems},
pages = {36--43},
note = {(Revised and extended version:
\url{http://cs.christophwernhard.com/papers/integ.ps})},
url = {http://i12www.ira.uka.de/Workshop/notes.ps.gz},
year = {1998},
abstract = {We describe a concept for the cooperation of a computer
algebra system, several automated theorem provers and a
mathematical library. The purpose of this cooperation is to
enable intelligent retrieval of theorems and definitions
from the remote mathematical library. Automated theorem
provers compete on remote machines to verify conjectures
provided by the user in a local copy of Mathematica. They
make use of a remote knowledge base which contains parts of
the Mizar Mathematical Library.}
}
@inproceedings{DahnWernhard:1997:Mathlib,
author = {Ingo Dahn and Christoph Wernhard},
title = {First Order Proof Problems Extracted from an Article in the
{Mizar} Mathematical Library},
booktitle = {International
Workshop on First-Order Theorem Proving, FTP'97},
year = {1997},
series = {RISC-Linz Report Series No.\ 97--50},
pages = {58--62},
publisher = {Johannes Kepler Universit{\"a}t, Linz},
editor = {Maria Paola Bonacina and Ulrich Furbach},
url = {http://cs.christophwernhard.com/papers/ftp97.ps},
abstract = {Over the years, interactive theorem provers have built a large
body of verified computer mathematics. The ILF Mathematical
Library aims to make this knowledge available to other
systems. One of the reasons for such a project is
economy. Verification of software and hardware frequently
requires the proof of purely mathematical theorems. It is
obviously inefficient, to use the time of experts in the
design of software or hardware systems to prove such
theorems. Another reason for presenting a collection of
mathematical theorems in a unified framework is safety. It
should facilitate the verification of theorems in the
library of one system by other systems. A third reason is
dynamics of research. New interactive theorem provers should
obtain the possibility to show their usability for
real-world problems without having to reprove elementary
mathematical facts. Last but not least, it is hoped that
reproving theorems in a uniform mathematical library will be
considered as a challenge to the development of automated
theorem provers.}
}
@mastersthesis{Wernhard:92:CLOSDB,
author = {Christoph Wernhard},
title = {Entwurf und {I}mplementierung einer
{D}atenbank\-schnitt\-stelle
f{\"u}r das {C}ommon {L}isp {O}bject {S}ystem},
school = {Freie Universit{\"a}t Berlin},
address = {Berlin, Germany},
type = {Magisterarbeit},
year = {1992},
note = {(The system documentation is available from
\url{http://cs.christophwernhard.com/closdb/}.)}
}
@inproceedings{SchweppeEtAl:90:DBCLOS,
author = {Heinz Schweppe and Christoph Wernhard and Jutta Estenfeld},
title = {{DB-CLOS}: {E}ine {D}atenbankschnittstelle f{\"u}r das
{C}ommon {L}isp {O}bject {S}ystem},
booktitle = {K{\"u}nstliche Intelligenz in der Praxis},
year = {1990},
publisher = {Siemens AG},
editor = {W. Remmele}
}
@inproceedings{Hydrasat:2009,
author = {Christoph Baldow and Friedrich Gr{\"a}ter and
Steffen H{\"o}lldobler and Norbert Manthey and Max Seelemann
and Peter Steinke and Christoph Wernhard and Konrad Winkler
and Erik Zenker},
title = {{HydraSAT} 2009.3 Solver Description},
booktitle = {SAT 2009 Competitive Event Booklet},
pages = {15--16},
editor = {Daniel Le Berre et al.},
url = {http://www.cril.univ-artois.fr/SAT09/solvers/booklet.pdf},
year = {2009}
}