@inproceedings{baelde12itp,
  address = {Princeton, New~Jersey, USA},
  year = 2012,
  month = aug,
  volume = 7406,
  series = {Lecture Notes in Computer Science},
  publisher = {Springer},
  editor = {Beringer, Lennart and Felty, Amy P.},
  acronym = {{ITP}'12},
  booktitle = {{P}roceedings of the {T}hird {I}nternational {C}onference on
               {I}nteractive {T}heorem {P}roving ({ITP}'12)},
  author = {Baelde, David  and
               Courtieu, Pierre and
               Gross{-}Amblard, David  and
               Paulin{-}Mohring, {\relax Ch}ristine},
  title = {Towards Provably Robust Watermarking},
  pages = {201-216},
  url = {http://www.lsv.fr/Publis/PAPERS/PDF/baelde12itp.pdf},
  pdf = {http://www.lsv.fr/Publis/PAPERS/PDF/baelde12itp.pdf},
  doi = {10.1007/978-3-642-32347-8_14},
  abstract = {Watermarking techniques are used to help identifying
                  copies of publicly released information. They
                  consist in applying a slight and secret modification
                  to the data before its release, in a way that should
                  be \emph{robust}, i.e., remain recognizable even in
                  (reasonably) modified copies of the data. In~this
                  paper, we present new results about the robustness
                  of watermarking schemes against arbitrary attackers,
                  and the formalization of those results in \textsc{Coq}. We
                  used the \textsc{Alea} library, which formalizes
                  probability theory and models probabilistic programs
                  using a simple monadic translation. This work
                  illustrates the strengths and particularities of the
                  induced style of reasoning about probabilistic
                  programs. Our technique for proving robustness is
                  adapted from methods commonly used for cryptographic
                  protocols, and we discuss its relevance to the field
                  of watermarking.}
}
@inproceedings{baelde12lics,
  address = {Dubrovnik, Croatia},
  month = jun,
  year = 2012,
  publisher = {{IEEE} Computer Society Press},
  acronym = {{LICS}'12},
  booktitle = {{P}roceedings of the 27th
               {A}nnual {IEEE} {S}ymposium on
               {L}ogic in {C}omputer {S}cience
               ({LICS}'12)},
  author = {Baelde, David and
               Nadathur, Gopalan},
  title = {Combining Deduction Modulo and Logics of Fixed-Point Definitions},
  pages = {105-114},
  url = {http://www.lsv.fr/Publis/PAPERS/PDF/baelde12lics.pdf},
  pdf = {http://www.lsv.fr/Publis/PAPERS/PDF/baelde12lics.pdf},
  doi = {10.1109/LICS.2012.22},
  abstract = {Inductive and coinductive specifications are widely used
                  in formalizing computational systems. Such
                  specifications have a natural rendition in logics
                  that support fixed-point definitions. Another useful
                  formalization device is that of recursive
                  specifications. These specifications are not
                  directly complemented by fixed-point reasoning
                  techniques and, correspondingly, do not have to
                  satisfy strong monotonicity restrictions. We show
                  how to incorporate a rewriting capability into
                  logics of fixed-point definitions towards
                  additionally supporting recursive specifications. In
                  particular, we describe a natural deduction calculus
                  that adds a form of {"}closed-world{"} equality---a
                  key ingredient to supporting fixed-point
                  definitions----to \emph{deduction modulo}, a framework for
                  extending a logic with a rewriting layer operating
                  on formulas.  We show that our calculus enjoys
                  strong normalizability when the rewrite system
                  satisfies general properties and we demonstrate its
                  usefulness in specifying and reasoning about
                  syntax-based descriptions. The integration of
                  closed-world equality into deduction modulo leads us
                  to reconfigure the elimination principle for this
                  form of equality in a way that resolves
                  long-standing issues concerning the stability of
                  finite proofs under proof reduction.}
}
@article{baelde12tocl,
  publisher = {ACM Press},
  journal = {ACM Transactions on Computational Logic},
  author = {Baelde, David},
  title = {Least and greatest fixed points in linear logic},
  month = jan,
  year = {2012},
  volume = {13},
  number = {1},
  nopages = {},
  url = {http://www.lsv.fr/Publis/PAPERS/PDF/baelde12tocl.pdf},
  pdf = {http://www.lsv.fr/Publis/PAPERS/PDF/baelde12tocl.pdf},
  doi = {10.1145/2071368.2071370},
  abstract = {The first-order theory of MALL (multiplicative, additive
                  linear logic) over only equalities is a
                  well-structured but weak logic since it cannot
                  capture unbounded (infinite) behavior. Instead of
                  accounting for unbounded behavior via the addition
                  of the exponentials (! and ?), we add least and
                  greatest fixed point operators. The resulting logic,
                  which we call \(\mu\)MALL, satisfies two fundamental
                  proof theoretic properties: we establish weak
                  normalization for it, and we design a focused proof
                  system that we prove complete with respect to the
                  initial system. That second result provides a strong
                  normal form for cut-free proof structures that can
                  be used, for example, to help automate proof
                  search. We show how these foundations can be applied
                  to intuitionistic logic.}
}
@inproceedings{snow10ppdp,
  address = {Hagenberg, Austria},
  month = jul,
  year = 2010,
  publisher = {ACM Press},
  editor = {Kutsia, Temur and Schreiner, Wolfgang and Fern{\'a}ndez, Maribel},
  acronym = {{PPDP}'10},
  booktitle = {{P}roceedings of the 12th {I}nternational {ACM} {SIGPLAN}
  	   {C}onference on {P}rinciples and {P}ractice of {D}eclarative 
	   {P}rogramming ({PPDP}'10)},
  author = {Snow, Zachary and Baelde, David and Nadathur, Gopalan},
  title = {A Meta-Programming Approach to Realizing Dependently
                 Typed Logic Programming},
  pages = {187-198},
  url = {http://www.lsv.fr/Publis/PAPERS/PDF/snow10ppdp.pdf},
  pdf = {http://www.lsv.fr/Publis/PAPERS/PDF/snow10ppdp.pdf},
  doi = {10.1145/1836089.1836113},
  abstract = {Dependently typed lambda calculi such as the Logical
                  Framework (LF) can encode relationships between
                  terms in types and can naturally capture
                  correspondences between formulas and their
                  proofs. Such calculi can also be given a logic
                  programming interpretation: the Twelf system is
                  based on such an interpretation of LF. We consider
                  here whether a conventional logic programming
                  language can provide the benefits of a Twelf-like
                  system for encoding type and proof-and-formula
                  dependencies. In particular, we present a simple
                  mapping from LF specifications to a set of formulas
                  in the higher-order hereditary Harrop (\emph{hohh})
                  language, that relates derivations and proof-search
                  between the two frameworks. We then show that this
                  encoding can be improved by exploiting knowledge of
                  the well-formedness of the original LF
                  specifications to elide much redundant type-checking
                  information. The resulting logic program has a
                  structure that closely resembles the original
                  specification, thereby allowing LF specifications to
                  be viewed as \emph{hohh} meta-programs.  Using the Teyjus
                  implementation of \(\lambda\)Prolog, we show that our
                  translation provides an efficient means for
                  executing LF specifications, complementing the
                  ability that the Twelf system provides for reasoning
                  about them.}
}
@inproceedings{baelde07cade,
  address = {Bremen, Germany},
  month = jul,
  year = 2007,
  volume = 4603,
  series = {Lecture Notes in Artificial Intelligence},
  publisher = {Springer},
  editor = {Pfenning, Frank},
  acronym = {{CADE}'07},
  booktitle = {{P}roceedings of the 21st {I}nternational 
               {C}onference on {A}utomated {D}eduction
               ({CADE}'07)},
  author = {Baelde, David and Gacek, Andrew and Miller, Dale and
                  Nadathur, Gopalan and Tiu, Alwen},
  title = {The {B}edwyr system for model checking over syntactic
                 expressions},
  pages = {391-397},
  url = {http://www.lsv.fr/Publis/PAPERS/PDF/baelde07cade.pdf},
  pdf = {http://www.lsv.fr/Publis/PAPERS/PDF/baelde07cade.pdf},
  doi = {10.1007/978-3-540-73595-3_28},
  abstract = {Bedwyr is a generalization of logic programming that
                  allows model checking directly on syntactic
                  expressions possibly containing bindings. This
                  system, written in OCaml, is a direct implementation
                  of two recent advances in the theory of proof
                  search. The first is centered on the fact that both
                  finite success and finite failure can be captured in
                  the sequent calculus by incorporating inference
                  rules for \emph{definitions} that allow \emph{fixed
                  points} to be explored. As a result, proof search in
                  such a sequent calculus can capture simple model
                  checking problems as well as may and must behavior
                  in operational semantics. The second is that
                  higher-order abstract syntax is directly supported
                  using term-level \(\lambda\)-binders and the
                  quantifier known as~\(\nabla\). These features allow
                  reasoning directly on expressions containing bound
                  variables.}
}
@inproceedings{baelde07lpar,
  address = {Yerevan, Armenia},
  month = oct,
  year = 2007,
  volume = 4790,
  series = {Lecture Notes in Artificial Intelligence},
  publisher = {Springer},
  editor = {Dershowitz, Nachum and Voronkov, Andrei},
  acronym = {{LPAR}'07},
  booktitle = {{P}roceedings of the 14th {I}nternational
               {C}onference on {L}ogic for {P}rogramming,
               {A}rtificial {I}ntelligence, and {R}easoning
               ({LPAR}'07)},
  author = {Baelde, David and Miller, Dale},
  title = {Least and greatest fixed points in linear logic},
  pages = {92-106},
  url = {http://www.lsv.fr/Publis/PAPERS/PDF/baelde07lpar.pdf},
  pdf = {http://www.lsv.fr/Publis/PAPERS/PDF/baelde07lpar.pdf},
  doi = {10.1007/978-3-540-75560-9_9},
  abstract = {The first-order theory of MALL (multiplicative, additive
                  linear logic) over only equalities is an interesting
                  but weak logic since it cannot capture un- bounded
                  (infinite) behavior. Instead of accounting for
                  unbounded behavior via the addition of the
                  exponentials (! and ?), we add least and greatest
                  fixed point operators. The resulting logic, which we
                  call \(\mu\)MALL\textsuperscript{=} , satisfies two
                  fundamental proof theoretic properties. In
                  particular, \(\mu\)MALL\textsuperscript{=} satisfies
                  cut-elimination, which implies consistency, and has
                  a complete focused proof system. This second result
                  about focused proofs provides a strong normal form
                  for cut-free proof structures that can be used, for
                  example, to help automate proof search. We then
                  consider applying these two results about \(\mu\)MALL\textsuperscript{=}
                  to derive a focused proof system for an
                  intuitionistic logic extended with induction and
                  co-induction. The traditional approach to encoding
                  intuitionistic logic into linear logic relies
                  heavily on using the exponentials, which
                  unfortunately weaken the focusing discipline. We get
                  a better focused proof system by observing that
                  certain fixed points satisfy the structural rules of
                  weakening and contraction (without using
                  exponentials). The resulting focused proof system
                  for intuitionistic logic is closely related to the
                  one implemented in Bedwyr, a recent model checker
                  based on logic programming.  We discuss how our
                  proof theory might be used to build a computational
                  system that can partially automate induction and
                  co-induction.}
}
@inproceedings{baelde08jfla,
  address = {{\'E}tretat, France},
  month = jan,
  year = 2008,
  publisher = {INRIA},
  editor = {Blazy, Sandrine},
  acronym = {{JFLA}'08},
  booktitle = {{A}ctes des 19{\`e}mes {J}ourn{\'e}es
               {F}rancophones sur les {L}angages
               {A}pplicatifs
               ({JFLA}'08)},
  author = {Baelde, David and Mimram, Samuel},
  title = {{De la webradio lambda \`a la {{\(\lambda\)}}-webradio}},
  pages = {47-61},
  url = {http://www.lsv.fr/Publis/PAPERS/PDF/baelde08jfla.pdf},
  pdf = {http://www.lsv.fr/Publis/PAPERS/PDF/baelde08jfla.pdf}
}
@inproceedings{baelde08lfmtp,
  address = {Pittsburgh, Pennsylvania, USA},
  month = jan,
  year = 2009,
  volume = 228,
  series = {Electronic Notes in Theoretical Computer Science},
  publisher = {Elsevier Science Publishers},
  editor = {Abel, Andreas and Urban, Christian},
  acronym = {{LFMTP}'08},
  booktitle = {{I}nternational {W}orkshop on {L}ogical {F}rameworks and 
           {M}eta-{L}anguages: {T}heory and {P}ractice ({LFMTP}'08)},
  author = {Baelde, David},
  title = {On the Expressivity of Minimal Generic
                 Quantification},
  pages = {3-19},
  url = {http://www.lsv.fr/Publis/PAPERS/PDF/baelde08lfmtp.pdf},
  pdf = {http://www.lsv.fr/Publis/PAPERS/PDF/baelde08lfmtp.pdf},
  doi = {10.1016/j.entcs.2008.12.113},
  abstract = {We come back to the initial design of the \(\nabla\)
                  quantifier by Miller and Tiu, which we call minimal
                  generic quantification. In the absence of fixed
                  points, it is equivalent to seemingly stronger
                  designs. However, several expected theorems about
                  (co)inductive specifications can not be derived in
                  that setting. We present a refinement of minimal
                  generic quantification that brings the expected
                  expressivity while keeping the minimal semantic,
                  which we claim is useful to get natural adequate
                  specifications. We build on the idea that generic
                  quantification is not a logical connective but one
                  that is defined, like negation in classical
                  logics. This allows us to use the standard
                  (co)induction rule, but obtain much more
                  expressivity than before. We show classes of
                  theorems that can now be derived in the logic, and
                  present a few practical examples.}
}
@inproceedings{baelde09tableaux,
  address = {Oslo, Norway},
  month = jul,
  year = 2009,
  volume = 5607,
  series = {Lecture Notes in Artificial Intelligence},
  publisher = {Springer},
  editor = {Giese, Martin and Waaler, Arild},
  acronym = {{TABLEAUX}'09},
  booktitle = {{P}roceedings of the 18th {I}nternational
               {W}orkshop on {T}heorem {P}roving with 
               {A}nalytic {T}ableaux and {R}elated {M}ethods
               ({TABLEAUX}'09)},
  author = {Baelde, David},
  title = {On the proof theory of regular fixed points},
  pages = {93-107},
  url = {http://www.lsv.fr/Publis/PAPERS/PDF/baelde09tableaux.pdf},
  pdf = {http://www.lsv.fr/Publis/PAPERS/PDF/baelde09tableaux.pdf},
  doi = {10.1007/978-3-642-02716-1_8},
  abstract = {We consider encoding finite automata as least fixed
                  points in a proof-theoretical framework equipped
                  with a general induction scheme, and study automata
                  inclusion in that setting. We provide a coinductive
                  characterization of inclusion that yields a natural
                  bridge to proof-theory. This leads us to generalize
                  these observations to \emph{regular formulas},
                  obtaining new insights about inductive theorem
                  proving and cyclic proofs in particular.  }
}
@inproceedings{baelde10ijcar,
  address = {Edinburgh, Scotland, UK},
  month = jul,
  year = 2010,
  volume = {6173},
  series = {Lecture Notes in Artificial Intelligence},
  publisher = {Springer-Verlag},
  editor = {Giesl, J{\"u}rgen and Haehnle, Reiner},
  acronym = {{IJCAR}'10},
  booktitle = {{P}roceedings of the 5th {I}nternational {J}oint
           {C}onference on {A}utomated {R}easoning
           ({IJCAR}'10)},
  author = {Baelde, David and Miller, Dale and Snow, Zachary},
  title = {Focused Inductive Theorem Proving},
  pages = {278-292},
  url = {http://www.lsv.fr/Publis/PAPERS/PDF/baelde10ijcar.pdf},
  pdf = {http://www.lsv.fr/Publis/PAPERS/PDF/baelde10ijcar.pdf},
  doi = {10.1007/978-3-642-14203-1_24},
  abstract = {\emph{Focused proof systems} provide means for reducing
                  and structuring the non-determinism involved in
                  searching for sequent calculus proofs. We present a
                  focused proof system for a first-order logic with
                  inductive and co-inductive definitions in which the
                  introduction rules are partitioned into an
                  asynchronous phase and a synchronous phase. These
                  focused proofs allow us to naturally see proof
                  search as being organized around interleaving
                  intervals of computation and more general
                  deduction. For example, entire Prolog-like
                  computations can be captured using a single
                  synchronous phase and many model-checking queries
                  can be captured using an asynchronous phase followed
                  by a synchronous phase.  Leveraging these ideas, we
                  have developed an interactive proof assistant,
                  called Tac, for this logic. We describe its
                  high-level design and illustrate how it is capable
                  of automatically proving many theorems using
                  induction and coinduction. Since the automatic proof
                  procedure is structured using focused proofs, its
                  behavior is often rather easy to anticipate and
                  modify. We illustrate the strength of Tac with
                  several examples of proved theorems, some achieved
                  entirely automatically and others achieved with user
                  guidance.}
}
@inproceedings{baelde11sofsem,
  address = {Nov{\'y} Smokovec, Slovakia},
  month = jan,
  year = 2011,
  volume = 6543,
  series = {Lecture Notes in Computer Science},
  publisher = {Springer},
  editor = {Cern{\'a}, Ivana and Gyim{\'o}thy, Tibor and Hromkovic, Juraj and 
                 Jeffery, Keith G. and Kr{\'a}lovic, Rastislav and Vukoli{\'c}, Marko and
                 Wolf, Stefan},
  acronym = {{SOFSEM}'11},
  booktitle = {{P}roceedings of the 37th International Conference on 
		Current Trends in Theory and Practice of 
		Computer Science ({SOFSEM}'11)},
  author = {Baelde, David and Beauxis, Romain and Mimram, Samuel},
  title = {Liquidsoap: a~High-Level Programming Language for Multimedia Streaming},
  pages = {99-110},
  url = {http://www.lsv.fr/Publis/PAPERS/PDF/baelde11sofsem.pdf},
  pdf = {http://www.lsv.fr/Publis/PAPERS/PDF/baelde11sofsem.pdf},
  doi = {10.1007/978-3-642-18381-2_8},
  abstract = {Generating multimedia streams, such as in a netradio, is
                  a task which is complex and difficult to adapt to
                  every users' needs.  We introduce a novel approach
                  in order to achieve it, based on a dedicated
                  high-level functional programming language, called
                  \emph{Liquidsoap}, for generating, manipulating and
                  broadcasting multimedia streams. Unlike traditional
                  approaches, which are based on configuration files
                  or static graphical interfaces, it also allows the
                  user to build complex and highly customized
                  systems. This language is based on a model for
                  streams and contains operators and constructions,
                  which make it adapted to the generation of
                  streams. The interpreter of the language also
                  ensures many properties concerning the good
                  execution of the stream generation.}
}
@inproceedings{BDH-post14,
  address = {Grenoble, France},
  month = apr,
  year = 2014,
  volume = {8414},
  series = {Lecture Notes in Computer Science},
  publisher = {Springer},
  editor = {Abadi, Mart{\'\i}n and Kremer, Steve},
  acronym = {{POST}'14},
  booktitle = {{P}roceedings of the 3rd {I}nternational {C}onference on
  	   {P}rinciples of {S}ecurity and {T}rust 
           ({POST}'14)},
  author = {Baelde, David and Delaune, St{\'e}phanie and Hirschi, Lucca},
  title = {A~reduced semantics for deciding trace equivalence using constraint systems},
  pages = {1-21},
  url = {http://www.lsv.fr/Publis/PAPERS/PDF/BDH-post14.pdf},
  pdf = {http://www.lsv.fr/Publis/PAPERS/PDF/BDH-post14.pdf},
  doi = {10.1007/978-3-642-54792-8_1},
  abstract = {Many privacy-type properties of security protocols can be
    modelled using trace equivalence properties in suitable process algebras.
    It has been shown that such properties can be decided for interesting
    classes of finite processes (i.e.,~without replication) by means of symbolic
    execution and constraint solving. However, this does not suffice to obtain
    practical tools. Current prototypes suffer from a classical combinatorial
    explosion problem caused by the exploration of many interleavings in the
    behaviour of processes. Modersheim et~al. have tackled this problem for
    reachability properties using partial order reduction techniques. We
    revisit their work, generalize it and adapt it for equivalence checking.
    We obtain an optimization in the form of a reduced symbolic semantics that
    eliminates redundant interleavings on the fly.}
}
@article{BCGMNTW-jfr14,
  publisher = {University of Bologna},
  journal = {Journal of Formalized Reasoning},
  author = {Baelde, David and Chaudhuri, Kaustuv and Gacek, Andrew and
                  Miller, Dale and Nadathur, Gopalan and Tiu, Alwen and Wang,
                  Yuting},
  title = {Abella: A~System for Reasoning about Relational Specifications},
  volume = {7},
  number = {2},
  year = {2014},
  pages = {1-89},
  url = {http://www.lsv.fr/Publis/PAPERS/PDF/BCGMNTW-jfr14.pdf},
  pdf = {http://www.lsv.fr/Publis/PAPERS/PDF/BCGMNTW-jfr14.pdf},
  doi = {10.6092/issn.1972-5787/4650},
  abstract = {The Abella interactive theorem prover is based on an
    intuitionistic logic that allows for inductive and co-inductive reasoning
    over relations. Abella supports the \(\lambda\)-tree approach to treating
    syntax containing binders: it~allows simply typed \(\lambda\)-terms to be
    used to represent such syntax and it provides higher-order (pattern)
    unification, the \(\nabla\) quantifier, and nominal constants for
    reasoning about these representations. As such, it is a suitable vehicle
    for formalizing the meta-theory of formal systems such as logics and
    programming languages. This tutorial exposes Abella incrementally,
    starting with its capabilities at a first-order logic level and gradually
    presenting more sophisticated features, ending with the support it offers
    to the \emph{two-level logic approach} to meta-theoretic reasoning. Along
    the way, we show how Abella can be used prove theorems involving natural
    numbers, lists, and automata, as well as involving typed and untyped
    \(\lambda\)-calculi and the \(\pi\)-calculus.}
}
@inproceedings{BDS-csl15,
  address = {Berlin, Germany},
  month = sep,
  year = 2015,
  volume = {41},
  series = {Leibniz International Proceedings in Informatics},
  publisher = {Leibniz-Zentrum f{\"u}r Informatik},
  editor = {Kreuzer, Stephan},
  acronym = {{CSL}'15},
  booktitle = {{P}roceedings of the 24th {A}nnual {EACSL} {C}onference on
                  {C}omputer {S}cience {L}ogic ({CSL}'15)},
  author = {Baelde, David and Doumane, Amina and Saurin, Alexis},
  title = {Least and Greatest Fixed Points in Ludics},
  pages = {549-566},
  url = {http://www.lsv.fr/Publis/PAPERS/PDF/BDS-csl15.pdf},
  pdf = {http://www.lsv.fr/Publis/PAPERS/PDF/BDS-csl15.pdf},
  doi = {10.4230/LIPIcs.CSL.2015.549},
  abstract = {Various logics have been introduced in order to reason over
   (co)inductive specifications and, through the Curry-Howard correspondence,
   to study computation over inductive and coinductive data. The logic mu-MALL
   is one of those logics, extending multiplicative and additive linear logic
   with least and greatest fixed point operators.\par
   In this paper, we investigate the semantics of mu-MALL proofs in
   (computational) ludics. This framework is built around the notion of
   design, which can be seen as an analogue of the strategies of game
   semantics. The infinitary nature of designs makes them particularly well
   suited for representing computations over infinite data.\par
   We provide mu-MALL with a denotational semantics, interpreting proofs by
   designs and formulas by particular sets of designs called behaviours. Then
   we prove a completeness result for the class of {"}essentially finite
   designs{"}, which are those designs performing a finite computation followed
   by a copycat. On the way to completeness, we investigate semantic
   inclusion, proving its decidability (given two formulas, we can decide
   whether the semantics of one is included in the other's) and completeness
   (if semantic inclusion holds, the corresponding implication is provable in
   mu-MALL).}
}
@inproceedings{BDH-concur15,
  address = {Madrid, Spain},
  month = sep,
  year = 2015,
  volume = {42},
  series = {Leibniz International Proceedings in Informatics},
  publisher = {Leibniz-Zentrum f{\"u}r Informatik},
  editor = {Aceto, Luca and de Frutos-Escrig, David},
  acronym = {{CONCUR}'15},
  booktitle = {{P}roceedings of the 26th
               {I}nternational {C}onference on
               {C}oncurrency {T}heory
               ({CONCUR}'15)},
  author = {Baelde, David and Delaune, St{\'e}phanie and Hirschi,
                  Lucca},
  title = {Partial Order Reduction for Security Protocols},
  pages = {497-510},
  url = {http://www.lsv.fr/Publis/PAPERS/PDF/BDH-concur15.pdf},
  pdf = {http://www.lsv.fr/Publis/PAPERS/PDF/BDH-concur15.pdf},
  doi = {10.4230/LIPIcs.CONCUR.2015.497},
  abstract = {Security protocols are concurrent processes that communicate
    using cryptography with the aim of achieving various security properties.
    Recent work on their formal verification has brought procedures and tools
    for deciding trace equivalence properties (\textit{e.g.},~anonymity,
    unlinkability, vote secrecy) for a bounded number of sessions. However,
    these procedures are based on a naive symbolic exploration of all traces
    of the considered processes which, unsurprisingly, greatly limits the
    scalability and practical impact of the verification tools.\par
    In this paper, we mitigate this difficulty by developing partial order
    reduction techniques for the verification of security protocols. We
    provide reduced transition systems that optimally elim- inate redundant
    traces, and which are adequate for model-checking trace equivalence
    properties of protocols by means of symbolic execution. We have
    implemented our reductions in the tool \textsf{Apte}, and demonstrated
    that it achieves the expected speedup on various protocols.}
}
@inproceedings{DBS-csl16,
  address = {Marseille, France},
  month = sep,
  year = 2016,
  volume = {62},
  series = {Leibniz International Proceedings in Informatics},
  publisher = {Leibniz-Zentrum f{\"u}r Informatik},
  editor = {Regnier, Laurent and Talbot, Jean-Marc},
  acronym = {{CSL}'16},
  booktitle = {{P}roceedings of the 25th {A}nnual {EACSL} {C}onference on
                  {C}omputer {S}cience {L}ogic ({CSL}'16)},
  author = {Amina Doumane and David Baelde and Alexis Saurin},
  title = {Infinitary proof theory: the multiplicative additive case},
  pages = {42:1-42:17},
  doi = {10.4230/LIPIcs.CSL.2016.42},
  abstract = {Infinitary and regular proofs are commonly used in fixed point logics. Being natural intermediate devices between semantics and traditional finitary proof systems, they are commonly found in completeness arguments, automated deduction, verification, etc. However, their proof theory is surprisingly underdeveloped. In particular, very little is known about the computational behavior of such proofs through cut elimination. Taking such aspects into account has unlocked rich developments at the intersection of proof theory and programming language theory. One would hope that extending this to infinitary calculi would lead, e.g., to a better understanding of recursion and corecursion in programming languages. Structural proof theory is notably based on two fundamental properties of a proof system: cut elimination and focalization. The first one is only known to hold for restricted (purely additive) infinitary calculi, thanks to the work of Santocanale and Fortier; the second one has never been studied in infinitary systems. In this paper, we consider the infinitary proof system muMALLi for multiplicative and additive linear logic extended with least and greatest fixed points, and prove these two key results. We thus establish muMALLi as a satisfying computational proof system in itself, rather than just an intermediate device in the study of finitary proof systems.}
}
@inproceedings{BLS-hal15,
  address = {Marseille, France},
  month = sep,
  year = 2016,
  volume = {62},
  series = {Leibniz International Proceedings in Informatics},
  publisher = {Leibniz-Zentrum f{\"u}r Informatik},
  editor = {Regnier, Laurent and Talbot, Jean-Marc},
  acronym = {{CSL}'16},
  booktitle = {{P}roceedings of the 25th {A}nnual {EACSL} {C}onference on
                  {C}omputer {S}cience {L}ogic ({CSL}'16)},
  author = {Baelde, David and Lunel, Simon and Schmitz, Sylvain},
  title = {A~Sequent Calculus for a Modal Logic on Finite Data
                  Trees},
  pages = {32:1-32:16},
  url = {https://hal.inria.fr/hal-01191172},
  doi = {10.4230/LIPIcs.CSL.2016.32},
  abstract = {We investigate the proof theory of a modal fragment of XPath
                  equipped with data (in)equality tests over finite data
                  trees, i.e. over finite unranked trees where nodes are
                  labelled with both a symbol from a finite alphabet and a
                  single data value from an infinite domain.  We present a
                  sound and complete sequent calculus for this logic, which
                  yields the optimal PSPACE complexity bound for its validity
                  problem.}
}
@inproceedings{DBHS-lics16,
  address = {New York City, USA},
  month = jul,
  year = 2016,
  publisher = {ACM Press},
  editor = {Grohe, Martin and Koskinen, Eric and Shankar, Natarajan},
  acronym = {{LICS}'16},
  booktitle = {{P}roceedings of the 31st {A}nnual {ACM\slash
            IEEE} {S}ymposium on {L}ogic {I}n {C}omputer {S}cience ({LICS}'16)},
  author = {Amina Doumane and David Baelde and Lucca Hirschi
                  and Alexis Saurin},
  title = {Towards Completeness via Proof Search in the Linear
                  Time {{\(\mu\)}}-calculus},
  pages = {377-386},
  url = {https://hal.archives-ouvertes.fr/hal-01275289/},
  pdf = {http://www.lsv.fr/Publis/PAPERS/PDF/DBHS-lics16.pdf},
  doi = {10.1145/2933575.2933598},
  abstract = {Modal \(\mu\)-calculus is one of the central
                  languages of logic and verification, whose study
                  involves notoriously complex objects: automata over
                  infinite structures on the model-theoretical side;
                  infinite proofs and proofs by (co)induction on the
                  proof-theoretical side.  Nevertheless,
                  axiomatizations have been given for both linear and
                  branching time \(\mu\)-calculi, with quite involved
                  completeness arguments.  We come back to this
                  central problem, considering it from a proof search
                  viewpoint, and provide some new completeness
                  arguments in the linear time \(\mu\)-calculus.  Our
                  results only deal with restricted classes of
                  formulas that closely correspond to
                  (non-alternating) \(\omega\)-automata but, compared
                  to earlier proofs, our completeness arguments are
                  direct and constructive.  We first consider a
                  natural circular proof system based on sequent
                  calculus, and show that it is complete for
                  inclusions of parity automata directly expressed as
                  formulas, making use of Safra's construction
                  directly in proof search.  We then consider the
                  corresponding finitary proof system, featuring
                  (co)induction rules, and provide a partial
                  translation result from circular to finitary
                  proofs. This yields completeness of the finitary
                  proof system for inclusions of sufficiently
                  deterministic parity automata, and finally for
                  arbitrary B{\"u}chi automata.}
}
@inproceedings{HBD-sp16,
  address = {San Jose, California, USA},
  month = may,
  year = 2016,
  publisher = {IEEECSP},
  editor = {Locasto, Michael and Shmatikov, Vitaly and Erlingsson, {\'U}lfar},
  acronym = {{S\&P}'16},
  booktitle = {{P}roceedings of the 37th {IEEE} {S}ymposium
           on {S}ecurity and {P}rivacy ({S\&P}'16)},
  author = {Hirschi, Lucca and Baelde, David and Delaune, St{\'e}phanie},
  title = {A~method for verifying privacy-type properties:
                  the~unbounded case},
  pages = {564-581},
  url = {http://www.lsv.fr/Publis/PAPERS/PDF/HBD-sp16.pdf},
  pdf = {http://www.lsv.fr/Publis/PAPERS/PDF/HBD-sp16.pdf},
  doi = {10.1109/SP.2016.40},
  abstract = {In~this paper, we~consider the problem of verifying
    anonymity and unlinkability in the symbolic model, where protocols
    are represented as processes in a variant of the applied pi calculus
    notably used in the Proverif tool. Existing tools and techniques do
    not allow one to verify directly these properties, expressed as
    behavioral equivalences. We propose a different approach: we design
    two conditions on protocols which are sufficient to ensure anonymity
    and unlinkability, and which can then be effectively checked
    automatically using Proverif. Our two conditions correspond to two
    broad classes of attacks on unlinkability, corresponding to data and
    control-flow leaks.\par
    This theoretical result is general enough to apply to a wide class
    of protocols. In particular, we apply our techniques to provide the
    first formal security proof of the BAC protocol (e-passport). Our
    work has also lead to the discovery of new attacks, including one on
    the LAK protocol (RFID authentication) which was previously claimed
    to be unlinkable (in~a weak sense) and one on the PACE protocol
    (e-passport).}
}
@comment{{B-arxiv16,
  author =		Bollig, Benedikt, 
  affiliation = 	aff-LSVmexico,
  title =    		One-Counter Automata with Counter Visibility, 
  institution = 	Computing Research Repository, 
  number =    		1602.05940, 
  month = 		feb, 
  nmonth =     		2,
  year = 		2016, 
  type = 		RR, 
  axeLSV = 		mexico,
  NOcontrat = 		"",
  
  url =			http://arxiv.org/abs/1602.05940, 
  PDF =			"http://www.lsv.fr/Publis/PAPERS/PDF/B-arxiv16.pdf",
  lsvdate-new =  	20160222,
  lsvdate-upd =  	20160222,
  lsvdate-pub =  	20160222,
  lsv-category = 	"rapl",
  wwwpublic =    	"public and ccsb",
  note = 		18~pages, 

  abstract = "In a one-counter automaton (OCA), one can read a letter
    from some finite alphabet, increment and decrement the counter by
    one, or test it for zero. It is well-known that universality and
    language inclusion for OCAs are undecidable. We consider here OCAs
    with counter visibility: Whenever the automaton produces a letter,
    it outputs the current counter value along with~it. Hence, its
    language is now a set of words over an infinite alphabet. We show
    that universality and inclusion for that model are in PSPACE, thus
    no harder than the corresponding problems for finite automata, which
    can actually be considered as a special case. In fact, we show that
    OCAs with counter visibility are effectively determinizable and
    closed under all boolean operations. As~a~strict generalization, we
    subsequently extend our model by registers. The general nonemptiness
    problem being undecidable, we impose a bound on the number of
    register comparisons and show that the corresponding nonemptiness
    problem is NP-complete.",
}}
@misc{vip-D32,
  author = {Baelde, David and Delaune, St{\'e}phanie and Kremer, Steve},
  title = {Decision procedures for equivalence based properties (part~{II})},
  howpublished = {Deliverable VIP~3.2 (ANR-11-JS02-0006)},
  month = sep,
  year = {2015},
  note = {9~pages},
  type = {Contract Report},
  url = {http://www.lsv.fr/Publis/PAPERS/PDF/vip-d32.pdf},
  pdf = {http://www.lsv.fr/Publis/PAPERS/PDF/vip-d32.pdf}
}
@article{BCMW-fi17,
  publisher = {{IOS} Press},
  journal = {Fundamenta Informaticae},
  author = {David Baelde and Arnaud Carayol and Ralph Matthes and Igor Walukiewicz},
  title = {Preface: Special Issue of {Fixed Points in Computer Science} ({FICS}'13)},
  volume = {150},
  number = {3-4},
  pages = {i-ii},
  year = {2017},
  url = {https://doi.org/10.3233/FI-2017-1468},
  doi = {10.3233/FI-2017-1468}
}
@inproceedings{BDGK-csf17,
  address = {Santa Barbara, California, USA},
  month = aug,
  publisher = {{IEEE} Computer Society Press},
  editor = {K{\"o}pf, Boris and Chong, Steve},
  acronym = {{CSF}'17},
  booktitle = {{P}roceedings of the 
               30th {IEEE} {C}omputer {S}ecurity {F}oundations
               {S}ymposium ({CSF}'17)},
  author = {Baelde, David and Delaune, St{\'e}phanie and Gazeau, Ivan and Kremer, Steve},
  title = {Symbolic Verification of Privacy-Type Properties for Security Protocols with {XOR}},
  pages = {234-248},
  year = {2017},
  doi = {10.1109/CSF.2017.22},
  pdf = {https://hal.inria.fr/hal-01533694/document},
  url = {https://hal.inria.fr/hal-01533694},
  abstract = {In symbolic verification of security protocols, process equivalences have recently been used extensively to model strong secrecy, anonymity and unlinkability properties. However, tool support for automated analysis of equivalence properties is limited compared to trace properties, e.g., modeling authentication and weak notions of secrecy. In this paper, we present a novel procedure for verifying equivalences on finite processes, i.e., without replication, for protocols that rely on various cryptographic primitives including exclusive or (xor). We have implemented our procedure in the tool AKISS, and successfully used it on several case studies that are outside the scope of existing tools, e.g., unlinkability on various RFID protocols, and resistance against guessing attacks on protocols that use xor.}
}
@article{BDH-lmcs17,
  journal = {Logical Methods in Computer Science},
  author = {Baelde, David and Delaune, St{\'e}phanie and Hirschi, Lucca},
  title = {{A Reduced Semantics for Deciding Trace Equivalence}},
  volume = {13},
  number = {2:8},
  year = {2017},
  pages = {1-48},
  doi = {10.23638/LMCS-13(2:8)2017},
  pdf = {http://www.lsv.fr/Publis/PAPERS/PDF/BDH-lmcs17.pdf},
  url = {https://lmcs.episciences.org/3703},
  abstract = {Many privacy-type properties of security protocols can be modelled using trace equivalence properties in suitable process algebras. It has been shown that such properties can be decided for interesting classes of finite processes (i.e. without replication) by means of symbolic execution and constraint solving. However, this does not suffice to obtain practical tools. Current prototypes suffer from a classical combinatorial explosion problem caused by the exploration of many interleavings in the behaviour of processes. M{\"o}dersheim et al. [40] have tackled this problem for reachability properties using partial order reduction techniques. We revisit their work, generalize it and adapt it for equivalence checking. We obtain an optimisation in the form of a reduced symbolic semantics that eliminates redundant interleavings on the fly. The obtained partial order reduction technique has been integrated in a tool called Apte. We conducted complete benchmarks showing dramatic improvements.}
}
@inproceedings{BDH-esorics18,
  address = {Barcelona, Spain},
  month = sep,
  year = 2018,
  volume = {11098},
  series = {Lecture Notes in Computer Science},
  publisher = {Springer},
  editor = {Javier L{\'{o}}pez and
               Jianying Zhou and
               Miguel Soriano},
  acronym = {{ESORICS}'18},
  booktitle = {{P}roceedings of the 23rd {E}uropean {S}ymposium on
		 {R}esearch in {C}omputer {S}ecurity ({ESORICS}'18)},
  author = {David Baelde and St{\'e}phanie Delaune and Lucca Hirschi},
  title = {{POR} for Security Protocol Equivalences - Beyond Action-Determinism},
  pages = {385-405},
  url = {https://arxiv.org/abs/1804.03650},
  doi = {10.1007/978-3-319-99073-6\_19},
  abstract = {Formal methods have proved effective to automatically analyse protocols. Recently, much research has focused on verifying trace equivalence on protocols, which is notably used to model interesting privacy properties such as anonymity or unlinkability. Several tools for checking trace equivalence rely on a naive and expensive exploration of all interleavings of concurrent actions, which calls for partial-order reduction (POR) techniques. In this paper, we present the first POR technique for protocol equivalences that does not rely on an action-determinism assumption: we recast trace equivalence as a reachability problem, to which persistent and sleep set techniques can be applied, and we show how to effectively apply these results in the context of symbolic execution. We report on a prototype implementation, improving the tool DeepSec.}
}
@inproceedings{BLS-fsttcs18,
  address = {Ahmedabad, India},
  month = dec,
  year = 2018,
  volume = {122},
  series = {Leibniz International Proceedings in Informatics},
  publisher = {Leibniz-Zentrum f{\"u}r Informatik},
  editor = {Sumit Ganguly and Paritosh Pandya},
  acronym = {{FSTTCS}'18},
  booktitle = {{P}roceedings of the 38th {C}onference on
               {F}oundations of {S}oftware {T}echnology and
               {T}heoretical {C}omputer {S}cience
               ({FSTTCS}'18)},
  author = {Baelde, David and Lick, Anthony and Schmitz, Sylvain},
  title = {A Hypersequent Calculus with Clusters for Tense Logic over Ordinals},
  pages = {15:1-15:19},
  url = {http://drops.dagstuhl.de/opus/frontdoor.php?source_opus=9914},
  pdf = {http://drops.dagstuhl.de/opus/volltexte/2018/9914/pdf/LIPIcs-FSTTCS-2018-15.pdf},
  doi = {10.4230/LIPIcs.FSTTCS.2018.15},
  abstract = {Prior's tense logic forms the core of linear temporal logic, with both past-and future-looking modalities.  We present a sound and complete proof system for tense logic over ordinals.  Technically, this is a hypersequent system, enriched with an ordering, clusters, and annotations. The system is designed with proof search algorithms in mind, and yields an optimal coNP complexity for the validity problem.  It entails a small model property for tense logic over ordinals: every satisfiable formula has a model of order type at most \(\omega^2\).  It also allows to answer the validity problem for ordinals below or exactly equal to a given one.}
}
@inproceedings{BLS-pods19,
  address = {Amsterdam, Netherlands},
  month = jun # {-} # jul,
  publisher = {ACM Press},
  editor = {Christoph Koch},
  acronym = {{PODS}'19},
  booktitle = {{P}roceedings of the 38th {A}nnual 
	  {ACM} {SIGACT}-{SIGMOD}-{SIGART} {S}ymposium 
	  on {P}rinciples of {D}atabase {S}ystems
	  ({PODS}'19)},
  author = {Baelde, David and Lick, Anthony and Schmitz, Sylvain},
  title = {Decidable {XP}ath Fragments in the Real World},
  pages = {285-302},
  year = 2019,
  doi = {10.1145/3294052.3319685},
  url = {https://hal.inria.fr/hal-01852475},
  abstract = {XPath is arguably the most popular query language for selecting elements in XML documents.  Besides query evaluation, query satisfiability and containment are the main computational problems for XPath; they are useful, for instance, to detect dead code or validate query optimisations.  These problems are undecidable in general, but several fragments have been identified over time for which satisfiability (or query containment) is decidable: CoreXPath 1.0 and 2.0 without so-called data joins, fragments with data joins but limited navigation, etc.  However, these fragments are often given in a simplified syntax, and sometimes wrt. a simplified XPath semantics.  Moreover, they have been studied mostly with theoretical motivations, with little consideration for the practically relevant features of XPath.  To investigate the practical impact of these theoretical fragments, we design a benchmark compiling thousands of real-world XPath queries extracted from open-source projects.  These queries are then matched against syntactic fragments from the literature.  We investigate how to extend these fragments with seldom-considered features such as free variables, data tests, data joins, and the last() and id() functions, for which we provide both undecidability and decidability results.  We analyse the coverage of the original and extended fragments, and further provide a glimpse at which other practically-motivated features might be worth investigating in the future.}
}
@inproceedings{BLS-aiml18,
  address = {Bern, Switzerland},
  month = aug,
  year = 2018,
  publisher = {College Publications},
  editor = {Guram Bezhanishvili and Giovanna D'Agostino and
                  George Metcalfe and Thomas Studer},
  acronym = {{AiML}'18},
  booktitle = {{P}roceedings of the 10th
           {C}onference on {A}dvances in {M}odal {L}ogics
           ({AiML}'18)},
  author = {Baelde, David and Lick, Anthony and Schmitz, Sylvain},
  title = {A Hypersequent Calculus with Clusters for Linear Frames},
  pages = {36-55},
  url = {https://hal.inria.fr/hal-01756126},
  abstract = {The logic Kt4.3 is the basic modal logic of linear frames. Along with its extensions, it is found at the core of linear-time temporal logics and logics on words.  In this paper, we consider the problem of designing proof systems for these logics, in such a way that proof search yields decision procedures for validity with an optimal complexity---coNP in this case.  In earlier work, Indrzejczak has proposed an ordered hypersequent calculus that is sound and complete for Kt4.3 but does not yield any decision procedure.  We refine his approach, using a hypersequent structure that corresponds to weak rather than strict total orders, and using annotations that reflect the model-theoretic insights given by small models for Kt4.3.  We obtain a sound and complete calculus with an associated coNP proof search algorithm.  These results extend naturally to the cases of unbounded and dense frames, and to the complexity of the two-variable fragment of first-order logic over total orders.}
}
@article{HBD-jcs19,
  publisher = {{IOS} Press},
  journal = {Journal of Computer Security},
  author = {Hirschi, Lucca and Baelde, David and Delaune, St{\'e}phanie},
  title = {A method for unbounded verification of privacy-type properties},
  volume = {27},
  number = {3},
  pages = {277-342},
  year = 2019,
  pdf = {http://www.lsv.ens-cachan.fr/Publis/PAPERS/PDF/HBD-jcs19.pdf},
  doi = {10.3233/JCS-171070},
  url = {https://content.iospress.com/articles/journal-of-computer-security/jcs171070}
}
@inproceedings{BDJKM-csl21,
  address = {online},
  month = may,
  publisher = {{IEEE} Press},
  editor = {Alina Oprea  and Thorsten Holz},
  acronym = {{S\&P}'21},
  booktitle = {{P}roceedings of the 42nd IEEE Symposium on Security and Privacy 
           ({S\&P}'21)},
  author = {Baelde, David and Delaune, St{\'e}phanie and Jacomme, Charlie and 
        Koutsos, Adrien and Moreau, Sol{\`e}ne},
  title = {An {I}nteractive {P}rover for {P}rotocol {V}erification in the {C}omputational {M}odel},
  year = {2021},
  pdf = {https://hal.archives-ouvertes.fr/hal-03172119},
  url = {https://hal.archives-ouvertes.fr/hal-03172119},
  note = {To appear}
}
@phdthesis{baelde-hdr2021,
  author = {Baelde, David},
  title = {Contributions to the {V}erification of {C}ryptographic  {P}rotocols},
  school = {{\'E}cole Normale Sup{\'e}rieure Paris-Saclay, France},
  type = {M{\'e}moire d'habilitation},
  year = 2021,
  month = feb,
  pdf = {http://www.lsv.ens-cachan.fr/Publis/PAPERS/PDF/Baelde-Hab2021.pdf},
  url = {http://www.lsv.fr/~baelde/hdr/index.html}
}
@inproceedings{BDM-csf20,
  address = {Boston, MA, USA},
  month = jul,
  publisher = {{IEEE} Computer Society Press},
  editor = {Jia, Limin and K{\"u}sters, Ralf},
  acronym = {{CSF}'19},
  booktitle = {{P}roceedings of the 
               33rd {IEEE} {C}omputer {S}ecurity {F}oundations
               {S}ymposium ({CSF}'20)},
  author = {David Baelde and St{\'e}phanie Delaune and Sol{\`e}ne Moreau},
  title = {A Method for Proving Unlinkability of Stateful Protocols},
  pages = {169--183},
  year = 2020,
  url = {https://hal.archives-ouvertes.fr/hal-02459984/},
  abstract = {The rise of contactless and wireless devices such as mobile phones and RFID chips justifies significant concerns over privacy, and calls for communication protocols that ensure some form of unlinkability. Formally specifying this property is difficult and context-dependent, and analysing it is very complex; as is common with security protocols, several incorrect unlinkability claims can be found in the literature. Formal verification is therefore desirable, but current techniques are not sufficient to directly analyse unlinkability. In [Hirschi et al., SP'19], two conditions have been identified that imply unlinkability and can be automatically verified. This work, however, only considers a restricted class of protocols. We adapt their formal definition as well as their proof method to the common setting of RFID authentication protocols, where readers access a central database of authorised users. Moreover, we also consider protocols where readers may update their database, and tags may also carry a mutable state. We propose sufficient conditions to ensure unlinkability, find new attacks, and obtain new proofs of unlinkability using Tamarin to establish our sufficient conditions.}
}
@article{BFNS-mscs20,
  publisher = {Cambridge University Press},
  journal = {Mathematical Structures in Computer Science},
  author = {David Baelde and Amy P. Felty and Gopalan Nadathur and Alexis Saurin},
  title = {A special issue on structural proof theory, automated reasoning and
               computation in celebration of Dale Miller's 60th birthday},
  volume = {29},
  number = {8},
  pages = {1007--1008},
  year = 2020,
  doi = {10.1017/S0960129519000136},
  abstract = {The genesis of this special issue was in a meeting that took place at Université Paris Diderot on December 15 and 16, 2016. Dale Miller, Professor at École polytechnique, had turned 60 a few days earlier. In a career spanning over three decades and in work conducted in collaboration with several students and colleagues, Dale had had a significant influence in an area that can be described as structural proof theory and its application to computation and reasoning. In recognition of this fact, several of his collaborators thought it appropriate to celebrate the occasion by organizing a symposium on topics broadly connected to his areas of interest and achievements. The meeting was a success in several senses: it was attended by over 35 people, there were 15 technical presentations describing new results, and, quite gratifyingly, we managed to spring the event as a complete surprise to Dale.}
}

This file was generated by bibtex2html 1.98.