diff --git a/metadata/authors.toml b/metadata/authors.toml
--- a/metadata/authors.toml
+++ b/metadata/authors.toml
@@ -1,3551 +1,3488 @@
[lange]
name = "Christoph Lange"
[lange.emails]
lange_email = "math.semantic.web@gmail.com"
[lange.homepages]
[petrovic]
name = "Danijela Petrovic"
[petrovic.emails]
[petrovic.homepages]
petrovic_homepage = "http://www.matf.bg.ac.rs/~danijela"
[schimpf]
name = "Alexander Schimpf"
[schimpf.emails]
schimpf_email = "schimpfa@informatik.uni-freiburg.de"
[schimpf.homepages]
[murao]
name = "H. Murao"
[murao.emails]
[murao.homepages]
[bourke]
name = "Timothy Bourke"
[bourke.emails]
bourke_email = "tim@tbrk.org"
[bourke.homepages]
bourke_homepage = "http://www.tbrk.org"
[schaeffeler]
name = "Maximilian Schäffeler"
[schaeffeler.emails]
schaeffeler_email = "schaeffm@in.tum.de"
[schaeffeler.homepages]
[liut]
name = "Tao Liu"
[liut.emails]
[liut.homepages]
[sickert]
name = "Salomon Sickert"
[sickert.emails]
sickert_email = "s.sickert@tum.de"
-sickert_email1 = "sickert@in.tum.de"
[sickert.homepages]
sickert_homepage = "https://www7.in.tum.de/~sickert"
[lutz]
name = "Bianca Lutz"
[lutz.emails]
lutz_email = "sowilo@cs.tu-berlin.de"
[lutz.homepages]
[rizkallah]
name = "Christine Rizkallah"
[rizkallah.emails]
[rizkallah.homepages]
rizkallah_homepage = "https://www.mpi-inf.mpg.de/~crizkall/"
[pollak]
name = "Florian Pollak"
[pollak.emails]
pollak_email = "florian.pollak@gmail.com"
[pollak.homepages]
[sulejmani]
name = "Ujkan Sulejmani"
[sulejmani.emails]
[sulejmani.homepages]
[biendarra]
name = "Julian Biendarra"
[biendarra.emails]
[biendarra.homepages]
[saile]
name = "Christian Saile"
[saile.emails]
[saile.homepages]
saile_homepage = "http://dss.in.tum.de/staff/christian-saile.html"
[friedrich]
name = "Stefan Friedrich"
[friedrich.emails]
[friedrich.homepages]
[maximova]
name = "Alexandra Maximova"
[maximova.emails]
maximova_email = "amaximov@student.ethz.ch"
[maximova.homepages]
[stricker]
name = "Christian Stricker"
[stricker.emails]
[stricker.homepages]
stricker_homepage = "http://dss.in.tum.de/staff/christian-stricker.html"
[dyckhoff]
name = "Roy Dyckhoff"
[dyckhoff.emails]
[dyckhoff.homepages]
dyckhoff_homepage = "https://rd.host.cs.st-andrews.ac.uk"
[weidner]
name = "Arno Wilhelm-Weidner"
[weidner.emails]
weidner_email = "arno.wilhelm-weidner@tu-berlin.de"
[weidner.homepages]
[thomson]
name = "Fox Thomson"
[thomson.emails]
thomson_email = "foxthomson0@gmail.com"
[thomson.homepages]
[he]
name = "Yijun He"
[he.emails]
he_email = "yh403@cam.ac.uk"
[he.homepages]
[kammueller]
name = "Florian Kammüller"
[kammueller.emails]
kammueller_email = "flokam@cs.tu-berlin.de"
kammueller_email1 = "florian.kammuller@gmail.com"
[kammueller.homepages]
kammueller_homepage = "http://www.cs.mdx.ac.uk/people/florian-kammueller/"
[oostrom]
name = "Vincent van Oostrom"
[oostrom.emails]
[oostrom.homepages]
[klein]
name = "Gerwin Klein"
[klein.emails]
-klein_email = "kleing@cse.unsw.edu.au"
-klein_email1 = "kleing@unsw.edu.au"
+klein_email = "kleing@unsw.edu.au"
[klein.homepages]
klein_homepage = "http://www.cse.unsw.edu.au/~kleing/"
[simic]
name = "Danijela Simić"
[simic.emails]
simic_email = "danijela@matf.bg.ac.rs"
[simic.homepages]
simic_homepage = "http://poincare.matf.bg.ac.rs/~danijela"
[gao]
name = "Xin Gao"
[gao.emails]
[gao.homepages]
[guttmann]
name = "Walter Guttmann"
[guttmann.emails]
guttmann_email = "walter.guttmann@canterbury.ac.nz"
-guttmann_email1 = "walter.guttman@canterbury.ac.nz"
[guttmann.homepages]
-guttmann_homepage = "http://www.cosc.canterbury.ac.nz/walter.guttmann/"
-guttmann_homepage1 = "https://www.cosc.canterbury.ac.nz/walter.guttmann/"
+guttmann_homepage = "https://www.cosc.canterbury.ac.nz/walter.guttmann/"
[mantel]
name = "Heiko Mantel"
[mantel.emails]
mantel_email = "mantel@mais.informatik.tu-darmstadt.de"
[mantel.homepages]
[schlichtkrull]
name = "Anders Schlichtkrull"
[schlichtkrull.emails]
schlichtkrull_email = "andschl@dtu.dk"
[schlichtkrull.homepages]
schlichtkrull_homepage = "https://people.compute.dtu.dk/andschl/"
[jaskolka]
name = "Jason Jaskolka"
[jaskolka.emails]
jaskolka_email = "jason.jaskolka@carleton.ca"
[jaskolka.homepages]
jaskolka_homepage = "https://carleton.ca/jaskolka/"
[rau]
name = "Martin Rau"
[rau.emails]
rau_email = "martin.rau@tum.de"
rau_email1 = "mrtnrau@googlemail.com"
[rau.homepages]
[bottesch]
name = "Ralph Bottesch"
[bottesch.emails]
bottesch_email = "ralph.bottesch@uibk.ac.at"
[bottesch.homepages]
bottesch_homepage = "http://cl-informatik.uibk.ac.at/users/bottesch/"
[bella]
name = "Giampaolo Bella"
[bella.emails]
bella_email = "giamp@dmi.unict.it"
[bella.homepages]
bella_homepage = "http://www.dmi.unict.it/~giamp/"
[dirix]
name = "Stefan Dirix"
[dirix.emails]
[dirix.homepages]
[nielsen]
name = "Finn Nielsen"
[nielsen.emails]
nielsen_email = "finn.nielsen@uni-muenster.de"
[nielsen.homepages]
[mansky]
name = "Susannah Mansky"
[mansky.emails]
mansky_email = "sjohnsn2@illinois.edu"
mansky_email1 = "susannahej@gmail.com"
[mansky.homepages]
[dunaev]
name = "Georgy Dunaev"
[dunaev.emails]
dunaev_email = "georgedunaev@gmail.com"
[dunaev.homepages]
[li]
name = "Wenda Li"
[li.emails]
li_email = "wl302@cam.ac.uk"
li_email1 = "liwenda1990@hotmail.com"
[li.homepages]
li_homepage = "https://www.cl.cam.ac.uk/~wl302/"
-li_homepage1 = "http://www.cl.cam.ac.uk/~wl302/"
[stevens]
name = "Lukas Stevens"
[stevens.emails]
[stevens.homepages]
stevens_homepage = "https://www21.in.tum.de/team/stevensl"
[tourret]
name = "Sophie Tourret"
[tourret.emails]
tourret_email = "stourret@mpi-inf.mpg.de"
[tourret.homepages]
tourret_homepage = "https://www.mpi-inf.mpg.de/departments/automation-of-logic/people/sophie-tourret/"
-tourret_homepage1 = "https://www.mpi-inf.mpg.de/departments/automation-of-logic/people/sophie-tourret"
[yu]
name = "Lei Yu"
[yu.emails]
yu_email = "ly271@cam.ac.uk"
[yu.homepages]
[grewe]
name = "Sylvia Grewe"
[grewe.emails]
-grewe_email = "grewe@st.informatik.tu-darmstadt.de"
-grewe_email1 = "grewe@cs.tu-darmstadt.de"
+grewe_email = "grewe@cs.tu-darmstadt.de"
[grewe.homepages]
[coghetto]
name = "Roland Coghetto"
[coghetto.emails]
coghetto_email = "roland_coghetto@hotmail.com"
[coghetto.homepages]
[schirmer]
name = "Norbert Schirmer"
[schirmer.emails]
schirmer_email = "norbert.schirmer@web.de"
-schirmer_email1 = "schirmer@in.tum.de"
[schirmer.homepages]
[immler]
name = "Fabian Immler"
[immler.emails]
immler_email = "immler@in.tum.de"
immler_email1 = "fimmler@cs.cmu.edu"
-immler_email2 = "fimmler@andrew.cmu.edu"
[immler.homepages]
-immler_homepage = "http://www21.in.tum.de/~immler"
-immler_homepage1 = "https://home.in.tum.de/~immler/"
-immler_homepage2 = "http://home.in.tum.de/~immler/"
+immler_homepage = "https://home.in.tum.de/~immler/"
[tiu]
name = "Alwen Tiu"
[tiu.emails]
tiu_email = "ATiu@ntu.edu.sg"
[tiu.homepages]
tiu_homepage = "http://users.cecs.anu.edu.au/~tiu/"
[henrio]
name = "Ludovic Henrio"
[henrio.emails]
henrio_email = "Ludovic.Henrio@sophia.inria.fr"
[henrio.homepages]
[urban]
name = "Christian Urban"
[urban.emails]
urban_email = "christian.urban@kcl.ac.uk"
[urban.homepages]
-urban_homepage = "http://www.inf.kcl.ac.uk/staff/urbanc/"
-urban_homepage1 = "https://nms.kcl.ac.uk/christian.urban/"
+urban_homepage = "https://nms.kcl.ac.uk/christian.urban/"
[michaelis]
name = "Julius Michaelis"
[michaelis.emails]
michaelis_email = "isabelleopenflow@liftm.de"
michaelis_email1 = "maintainafpppt@liftm.de"
michaelis_email2 = "bdd@liftm.de"
michaelis_email3 = "afp@liftm.de"
[michaelis.homepages]
-michaelis_homepage = "http://liftm.de"
-michaelis_homepage1 = "http://liftm.de/"
+michaelis_homepage = "http://liftm.de/"
[scott]
name = "Dana Scott"
[scott.emails]
[scott.homepages]
scott_homepage = "http://www.cs.cmu.edu/~scott/"
[wenzel]
name = "Makarius Wenzel"
[wenzel.emails]
wenzel_email = "Makarius.wenzel@lri.fr"
[wenzel.homepages]
[dardinier]
name = "Thibault Dardinier"
[dardinier.emails]
dardinier_email = "thibault.dardinier@inf.ethz.ch"
-dardinier_email1 = "tdardini@student.ethz.ch"
[dardinier.homepages]
dardinier_homepage = "https://dardinier.me/"
[lallemand]
name = "Joseph Lallemand"
[lallemand.emails]
lallemand_email = "joseph.lallemand@loria.fr"
[lallemand.homepages]
[schmoetten]
name = "Richard Schmoetten"
[schmoetten.emails]
schmoetten_email = "s1311325@sms.ed.ac.uk"
[schmoetten.homepages]
[cohen]
name = "Ernie Cohen"
[cohen.emails]
cohen_email = "ecohen@amazon.com"
[cohen.homepages]
[chapman]
name = "Peter Chapman"
[chapman.emails]
chapman_email = "pc@cs.st-andrews.ac.uk"
[chapman.homepages]
[rickmann]
name = "Christina Rickmann"
[rickmann.emails]
rickmann_email = "c.rickmann@tu-berlin.de"
[rickmann.homepages]
[raya]
name = "Rodrigo Raya"
[raya.emails]
[raya.homepages]
raya_homepage = "https://people.epfl.ch/rodrigo.raya"
[kadzioka]
name = "Jakub Kądziołka"
[kadzioka.emails]
kadzioka_email = "kuba@kadziolka.net"
[kadzioka.homepages]
[foster]
name = "Michael Foster"
[foster.emails]
foster_email = "m.foster@sheffield.ac.uk"
[foster.homepages]
[zhann]
name = "Naijun Zhan"
[zhann.emails]
[zhann.homepages]
[fuenmayor]
name = "David Fuenmayor"
[fuenmayor.emails]
fuenmayor_email = "davfuenmayor@gmail.com"
[fuenmayor.homepages]
[iwama]
name = "Fumiya Iwama"
[iwama.emails]
iwama_email = "d1623001@s.konan-u.ac.jp"
[iwama.homepages]
[feliachi]
name = "Abderrahmane Feliachi"
[feliachi.emails]
feliachi_email = "abderrahmane.feliachi@lri.fr"
[feliachi.homepages]
[sudbrock]
name = "Henning Sudbrock"
[sudbrock.emails]
sudbrock_email = "sudbrock@mais.informatik.tu-darmstadt.de"
[sudbrock.homepages]
[bauer]
name = "Gertrud Bauer"
[bauer.emails]
[bauer.homepages]
[lux]
name = "Alexander Lux"
[lux.emails]
lux_email = "lux@mais.informatik.tu-darmstadt.de"
[lux.homepages]
[seidler]
name = "Henning Seidler"
[seidler.emails]
seidler_email = "henning.seidler@mailbox.tu-berlin.de"
[seidler.homepages]
[desharnais]
name = "Martin Desharnais"
[desharnais.emails]
desharnais_email = "martin.desharnais@unibw.de"
[desharnais.homepages]
desharnais_homepage = "https://martin.desharnais.me"
[wasserrab]
name = "Daniel Wasserrab"
[wasserrab.emails]
[wasserrab.homepages]
wasserrab_homepage = "http://pp.info.uni-karlsruhe.de/personhp/daniel_wasserrab.php"
[fell]
name = "Julian Fell"
[fell.emails]
fell_email = "julian.fell@uq.net.au"
[fell.homepages]
[zhan]
name = "Bohua Zhan"
[zhan.emails]
zhan_email = "bzhan@ios.ac.cn"
[zhan.homepages]
zhan_homepage = "http://lcs.ios.ac.cn/~bzhan/"
[stephan]
name = "Werner Stephan"
[stephan.emails]
stephan_email = "stephan@dfki.de"
[stephan.homepages]
[david]
name = "Marco David"
[david.emails]
david_email = "marco.david@hotmail.de"
[david.homepages]
[olm]
name = "Markus Müller-Olm"
[olm.emails]
[olm.homepages]
olm_homepage = "http://cs.uni-muenster.de/u/mmo/"
[havle]
name = "Oto Havle"
[havle.emails]
havle_email = "oha@sysgo.com"
[havle.homepages]
[margetson]
name = "James Margetson"
[margetson.emails]
[margetson.homepages]
[scharager]
name = "Matias Scharager"
[scharager.emails]
scharager_email = "mscharag@cs.cmu.edu"
[scharager.homepages]
[haftmann]
name = "Florian Haftmann"
[haftmann.emails]
haftmann_email = "florian.haftmann@informatik.tu-muenchen.de"
[haftmann.homepages]
haftmann_homepage = "http://isabelle.in.tum.de/~haftmann"
[schoepe]
name = "Daniel Schoepe"
[schoepe.emails]
schoepe_email = "daniel@schoepe.org"
[schoepe.homepages]
[nagele]
name = "Julian Nagele"
[nagele.emails]
nagele_email = "julian.nagele@uibk.ac.at"
[nagele.homepages]
[hoefner]
name = "Peter Höfner"
[hoefner.emails]
hoefner_email = "peter@hoefner-online.de"
[hoefner.homepages]
hoefner_homepage = "http://www.hoefner-online.de/"
[derrick]
name = "John Derrick"
[derrick.emails]
derrick_email = "j.derrick@sheffield.ac.uk"
[derrick.homepages]
[beeren]
name = "Joel Beeren"
[beeren.emails]
[beeren.homepages]
[echenim]
name = "Mnacho Echenim"
[echenim.emails]
echenim_email = "mnacho.echenim@univ-grenoble-alpes.fr"
[echenim.homepages]
-echenim_homepage = "http://lig-membres.imag.fr/mechenim/"
-echenim_homepage1 = "https://lig-membres.imag.fr/mechenim/"
+echenim_homepage = "https://lig-membres.imag.fr/mechenim/"
[vytiniotis]
name = "Dimitrios Vytiniotis"
[vytiniotis.emails]
[vytiniotis.homepages]
vytiniotis_homepage = "http://research.microsoft.com/en-us/people/dimitris/"
[beringer]
name = "Lennart Beringer"
[beringer.emails]
beringer_email = "lennart.beringer@ifi.lmu.de"
[beringer.homepages]
[wickerson]
name = "John Wickerson"
[wickerson.emails]
[wickerson.homepages]
wickerson_homepage = "http://www.doc.ic.ac.uk/~jpw48"
[eberl]
name = "Manuel Eberl"
[eberl.emails]
eberl_email = "manuel@pruvisto.org"
eberl_email1 = "manuel.eberl@tum.de"
[eberl.homepages]
-eberl_homepage = "https://pruvisto.org"
-eberl_homepage1 = "https://pruvisto.org/"
+eberl_homepage = "https://pruvisto.org/"
eberl_homepage2 = "https://www.in.tum.de/~eberlm"
[noce]
name = "Pasquale Noce"
[noce.emails]
noce_email = "pasquale.noce.lavoro@gmail.com"
[noce.homepages]
[gunther]
name = "Emmanuel Gunther"
[gunther.emails]
gunther_email = "gunther@famaf.unc.edu.ar"
[gunther.homepages]
[nemouchi]
name = "Yakoub Nemouchi"
[nemouchi.emails]
nemouchi_email = "nemouchi@lri.fr"
nemouchi_email1 = "yakoub.nemouchi@york.ac.uk"
[nemouchi.homepages]
[stueber]
name = "Anke Stüber"
[stueber.emails]
stueber_email = "anke.stueber@campus.tu-berlin.de"
[stueber.homepages]
[sternagel]
name = "Christian Sternagel"
[sternagel.emails]
sternagel_email = "c.sternagel@gmail.com"
sternagel_email1 = "christian.sternagel@uibk.ac.at"
[sternagel.homepages]
sternagel_homepage = "http://cl-informatik.uibk.ac.at/users/griff/"
[sachtleben]
name = "Robert Sachtleben"
[sachtleben.emails]
sachtleben_email = "rob_sac@uni-bremen.de"
[sachtleben.homepages]
[kastermans]
name = "Bart Kastermans"
[kastermans.emails]
[kastermans.homepages]
kastermans_homepage = "http://kasterma.net"
[pal]
name = "Abhik Pal"
[pal.emails]
[pal.homepages]
[thiemann]
name = "René Thiemann"
[thiemann.emails]
thiemann_email = "rene.thiemann@uibk.ac.at"
[thiemann.homepages]
-thiemann_homepage = "http://cl-informatik.uibk.ac.at/~thiemann"
-thiemann_homepage1 = "http://cl-informatik.uibk.ac.at/~thiemann/"
-thiemann_homepage2 = "http://cl-informatik.uibk.ac.at/users/thiemann/"
+thiemann_homepage = "http://cl-informatik.uibk.ac.at/users/thiemann/"
[campo]
name = "Alejandro del Campo"
[campo.emails]
campo_email = "alejandro.del-campo@alum.unirioja.es"
[campo.homepages]
[raska]
name = "Martin Raška"
[raska.emails]
[raska.homepages]
[stannett]
name = "Mike Stannett"
[stannett.emails]
stannett_email = "m.stannett@sheffield.ac.uk"
[stannett.homepages]
[back]
name = "Ralph-Johan Back"
[back.emails]
[back.homepages]
back_homepage = "http://users.abo.fi/Ralph-Johan.Back/"
[bruegger]
name = "Lukas Brügger"
[bruegger.emails]
bruegger_email = "lukas.a.bruegger@gmail.com"
[bruegger.homepages]
[kuncak]
name = "Viktor Kuncak"
[kuncak.emails]
[kuncak.homepages]
kuncak_homepage = "http://lara.epfl.ch/~kuncak/"
[gaudel]
name = "Marie-Claude Gaudel"
[gaudel.emails]
gaudel_email = "mcg@lri.fr"
[gaudel.homepages]
[sison]
name = "Robert Sison"
[sison.emails]
[sison.homepages]
[benzmueller]
name = "Christoph Benzmüller"
[benzmueller.emails]
benzmueller_email = "c.benzmueller@gmail.com"
benzmueller_email1 = "c.benzmueller@fu-berlin.de"
[benzmueller.homepages]
benzmueller_homepage = "http://christoph-benzmueller.de"
benzmueller_homepage1 = "http://page.mi.fu-berlin.de/cbenzmueller/"
-benzmueller_homepage2 = "http://www.christoph-benzmueller.de"
[from]
name = "Asta Halkjær From"
[from.emails]
from_email = "ahfrom@dtu.dk"
[from.homepages]
from_homepage = "https://people.compute.dtu.dk/ahfrom/"
[tuongj]
name = "Joseph Tuong"
[tuongj.emails]
[tuongj.homepages]
[zeller]
name = "Peter Zeller"
[zeller.emails]
zeller_email = "p_zeller@cs.uni-kl.de"
[zeller.homepages]
[schmidinger]
name = "Lukas Schmidinger"
[schmidinger.emails]
[schmidinger.homepages]
[ravindran]
name = "Binoy Ravindran"
[ravindran.emails]
[ravindran.homepages]
[rizaldi]
name = "Albert Rizaldi"
[rizaldi.emails]
rizaldi_email = "albert.rizaldi@ntu.edu.sg"
[rizaldi.homepages]
[sabouret]
name = "Nicolas Sabouret"
[sabouret.emails]
[sabouret.homepages]
[butler]
name = "David Butler"
[butler.emails]
butler_email = "dbutler@turing.ac.uk"
[butler.homepages]
butler_homepage = "https://www.turing.ac.uk/people/doctoral-students/david-butler"
[sprenger]
name = "Christoph Sprenger"
[sprenger.emails]
sprenger_email = "sprenger@inf.ethz.ch"
[sprenger.homepages]
[bordg]
name = "Anthony Bordg"
[bordg.emails]
bordg_email = "apdb3@cam.ac.uk"
[bordg.homepages]
bordg_homepage = "https://sites.google.com/site/anthonybordg/"
[bhatt]
name = "Bhargav Bhatt"
[bhatt.emails]
bhatt_email = "bhargav.bhatt@inf.ethz.ch"
[bhatt.homepages]
[gonzalez]
name = "Edgar Gonzàlez"
[gonzalez.emails]
gonzalez_email = "edgargip@google.com"
[gonzalez.homepages]
[adelsberger]
name = "Stephan Adelsberger"
[adelsberger.emails]
adelsberger_email = "stvienna@gmail.com"
[adelsberger.homepages]
adelsberger_homepage = "http://nm.wu.ac.at/nm/sadelsbe"
[schleicher]
name = "Dierk Schleicher"
[schleicher.emails]
[schleicher.homepages]
[ying]
name = "Shenggang Ying"
[ying.emails]
[ying.homepages]
[brien]
name = "Nicolas Robinson-O'Brien"
[brien.emails]
[brien.homepages]
[kobayashi]
name = "Hidetsune Kobayashi"
[kobayashi.emails]
[kobayashi.homepages]
[krauss]
name = "Alexander Krauss"
[krauss.emails]
krauss_email = "krauss@in.tum.de"
[krauss.homepages]
krauss_homepage = "http://www.in.tum.de/~krauss"
[somaini]
name = "Ivano Somaini"
[somaini.emails]
[somaini.homepages]
[seidl]
name = "Benedikt Seidl"
[seidl.emails]
seidl_email = "benedikt.seidl@tum.de"
[seidl.homepages]
[edmonds]
name = "Chelsea Edmonds"
[edmonds.emails]
edmonds_email = "cle47@cam.ac.uk"
[edmonds.homepages]
edmonds_homepage = "https://www.cst.cam.ac.uk/people/cle47"
[debrat]
name = "Henri Debrat"
[debrat.emails]
debrat_email = "henri.debrat@loria.fr"
[debrat.homepages]
[holub]
name = "Štěpán Holub"
[holub.emails]
holub_email = "holub@karlin.mff.cuni.cz"
[holub.homepages]
holub_homepage = "https://www2.karlin.mff.cuni.cz/~holub/"
[beresford]
name = "Alastair R. Beresford"
[beresford.emails]
beresford_email = "arb33@cam.ac.uk"
-beresford_email1 = "arb33@cl.cam.ac.uk"
[beresford.homepages]
[terraf]
name = "Pedro Sánchez Terraf"
[terraf.emails]
-terraf_email = "sterraf@famaf.unc.edu.ar"
-terraf_email1 = "psterraf@unc.edu.ar"
+terraf_email = "psterraf@unc.edu.ar"
[terraf.homepages]
terraf_homepage = "https://cs.famaf.unc.edu.ar/~pedro/home_en.html"
-terraf_homepage1 = "https://cs.famaf.unc.edu.ar/~pedro/home_en"
-terraf_homepage2 = "https://cs.famaf.unc.edu.ar/~pedro"
[raumer]
name = "Jakob von Raumer"
[raumer.emails]
raumer_email = "psxjv4@nottingham.ac.uk"
[raumer.homepages]
[pagano]
name = "Miguel Pagano"
[pagano.emails]
-pagano_email = "pagano@famaf.unc.edu.ar"
-pagano_email1 = "miguel.pagano@unc.edu.ar"
+pagano_email = "miguel.pagano@unc.edu.ar"
[pagano.homepages]
pagano_homepage = "https://cs.famaf.unc.edu.ar/~mpagano/"
[coglio]
name = "Alessandro Coglio"
[coglio.emails]
coglio_email = "coglio@kestrel.edu"
[coglio.homepages]
coglio_homepage = "http://www.kestrel.edu/~coglio"
[caballero]
name = "José Manuel Rodríguez Caballero"
[caballero.emails]
caballero_email = "jose.manuel.rodriguez.caballero@ut.ee"
[caballero.homepages]
caballero_homepage = "https://josephcmac.github.io/"
[grechuk]
name = "Bogdan Grechuk"
[grechuk.emails]
grechuk_email = "grechukbogdan@yandex.ru"
[grechuk.homepages]
[sewell]
name = "Thomas Sewell"
[sewell.emails]
[sewell.homepages]
[leustean]
name = "Laurentiu Leustean"
[leustean.emails]
[leustean.homepages]
[rosskopf]
name = "Simon Roßkopf"
[rosskopf.emails]
rosskopf_email = "rosskops@in.tum.de"
[rosskopf.homepages]
rosskopf_homepage = "http://www21.in.tum.de/~rosskops"
[aspinall]
name = "David Aspinall"
[aspinall.emails]
[aspinall.homepages]
aspinall_homepage = "http://homepages.inf.ed.ac.uk/da/"
[ausaf]
name = "Fahad Ausaf"
[ausaf.emails]
[ausaf.homepages]
ausaf_homepage = "http://kcl.academia.edu/FahadAusaf"
[diekmann]
name = "Cornelius Diekmann"
[diekmann.emails]
diekmann_email = "diekmann@net.in.tum.de"
[diekmann.homepages]
diekmann_homepage = "http://net.in.tum.de/~diekmann"
[fouillard]
name = "Valentin Fouillard"
[fouillard.emails]
fouillard_email = "valentin.fouillard@limsi.fr"
[fouillard.homepages]
[willenbrink]
name = "Sebastian Willenbrink"
[willenbrink.emails]
willenbrink_email = "sebastian.willenbrink@tum.de"
[willenbrink.homepages]
[xu]
name = "Jian Xu"
[xu.emails]
[xu.homepages]
[wang]
name = "Shuling Wang"
[wang.emails]
[wang.homepages]
[ridge]
name = "Tom Ridge"
[ridge.emails]
[ridge.homepages]
[kleppmann]
name = "Martin Kleppmann"
[kleppmann.emails]
kleppmann_email = "martin.kleppmann@cl.cam.ac.uk"
-kleppmann_email1 = "mk428@cl.cam.ac.uk"
[kleppmann.homepages]
[kreuzer]
name = "Katharina Kreuzer"
[kreuzer.emails]
kreuzer_email = "kreuzerk@in.tum.de"
[kreuzer.homepages]
kreuzer_homepage = "https://www21.in.tum.de/team/kreuzer/"
[traut]
name = "Christoph Traut"
[traut.emails]
[traut.homepages]
[blanchette]
name = "Jasmin Christian Blanchette"
[blanchette.emails]
blanchette_email = "jasmin.blanchette@gmail.com"
blanchette_email1 = "j.c.blanchette@vu.nl"
[blanchette.homepages]
blanchette_homepage = "http://www21.in.tum.de/~blanchet"
blanchette_homepage1 = "https://www.cs.vu.nl/~jbe248/"
[van]
name = "Hai Nguyen Van"
[van.emails]
van_email = "hai.nguyenvan.phie@gmail.com"
[van.homepages]
[brunner]
name = "Julian Brunner"
[brunner.emails]
brunner_email = "brunnerj@in.tum.de"
[brunner.homepages]
brunner_homepage = "http://www21.in.tum.de/~brunnerj/"
[reiche]
name = "Sebastian Reiche"
[reiche.emails]
[reiche.homepages]
reiche_homepage = "https://www.linkedin.com/in/sebastian-reiche-0b2093178"
[eriksson]
name = "Lars-Henrik Eriksson"
[eriksson.emails]
eriksson_email = "lhe@it.uu.se"
[eriksson.homepages]
[nipkow]
name = "Tobias Nipkow"
[nipkow.emails]
nipkow_email = "nipkow@in.tum.de"
[nipkow.homepages]
-nipkow_homepage = "http://www21.in.tum.de/~nipkow"
-nipkow_homepage1 = "http://www.in.tum.de/~nipkow/"
-nipkow_homepage2 = "http://www.in.tum.de/~nipkow"
-nipkow_homepage3 = "https://www.in.tum.de/~nipkow"
-nipkow_homepage4 = "http://www21.in.tum.de/~nipkow/"
+nipkow_homepage = "https://www21.in.tum.de/~nipkow/"
[divason]
name = "Jose Divasón"
[divason.emails]
-divason_email = "jose.divasonm@unirioja.es"
-divason_email1 = "jose.divason@unirioja.es"
+divason_email = "jose.divason@unirioja.es"
[divason.homepages]
-divason_homepage = "http://www.unirioja.es/cu/jodivaso"
-divason_homepage1 = "http://www.unirioja.es/cu/jodivaso/"
-divason_homepage2 = "https://www.unirioja.es/cu/jodivaso/"
+divason_homepage = "https://www.unirioja.es/cu/jodivaso/"
[barsotti]
name = "Damián Barsotti"
[barsotti.emails]
[barsotti.homepages]
barsotti_homepage = "http://www.cs.famaf.unc.edu.ar/~damian/"
[nagashima]
name = "Yutaka Nagashima"
[nagashima.emails]
nagashima_email = "Yutaka.Nagashima@data61.csiro.au"
[nagashima.homepages]
[avigad]
name = "Jeremy Avigad"
[avigad.emails]
avigad_email = "avigad@cmu.edu"
[avigad.homepages]
avigad_homepage = "http://www.andrew.cmu.edu/user/avigad/"
[abdulaziz]
name = "Mohammad Abdulaziz"
[abdulaziz.emails]
-abdulaziz_email = "mansour@in.tum.de"
+abdulaziz_email = "mohammad.abdulaziz@in.tum.de"
abdulaziz_email1 = "mohammad.abdulaziz8@gmail.com"
-abdulaziz_email2 = "mohammad.abdulaziz@in.tum.de"
[abdulaziz.homepages]
abdulaziz_homepage = "http://home.in.tum.de/~mansour/"
[kuncar]
name = "Ondřej Kunčar"
[kuncar.emails]
[kuncar.homepages]
kuncar_homepage = "http://www21.in.tum.de/~kuncar/"
[borgstroem]
name = "Johannes Borgström"
[borgstroem.emails]
borgstroem_email = "johannes.borgstrom@it.uu.se"
[borgstroem.homepages]
[kurz]
name = "Friedrich Kurz"
[kurz.emails]
kurz_email = "friedrich.kurz@tum.de"
[kurz.homepages]
[noschinski]
name = "Lars Noschinski"
[noschinski.emails]
noschinski_email = "noschinl@gmail.com"
[noschinski.homepages]
noschinski_homepage = "http://www21.in.tum.de/~noschinl/"
[liy]
name = "Yangjia Li"
[liy.emails]
[liy.homepages]
[wimmer]
name = "Simon Wimmer"
[wimmer.emails]
wimmer_email = "simon.wimmer@tum.de"
-wimmer_email1 = "wimmers@in.tum.de"
[wimmer.homepages]
-wimmer_homepage = "http://in.tum.de/~wimmers"
-wimmer_homepage1 = "http://home.in.tum.de/~wimmers/"
+wimmer_homepage = "http://home.in.tum.de/~wimmers/"
[gammie]
name = "Peter Gammie"
[gammie.emails]
gammie_email = "peteg42@gmail.com"
[gammie.homepages]
gammie_homepage = "http://peteg.org"
[snelting]
name = "Gregor Snelting"
[snelting.emails]
[snelting.homepages]
snelting_homepage = "http://pp.info.uni-karlsruhe.de/personhp/gregor_snelting.php"
[stark]
name = "Eugene W. Stark"
[stark.emails]
stark_email = "stark@cs.stonybrook.edu"
[stark.homepages]
[engelhardt]
name = "Kai Engelhardt"
[engelhardt.emails]
[engelhardt.homepages]
[fleury]
name = "Mathias Fleury"
[fleury.emails]
fleury_email = "fleury@mpi-inf.mpg.de"
fleury_email1 = "mathias.fleury@jku.at"
[fleury.homepages]
fleury_homepage = "http://fmv.jku.at/fleury"
[griebel]
name = "Simon Griebel"
[griebel.emails]
griebel_email = "s.griebel@tum.de"
[griebel.homepages]
[zhang]
name = "Yu Zhang"
[zhang.emails]
[zhang.homepages]
[spichkova]
name = "Maria Spichkova"
[spichkova.emails]
spichkova_email = "maria.spichkova@rmit.edu.au"
[spichkova.homepages]
[andronick]
name = "June Andronick"
[andronick.emails]
[andronick.homepages]
[lammich]
name = "Peter Lammich"
[lammich.emails]
lammich_email = "lammich@in.tum.de"
lammich_email1 = "peter.lammich@uni-muenster.de"
[lammich.homepages]
lammich_homepage = "http://www21.in.tum.de/~lammich"
[maricf]
name = "Filip Marić"
[maricf.emails]
maricf_email = "filip@matf.bg.ac.rs"
[maricf.homepages]
maricf_homepage = "http://www.matf.bg.ac.rs/~filip"
-maricf_homepage1 = "http://poincare.matf.bg.ac.rs/~filip/"
[popescu]
name = "Andrei Popescu"
[popescu.emails]
popescu_email = "a.popescu@sheffield.ac.uk"
popescu_email1 = "uuomul@yahoo.com"
popescu_email2 = "a.popescu@mdx.ac.uk"
[popescu.homepages]
popescu_homepage = "https://www.andreipopescu.uk"
[chen]
name = "L. Chen"
[chen.emails]
[chen.homepages]
[helke]
name = "Steffen Helke"
[helke.emails]
helke_email = "helke@cs.tu-berlin.de"
[helke.homepages]
[pohjola]
name = "Johannes Åman Pohjola"
[pohjola.emails]
[pohjola.homepages]
[yingm]
name = "Mingsheng Ying"
[yingm.emails]
[yingm.homepages]
[fosterj]
name = "J. Nathan Foster"
[fosterj.emails]
[fosterj.homepages]
fosterj_homepage = "http://www.cs.cornell.edu/~jnfoster/"
[verbeek]
name = "Freek Verbeek"
[verbeek.emails]
verbeek_email = "Freek.Verbeek@ou.nl"
verbeek_email1 = "freek@vt.edu"
[verbeek.homepages]
[struth]
name = "Georg Struth"
[struth.emails]
struth_email = "g.struth@sheffield.ac.uk"
-struth_email1 = "g.struth@dcs.shef.ac.uk"
[struth.homepages]
-struth_homepage = "http://www.dcs.shef.ac.uk/~georg"
-struth_homepage1 = "http://staffwww.dcs.shef.ac.uk/people/G.Struth/"
+struth_homepage = "http://staffwww.dcs.shef.ac.uk/people/G.Struth/"
[mitsch]
name = "Stefan Mitsch"
[mitsch.emails]
mitsch_email = "smitsch@cs.cmu.edu"
[mitsch.homepages]
[nordhoff]
name = "Benedikt Nordhoff"
[nordhoff.emails]
nordhoff_email = "b.n@wwu.de"
nordhoff_email1 = "b_nord01@uni-muenster.de"
[nordhoff.homepages]
[kirchner]
name = "Daniel Kirchner"
[kirchner.emails]
kirchner_email = "daniel@ekpyron.org"
[kirchner.homepages]
[merz]
name = "Stephan Merz"
[merz.emails]
merz_email = "Stephan.Merz@loria.fr"
[merz.homepages]
merz_homepage = "http://www.loria.fr/~merz"
[fernandez]
name = "Matthew Fernandez"
[fernandez.emails]
[fernandez.homepages]
[kolanski]
name = "Rafal Kolanski"
[kolanski.emails]
kolanski_email = "rafal.kolanski@nicta.com.au"
[kolanski.homepages]
[strnisa]
name = "Rok Strniša"
[strnisa.emails]
strnisa_email = "rok@strnisa.com"
[strnisa.homepages]
strnisa_homepage = "http://rok.strnisa.com/lj/"
[liuy]
name = "Yang Liu"
[liuy.emails]
liuy_email = "yangliu@ntu.edu.sg"
[liuy.homepages]
[jacobsen]
name = "Frederik Krogsdal Jacobsen"
[jacobsen.emails]
jacobsen_email = "fkjac@dtu.dk"
[jacobsen.homepages]
jacobsen_homepage = "http://people.compute.dtu.dk/fkjac/"
[crighton]
name = "Aaron Crighton"
[crighton.emails]
crighton_email = "crightoa@mcmaster.ca"
[crighton.homepages]
[matiyasevich]
name = "Yuri Matiyasevich"
[matiyasevich.emails]
[matiyasevich.homepages]
[kaliszyk]
name = "Cezary Kaliszyk"
[kaliszyk.emails]
kaliszyk_email = "cezary.kaliszyk@uibk.ac.at"
[kaliszyk.homepages]
kaliszyk_homepage = "http://cl-informatik.uibk.ac.at/users/cek/"
-kaliszyk_homepage1 = "http://cl-informatik.uibk.ac.at/cek/"
[voisin]
name = "Frederic Voisin"
[voisin.emails]
[voisin.homepages]
[kaufmann]
name = "Daniela Kaufmann"
[kaufmann.emails]
[kaufmann.homepages]
kaufmann_homepage = "http://fmv.jku.at/kaufmann"
[milehins]
name = "Mihails Milehins"
[milehins.emails]
milehins_email = "user9716869@gmail.com"
milehins_email1 = "mihailsmilehins@gmail.com"
[milehins.homepages]
[cordwell]
name = "Katherine Cordwell"
[cordwell.emails]
cordwell_email = "kcordwel@cs.cmu.edu"
[cordwell.homepages]
cordwell_homepage = "https://www.cs.cmu.edu/~kcordwel/"
[smaus]
name = "Jan-Georg Smaus"
[smaus.emails]
[smaus.homepages]
smaus_homepage = "http://www.irit.fr/~Jan-Georg.Smaus"
[sefidgar]
name = "S. Reza Sefidgar"
[sefidgar.emails]
sefidgar_email = "reza.sefidgar@inf.ethz.ch"
[sefidgar.homepages]
[steinberg]
name = "Matías Steinberg"
[steinberg.emails]
steinberg_email = "matias.steinberg@mi.unc.edu.ar"
[steinberg.homepages]
[furusawa]
name = "Hitoshi Furusawa"
[furusawa.emails]
[furusawa.homepages]
furusawa_homepage = "http://www.sci.kagoshima-u.ac.jp/~furusawa/"
[unruh]
name = "Dominique Unruh"
[unruh.emails]
unruh_email = "unruh@ut.ee"
[unruh.homepages]
-unruh_homepage = "http://kodu.ut.ee/~unruh/"
-unruh_homepage1 = "https://www.ut.ee/~unruh/"
+unruh_homepage = "https://www.ut.ee/~unruh/"
[yamada]
name = "Akihisa Yamada"
[yamada.emails]
yamada_email = "akihisa.yamada@uibk.ac.at"
yamada_email1 = "ayamada@trs.cm.is.nagoya-u.ac.jp"
yamada_email2 = "akihisa.yamada@aist.go.jp"
yamada_email3 = "akihisayamada@nii.ac.jp"
[yamada.homepages]
yamada_homepage = "http://group-mmm.org/~ayamada/"
[smola]
name = "Filip Smola"
[smola.emails]
smola_email = "f.smola@sms.ed.ac.uk"
[smola.homepages]
[pusch]
name = "Cornelia Pusch"
[pusch.emails]
[pusch.homepages]
[ogawa]
name = "Mizuhito Ogawa"
[ogawa.emails]
[ogawa.homepages]
[bulwahn]
name = "Lukas Bulwahn"
[bulwahn.emails]
bulwahn_email = "lukas.bulwahn@gmail.com"
[bulwahn.homepages]
[obua]
name = "Steven Obua"
[obua.emails]
obua_email = "steven@recursivemind.com"
[obua.homepages]
[bayer]
name = "Jonas Bayer"
[bayer.emails]
bayer_email = "jonas.bayer999@gmail.com"
[bayer.homepages]
[brun]
name = "Matthias Brun"
[brun.emails]
brun_email = "matthias.brun@inf.ethz.ch"
[brun.homepages]
[aransay]
name = "Jesús Aransay"
[aransay.emails]
aransay_email = "jesus-maria.aransay@unirioja.es"
[aransay.homepages]
-aransay_homepage = "http://www.unirioja.es/cu/jearansa"
-aransay_homepage1 = "https://www.unirioja.es/cu/jearansa"
+aransay_homepage = "https://www.unirioja.es/cu/jearansa"
[gioiosa]
name = "Gianpaolo Gioiosa"
[gioiosa.emails]
[gioiosa.homepages]
[karayel]
name = "Emin Karayel"
[karayel.emails]
karayel_email = "me@eminkarayel.de"
karayel_email1 = "eminkarayel@google.com"
[karayel.homepages]
karayel_homepage = "https://orcid.org/0000-0003-3290-5034"
[koller]
name = "Lukas Koller"
[koller.emails]
koller_email = "lukas.koller@tum.de"
[koller.homepages]
[starosta]
name = "Štěpán Starosta"
[starosta.emails]
starosta_email = "stepan.starosta@fit.cvut.cz"
[starosta.homepages]
starosta_homepage = "https://users.fit.cvut.cz/~staroste/"
[zhangx]
name = "Xingyuan Zhang"
[zhangx.emails]
[zhangx.homepages]
[katovsky]
name = "Alexander Katovsky"
[katovsky.emails]
katovsky_email = "apk32@cam.ac.uk"
katovsky_email1 = "alexander.katovsky@cantab.net"
[katovsky.homepages]
[roessle]
name = "Ian Roessle"
[roessle.emails]
[roessle.homepages]
[schoepf]
name = "Jonas Schöpf"
[schoepf.emails]
schoepf_email = "jonas.schoepf@uibk.ac.at"
[schoepf.homepages]
[gheri]
name = "Lorenzo Gheri"
[gheri.emails]
gheri_email = "lor.gheri@gmail.com"
[gheri.homepages]
[oosterhuis]
name = "Roelof Oosterhuis"
[oosterhuis.emails]
oosterhuis_email = "roelofoosterhuis@gmail.com"
[oosterhuis.homepages]
[robillard]
name = "Simon Robillard"
[robillard.emails]
[robillard.homepages]
robillard_homepage = "https://simon-robillard.net/"
[rauch]
name = "Nicole Rauch"
[rauch.emails]
rauch_email = "rauch@informatik.uni-kl.de"
[rauch.homepages]
[wolff]
name = "Burkhart Wolff"
[wolff.emails]
-wolff_email = "wolff@lri.fr"
-wolff_email1 = "burkhart.wolff@lri.fr"
+wolff_email = "burkhart.wolff@lri.fr"
[wolff.homepages]
wolff_homepage = "https://www.lri.fr/~wolff/"
[lattuada]
name = "Andrea Lattuada"
[lattuada.emails]
[lattuada.homepages]
lattuada_homepage = "https://andrea.lattuada.me"
[pierzchalski]
name = "Edward Pierzchalski"
[pierzchalski.emails]
[pierzchalski.homepages]
[chaieb]
name = "Amine Chaieb"
[chaieb.emails]
[chaieb.homepages]
[raszyk]
name = "Martin Raszyk"
[raszyk.emails]
raszyk_email = "martin.raszyk@inf.ethz.ch"
raszyk_email1 = "m.raszyk@gmail.com"
[raszyk.homepages]
[zee]
name = "Karen Zee"
[zee.emails]
zee_email = "kkz@mit.edu"
[zee.homepages]
zee_homepage = "http://www.mit.edu/~kkz/"
[jensen]
name = "Alexander Birch Jensen"
[jensen.emails]
jensen_email = "aleje@dtu.dk"
[jensen.homepages]
jensen_homepage = "https://people.compute.dtu.dk/aleje/"
[murray]
name = "Toby Murray"
[murray.emails]
murray_email = "toby.murray@unimelb.edu.au"
[murray.homepages]
-murray_homepage = "http://people.eng.unimelb.edu.au/tobym/"
-murray_homepage1 = "https://people.eng.unimelb.edu.au/tobym/"
+murray_homepage = "https://people.eng.unimelb.edu.au/tobym/"
[hupel]
name = "Lars Hupel"
[hupel.emails]
hupel_email = "hupel@in.tum.de"
hupel_email1 = "lars@hupel.info"
[hupel.homepages]
hupel_homepage = "https://www21.in.tum.de/~hupel/"
hupel_homepage1 = "https://lars.hupel.info/"
[rowat]
name = "Colin Rowat"
[rowat.emails]
rowat_email = "c.rowat@bham.ac.uk"
[rowat.homepages]
[schneider]
name = "Joshua Schneider"
[schneider.emails]
schneider_email = "joshua.schneider@inf.ethz.ch"
[schneider.homepages]
[cousin]
name = "Marie Cousin"
[cousin.emails]
cousin_email = "marie.cousin@grenoble-inp.org"
[cousin.homepages]
[parsert]
name = "Julian Parsert"
[parsert.emails]
parsert_email = "julian.parsert@gmail.com"
parsert_email1 = "julian.parsert@uibk.ac.at"
[parsert.homepages]
parsert_homepage = "http://www.parsert.com/"
[aehlig]
name = "Klaus Aehlig"
[aehlig.emails]
[aehlig.homepages]
aehlig_homepage = "http://www.linta.de/~aehlig/"
[argyraki]
name = "Angeliki Koutsoukou-Argyraki"
[argyraki.emails]
argyraki_email = "ak2110@cam.ac.uk"
[argyraki.homepages]
argyraki_homepage = "https://www.cl.cam.ac.uk/~ak2110/"
-argyraki_homepage1 = "http://www.cl.cam.ac.uk/~ak2110/"
argyraki_homepage2 = "https://www.cst.cam.ac.uk/people/ak2110"
[ye]
name = "Lina Ye"
[ye.emails]
ye_email = "lina.ye@lri.fr"
[ye.homepages]
[moeller]
name = "Bernhard Möller"
[moeller.emails]
[moeller.homepages]
moeller_homepage = "https://www.informatik.uni-augsburg.de/en/chairs/dbis/pmi/staff/moeller/"
[zankl]
name = "Harald Zankl"
[zankl.emails]
zankl_email = "Harald.Zankl@uibk.ac.at"
[zankl.homepages]
zankl_homepage = "http://cl-informatik.uibk.ac.at/users/hzankl"
[langenstein]
name = "Bruno Langenstein"
[langenstein.emails]
langenstein_email = "langenstein@dfki.de"
[langenstein.homepages]
[breitner]
name = "Joachim Breitner"
[breitner.emails]
breitner_email = "mail@joachim-breitner.de"
breitner_email1 = "joachim@cis.upenn.edu"
[breitner.homepages]
breitner_homepage = "http://pp.ipd.kit.edu/~breitner"
[thommes]
name = "Joseph Thommes"
[thommes.emails]
thommes_email = "joseph-thommes@gmx.de"
[thommes.homepages]
[loibl]
name = "Matthias Loibl"
[loibl.emails]
[loibl.homepages]
[cock]
name = "David Cock"
[cock.emails]
cock_email = "david.cock@nicta.com.au"
[cock.homepages]
[stuewe]
name = "Daniel Stüwe"
[stuewe.emails]
[stuewe.homepages]
[decova]
name = "Sára Decova"
[decova.emails]
[decova.homepages]
[nikiforov]
name = "Denis Nikiforov"
[nikiforov.emails]
nikiforov_email = "denis.nikif@gmail.com"
[nikiforov.homepages]
[traytel]
name = "Dmitriy Traytel"
[traytel.emails]
traytel_email = "traytel@in.tum.de"
traytel_email1 = "traytel@inf.ethz.ch"
traytel_email2 = "traytel@di.ku.dk"
[traytel.homepages]
-traytel_homepage = "https://traytel.bitbucket.io"
-traytel_homepage1 = "https://traytel.bitbucket.io/"
+traytel_homepage = "https://traytel.bitbucket.io/"
[tverdyshev]
name = "Sergey Tverdyshev"
[tverdyshev.emails]
tverdyshev_email = "stv@sysgo.com"
[tverdyshev.homepages]
[wassell]
name = "Mark Wassell"
[wassell.emails]
wassell_email = "mpwassell@gmail.com"
[wassell.homepages]
[ijbema]
name = "Mark Ijbema"
[ijbema.emails]
ijbema_email = "ijbema@fmf.nl"
[ijbema.homepages]
[wirt]
name = "Kai Wirt"
[wirt.emails]
[wirt.homepages]
[matache]
name = "Cristina Matache"
[matache.emails]
matache_email = "cris.matache@gmail.com"
[matache.homepages]
[ito]
name = "Yosuke Ito"
[ito.emails]
ito_email = "glacier345@gmail.com"
[ito.homepages]
[maric]
name = "Ognjen Marić"
[maric.emails]
maric_email = "ogi.afp@mynosefroze.com"
[maric.homepages]
[heimes]
name = "Lukas Heimes"
[heimes.emails]
heimes_email = "heimesl@student.ethz.ch"
[heimes.homepages]
[parrow]
name = "Joachim Parrow"
[parrow.emails]
parrow_email = "joachim.parrow@it.uu.se"
[parrow.homepages]
[naraschewski]
name = "Wolfgang Naraschewski"
[naraschewski.emails]
[naraschewski.homepages]
[amani]
name = "Sidney Amani"
[amani.emails]
amani_email = "sidney.amani@data61.csiro.au"
[amani.homepages]
[palmer]
name = "Jake Palmer"
[palmer.emails]
palmer_email = "jake.palmer@ed.ac.uk"
[palmer.homepages]
[bockenek]
name = "Joshua Bockenek"
[bockenek.emails]
[bockenek.homepages]
[lachnitt]
name = "Hanna Lachnitt"
[lachnitt.emails]
lachnitt_email = "lachnitt@stanford.edu"
[lachnitt.homepages]
[rabe]
name = "Markus N. Rabe"
[rabe.emails]
[rabe.homepages]
rabe_homepage = "http://www.react.uni-saarland.de/people/rabe.html"
[munive]
name = "Jonathan Julian Huerta y Munive"
[munive.emails]
munive_email = "jjhuertaymunive1@sheffield.ac.uk"
munive_email1 = "jonjulian23@gmail.com"
[munive.homepages]
[gouezel]
name = "Sebastien Gouezel"
[gouezel.emails]
gouezel_email = "sebastien.gouezel@univ-rennes1.fr"
[gouezel.homepages]
gouezel_homepage = "http://www.math.sciences.univ-nantes.fr/~gouezel/"
[brucker]
name = "Achim D. Brucker"
[brucker.emails]
brucker_email = "a.brucker@exeter.ac.uk"
brucker_email1 = "brucker@spamfence.net"
brucker_email2 = "adbrucker@0x5f.org"
[brucker.homepages]
brucker_homepage = "https://www.brucker.ch/"
-brucker_homepage1 = "https://www.brucker.ch"
[sato]
name = "Tetsuya Sato"
[sato.emails]
sato_email = "tsato@c.titech.ac.jp"
[sato.homepages]
sato_homepage = "https://sites.google.com/view/tetsuyasato/"
[huffman]
name = "Brian Huffman"
[huffman.emails]
huffman_email = "huffman@in.tum.de"
huffman_email1 = "brianh@cs.pdx.edu"
[huffman.homepages]
-huffman_homepage = "http://web.cecs.pdx.edu/~brianh/"
-huffman_homepage1 = "http://cs.pdx.edu/~brianh"
+huffman_homepage = "http://cs.pdx.edu/~brianh/"
[fleuriot]
name = "Jacques D. Fleuriot"
[fleuriot.emails]
fleuriot_email = "Jacques.Fleuriot@ed.ac.uk"
fleuriot_email1 = "jdf@ed.ac.uk"
[fleuriot.homepages]
fleuriot_homepage = "https://www.inf.ed.ac.uk/people/staff/Jacques_Fleuriot.html"
[hess]
name = "Andreas V. Hess"
[hess.emails]
hess_email = "avhe@dtu.dk"
hess_email1 = "andreasvhess@gmail.com"
[hess.homepages]
[marmsoler]
name = "Diego Marmsoler"
[marmsoler.emails]
marmsoler_email = "diego.marmsoler@tum.de"
[marmsoler.homepages]
marmsoler_homepage = "http://marmsoler.com"
[hibon]
name = "Quentin Hibon"
[hibon.emails]
hibon_email = "qh225@cl.cam.ac.uk"
[hibon.homepages]
[tasch]
name = "Markus Tasch"
[tasch.emails]
tasch_email = "tasch@mais.informatik.tu-darmstadt.de"
[tasch.homepages]
[doty]
name = "Matthew Wampler-Doty"
[doty.emails]
[doty.homepages]
[berghofer]
name = "Stefan Berghofer"
[berghofer.emails]
berghofer_email = "berghofe@in.tum.de"
[berghofer.homepages]
berghofer_homepage = "http://www.in.tum.de/~berghofe"
[hirata]
name = "Michikazu Hirata"
[hirata.emails]
hirata_email = "hirata.m.ac@m.titech.ac.jp"
[hirata.homepages]
[sylvestre]
name = "Jeremy Sylvestre"
[sylvestre.emails]
sylvestre_email = "jeremy.sylvestre@ualberta.ca"
sylvestre_email1 = "jsylvest@ualberta.ca"
[sylvestre.homepages]
sylvestre_homepage = "http://ualberta.ca/~jsylvest/"
[lindenberg]
name = "Christina Lindenberg"
[lindenberg.emails]
[lindenberg.homepages]
[joosten]
name = "Sebastiaan J. C. Joosten"
[joosten.emails]
joosten_email = "sebastiaan.joosten@uibk.ac.at"
joosten_email1 = "sjcjoosten@gmail.com"
joosten_email2 = "s.j.c.joosten@utwente.nl"
[joosten.homepages]
-joosten_homepage = "https://sjcjoosten.nl"
-joosten_homepage1 = "https://sjcjoosten.nl/"
+joosten_homepage = "https://sjcjoosten.nl/"
[blumson]
name = "Ben Blumson"
[blumson.emails]
blumson_email = "benblumson@gmail.com"
[blumson.homepages]
-blumson_homepage = "https://philpapers.org/profile/805"
-blumson_homepage1 = "https://philpeople.org/profiles/ben-blumson"
+blumson_homepage = "https://philpeople.org/profiles/ben-blumson"
[hosking]
name = "Tony Hosking"
[hosking.emails]
[hosking.homepages]
hosking_homepage = "https://www.cs.purdue.edu/homes/hosking/"
[preoteasa]
name = "Viorel Preoteasa"
[preoteasa.emails]
preoteasa_email = "viorel.preoteasa@aalto.fi"
[preoteasa.homepages]
preoteasa_homepage = "http://users.abo.fi/vpreotea/"
[gutkovas]
name = "Ramunas Gutkovas"
[gutkovas.emails]
gutkovas_email = "ramunas.gutkovas@it.uu.se"
[gutkovas.homepages]
[minamide]
name = "Yasuhiko Minamide"
[minamide.emails]
minamide_email = "minamide@is.titech.ac.jp"
[minamide.homepages]
minamide_homepage = "https://sv.c.titech.ac.jp/minamide/index.en.html"
[waldmann]
name = "Uwe Waldmann"
[waldmann.emails]
waldmann_email = "waldmann@mpi-inf.mpg.de"
-waldmann_email1 = "uwe@mpi-inf.mpg.de"
[waldmann.homepages]
[lewis]
name = "Corey Lewis"
[lewis.emails]
lewis_email = "corey.lewis@data61.csiro.au"
[lewis.homepages]
[peters]
name = "Kirstin Peters"
[peters.emails]
peters_email = "kirstin.peters@tu-berlin.de"
[peters.homepages]
[ortner]
name = "Veronika Ortner"
[ortner.emails]
[ortner.homepages]
[fiedler]
name = "Ben Fiedler"
[fiedler.emails]
fiedler_email = "ben.fiedler@inf.ethz.ch"
[fiedler.homepages]
[becker]
name = "Heiko Becker"
[becker.emails]
becker_email = "hbecker@mpi-sws.org"
[becker.homepages]
[bracevac]
name = "Oliver Bračevac"
[bracevac.emails]
bracevac_email = "bracevac@st.informatik.tu-darmstadt.de"
[bracevac.homepages]
[bohrer]
name = "Rose Bohrer"
[bohrer.emails]
bohrer_email = "rose.bohrer.cs@gmail.com"
[bohrer.homepages]
[londono]
name = "Alejandro Gómez-Londoño"
[londono.emails]
londono_email = "alejandro.gomez@chalmers.se"
[londono.homepages]
[armstrong]
name = "Alasdair Armstrong"
[armstrong.emails]
[armstrong.homepages]
[glabbeek]
name = "Rob van Glabbeek"
[glabbeek.emails]
[glabbeek.homepages]
glabbeek_homepage = "http://theory.stanford.edu/~rvg/"
[jiangd]
name = "Dongchen Jiang"
[jiangd.emails]
jiangd_email = "dongchenjiang@googlemail.com"
[jiangd.homepages]
[meis]
name = "Rene Meis"
[meis.emails]
meis_email = "rene.meis@uni-muenster.de"
meis_email1 = "rene.meis@uni-due.de"
[meis.homepages]
[hofmann]
name = "Martin Hofmann"
[hofmann.emails]
[hofmann.homepages]
hofmann_homepage = "http://www.tcs.informatik.uni-muenchen.de/~mhofmann"
[kerber]
name = "Manfred Kerber"
[kerber.emails]
kerber_email = "mnfrd.krbr@gmail.com"
[kerber.homepages]
kerber_homepage = "http://www.cs.bham.ac.uk/~mmk"
[linker]
name = "Sven Linker"
[linker.emails]
linker_email = "s.linker@liverpool.ac.uk"
[linker.homepages]
[balbach]
name = "Frank J. Balbach"
[balbach.emails]
balbach_email = "frank-balbach@gmx.de"
[balbach.homepages]
[weber]
name = "Tjark Weber"
[weber.emails]
weber_email = "tjark.weber@it.uu.se"
[weber.homepages]
weber_homepage = "http://user.it.uu.se/~tjawe125/"
[hellauer]
name = "Fabian Hellauer"
[hellauer.emails]
hellauer_email = "hellauer@in.tum.de"
[hellauer.homepages]
[reiter]
name = "Markus Reiter"
[reiter.emails]
[reiter.homepages]
[herzberg]
name = "Michael Herzberg"
[herzberg.emails]
herzberg_email = "mail@michael-herzberg.de"
[herzberg.homepages]
herzberg_homepage = "http://www.dcs.shef.ac.uk/cgi-bin/makeperson?M.Herzberg"
[overbeek]
name = "Roy Overbeek"
[overbeek.emails]
overbeek_email = "Roy.Overbeek@cwi.nl"
[overbeek.homepages]
[koerner]
name = "Stefan Körner"
[koerner.emails]
koerner_email = "s_koer03@uni-muenster.de"
[koerner.homepages]
[tan]
name = "Yong Kiam Tan"
[tan.emails]
tan_email = "yongkiat@cs.cmu.edu"
[tan.homepages]
tan_homepage = "https://www.cs.cmu.edu/~yongkiat/"
[esparza]
name = "Javier Esparza"
[esparza.emails]
[esparza.homepages]
esparza_homepage = "https://www7.in.tum.de/~esparza/"
[platzer]
name = "André Platzer"
[platzer.emails]
platzer_email = "aplatzer@cs.cmu.edu"
[platzer.homepages]
platzer_homepage = "https://www.cs.cmu.edu/~aplatzer/"
-platzer_homepage1 = "http://www.cs.cmu.edu/~aplatzer/"
[matichuk]
name = "Daniel Matichuk"
[matichuk.emails]
[matichuk.homepages]
[rawson]
name = "Michael Rawson"
[rawson.emails]
rawson_email = "michaelrawson76@gmail.com"
rawson_email1 = "mr644@cam.ac.uk"
[rawson.homepages]
[bisping]
name = "Benjamin Bisping"
[bisping.emails]
bisping_email = "benjamin.bisping@campus.tu-berlin.de"
[bisping.homepages]
[sanan]
name = "David Sanan"
[sanan.emails]
sanan_email = "sanan@ntu.edu.sg"
[sanan.homepages]
[jungnickel]
name = "Tim Jungnickel"
[jungnickel.emails]
jungnickel_email = "tim.jungnickel@tu-berlin.de"
[jungnickel.homepages]
[ammer]
name = "Thomas Ammer"
[ammer.emails]
[ammer.homepages]
[mulligan]
name = "Dominic P. Mulligan"
[mulligan.emails]
mulligan_email = "dominic.p.mulligan@googlemail.com"
mulligan_email1 = "Dominic.Mulligan@arm.com"
[mulligan.homepages]
[georgescu]
name = "George Georgescu"
[georgescu.emails]
[georgescu.homepages]
[aissat]
name = "Romain Aissat"
[aissat.emails]
[aissat.homepages]
[gore]
name = "Rajeev Gore"
[gore.emails]
gore_email = "rajeev.gore@anu.edu.au"
[gore.homepages]
[gomes]
name = "Victor B. F. Gomes"
[gomes.emails]
gomes_email = "victor.gomes@cl.cam.ac.uk"
-gomes_email1 = "vb358@cam.ac.uk"
gomes_email2 = "victorborgesfg@gmail.com"
-gomes_email3 = "vb358@cl.cam.ac.uk"
gomes_email4 = "vborgesferreiragomes1@sheffield.ac.uk"
[gomes.homepages]
gomes_homepage = "http://www.dcs.shef.ac.uk/~victor"
[trachtenherz]
name = "David Trachtenherz"
[trachtenherz.emails]
[trachtenherz.homepages]
[lohner]
name = "Denis Lohner"
[lohner.emails]
lohner_email = "denis.lohner@kit.edu"
[lohner.homepages]
lohner_homepage = "http://pp.ipd.kit.edu/person.php?id=88"
[richter]
name = "Stefan Richter"
[richter.emails]
richter_email = "richter@informatik.rwth-aachen.de"
[richter.homepages]
richter_homepage = "http://www-lti.informatik.rwth-aachen.de/~richter/"
[nemeti]
name = "István Németi"
[nemeti.emails]
[nemeti.homepages]
nemeti_homepage = "http://www.renyi.hu/~nemeti/"
[bentkamp]
name = "Alexander Bentkamp"
[bentkamp.emails]
bentkamp_email = "bentkamp@gmail.com"
bentkamp_email1 = "a.bentkamp@vu.nl"
[bentkamp.homepages]
bentkamp_homepage = "https://www.cs.vu.nl/~abp290/"
[zeyda]
name = "Frank Zeyda"
[zeyda.emails]
zeyda_email = "frank.zeyda@york.ac.uk"
[zeyda.homepages]
[tuerk]
name = "Thomas Tuerk"
[tuerk.emails]
[tuerk.homepages]
[ribeiro]
name = "Pedro Ribeiro"
[ribeiro.emails]
[ribeiro.homepages]
[raedle]
name = "Jonas Rädle"
[raedle.emails]
raedle_email = "jonas.raedle@gmail.com"
raedle_email1 = "jonas.raedle@tum.de"
[raedle.homepages]
[peltier]
name = "Nicolas Peltier"
[peltier.emails]
peltier_email = "Nicolas.Peltier@imag.fr"
[peltier.homepages]
peltier_homepage = "http://membres-lig.imag.fr/peltier/"
[kappelmann]
name = "Kevin Kappelmann"
[kappelmann.emails]
kappelmann_email = "kevin.kappelmann@tum.de"
[kappelmann.homepages]
kappelmann_homepage = "https://www21.in.tum.de/team/kappelmk/"
[hoelzl]
name = "Johannes Hölzl"
[hoelzl.emails]
hoelzl_email = "hoelzl@in.tum.de"
[hoelzl.homepages]
-hoelzl_homepage = "http://in.tum.de/~hoelzl"
-hoelzl_homepage1 = "http://home.in.tum.de/~hoelzl"
+hoelzl_homepage = "http://home.in.tum.de/~hoelzl"
[ballarin]
name = "Clemens Ballarin"
[ballarin.emails]
ballarin_email = "ballarin@in.tum.de"
[ballarin.homepages]
ballarin_homepage = "http://www21.in.tum.de/~ballarin/"
[wu]
name = "Chunhan Wu"
[wu.emails]
[wu.homepages]
[paulson]
name = "Lawrence C. Paulson"
[paulson.emails]
paulson_email = "lp15@cam.ac.uk"
-paulson_email1 = "lcp@cl.cam.ac.uk"
[paulson.homepages]
-paulson_homepage = "http://www.cl.cam.ac.uk/~lp15/"
-paulson_homepage1 = "https://www.cl.cam.ac.uk/~lp15/"
+paulson_homepage = "https://www.cl.cam.ac.uk/~lp15/"
[paleo]
name = "Bruno Woltzenlogel Paleo"
[paleo.emails]
[paleo.homepages]
paleo_homepage = "http://www.logic.at/staff/bruno/"
[wagner]
name = "Max Wagner"
[wagner.emails]
wagner_email = "max@trollbu.de"
[wagner.homepages]
[lochbihler]
name = "Andreas Lochbihler"
[lochbihler.emails]
lochbihler_email = "andreas.lochbihler@digitalasset.com"
lochbihler_email1 = "mail@andreas-lochbihler.de"
[lochbihler.homepages]
-lochbihler_homepage = "http://www.andreas-lochbihler.de"
-lochbihler_homepage1 = "http://www.andreas-lochbihler.de/"
+lochbihler_homepage = "http://www.andreas-lochbihler.de/"
[porter]
name = "Benjamin Porter"
[porter.emails]
[porter.homepages]
[boehme]
name = "Sascha Böhme"
[boehme.emails]
boehme_email = "boehmes@in.tum.de"
[boehme.homepages]
boehme_homepage = "http://www21.in.tum.de/~boehmes/"
[hou]
name = "Zhe Hou"
[hou.emails]
hou_email = "zhe.hou@ntu.edu.sg"
[hou.homepages]
[mitchell]
name = "Neil Mitchell"
[mitchell.emails]
[mitchell.homepages]
[gay]
name = "Richard Gay"
[gay.emails]
gay_email = "gay@mais.informatik.tu-darmstadt.de"
[gay.homepages]
[oldenburg]
name = "Lennart Oldenburg"
[oldenburg.emails]
[oldenburg.homepages]
[lochmann]
name = "Alexander Lochmann"
[lochmann.emails]
lochmann_email = "alexander.lochmann@uibk.ac.at"
[lochmann.homepages]
[lim]
name = "Japheth Lim"
[lim.emails]
[lim.homepages]
[keinholz]
name = "Jonas Keinholz"
[keinholz.emails]
[keinholz.homepages]
[taylor]
name = "Ramsay G. Taylor"
[taylor.emails]
taylor_email = "r.g.taylor@sheffield.ac.uk"
[taylor.homepages]
[bauereiss]
name = "Thomas Bauereiss"
[bauereiss.emails]
bauereiss_email = "thomas@bauereiss.name"
[bauereiss.homepages]
[lee]
name = "Holden Lee"
[lee.emails]
lee_email = "holdenl@princeton.edu"
[lee.homepages]
[weerwag]
name = "Timmy Weerwag"
[weerwag.emails]
[weerwag.homepages]
[sudhof]
name = "Henry Sudhof"
[sudhof.emails]
sudhof_email = "hsudhof@cs.tu-berlin.de"
[sudhof.homepages]
[nedzelsky]
name = "Michael Nedzelsky"
[nedzelsky.emails]
nedzelsky_email = "MichaelNedzelsky@yandex.ru"
[nedzelsky.homepages]
[romanos]
name = "Ralph Romanos"
[romanos.emails]
romanos_email = "ralph.romanos@student.ecp.fr"
[romanos.homepages]
[doczkal]
name = "Christian Doczkal"
[doczkal.emails]
doczkal_email = "doczkal@ps.uni-saarland.de"
[doczkal.homepages]
[buyse]
name = "Maxime Buyse"
[buyse.emails]
buyse_email = "maxime.buyse@polytechnique.edu"
[buyse.homepages]
[guiol]
name = "Hervé Guiol"
[guiol.emails]
guiol_email = "herve.guiol@univ-grenoble-alpes.fr"
[guiol.homepages]
[brodmann]
name = "Paul-David Brodmann"
[brodmann.emails]
brodmann_email = "p.brodmann@tu-berlin.de"
[brodmann.homepages]
[moedersheim]
name = "Sebastian Mödersheim"
[moedersheim.emails]
moedersheim_email = "samo@dtu.dk"
[moedersheim.homepages]
moedersheim_homepage = "https://people.compute.dtu.dk/samo/"
[hu]
name = "Shuwei Hu"
[hu.emails]
hu_email = "shuwei.hu@tum.de"
[hu.homepages]
[wand]
name = "Daniel Wand"
[wand.emails]
wand_email = "dwand@mpi-inf.mpg.de"
[wand.homepages]
[velykis]
name = "Andrius Velykis"
[velykis.emails]
[velykis.homepages]
velykis_homepage = "http://andrius.velykis.lt"
[felgenhauer]
name = "Bertram Felgenhauer"
[felgenhauer.emails]
felgenhauer_email = "bertram.felgenhauer@uibk.ac.at"
felgenhauer_email1 = "int-e@gmx.de"
[felgenhauer.homepages]
[sauer]
name = "Jens Sauer"
[sauer.emails]
sauer_email = "sauer@mais.informatik.tu-darmstadt.de"
[sauer.homepages]
[dittmann]
name = "Christoph Dittmann"
[dittmann.emails]
dittmann_email = "isabelle@christoph-d.de"
[dittmann.homepages]
dittmann_homepage = "http://logic.las.tu-berlin.de/Members/Dittmann/"
[muendler]
name = "Niels Mündler"
[muendler.emails]
muendler_email = "n.muendler@tum.de"
[muendler.homepages]
[somogyi]
name = "Dániel Somogyi"
[somogyi.emails]
[somogyi.homepages]
[makarios]
name = "T. J. M. Makarios"
[makarios.emails]
makarios_email = "tjm1983@gmail.com"
[makarios.homepages]
[nishihara]
name = "Toshiaki Nishihara"
[nishihara.emails]
[nishihara.homepages]
[losa]
name = "Giuliano Losa"
[losa.emails]
losa_email = "giuliano.losa@epfl.ch"
losa_email1 = "giuliano@galois.com"
losa_email2 = "giuliano@losa.fr"
[losa.homepages]
[grov]
name = "Gudmund Grov"
[grov.emails]
grov_email = "ggrov@inf.ed.ac.uk"
[grov.homepages]
grov_homepage = "http://homepages.inf.ed.ac.uk/ggrov"
[bharadwaj]
name = "Abhijith Bharadwaj"
[bharadwaj.emails]
[bharadwaj.homepages]
[clouston]
name = "Ranald Clouston"
[clouston.emails]
clouston_email = "ranald.clouston@cs.au.dk"
[clouston.homepages]
[jaskelioff]
name = "Mauro Jaskelioff"
[jaskelioff.emails]
[jaskelioff.homepages]
jaskelioff_homepage = "http://www.fceia.unr.edu.ar/~mauro/"
[caminati]
name = "Marco B. Caminati"
[caminati.emails]
[caminati.homepages]
[bengtson]
name = "Jesper Bengtson"
[bengtson.emails]
[bengtson.homepages]
bengtson_homepage = "http://www.itu.dk/people/jebe"
[boutry]
name = "Pierre Boutry"
[boutry.emails]
boutry_email = "boutry@unistra.fr"
[boutry.homepages]
[sternagelt]
name = "Thomas Sternagel"
[sternagelt.emails]
[sternagelt.homepages]
[guerraoui]
name = "Rachid Guerraoui"
[guerraoui.emails]
guerraoui_email = "rachid.guerraoui@epfl.ch"
[guerraoui.homepages]
[dongol]
name = "Brijesh Dongol"
[dongol.emails]
dongol_email = "brijesh.dongol@brunel.ac.uk"
[dongol.homepages]
[tuong]
name = "Frédéric Tuong"
[tuong.emails]
tuong_email = "tuong@users.gforge.inria.fr"
tuong_email1 = "ftuong@lri.fr"
[tuong.homepages]
tuong_homepage = "https://www.lri.fr/~ftuong/"
[ullrich]
name = "Sebastian Ullrich"
[ullrich.emails]
ullrich_email = "sebasti@nullri.ch"
[ullrich.homepages]
[essmann]
name = "Robin Eßmann"
[essmann.emails]
essmann_email = "robin.essmann@tum.de"
[essmann.homepages]
[liu]
name = "Junyi Liu"
[liu.emails]
[liu.homepages]
[villadsen]
name = "Jørgen Villadsen"
[villadsen.emails]
villadsen_email = "jovi@dtu.dk"
[villadsen.homepages]
villadsen_homepage = "https://people.compute.dtu.dk/jovi/"
[fosters]
name = "Simon Foster"
[fosters.emails]
fosters_email = "simon.foster@york.ac.uk"
[fosters.homepages]
fosters_homepage = "https://www-users.cs.york.ac.uk/~simonf/"
-fosters_homepage1 = "http://www-users.cs.york.ac.uk/~simonf"
[dubut]
name = "Jérémy Dubut"
[dubut.emails]
dubut_email = "dubut@nii.ac.jp"
[dubut.homepages]
dubut_homepage = "http://group-mmm.org/~dubut/"
[jiang]
name = "Nan Jiang"
[jiang.emails]
jiang_email = "nanjiang@whu.edu.cn"
[jiang.homepages]
[hetzl]
name = "Stefan Hetzl"
[hetzl.emails]
hetzl_email = "hetzl@logic.at"
[hetzl.homepages]
hetzl_homepage = "http://www.logic.at/people/hetzl/"
[parkinson]
name = "Matthew Parkinson"
[parkinson.emails]
[parkinson.homepages]
parkinson_homepage = "http://research.microsoft.com/people/mattpark/"
[maletzky]
name = "Alexander Maletzky"
[maletzky.emails]
maletzky_email = "alexander.maletzky@risc.jku.at"
maletzky_email1 = "alexander.maletzky@risc-software.at"
[maletzky.homepages]
maletzky_homepage = "https://risc.jku.at/m/alexander-maletzky/"
[keefe]
name = "Greg O'Keefe"
[keefe.emails]
[keefe.homepages]
keefe_homepage = "http://users.rsise.anu.edu.au/~okeefe/"
[brandt]
name = "Felix Brandt"
[brandt.emails]
[brandt.homepages]
brandt_homepage = "http://dss.in.tum.de/staff/brandt.html"
[blasum]
name = "Holger Blasum"
[blasum.emails]
blasum_email = "holger.blasum@sysgo.com"
[blasum.homepages]
[messner]
name = "Florian Messner"
[messner.emails]
messner_email = "florian.g.messner@uibk.ac.at"
[messner.homepages]
[diaz]
name = "Javier Díaz"
[diaz.emails]
diaz_email = "javier.diaz.manzi@gmail.com"
[diaz.homepages]
[prathamesh]
name = "T.V.H. Prathamesh"
[prathamesh.emails]
prathamesh_email = "prathamesh@imsc.res.in"
[prathamesh.homepages]
[nestmann]
name = "Uwe Nestmann"
[nestmann.emails]
[nestmann.homepages]
nestmann_homepage = "https://www.mtv.tu-berlin.de/nestmann/"
[siek]
name = "Jeremy Siek"
[siek.emails]
siek_email = "jsiek@indiana.edu"
[siek.homepages]
siek_homepage = "http://homes.soic.indiana.edu/jsiek/"
[stock]
name = "Benedikt Stock"
[stock.emails]
stock_email = "benedikt1999@freenet.de"
[stock.homepages]
[boyton]
name = "Andrew Boyton"
[boyton.emails]
boyton_email = "andrew.boyton@nicta.com.au"
[boyton.homepages]
[taha]
name = "Safouan Taha"
[taha.emails]
taha_email = "safouan.taha@lri.fr"
[taha.homepages]
[ghourabi]
name = "Fadoua Ghourabi"
[ghourabi.emails]
ghourabi_email = "fadouaghourabi@gmail.com"
[ghourabi.homepages]
[brinkop]
name = "Hauke Brinkop"
[brinkop.emails]
brinkop_email = "hauke.brinkop@googlemail.com"
[brinkop.homepages]
[neumann]
name = "René Neumann"
[neumann.emails]
-neumann_email = "neumannr@in.tum.de"
-neumann_email1 = "rene.neumann@in.tum.de"
+neumann_email = "rene.neumann@in.tum.de"
[neumann.homepages]
[schmaltz]
name = "Julien Schmaltz"
[schmaltz.emails]
schmaltz_email = "Julien.Schmaltz@ou.nl"
[schmaltz.homepages]
[haslbeck]
name = "Max W. Haslbeck"
[haslbeck.emails]
haslbeck_email = "maximilian.haslbeck@uibk.ac.at"
haslbeck_email1 = "haslbecm@in.tum.de"
haslbeck_email2 = "max.haslbeck@gmx.de"
[haslbeck.homepages]
haslbeck_homepage = "http://cl-informatik.uibk.ac.at/users/mhaslbeck/"
-haslbeck_homepage1 = "http://cl-informatik.uibk.ac.at/users/mhaslbeck//"
[reynaud]
name = "Alban Reynaud"
[reynaud.emails]
[reynaud.homepages]
[boulanger]
name = "Frédéric Boulanger"
[boulanger.emails]
boulanger_email = "frederic.boulanger@centralesupelec.fr"
[boulanger.homepages]
[spasic]
name = "Mirko Spasić"
[spasic.emails]
spasic_email = "mirko@matf.bg.ac.rs"
[spasic.homepages]
[heller]
name = "Armin Heller"
[heller.emails]
[heller.homepages]
[watt]
name = "Conrad Watt"
[watt.emails]
watt_email = "caw77@cam.ac.uk"
[watt.homepages]
watt_homepage = "http://www.cl.cam.ac.uk/~caw77/"
[hayes]
name = "Ian J. Hayes"
[hayes.emails]
hayes_email = "ian.hayes@itee.uq.edu.au"
-hayes_email1 = "Ian.Hayes@itee.uq.edu.au"
[hayes.homepages]
[haslbeckm]
name = "Maximilian P. L. Haslbeck"
[haslbeckm.emails]
haslbeckm_email = "haslbema@in.tum.de"
[haslbeckm.homepages]
-haslbeckm_homepage = "http://www.in.tum.de/~haslbema"
-haslbeckm_homepage1 = "http://in.tum.de/~haslbema/"
+haslbeckm_homepage = "http://in.tum.de/~haslbema/"
[bortin]
name = "Maksym Bortin"
[bortin.emails]
-bortin_email = "Maksym.Bortin@nicta.com.au"
-bortin_email1 = "maksym.bortin@nicta.com.au"
+bortin_email = "maksym.bortin@nicta.com.au"
[bortin.homepages]
diff --git a/metadata/entries/Approximation_Algorithms.toml b/metadata/entries/Approximation_Algorithms.toml
--- a/metadata/entries/Approximation_Algorithms.toml
+++ b/metadata/entries/Approximation_Algorithms.toml
@@ -1,36 +1,36 @@
title = "Verified Approximation Algorithms"
date = 2020-01-16
topics = [
"Computer science/Algorithms/Approximation",
]
abstract = """
We present the first formal verification of approximation algorithms
for NP-complete optimization problems: vertex cover, set cover, independent set,
center selection, load balancing, and bin packing. The proofs correct incompletenesses
in existing proofs and improve the approximation ratio in one case.
A detailed description of our work (excluding center selection) has been published in the proceedings of
IJCAR 2020."""
license = "bsd"
note = ""
[authors]
[authors.essmann]
email = "essmann_email"
[authors.nipkow]
-homepage = "nipkow_homepage1"
+homepage = "nipkow_homepage"
[authors.robillard]
homepage = "robillard_homepage"
[authors.sulejmani]
[contributors]
[notify]
nipkow = "nipkow_email"
[history]
[extra]
diff --git a/metadata/entries/CAVA_LTL_Modelchecker.toml b/metadata/entries/CAVA_LTL_Modelchecker.toml
--- a/metadata/entries/CAVA_LTL_Modelchecker.toml
+++ b/metadata/entries/CAVA_LTL_Modelchecker.toml
@@ -1,53 +1,53 @@
title = "A Fully Verified Executable LTL Model Checker"
date = 2014-05-28
topics = [
"Computer science/Automata and formal languages",
]
abstract = """
We present an LTL model checker whose code has been completely verified
using the Isabelle theorem prover. The checker consists of over 4000
lines of ML code. The code is produced using the Isabelle Refinement
Framework, which allows us to split its correctness proof into (1) the
proof of an abstract version of the checker, consisting of a few hundred
lines of ``formalized pseudocode'', and (2) a verified refinement step
in which mathematical sets and other abstract structures are replaced by
implementations of efficient structures like red-black trees and
functional arrays. This leads to a checker that,
while still slower than unverified checkers, can already be used as a
trusted reference implementation against which advanced implementations
can be tested.
An early version of this model checker is described in the
CAV 2013 paper
with the same title."""
license = "bsd"
note = ""
[authors]
[authors.esparza]
homepage = "esparza_homepage"
[authors.lammich]
homepage = "lammich_homepage"
[authors.neumann]
-email = "neumann_email1"
+email = "neumann_email"
[authors.nipkow]
homepage = "nipkow_homepage"
[authors.schimpf]
email = "schimpf_email"
[authors.smaus]
homepage = "smaus_homepage"
[contributors]
[notify]
lammich = "lammich_email"
[history]
[extra]
diff --git a/metadata/entries/CRDT.toml b/metadata/entries/CRDT.toml
--- a/metadata/entries/CRDT.toml
+++ b/metadata/entries/CRDT.toml
@@ -1,47 +1,47 @@
title = "A framework for establishing Strong Eventual Consistency for Conflict-free Replicated Datatypes"
date = 2017-07-07
topics = [
"Computer science/Algorithms/Distributed",
"Computer science/Data structures",
]
abstract = """
In this work, we focus on the correctness of Conflict-free Replicated
Data Types (CRDTs), a class of algorithm that provides strong eventual
consistency guarantees for replicated data. We develop a modular and
reusable framework for verifying the correctness of CRDT algorithms.
We avoid correctness issues that have dogged previous mechanised
proofs in this area by including a network model in our formalisation,
and proving that our theorems hold in all possible network behaviours.
Our axiomatic network model is a standard abstraction that accurately
reflects the behaviour of real-world computer networks. Moreover, we
identify an abstract convergence theorem, a property of order
relations, which provides a formal definition of strong eventual
consistency. We then obtain the first machine-checked correctness
theorems for three concrete CRDTs: the Replicated Growable Array, the
Observed-Remove Set, and an Increment-Decrement Counter."""
license = "bsd"
note = ""
[authors]
[authors.gomes]
-email = "gomes_email1"
+email = "gomes_email"
[authors.kleppmann]
email = "kleppmann_email"
[authors.mulligan]
email = "mulligan_email"
[authors.beresford]
email = "beresford_email"
[contributors]
[notify]
-gomes = "gomes_email1"
+gomes = "gomes_email"
mulligan = "mulligan_email"
[history]
[extra]
diff --git a/metadata/entries/CYK.toml b/metadata/entries/CYK.toml
--- a/metadata/entries/CYK.toml
+++ b/metadata/entries/CYK.toml
@@ -1,28 +1,28 @@
title = "A formalisation of the Cocke-Younger-Kasami algorithm"
date = 2016-04-27
topics = [
"Computer science/Algorithms",
"Computer science/Automata and formal languages",
]
abstract = """
The theory provides a formalisation of the Cocke-Younger-Kasami
algorithm (CYK for short), an approach to solving the word problem
for context-free languages. CYK decides if a word is in the
languages generated by a context-free grammar in Chomsky normal form.
The formalized algorithm is executable."""
license = "bsd"
note = ""
[authors]
[authors.bortin]
email = "bortin_email"
[contributors]
[notify]
-bortin = "bortin_email1"
+bortin = "bortin_email"
[history]
[extra]
diff --git a/metadata/entries/Category.toml b/metadata/entries/Category.toml
--- a/metadata/entries/Category.toml
+++ b/metadata/entries/Category.toml
@@ -1,23 +1,23 @@
title = "Category Theory to Yoneda's Lemma"
date = 2005-04-21
topics = [
"Mathematics/Category theory",
]
abstract = "This development proves Yoneda's lemma and aims to be readable by humans. It only defines what is needed for the lemma: categories, functors and natural transformations. Limits, adjunctions and other important concepts are not included."
license = "lgpl"
note = ""
[authors]
[authors.keefe]
homepage = "keefe_homepage"
[contributors]
[notify]
-paulson = "paulson_email1"
+paulson = "paulson_email"
[history]
2010-04-23 = "The definition of the constant equinumerous was slightly too weak in the original submission and has been fixed in revision 8c2b5b3c995f."
[extra]
diff --git a/metadata/entries/Closest_Pair_Points.toml b/metadata/entries/Closest_Pair_Points.toml
--- a/metadata/entries/Closest_Pair_Points.toml
+++ b/metadata/entries/Closest_Pair_Points.toml
@@ -1,33 +1,33 @@
title = "Closest Pair of Points Algorithms"
date = 2020-01-13
topics = [
"Computer science/Algorithms/Geometry",
]
abstract = """
This entry provides two related verified divide-and-conquer algorithms
solving the fundamental Closest Pair of Points
problem in Computational Geometry. Functional correctness and the
optimal running time of O(n log n) are
proved. Executable code is generated which is empirically competitive
with handwritten reference implementations."""
license = "bsd"
note = ""
[authors]
[authors.rau]
email = "rau_email"
[authors.nipkow]
-homepage = "nipkow_homepage2"
+homepage = "nipkow_homepage"
[contributors]
[notify]
rau = "rau_email"
nipkow = "nipkow_email"
[history]
2020-04-14 = "Incorporate Time_Monad of the AFP entry Root_Balanced_Tree."
[extra]
diff --git a/metadata/entries/Complex_Bounded_Operators.toml b/metadata/entries/Complex_Bounded_Operators.toml
--- a/metadata/entries/Complex_Bounded_Operators.toml
+++ b/metadata/entries/Complex_Bounded_Operators.toml
@@ -1,37 +1,37 @@
title = "Complex Bounded Operators"
date = 2021-09-18
topics = [
"Mathematics/Analysis",
]
abstract = """
We present a formalization of bounded operators on complex vector
spaces. Our formalization contains material on complex vector spaces
(normed spaces, Banach spaces, Hilbert spaces) that complements and
goes beyond the developments of real vectors spaces in the
Isabelle/HOL standard library. We define the type of bounded
operators between complex vector spaces
(cblinfun) and develop the theory of unitaries,
projectors, extension of bounded linear functions (BLT theorem),
adjoints, Loewner order, closed subspaces and more. For the
finite-dimensional case, we provide code generation support by
identifying finite-dimensional operators with matrices as formalized
in the Jordan_Normal_Form AFP entry."""
license = "bsd"
note = ""
[authors]
[authors.caballero]
homepage = "caballero_homepage"
[authors.unruh]
-homepage = "unruh_homepage1"
+homepage = "unruh_homepage"
[contributors]
[notify]
unruh = "unruh_email"
[history]
[extra]
diff --git a/metadata/entries/Concurrent_Ref_Alg.toml b/metadata/entries/Concurrent_Ref_Alg.toml
--- a/metadata/entries/Concurrent_Ref_Alg.toml
+++ b/metadata/entries/Concurrent_Ref_Alg.toml
@@ -1,46 +1,46 @@
title = "Concurrent Refinement Algebra and Rely Quotients"
date = 2016-12-30
topics = [
"Computer science/Concurrency",
]
abstract = """
The concurrent refinement algebra developed here is designed to
provide a foundation for rely/guarantee reasoning about concurrent
programs. The algebra builds on a complete lattice of commands by
providing sequential composition, parallel composition and a novel
weak conjunction operator. The weak conjunction operator coincides
with the lattice supremum providing its arguments are non-aborting,
but aborts if either of its arguments do. Weak conjunction provides an
abstract version of a guarantee condition as a guarantee process. We
distinguish between models that distribute sequential composition over
non-deterministic choice from the left (referred to as being
conjunctive in the refinement calculus literature) and those that
don't. Least and greatest fixed points of monotone functions are
provided to allow recursion and iteration operators to be added to the
language. Additional iteration laws are available for conjunctive
models. The rely quotient of processes c and
i is the process that, if executed in parallel with
i implements c. It represents an
abstract version of a rely condition generalised to a process."""
license = "bsd"
note = ""
[authors]
[authors.fell]
email = "fell_email"
[authors.hayes]
email = "hayes_email"
[authors.velykis]
homepage = "velykis_homepage"
[contributors]
[notify]
-hayes = "hayes_email1"
+hayes = "hayes_email"
[history]
[extra]
diff --git a/metadata/entries/Constructive_Cryptography.toml b/metadata/entries/Constructive_Cryptography.toml
--- a/metadata/entries/Constructive_Cryptography.toml
+++ b/metadata/entries/Constructive_Cryptography.toml
@@ -1,36 +1,36 @@
title = "Constructive Cryptography in HOL"
date = 2018-12-17
topics = [
"Computer science/Security/Cryptography",
"Mathematics/Probability theory",
]
abstract = """
Inspired by Abstract Cryptography, we extend CryptHOL, a framework for
formalizing game-based proofs, with an abstract model of Random
Systems and provide proof rules about their composition and equality.
This foundation facilitates the formalization of Constructive
Cryptography proofs, where the security of a cryptographic scheme is
realized as a special form of construction in which a complex random
system is built from simpler ones. This is a first step towards a
fully-featured compositional framework, similar to Universal
Composability framework, that supports formalization of
simulation-based proofs."""
license = "bsd"
note = ""
[authors]
[authors.lochbihler]
-homepage = "lochbihler_homepage1"
+homepage = "lochbihler_homepage"
[authors.sefidgar]
[contributors]
[notify]
lochbihler = "lochbihler_email1"
sefidgar = "sefidgar_email"
[history]
[extra]
diff --git a/metadata/entries/Core_SC_DOM.toml b/metadata/entries/Core_SC_DOM.toml
--- a/metadata/entries/Core_SC_DOM.toml
+++ b/metadata/entries/Core_SC_DOM.toml
@@ -1,40 +1,40 @@
title = "The Safely Composable DOM"
date = 2020-09-28
topics = [
"Computer science/Data structures",
]
abstract = """
In this AFP entry, we formalize the core of the Safely Composable
Document Object Model (SC DOM). The SC DOM improve the standard DOM
(as formalized in the AFP entry \"Core DOM\") by strengthening
the tree boundaries set by shadow roots: in the SC DOM, the shadow
root is a sub-class of the document class (instead of a base class).
This modifications also results in changes to some API methods (e.g.,
getOwnerDocument) to return the nearest shadow root rather than the
document root. As a result, many API methods that, when called on a
node inside a shadow tree, would previously ``break out''
and return or modify nodes that are possibly outside the shadow tree,
now stay within its boundaries. This change in behavior makes programs
that operate on shadow trees more predictable for the developer and
allows them to make more assumptions about other code accessing the
DOM."""
license = "bsd"
note = ""
[authors]
[authors.brucker]
-homepage = "brucker_homepage1"
+homepage = "brucker_homepage"
[authors.herzberg]
homepage = "herzberg_homepage"
[contributors]
[notify]
brucker = "brucker_email2"
herzberg = "herzberg_email"
[history]
[extra]
diff --git a/metadata/entries/Correctness_Algebras.toml b/metadata/entries/Correctness_Algebras.toml
--- a/metadata/entries/Correctness_Algebras.toml
+++ b/metadata/entries/Correctness_Algebras.toml
@@ -1,34 +1,34 @@
title = "Algebras for Iteration, Infinite Executions and Correctness of Sequential Computations"
date = 2021-10-12
topics = [
"Computer science/Programming languages/Logics",
]
abstract = """
We study models of state-based non-deterministic sequential
computations and describe them using algebras. We propose algebras
that describe iteration for strict and non-strict computations. They
unify computation models which differ in the fixpoints used to
represent iteration. We propose algebras that describe the infinite
executions of a computation. They lead to a unified approximation
order and results that connect fixpoints in the approximation and
refinement orders. This unifies the semantics of recursion for a range
of computation models. We propose algebras that describe preconditions
and the effect of while-programs under postconditions. They unify
correctness statements in two dimensions: one statement applies in
various computation models to various correctness claims."""
license = "bsd"
note = ""
[authors]
[authors.guttmann]
-homepage = "guttmann_homepage1"
+homepage = "guttmann_homepage"
[contributors]
[notify]
guttmann = "guttmann_email"
[history]
[extra]
diff --git a/metadata/entries/DOM_Components.toml b/metadata/entries/DOM_Components.toml
--- a/metadata/entries/DOM_Components.toml
+++ b/metadata/entries/DOM_Components.toml
@@ -1,38 +1,38 @@
title = "A Formalization of Web Components"
date = 2020-09-28
topics = [
"Computer science/Data structures",
]
abstract = """
While the DOM with shadow trees provide the technical basis for
defining web components, the DOM standard neither defines the concept
of web components nor specifies the safety properties that web
components should guarantee. Consequently, the standard also does not
discuss how or even if the methods for modifying the DOM respect
component boundaries. In AFP entry, we present a formally verified
model of web components and define safety properties which ensure that
different web components can only interact with each other using
well-defined interfaces. Moreover, our verification of the application
programming interface (API) of the DOM revealed numerous invariants
that implementations of the DOM API need to preserve to ensure the
integrity of components."""
license = "bsd"
note = ""
[authors]
[authors.brucker]
-homepage = "brucker_homepage1"
+homepage = "brucker_homepage"
[authors.herzberg]
homepage = "herzberg_homepage"
[contributors]
[notify]
brucker = "brucker_email2"
herzberg = "herzberg_email"
[history]
[extra]
diff --git a/metadata/entries/Design_Theory.toml b/metadata/entries/Design_Theory.toml
--- a/metadata/entries/Design_Theory.toml
+++ b/metadata/entries/Design_Theory.toml
@@ -1,41 +1,41 @@
title = "Combinatorial Design Theory"
date = 2021-08-13
topics = [
"Mathematics/Combinatorics",
]
abstract = """
Combinatorial design theory studies incidence set systems with certain
balance and symmetry properties. It is closely related to hypergraph
theory. This formalisation presents a general library for formal
reasoning on incidence set systems, designs and their applications,
including formal definitions and proofs for many key properties,
operations, and theorems on the construction and existence of designs.
Notably, this includes formalising t-designs, balanced incomplete
block designs (BIBD), group divisible designs (GDD), pairwise balanced
designs (PBD), design isomorphisms, and the relationship between
graphs and designs. A locale-centric approach has been used to manage
the relationships between the many different types of designs.
Theorems of particular interest include the necessary conditions for
existence of a BIBD, Wilson's construction on GDDs, and
Bose's inequality on resolvable designs. Parts of this
formalisation are explored in the paper \"A Modular First
Formalisation of Combinatorial Design Theory\", presented at CICM 2021."""
license = "bsd"
note = ""
[authors]
[authors.edmonds]
homepage = "edmonds_homepage"
[authors.paulson]
-homepage = "paulson_homepage1"
+homepage = "paulson_homepage"
[contributors]
[notify]
edmonds = "edmonds_email"
[history]
[extra]
diff --git a/metadata/entries/Differential_Game_Logic.toml b/metadata/entries/Differential_Game_Logic.toml
--- a/metadata/entries/Differential_Game_Logic.toml
+++ b/metadata/entries/Differential_Game_Logic.toml
@@ -1,35 +1,35 @@
title = "Differential Game Logic"
date = 2019-06-03
topics = [
"Computer science/Programming languages/Logics",
]
abstract = """
This formalization provides differential game logic (dGL), a logic for
proving properties of hybrid game. In addition to the syntax and
semantics, it formalizes a uniform substitution calculus for dGL.
Church's uniform substitutions substitute a term or formula for a
function or predicate symbol everywhere. The uniform substitutions for
dGL also substitute hybrid games for a game symbol everywhere. We
prove soundness of one-pass uniform substitutions and the axioms of
differential game logic with respect to their denotational semantics.
One-pass uniform substitutions are faster by postponing
soundness-critical admissibility checks with a linear pass homomorphic
application and regain soundness by a variable condition at the
replacements. The formalization is based on prior non-mechanized
soundness proofs for dGL."""
license = "bsd"
note = ""
[authors]
[authors.platzer]
-homepage = "platzer_homepage1"
+homepage = "platzer_homepage"
[contributors]
[notify]
platzer = "platzer_email"
[history]
[extra]
diff --git a/metadata/entries/Factored_Transition_System_Bounding.toml b/metadata/entries/Factored_Transition_System_Bounding.toml
--- a/metadata/entries/Factored_Transition_System_Bounding.toml
+++ b/metadata/entries/Factored_Transition_System_Bounding.toml
@@ -1,43 +1,43 @@
title = "Upper Bounding Diameters of State Spaces of Factored Transition Systems"
date = 2018-10-12
topics = [
"Computer science/Automata and formal languages",
"Mathematics/Graph theory",
]
abstract = """
A completeness threshold is required to guarantee the completeness of
planning as satisfiability, and bounded model checking of safety
properties. One valid completeness threshold is the diameter of the
underlying transition system. The diameter is the maximum element in
the set of lengths of all shortest paths between pairs of states. The
diameter is not calculated exactly in our setting, where the
transition system is succinctly described using a (propositionally)
factored representation. Rather, an upper bound on the diameter is
calculated compositionally, by bounding the diameters of small
abstract subsystems, and then composing those. We port a HOL4
formalisation of a compositional algorithm for computing a relatively
tight upper bound on the system diameter. This compositional algorithm
exploits acyclicity in the state space to achieve compositionality,
and it was introduced by Abdulaziz et. al. The formalisation that we
port is described as a part of another paper by Abdulaziz et. al. As a
part of this porting we developed a libray about transition systems,
which shall be of use in future related mechanisation efforts."""
license = "bsd"
note = ""
[authors]
[authors.kurz]
[authors.abdulaziz]
homepage = "abdulaziz_homepage"
[contributors]
[notify]
kurz = "kurz_email"
-abdulaziz = "abdulaziz_email2"
+abdulaziz = "abdulaziz_email"
[history]
[extra]
diff --git a/metadata/entries/Farkas.toml b/metadata/entries/Farkas.toml
--- a/metadata/entries/Farkas.toml
+++ b/metadata/entries/Farkas.toml
@@ -1,36 +1,36 @@
title = "Farkas' Lemma and Motzkin's Transposition Theorem"
date = 2019-01-17
topics = [
"Mathematics/Algebra",
]
abstract = """
We formalize a proof of Motzkin's transposition theorem and
Farkas' lemma in Isabelle/HOL. Our proof is based on the
formalization of the simplex algorithm which, given a set of linear
constraints, either returns a satisfying assignment to the problem or
detects unsatisfiability. By reusing facts about the simplex algorithm
we show that a set of linear constraints is unsatisfiable if and only
if there is a linear combination of the constraints which evaluates to
a trivially unsatisfiable inequality."""
license = "bsd"
note = ""
[authors]
[authors.bottesch]
homepage = "bottesch_homepage"
[authors.haslbeck]
homepage = "haslbeck_homepage"
[authors.thiemann]
-homepage = "thiemann_homepage1"
+homepage = "thiemann_homepage"
[contributors]
[notify]
thiemann = "thiemann_email"
[history]
[extra]
diff --git a/metadata/entries/First_Order_Terms.toml b/metadata/entries/First_Order_Terms.toml
--- a/metadata/entries/First_Order_Terms.toml
+++ b/metadata/entries/First_Order_Terms.toml
@@ -1,36 +1,36 @@
title = "First-Order Terms"
date = 2018-02-06
topics = [
"Logic/Rewriting",
"Computer science/Algorithms",
]
abstract = """
We formalize basic results on first-order terms, including matching and a
first-order unification algorithm, as well as well-foundedness of the
subsumption order. This entry is part of the Isabelle
Formalization of RewritingIsaFoR,
where first-order terms are omni-present: the unification algorithm is
used to certify several confluence and termination techniques, like
critical-pair computation and dependency graph approximations; and the
subsumption order is a crucial ingredient for completion."""
license = "lgpl"
note = ""
[authors]
[authors.sternagel]
email = "sternagel_email"
[authors.thiemann]
-homepage = "thiemann_homepage2"
+homepage = "thiemann_homepage"
[contributors]
[notify]
sternagel = "sternagel_email"
thiemann = "thiemann_email"
[history]
[extra]
diff --git a/metadata/entries/Floyd_Warshall.toml b/metadata/entries/Floyd_Warshall.toml
--- a/metadata/entries/Floyd_Warshall.toml
+++ b/metadata/entries/Floyd_Warshall.toml
@@ -1,38 +1,38 @@
title = "The Floyd-Warshall Algorithm for Shortest Paths"
date = 2017-05-08
topics = [
"Computer science/Algorithms/Graph",
]
abstract = """
The Floyd-Warshall algorithm [Flo62, Roy59, War62] is a classic
dynamic programming algorithm to compute the length of all shortest
paths between any two vertices in a graph (i.e. to solve the all-pairs
shortest path problem, or APSP for short). Given a representation of
the graph as a matrix of weights M, it computes another matrix M'
which represents a graph with the same path lengths and contains the
length of the shortest path between any two vertices i and j. This is
only possible if the graph does not contain any negative cycles.
However, in this case the Floyd-Warshall algorithm will detect the
situation by calculating a negative diagonal entry. This entry
includes a formalization of the algorithm and of these key properties.
The algorithm is refined to an efficient imperative version using the
Imperative Refinement Framework."""
license = "bsd"
note = ""
[authors]
[authors.wimmer]
homepage = "wimmer_homepage"
[authors.lammich]
homepage = "lammich_homepage"
[contributors]
[notify]
-wimmer = "wimmer_email1"
+wimmer = "wimmer_email"
[history]
[extra]
diff --git a/metadata/entries/Forcing.toml b/metadata/entries/Forcing.toml
--- a/metadata/entries/Forcing.toml
+++ b/metadata/entries/Forcing.toml
@@ -1,34 +1,34 @@
title = "Formalization of Forcing in Isabelle/ZF"
date = 2020-05-06
topics = [
"Logic/Set theory",
]
abstract = """
We formalize the theory of forcing in the set theory framework of
Isabelle/ZF. Under the assumption of the existence of a countable
transitive model of ZFC, we construct a proper generic extension and
show that the latter also satisfies ZFC."""
license = "bsd"
note = ""
[authors]
[authors.gunther]
email = "gunther_email"
[authors.pagano]
homepage = "pagano_homepage"
[authors.terraf]
-homepage = "terraf_homepage1"
+homepage = "terraf_homepage"
[contributors]
[notify]
gunther = "gunther_email"
pagano = "pagano_email"
terraf = "terraf_email"
[history]
[extra]
diff --git a/metadata/entries/Fourier.toml b/metadata/entries/Fourier.toml
--- a/metadata/entries/Fourier.toml
+++ b/metadata/entries/Fourier.toml
@@ -1,27 +1,27 @@
title = "Fourier Series"
date = 2019-09-06
topics = [
"Mathematics/Analysis",
]
abstract = """
This development formalises the square integrable functions over the
reals and the basics of Fourier series. It culminates with a proof
that every well-behaved periodic function can be approximated by a
Fourier series. The material is ported from HOL Light:
https://github.com/jrh13/hol-light/blob/master/100/fourier.ml"""
license = "bsd"
note = ""
[authors]
[authors.paulson]
-homepage = "paulson_homepage1"
+homepage = "paulson_homepage"
[contributors]
[notify]
paulson = "paulson_email"
[history]
[extra]
diff --git a/metadata/entries/Generic_Join.toml b/metadata/entries/Generic_Join.toml
--- a/metadata/entries/Generic_Join.toml
+++ b/metadata/entries/Generic_Join.toml
@@ -1,31 +1,31 @@
title = "Formalization of Multiway-Join Algorithms"
date = 2019-09-16
topics = [
"Computer science/Algorithms",
]
abstract = """
Worst-case optimal multiway-join algorithms are recent seminal
achievement of the database community. These algorithms compute the
natural join of multiple relational databases and improve in the worst
case over traditional query plan optimizations of nested binary joins.
In 2014, Ngo, Ré,
and Rudra gave a unified presentation of different multi-way
join algorithms. We formalized and proved correct their \"Generic
Join\" algorithm and extended it to support negative joins."""
license = "bsd"
note = ""
[authors]
[authors.dardinier]
[contributors]
[notify]
-dardinier = "dardinier_email1"
+dardinier = "dardinier_email"
traytel = "traytel_email1"
[history]
[extra]
diff --git a/metadata/entries/Grothendieck_Schemes.toml b/metadata/entries/Grothendieck_Schemes.toml
--- a/metadata/entries/Grothendieck_Schemes.toml
+++ b/metadata/entries/Grothendieck_Schemes.toml
@@ -1,35 +1,35 @@
title = "Grothendieck's Schemes in Algebraic Geometry"
date = 2021-03-29
topics = [
"Mathematics/Algebra",
"Mathematics/Geometry",
]
abstract = """
We formalize mainstream structures in algebraic geometry culminating
in Grothendieck's schemes: presheaves of rings, sheaves of rings,
ringed spaces, locally ringed spaces, affine schemes and schemes. We
prove that the spectrum of a ring is a locally ringed space, hence an
affine scheme. Finally, we prove that any affine scheme is a scheme."""
license = "bsd"
note = ""
[authors]
[authors.bordg]
homepage = "bordg_homepage"
[authors.paulson]
-homepage = "paulson_homepage1"
+homepage = "paulson_homepage"
[authors.li]
homepage = "li_homepage"
[contributors]
[notify]
bordg = "bordg_email"
paulson = "paulson_email"
[history]
[extra]
diff --git a/metadata/entries/Hidden_Markov_Models.toml b/metadata/entries/Hidden_Markov_Models.toml
--- a/metadata/entries/Hidden_Markov_Models.toml
+++ b/metadata/entries/Hidden_Markov_Models.toml
@@ -1,33 +1,33 @@
title = "Hidden Markov Models"
date = 2018-05-25
topics = [
"Mathematics/Probability theory",
"Computer science/Algorithms",
]
abstract = """
This entry contains a formalization of hidden Markov models [3] based
on Johannes Hölzl's formalization of discrete time Markov chains
[1]. The basic definitions are provided and the correctness of two
main (dynamic programming) algorithms for hidden Markov models is
proved: the forward algorithm for computing the likelihood of an
observed sequence, and the Viterbi algorithm for decoding the most
probable hidden state sequence. The Viterbi algorithm is made
executable including memoization. Hidden markov models have various
applications in natural language processing. For an introduction see
Jurafsky and Martin [2]."""
license = "bsd"
note = ""
[authors]
[authors.wimmer]
homepage = "wimmer_homepage"
[contributors]
[notify]
-wimmer = "wimmer_email1"
+wimmer = "wimmer_email"
[history]
[extra]
diff --git a/metadata/entries/Hoare_Time.toml b/metadata/entries/Hoare_Time.toml
--- a/metadata/entries/Hoare_Time.toml
+++ b/metadata/entries/Hoare_Time.toml
@@ -1,36 +1,36 @@
title = "Hoare Logics for Time Bounds"
date = 2018-02-26
topics = [
"Computer science/Programming languages/Logics",
]
abstract = """
We study three different Hoare logics for reasoning about time bounds
of imperative programs and formalize them in Isabelle/HOL: a classical
Hoare like logic due to Nielson, a logic with potentials due to
Carbonneaux et al. and a separation
logic following work by Atkey, Chaguérand and Pottier.
These logics are formally shown to be sound and complete. Verification
condition generators are developed and are shown sound and complete
too. We also consider variants of the systems where we abstract from
multiplicative constants in the running time bounds, thus supporting a
big-O style of reasoning. Finally we compare the expressive power of
the three systems."""
license = "bsd"
note = ""
[authors]
[authors.haslbeckm]
homepage = "haslbeckm_homepage"
[authors.nipkow]
-homepage = "nipkow_homepage3"
+homepage = "nipkow_homepage"
[contributors]
[notify]
haslbeckm = "haslbeckm_email"
[history]
[extra]
diff --git a/metadata/entries/Independence_CH.toml b/metadata/entries/Independence_CH.toml
--- a/metadata/entries/Independence_CH.toml
+++ b/metadata/entries/Independence_CH.toml
@@ -1,36 +1,36 @@
title = "The Independence of the Continuum Hypothesis in Isabelle/ZF"
date = 2022-03-06
topics = [
"Logic/Set theory",
]
abstract = """
We redeveloped our formalization of forcing in the set theory
framework of Isabelle/ZF. Under the assumption of the existence of a
countable transitive model of ZFC, we construct proper generic
extensions that satisfy the Continuum Hypothesis and its negation."""
license = "bsd"
note = ""
[authors]
[authors.gunther]
email = "gunther_email"
[authors.pagano]
homepage = "pagano_homepage"
[authors.terraf]
-homepage = "terraf_homepage2"
+homepage = "terraf_homepage"
[authors.steinberg]
email = "steinberg_email"
[contributors]
[notify]
-terraf = "terraf_email1"
-pagano = "pagano_email1"
+terraf = "terraf_email"
+pagano = "pagano_email"
[history]
[extra]
diff --git a/metadata/entries/Irrationality_J_Hancl.toml b/metadata/entries/Irrationality_J_Hancl.toml
--- a/metadata/entries/Irrationality_J_Hancl.toml
+++ b/metadata/entries/Irrationality_J_Hancl.toml
@@ -1,33 +1,33 @@
title = "Irrational Rapidly Convergent Series"
date = 2018-05-23
topics = [
"Mathematics/Number theory",
"Mathematics/Analysis",
]
abstract = """
We formalize with Isabelle/HOL a proof of a theorem by J. Hancl asserting the
irrationality of the sum of a series consisting of rational numbers, built up
by sequences that fulfill certain properties. Even though the criterion is a
number theoretic result, the proof makes use only of analytical arguments. We
also formalize a corollary of the theorem for a specific series fulfilling the
assumptions of the theorem."""
license = "bsd"
note = ""
[authors]
[authors.argyraki]
-homepage = "argyraki_homepage1"
+homepage = "argyraki_homepage"
[authors.li]
-homepage = "li_homepage1"
+homepage = "li_homepage"
[contributors]
[notify]
argyraki = "argyraki_email"
li = "li_email"
[history]
[extra]
diff --git a/metadata/entries/Irrationals_From_THEBOOK.toml b/metadata/entries/Irrationals_From_THEBOOK.toml
--- a/metadata/entries/Irrationals_From_THEBOOK.toml
+++ b/metadata/entries/Irrationals_From_THEBOOK.toml
@@ -1,32 +1,32 @@
title = "Irrational numbers from THE BOOK"
date = 2022-01-08
topics = [
"Mathematics/Number theory",
]
abstract = """
An elementary proof is formalised: that exp r is irrational for
every nonzero rational number r. The mathematical development comes
from the well-known volume Proofs from THE BOOK,
by Aigner and Ziegler, who credit the idea to Hermite. The development
illustrates a number of basic Isabelle techniques: the manipulation of
summations, the calculation of quite complicated derivatives and the
estimation of integrals. We also see how to import another AFP entry (Stirling's formula).
As for the theorem itself, note that a much stronger and more general
result (the Hermite--Lindemann--Weierstraß transcendence theorem) is
already available in the AFP."""
license = "bsd"
note = ""
[authors]
[authors.paulson]
-homepage = "paulson_homepage1"
+homepage = "paulson_homepage"
[contributors]
[notify]
paulson = "paulson_email"
[history]
[extra]
diff --git a/metadata/entries/JiveDataStoreModel.toml b/metadata/entries/JiveDataStoreModel.toml
--- a/metadata/entries/JiveDataStoreModel.toml
+++ b/metadata/entries/JiveDataStoreModel.toml
@@ -1,25 +1,25 @@
title = "Jive Data and Store Model"
date = 2005-06-20
topics = [
"Computer science/Programming languages/Misc",
]
abstract = "This document presents the formalization of an object-oriented data and store model in Isabelle/HOL. This model is being used in the Java Interactive Verification Environment, Jive."
license = "lgpl"
note = ""
[authors]
[authors.rauch]
email = "rauch_email"
[authors.schirmer]
[contributors]
[notify]
klein = "klein_email"
-schirmer = "schirmer_email1"
+schirmer = "schirmer_email"
[history]
[extra]
diff --git a/metadata/entries/KAD.toml b/metadata/entries/KAD.toml
--- a/metadata/entries/KAD.toml
+++ b/metadata/entries/KAD.toml
@@ -1,49 +1,49 @@
title = "Kleene Algebras with Domain"
date = 2016-04-12
topics = [
"Computer science/Programming languages/Logics",
"Computer science/Automata and formal languages",
"Mathematics/Algebra",
]
abstract = """
Kleene algebras with domain are Kleene algebras endowed with an
operation that maps each element of the algebra to its domain of
definition (or its complement) in abstract fashion. They form a simple
algebraic basis for Hoare logics, dynamic logics or predicate
transformer semantics. We formalise a modular hierarchy of algebras
with domain and antidomain (domain complement) operations in
Isabelle/HOL that ranges from domain and antidomain semigroups to
modal Kleene algebras and divergence Kleene algebras. We link these
algebras with models of binary relations and program traces. We
include some examples from modal logics, termination and program
analysis."""
license = "bsd"
note = ""
[authors]
[authors.gomes]
homepage = "gomes_homepage"
[authors.guttmann]
homepage = "guttmann_homepage"
[authors.hoefner]
homepage = "hoefner_homepage"
[authors.struth]
homepage = "struth_homepage"
[authors.weber]
homepage = "weber_homepage"
[contributors]
[notify]
-guttmann = "guttmann_email1"
+guttmann = "guttmann_email"
struth = "struth_email"
weber = "weber_email"
[history]
[extra]
diff --git a/metadata/entries/KAT_and_DRA.toml b/metadata/entries/KAT_and_DRA.toml
--- a/metadata/entries/KAT_and_DRA.toml
+++ b/metadata/entries/KAT_and_DRA.toml
@@ -1,38 +1,38 @@
title = "Kleene Algebra with Tests and Demonic Refinement Algebras"
date = 2014-01-23
topics = [
"Computer science/Programming languages/Logics",
"Computer science/Automata and formal languages",
"Mathematics/Algebra",
]
abstract = """
We formalise Kleene algebra with tests (KAT) and demonic refinement
algebra (DRA) in Isabelle/HOL. KAT is relevant for program verification
and correctness proofs in the partial correctness setting. While DRA
targets similar applications in the context of total correctness. Our
formalisation contains the two most important models of these algebras:
binary relations in the case of KAT and predicate transformers in the
case of DRA. In addition, we derive the inference rules for Hoare logic
in KAT and its relational model and present a simple formally verified
program verification tool prototype based on the algebraic approach."""
license = "bsd"
note = ""
[authors]
[authors.armstrong]
[authors.gomes]
homepage = "gomes_homepage"
[authors.struth]
homepage = "struth_homepage"
[contributors]
[notify]
-struth = "struth_email1"
+struth = "struth_email"
[history]
[extra]
diff --git a/metadata/entries/Kleene_Algebra.toml b/metadata/entries/Kleene_Algebra.toml
--- a/metadata/entries/Kleene_Algebra.toml
+++ b/metadata/entries/Kleene_Algebra.toml
@@ -1,52 +1,52 @@
title = "Kleene Algebra"
date = 2013-01-15
topics = [
"Computer science/Programming languages/Logics",
"Computer science/Automata and formal languages",
"Mathematics/Algebra",
]
abstract = """
These files contain a formalisation of variants of Kleene algebras and
their most important models as axiomatic type classes in Isabelle/HOL.
Kleene algebras are foundational structures in computing with
applications ranging from automata and language theory to computational
modeling, program construction and verification.
We start with formalising dioids, which are additively idempotent
semirings, and expand them by axiomatisations of the Kleene star for
finite iteration and an omega operation for infinite iteration. We
show that powersets over a given monoid, (regular) languages, sets of
paths in a graph, sets of computation traces, binary relations and
formal power series form Kleene algebras, and consider further models
based on lattices, max-plus semirings and min-plus semirings. We also
demonstrate that dioids are closed under the formation of matrices
(proofs for Kleene algebras remain to be completed).
On the one hand we have aimed at a reference formalisation of variants
of Kleene algebras that covers a wide range of variants and the core
theorems in a structured and modular way and provides readable proofs
at text book level. On the other hand, we intend to use this algebraic
hierarchy and its models as a generic algebraic middle-layer from which
programming applications can quickly be explored, implemented and verified."""
license = "bsd"
note = ""
[authors]
[authors.armstrong]
[authors.struth]
-homepage = "struth_homepage1"
+homepage = "struth_homepage"
[authors.weber]
homepage = "weber_homepage"
[contributors]
[notify]
struth = "struth_email"
weber = "weber_email"
[history]
[extra]
diff --git a/metadata/entries/Knuth_Bendix_Order.toml b/metadata/entries/Knuth_Bendix_Order.toml
--- a/metadata/entries/Knuth_Bendix_Order.toml
+++ b/metadata/entries/Knuth_Bendix_Order.toml
@@ -1,31 +1,31 @@
title = "A Formalization of Knuth–Bendix Orders"
date = 2020-05-13
topics = [
"Logic/Rewriting",
]
abstract = """
We define a generalized version of Knuth–Bendix orders,
including subterm coefficient functions. For these orders we formalize
several properties such as strong normalization, the subterm property,
closure properties under substitutions and contexts, as well as ground
totality."""
license = "bsd"
note = ""
[authors]
[authors.sternagel]
email = "sternagel_email"
[authors.thiemann]
-homepage = "thiemann_homepage1"
+homepage = "thiemann_homepage"
[contributors]
[notify]
sternagel = "sternagel_email"
thiemann = "thiemann_email"
[history]
[extra]
diff --git a/metadata/entries/Kruskal.toml b/metadata/entries/Kruskal.toml
--- a/metadata/entries/Kruskal.toml
+++ b/metadata/entries/Kruskal.toml
@@ -1,36 +1,36 @@
title = "Kruskal's Algorithm for Minimum Spanning Forest"
date = 2019-02-14
topics = [
"Computer science/Algorithms/Graph",
]
abstract = """
This Isabelle/HOL formalization defines a greedy algorithm for finding
a minimum weight basis on a weighted matroid and proves its
correctness. This algorithm is an abstract version of Kruskal's
algorithm. We interpret the abstract algorithm for the cycle matroid
(i.e. forests in a graph) and refine it to imperative executable code
using an efficient union-find data structure. Our formalization can
be instantiated for different graph representations. We provide
instantiations for undirected graphs and symmetric directed graphs."""
license = "bsd"
note = ""
[authors]
[authors.haslbeckm]
-homepage = "haslbeckm_homepage1"
+homepage = "haslbeckm_homepage"
[authors.lammich]
homepage = "lammich_homepage"
[authors.biendarra]
[contributors]
[notify]
haslbeckm = "haslbeckm_email"
lammich = "lammich_email"
[history]
[extra]
diff --git a/metadata/entries/LLL_Basis_Reduction.toml b/metadata/entries/LLL_Basis_Reduction.toml
--- a/metadata/entries/LLL_Basis_Reduction.toml
+++ b/metadata/entries/LLL_Basis_Reduction.toml
@@ -1,58 +1,58 @@
title = "A verified LLL algorithm"
date = 2018-02-02
topics = [
"Computer science/Algorithms/Mathematical",
"Mathematics/Algebra",
]
abstract = """
The Lenstra-Lenstra-Lovász basis reduction algorithm, also known as
LLL algorithm, is an algorithm to find a basis with short, nearly
orthogonal vectors of an integer lattice. Thereby, it can also be seen
as an approximation to solve the shortest vector problem (SVP), which
is an NP-hard problem, where the approximation quality solely depends
on the dimension of the lattice, but not the lattice itself. The
algorithm also possesses many applications in diverse fields of
computer science, from cryptanalysis to number theory, but it is
specially well-known since it was used to implement the first
polynomial-time algorithm to factor polynomials. In this work we
present the first mechanized soundness proof of the LLL algorithm to
compute short vectors in lattices. The formalization follows a
textbook by von zur Gathen and Gerhard."""
license = "bsd"
note = ""
[authors]
[authors.bottesch]
[authors.divason]
-homepage = "divason_homepage1"
+homepage = "divason_homepage"
[authors.haslbeck]
homepage = "haslbeck_homepage"
[authors.joosten]
-homepage = "joosten_homepage1"
+homepage = "joosten_homepage"
[authors.thiemann]
-homepage = "thiemann_homepage2"
+homepage = "thiemann_homepage"
[authors.yamada]
[contributors]
[notify]
bottesch = "bottesch_email"
-divason = "divason_email1"
+divason = "divason_email"
haslbeck = "haslbeck_email"
joosten = "joosten_email2"
thiemann = "thiemann_email"
yamada = "yamada_email1"
[history]
2018-04-16 = """
Integrated formal complexity bounds (Haslbeck, Thiemann)
"""
2018-05-25 = "Integrated much faster LLL implementation based on integer arithmetic (Bottesch, Haslbeck, Thiemann)"
[extra]
diff --git a/metadata/entries/LLL_Factorization.toml b/metadata/entries/LLL_Factorization.toml
--- a/metadata/entries/LLL_Factorization.toml
+++ b/metadata/entries/LLL_Factorization.toml
@@ -1,47 +1,47 @@
title = "A verified factorization algorithm for integer polynomials with polynomial complexity"
date = 2018-02-06
topics = [
"Mathematics/Algebra",
]
abstract = """
Short vectors in lattices and factors of integer polynomials are
related. Each factor of an integer polynomial belongs to a certain
lattice. When factoring polynomials, the condition that we are looking
for an irreducible polynomial means that we must look for a small
element in a lattice, which can be done by a basis reduction
algorithm. In this development we formalize this connection and
thereby one main application of the LLL basis reduction algorithm: an
algorithm to factor square-free integer polynomials which runs in
polynomial time. The work is based on our previous
Berlekamp–Zassenhaus development, where the exponential reconstruction
phase has been replaced by the polynomial-time basis reduction
algorithm. Thanks to this formalization we found a serious flaw in a
textbook."""
license = "bsd"
note = ""
[authors]
[authors.divason]
-homepage = "divason_homepage1"
+homepage = "divason_homepage"
[authors.joosten]
-homepage = "joosten_homepage1"
+homepage = "joosten_homepage"
[authors.thiemann]
-homepage = "thiemann_homepage2"
+homepage = "thiemann_homepage"
[authors.yamada]
email = "yamada_email1"
[contributors]
[notify]
-divason = "divason_email1"
+divason = "divason_email"
joosten = "joosten_email2"
thiemann = "thiemann_email"
yamada = "yamada_email1"
[history]
[extra]
diff --git a/metadata/entries/LTL.toml b/metadata/entries/LTL.toml
--- a/metadata/entries/LTL.toml
+++ b/metadata/entries/LTL.toml
@@ -1,32 +1,32 @@
title = "Linear Temporal Logic"
date = 2016-03-01
topics = [
"Logic/General logic/Temporal logic",
"Computer science/Automata and formal languages",
]
abstract = """
This theory provides a formalisation of linear temporal logic (LTL)
and unifies previous formalisations within the AFP. This entry
establishes syntax and semantics for this logic and decouples it from
existing entries, yielding a common environment for theories reasoning
about LTL. Furthermore a parser written in SML and an executable
simplifier are provided."""
license = "bsd"
note = ""
[authors]
[authors.sickert]
homepage = "sickert_homepage"
[contributors]
[contributors.seidl]
email = "seidl_email"
[notify]
-sickert = "sickert_email1"
+sickert = "sickert_email"
[history]
[extra]
diff --git a/metadata/entries/LTL_to_DRA.toml b/metadata/entries/LTL_to_DRA.toml
--- a/metadata/entries/LTL_to_DRA.toml
+++ b/metadata/entries/LTL_to_DRA.toml
@@ -1,26 +1,26 @@
title = "Converting Linear Temporal Logic to Deterministic (Generalized) Rabin Automata"
date = 2015-09-04
topics = [
"Computer science/Automata and formal languages",
]
abstract = "Recently, Javier Esparza and Jan Kretinsky proposed a new method directly translating linear temporal logic (LTL) formulas to deterministic (generalized) Rabin automata. Compared to the existing approaches of constructing a non-deterministic Buechi-automaton in the first step and then applying a determinization procedure (e.g. some variant of Safra's construction) in a second step, this new approach preservers a relation between the formula and the states of the resulting automaton. While the old approach produced a monolithic structure, the new method is compositional. Furthermore, in some cases the resulting automata are much smaller than the automata generated by existing approaches. In order to ensure the correctness of the construction, this entry contains a complete formalisation and verification of the translation. Furthermore from this basis executable code is generated."
license = "bsd"
note = ""
[authors]
[authors.sickert]
-email = "sickert_email1"
+email = "sickert_email"
[contributors]
[notify]
-sickert = "sickert_email1"
+sickert = "sickert_email"
[history]
2015-09-23 = """
Enable code export for the eager unfolding optimisation and reduce running time of the generated tool. Moreover, add support for the mlton SML compiler.
"""
2016-03-24 = "Make use of the LTL entry and include the simplifier."
[extra]
diff --git a/metadata/entries/Laplace_Transform.toml b/metadata/entries/Laplace_Transform.toml
--- a/metadata/entries/Laplace_Transform.toml
+++ b/metadata/entries/Laplace_Transform.toml
@@ -1,29 +1,29 @@
title = "Laplace Transform"
date = 2019-08-14
topics = [
"Mathematics/Analysis",
]
abstract = """
This entry formalizes the Laplace transform and concrete Laplace
transforms for arithmetic functions, frequency shift, integration and
(higher) differentiation in the time domain. It proves Lerch's
lemma and uniqueness of the Laplace transform for continuous
functions. In order to formalize the foundational assumptions, this
entry contains a formalization of piecewise continuous functions and
functions of exponential order."""
license = "bsd"
note = ""
[authors]
[authors.immler]
-homepage = "immler_homepage1"
+homepage = "immler_homepage"
[contributors]
[notify]
immler = "immler_email1"
[history]
[extra]
diff --git a/metadata/entries/Lazy-Lists-II.toml b/metadata/entries/Lazy-Lists-II.toml
--- a/metadata/entries/Lazy-Lists-II.toml
+++ b/metadata/entries/Lazy-Lists-II.toml
@@ -1,21 +1,21 @@
title = "Lazy Lists II"
date = 2004-04-26
topics = [
"Computer science/Data structures",
]
abstract = "This theory contains some useful extensions to the LList (lazy list) theory by Larry Paulson, including finite, infinite, and positive llists over an alphabet, as well as the new constants take and drop and the prefix order of llists. Finally, the notions of safety and liveness in the sense of Alpern and Schneider (1985) are defined."
license = "bsd"
note = ""
[authors]
[authors.friedrich]
[contributors]
[notify]
-paulson = "paulson_email1"
+paulson = "paulson_email"
[history]
[extra]
diff --git a/metadata/entries/Linear_Inequalities.toml b/metadata/entries/Linear_Inequalities.toml
--- a/metadata/entries/Linear_Inequalities.toml
+++ b/metadata/entries/Linear_Inequalities.toml
@@ -1,35 +1,35 @@
title = "Linear Inequalities"
date = 2019-06-21
topics = [
"Mathematics/Algebra",
]
abstract = """
We formalize results about linear inqualities, mainly from
Schrijver's book. The main results are the proof of the
fundamental theorem on linear inequalities, Farkas' lemma,
Carathéodory's theorem, the Farkas-Minkowsky-Weyl theorem, the
decomposition theorem of polyhedra, and Meyer's result that the
integer hull of a polyhedron is a polyhedron itself. Several theorems
include bounds on the appearing numbers, and in particular we provide
an a-priori bound on mixed-integer solutions of linear inequalities."""
license = "bsd"
note = ""
[authors]
[authors.bottesch]
homepage = "bottesch_homepage"
[authors.reynaud]
[authors.thiemann]
-homepage = "thiemann_homepage1"
+homepage = "thiemann_homepage"
[contributors]
[notify]
thiemann = "thiemann_email"
[history]
[extra]
diff --git a/metadata/entries/Linear_Programming.toml b/metadata/entries/Linear_Programming.toml
--- a/metadata/entries/Linear_Programming.toml
+++ b/metadata/entries/Linear_Programming.toml
@@ -1,32 +1,32 @@
title = "Linear Programming"
date = 2019-08-06
topics = [
"Mathematics/Algebra",
]
abstract = """
We use the previous formalization of the general simplex algorithm to
formulate an algorithm for solving linear programs. We encode the
linear programs using only linear constraints. Solving these
constraints also solves the original linear program. This algorithm is
proven to be sound by applying the weak duality theorem which is also
part of this formalization."""
license = "bsd"
note = ""
[authors]
[authors.parsert]
homepage = "parsert_homepage"
[authors.kaliszyk]
-homepage = "kaliszyk_homepage1"
+homepage = "kaliszyk_homepage"
[contributors]
[notify]
parsert = "parsert_email"
kaliszyk = "kaliszyk_email"
[history]
[extra]
diff --git a/metadata/entries/List_Update.toml b/metadata/entries/List_Update.toml
--- a/metadata/entries/List_Update.toml
+++ b/metadata/entries/List_Update.toml
@@ -1,35 +1,35 @@
title = "Analysis of List Update Algorithms"
date = 2016-02-17
topics = [
"Computer science/Algorithms/Online",
]
abstract = """
These theories formalize the quantitative analysis of a number of classical algorithms for the list update problem: 2-competitiveness of move-to-front, the lower bound of 2 for the competitiveness of deterministic list update algorithms and 1.6-competitiveness of the randomized COMB algorithm, the best randomized list update algorithm known to date.
The material is based on the first two chapters of Online Computation
and Competitive Analysis by Borodin and El-Yaniv.
"""
license = "bsd"
note = ""
[authors]
[authors.haslbeckm]
-homepage = "haslbeckm_homepage1"
+homepage = "haslbeckm_homepage"
[authors.nipkow]
homepage = "nipkow_homepage"
[contributors]
[notify]
nipkow = "nipkow_email"
[history]
[extra]
diff --git a/metadata/entries/Lowe_Ontological_Argument.toml b/metadata/entries/Lowe_Ontological_Argument.toml
--- a/metadata/entries/Lowe_Ontological_Argument.toml
+++ b/metadata/entries/Lowe_Ontological_Argument.toml
@@ -1,34 +1,34 @@
title = "Computer-assisted Reconstruction and Assessment of E. J. Lowe's Modal Ontological Argument"
date = 2017-09-21
topics = [
"Logic/Philosophical aspects",
]
abstract = """
Computers may help us to understand --not just verify-- philosophical
arguments. By utilizing modern proof assistants in an iterative
interpretive process, we can reconstruct and assess an argument by
fully formal means. Through the mechanization of a variant of St.
Anselm's ontological argument by E. J. Lowe, which is a
paradigmatic example of a natural-language argument with strong ties
to metaphysics and religion, we offer an ideal showcase for our
computer-assisted interpretive method."""
license = "bsd"
note = ""
[authors]
[authors.fuenmayor]
email = "fuenmayor_email"
[authors.benzmueller]
-homepage = "benzmueller_homepage2"
+homepage = "benzmueller_homepage"
[contributors]
[notify]
fuenmayor = "fuenmayor_email"
benzmueller = "benzmueller_email"
[history]
[extra]
diff --git a/metadata/entries/Mereology.toml b/metadata/entries/Mereology.toml
--- a/metadata/entries/Mereology.toml
+++ b/metadata/entries/Mereology.toml
@@ -1,24 +1,24 @@
title = "Mereology"
date = 2021-03-01
topics = [
"Logic/Philosophical aspects",
]
abstract = """
We use Isabelle/HOL to verify elementary theorems and alternative
axiomatizations of classical extensional mereology."""
license = "bsd"
note = ""
[authors]
[authors.blumson]
-homepage = "blumson_homepage1"
+homepage = "blumson_homepage"
[contributors]
[notify]
blumson = "blumson_email"
[history]
[extra]
diff --git a/metadata/entries/Modular_arithmetic_LLL_and_HNF_algorithms.toml b/metadata/entries/Modular_arithmetic_LLL_and_HNF_algorithms.toml
--- a/metadata/entries/Modular_arithmetic_LLL_and_HNF_algorithms.toml
+++ b/metadata/entries/Modular_arithmetic_LLL_and_HNF_algorithms.toml
@@ -1,39 +1,39 @@
title = "Two algorithms based on modular arithmetic: lattice basis reduction and Hermite normal form computation"
date = 2021-03-12
topics = [
"Computer science/Algorithms/Mathematical",
]
abstract = """
We verify two algorithms for which modular arithmetic plays an
essential role: Storjohann's variant of the LLL lattice basis
reduction algorithm and Kopparty's algorithm for computing the
Hermite normal form of a matrix. To do this, we also formalize some
facts about the modulo operation with symmetric range. Our
implementations are based on the original papers, but are otherwise
efficient. For basis reduction we formalize two versions: one that
includes all of the optimizations/heuristics from Storjohann's
paper, and one excluding a heuristic that we observed to often
decrease efficiency. We also provide a fast, self-contained certifier
for basis reduction, based on the efficient Hermite normal form
algorithm."""
license = "bsd"
note = ""
[authors]
[authors.bottesch]
[authors.divason]
-homepage = "divason_homepage2"
+homepage = "divason_homepage"
[authors.thiemann]
email = "thiemann_email"
[contributors]
[notify]
thiemann = "thiemann_email"
[history]
[extra]
diff --git a/metadata/entries/Monad_Memo_DP.toml b/metadata/entries/Monad_Memo_DP.toml
--- a/metadata/entries/Monad_Memo_DP.toml
+++ b/metadata/entries/Monad_Memo_DP.toml
@@ -1,40 +1,40 @@
title = "Monadification, Memoization and Dynamic Programming"
date = 2018-05-22
topics = [
"Computer science/Programming languages/Transformations",
"Computer science/Algorithms",
"Computer science/Functional programming",
]
abstract = """
We present a lightweight framework for the automatic verified
(functional or imperative) memoization of recursive functions. Our
tool can turn a pure Isabelle/HOL function definition into a
monadified version in a state monad or the Imperative HOL heap monad,
and prove a correspondence theorem. We provide a variety of memory
implementations for the two types of monads. A number of simple
techniques allow us to achieve bottom-up computation and
space-efficient memoization. The framework’s utility is demonstrated
on a number of representative dynamic programming problems. A detailed
description of our work can be found in the accompanying paper [2]."""
license = "bsd"
note = ""
[authors]
[authors.wimmer]
-homepage = "wimmer_homepage1"
+homepage = "wimmer_homepage"
[authors.hu]
email = "hu_email"
[authors.nipkow]
-homepage = "nipkow_homepage4"
+homepage = "nipkow_homepage"
[contributors]
[notify]
-wimmer = "wimmer_email1"
+wimmer = "wimmer_email"
[history]
[extra]
diff --git a/metadata/entries/Nash_Williams.toml b/metadata/entries/Nash_Williams.toml
--- a/metadata/entries/Nash_Williams.toml
+++ b/metadata/entries/Nash_Williams.toml
@@ -1,28 +1,28 @@
title = "The Nash-Williams Partition Theorem"
date = 2020-05-16
topics = [
"Mathematics/Combinatorics",
]
abstract = """
In 1965, Nash-Williams discovered a generalisation of the infinite
form of Ramsey's theorem. Where the latter concerns infinite sets
of n-element sets for some fixed n, the Nash-Williams theorem concerns
infinite sets of finite sets (or lists) subject to a “no initial
segment” condition. The present formalisation follows a
monograph on Ramsey Spaces by Todorčević."""
license = "bsd"
note = ""
[authors]
[authors.paulson]
-homepage = "paulson_homepage1"
+homepage = "paulson_homepage"
[contributors]
[notify]
paulson = "paulson_email"
[history]
[extra]
diff --git a/metadata/entries/Octonions.toml b/metadata/entries/Octonions.toml
--- a/metadata/entries/Octonions.toml
+++ b/metadata/entries/Octonions.toml
@@ -1,31 +1,31 @@
title = "Octonions"
date = 2018-09-14
topics = [
"Mathematics/Algebra",
"Mathematics/Geometry",
]
abstract = """
We develop the basic theory of Octonions, including various identities
and properties of the octonions and of the octonionic product, a
description of 7D isometries and representations of orthogonal
transformations. To this end we first develop the theory of the vector
cross product in 7 dimensions. The development of the theory of
Octonions is inspired by that of the theory of Quaternions by Lawrence
Paulson. However, we do not work within the type class real_algebra_1
because the octonionic product is not associative."""
license = "bsd"
note = ""
[authors]
[authors.argyraki]
-homepage = "argyraki_homepage1"
+homepage = "argyraki_homepage"
[contributors]
[notify]
argyraki = "argyraki_email"
[history]
[extra]
diff --git a/metadata/entries/OpSets.toml b/metadata/entries/OpSets.toml
--- a/metadata/entries/OpSets.toml
+++ b/metadata/entries/OpSets.toml
@@ -1,47 +1,47 @@
title = "OpSets: Sequential Specifications for Replicated Datatypes"
date = 2018-05-10
topics = [
"Computer science/Algorithms/Distributed",
"Computer science/Data structures",
]
abstract = """
We introduce OpSets, an executable framework for specifying and
reasoning about the semantics of replicated datatypes that provide
eventual consistency in a distributed system, and for mechanically
verifying algorithms that implement these datatypes. Our approach is
simple but expressive, allowing us to succinctly specify a variety of
abstract datatypes, including maps, sets, lists, text, graphs, trees,
and registers. Our datatypes are also composable, enabling the
construction of complex data structures. To demonstrate the utility of
OpSets for analysing replication algorithms, we highlight an important
correctness property for collaborative text editing that has
traditionally been overlooked; algorithms that do not satisfy this
property can exhibit awkward interleaving of text. We use OpSets to
specify this correctness property and prove that although one existing
replication algorithm satisfies this property, several other published
algorithms do not."""
license = "bsd"
note = ""
[authors]
[authors.kleppmann]
-email = "kleppmann_email1"
+email = "kleppmann_email"
[authors.gomes]
-email = "gomes_email3"
+email = "gomes_email"
[authors.mulligan]
email = "mulligan_email1"
[authors.beresford]
-email = "beresford_email1"
+email = "beresford_email"
[contributors]
[notify]
-gomes = "gomes_email1"
+gomes = "gomes_email"
[history]
[extra]
diff --git a/metadata/entries/Optimal_BST.toml b/metadata/entries/Optimal_BST.toml
--- a/metadata/entries/Optimal_BST.toml
+++ b/metadata/entries/Optimal_BST.toml
@@ -1,31 +1,31 @@
title = "Optimal Binary Search Trees"
date = 2018-05-27
topics = [
"Computer science/Algorithms",
"Computer science/Data structures",
]
abstract = """
This article formalizes recursive algorithms for the construction
of optimal binary search trees given fixed access frequencies.
We follow Knuth (1971), Yao (1980) and Mehlhorn (1984).
The algorithms are memoized with the help of the AFP article
Monadification, Memoization and Dynamic Programming,
thus yielding dynamic programming algorithms."""
license = "bsd"
note = ""
[authors]
[authors.nipkow]
-homepage = "nipkow_homepage3"
+homepage = "nipkow_homepage"
[authors.somogyi]
[contributors]
[notify]
nipkow = "nipkow_email"
[history]
[extra]
diff --git a/metadata/entries/Order_Lattice_Props.toml b/metadata/entries/Order_Lattice_Props.toml
--- a/metadata/entries/Order_Lattice_Props.toml
+++ b/metadata/entries/Order_Lattice_Props.toml
@@ -1,30 +1,30 @@
title = "Properties of Orderings and Lattices"
date = 2018-12-11
topics = [
"Mathematics/Order",
]
abstract = """
These components add further fundamental order and lattice-theoretic
concepts and properties to Isabelle's libraries. They follow by
and large the introductory sections of the Compendium of Continuous
Lattices, covering directed and filtered sets, down-closed and
up-closed sets, ideals and filters, Galois connections, closure and
co-closure operators. Some emphasis is on duality and morphisms
between structures, as in the Compendium. To this end, three ad-hoc
approaches to duality are compared."""
license = "bsd"
note = ""
[authors]
[authors.struth]
-homepage = "struth_homepage1"
+homepage = "struth_homepage"
[contributors]
[notify]
struth = "struth_email"
[history]
[extra]
diff --git a/metadata/entries/Ordered_Resolution_Prover.toml b/metadata/entries/Ordered_Resolution_Prover.toml
--- a/metadata/entries/Ordered_Resolution_Prover.toml
+++ b/metadata/entries/Ordered_Resolution_Prover.toml
@@ -1,39 +1,39 @@
title = "Formalization of Bachmair and Ganzinger's Ordered Resolution Prover"
date = 2018-01-18
topics = [
"Logic/General logic/Mechanization of proofs",
]
abstract = """
This Isabelle/HOL formalization covers Sections 2 to 4 of Bachmair and
Ganzinger's \"Resolution Theorem Proving\" chapter in the
Handbook of Automated Reasoning. This includes
soundness and completeness of unordered and ordered variants of ground
resolution with and without literal selection, the standard redundancy
criterion, a general framework for refutational theorem proving, and
soundness and completeness of an abstract first-order prover."""
license = "bsd"
note = ""
[authors]
[authors.schlichtkrull]
homepage = "schlichtkrull_homepage"
[authors.blanchette]
email = "blanchette_email1"
[authors.traytel]
homepage = "traytel_homepage"
[authors.waldmann]
-email = "waldmann_email1"
+email = "waldmann_email"
[contributors]
[notify]
schlichtkrull = "schlichtkrull_email"
blanchette = "blanchette_email1"
[history]
[extra]
diff --git a/metadata/entries/Ordinal.toml b/metadata/entries/Ordinal.toml
--- a/metadata/entries/Ordinal.toml
+++ b/metadata/entries/Ordinal.toml
@@ -1,22 +1,22 @@
title = "Countable Ordinals"
date = 2005-11-11
topics = [
"Logic/Set theory",
]
abstract = "This development defines a well-ordered type of countable ordinals. It includes notions of continuous and normal functions, recursively defined functions over ordinals, least fixed-points, and derivatives. Much of ordinal arithmetic is formalized, including exponentials and logarithms. The development concludes with formalizations of Cantor Normal Form and Veblen hierarchies over normal functions."
license = "bsd"
note = ""
[authors]
[authors.huffman]
homepage = "huffman_homepage"
[contributors]
[notify]
-paulson = "paulson_email1"
+paulson = "paulson_email"
[history]
[extra]
diff --git a/metadata/entries/Ordinal_Partitions.toml b/metadata/entries/Ordinal_Partitions.toml
--- a/metadata/entries/Ordinal_Partitions.toml
+++ b/metadata/entries/Ordinal_Partitions.toml
@@ -1,39 +1,39 @@
title = "Ordinal Partitions"
date = 2020-08-03
topics = [
"Mathematics/Combinatorics",
"Logic/Set theory",
]
abstract = """
The theory of partition relations concerns generalisations of
Ramsey's theorem. For any ordinal $\\alpha$, write $\\alpha \\to
(\\alpha, m)^2$ if for each function $f$ from unordered pairs of
elements of $\\alpha$ into $\\{0,1\\}$, either there is a subset
$X\\subseteq \\alpha$ order-isomorphic to $\\alpha$ such that
$f\\{x,y\\}=0$ for all $\\{x,y\\}\\subseteq X$, or there is an $m$ element
set $Y\\subseteq \\alpha$ such that $f\\{x,y\\}=1$ for all
$\\{x,y\\}\\subseteq Y$. (In both cases, with $\\{x,y\\}$ we require
$x\\not=y$.) In particular, the infinite Ramsey theorem can be written
in this notation as $\\omega \\to (\\omega, \\omega)^2$, or if we
restrict $m$ to the positive integers as above, then $\\omega \\to
(\\omega, m)^2$ for all $m$. This entry formalises Larson's proof
of $\\omega^\\omega \\to (\\omega^\\omega, m)^2$ along with a similar proof
of a result due to Specker: $\\omega^2 \\to (\\omega^2, m)^2$. Also
proved is a necessary result by Erdős and Milner:
$\\omega^{1+\\alpha\\cdot n} \\to (\\omega^{1+\\alpha}, 2^n)^2$."""
license = "bsd"
note = ""
[authors]
[authors.paulson]
-homepage = "paulson_homepage1"
+homepage = "paulson_homepage"
[contributors]
[notify]
paulson = "paulson_email"
[history]
[extra]
diff --git a/metadata/entries/Poincare_Bendixson.toml b/metadata/entries/Poincare_Bendixson.toml
--- a/metadata/entries/Poincare_Bendixson.toml
+++ b/metadata/entries/Poincare_Bendixson.toml
@@ -1,37 +1,37 @@
title = "The Poincaré-Bendixson Theorem"
date = 2019-12-18
topics = [
"Mathematics/Analysis",
]
abstract = """
The Poincaré-Bendixson theorem is a classical result in the study of
(continuous) dynamical systems. Colloquially, it restricts the
possible behaviors of planar dynamical systems: such systems cannot be
chaotic. In practice, it is a useful tool for proving the existence of
(limiting) periodic behavior in planar systems. The theorem is an
interesting and challenging benchmark for formalized mathematics
because proofs in the literature rely on geometric sketches and only
hint at symmetric cases. It also requires a substantial background of
mathematical theories, e.g., the Jordan curve theorem, real analysis,
ordinary differential equations, and limiting (long-term) behavior of
dynamical systems."""
license = "bsd"
note = ""
[authors]
[authors.immler]
-homepage = "immler_homepage2"
+homepage = "immler_homepage"
[authors.tan]
homepage = "tan_homepage"
[contributors]
[notify]
immler = "immler_email1"
tan = "tan_email"
[history]
[extra]
diff --git a/metadata/entries/Prime_Number_Theorem.toml b/metadata/entries/Prime_Number_Theorem.toml
--- a/metadata/entries/Prime_Number_Theorem.toml
+++ b/metadata/entries/Prime_Number_Theorem.toml
@@ -1,60 +1,60 @@
title = "The Prime Number Theorem"
date = 2018-09-19
topics = [
"Mathematics/Number theory",
]
abstract = """
This article provides a short proof of the Prime Number
Theorem in several equivalent forms, most notably
π(x) ~ x/ln
x where π(x) is the
number of primes no larger than x. It also
defines other basic number-theoretic functions related to primes like
Chebyshev's functions ϑ and ψ and the
“n-th prime number” function
pn. We also show various
bounds and relationship between these functions are shown. Lastly, we
derive Mertens' First and Second Theorem, i. e.
∑p≤x
ln p/p = ln
x + O(1) and
∑p≤x
1/p = ln ln x + M +
O(1/ln x). We also give
explicit bounds for the remainder terms.
The proof
of the Prime Number Theorem builds on a library of Dirichlet series
and analytic combinatorics. We essentially follow the presentation by
Newman. The core part of the proof is a Tauberian theorem for
Dirichlet series, which is proven using complex analysis and then used
to strengthen Mertens' First Theorem to
∑p≤x
ln p/p = ln
x + c + o(1).
A variant of this proof has been formalised before by
Harrison in HOL Light, and formalisations of Selberg's elementary
proof exist both by Avigad et al. in Isabelle and
by Carneiro in Metamath. The advantage of the analytic proof is that,
while it requires more powerful mathematical tools, it is considerably
shorter and clearer. This article attempts to provide a short and
clear formalisation of all components of that proof using the full
range of mathematical machinery available in Isabelle, staying as
close as possible to Newman's simple paper proof.
"""
license = "bsd"
note = ""
[authors]
[authors.eberl]
homepage = "eberl_homepage"
[authors.paulson]
-homepage = "paulson_homepage1"
+homepage = "paulson_homepage"
[contributors]
[notify]
eberl = "eberl_email"
[history]
[extra]
diff --git a/metadata/entries/Probabilistic_Timed_Automata.toml b/metadata/entries/Probabilistic_Timed_Automata.toml
--- a/metadata/entries/Probabilistic_Timed_Automata.toml
+++ b/metadata/entries/Probabilistic_Timed_Automata.toml
@@ -1,38 +1,38 @@
title = "Probabilistic Timed Automata"
date = 2018-05-24
topics = [
"Mathematics/Probability theory",
"Computer science/Automata and formal languages",
]
abstract = """
We present a formalization of probabilistic timed automata (PTA) for
which we try to follow the formula MDP + TA = PTA as far as possible:
our work starts from our existing formalizations of Markov decision
processes (MDP) and timed automata (TA) and combines them modularly.
We prove the fundamental result for probabilistic timed automata: the
region construction that is known from timed automata carries over to
the probabilistic setting. In particular, this allows us to prove that
minimum and maximum reachability probabilities can be computed via a
reduction to MDP model checking, including the case where one wants to
disregard unrealizable behavior. Further information can be found in
our ITP paper [2]."""
license = "bsd"
note = ""
[authors]
[authors.wimmer]
homepage = "wimmer_homepage"
[authors.hoelzl]
-homepage = "hoelzl_homepage1"
+homepage = "hoelzl_homepage"
[contributors]
[notify]
-wimmer = "wimmer_email1"
+wimmer = "wimmer_email"
hoelzl = "hoelzl_email"
[history]
[extra]
diff --git a/metadata/entries/Progress_Tracking.toml b/metadata/entries/Progress_Tracking.toml
--- a/metadata/entries/Progress_Tracking.toml
+++ b/metadata/entries/Progress_Tracking.toml
@@ -1,41 +1,41 @@
title = "Formalization of Timely Dataflow's Progress Tracking Protocol"
date = 2021-04-13
topics = [
"Computer science/Algorithms/Distributed",
]
abstract = """
Large-scale stream processing systems often follow the dataflow
paradigm, which enforces a program structure that exposes a high
degree of parallelism. The Timely Dataflow distributed system supports
expressive cyclic dataflows for which it offers low-latency data- and
pipeline-parallel stream processing. To achieve high expressiveness
and performance, Timely Dataflow uses an intricate distributed
protocol for tracking the computation’s progress. We formalize this
progress tracking protocol and verify its safety. Our formalization is
described in detail in our forthcoming ITP'21
paper."""
license = "bsd"
note = ""
[authors]
[authors.brun]
[authors.decova]
[authors.lattuada]
homepage = "lattuada_homepage"
[authors.traytel]
-homepage = "traytel_homepage1"
+homepage = "traytel_homepage"
[contributors]
[notify]
brun = "brun_email"
traytel = "traytel_email2"
[history]
[extra]
diff --git a/metadata/entries/Projective_Measurements.toml b/metadata/entries/Projective_Measurements.toml
--- a/metadata/entries/Projective_Measurements.toml
+++ b/metadata/entries/Projective_Measurements.toml
@@ -1,28 +1,28 @@
title = "Quantum projective measurements and the CHSH inequality"
date = 2021-03-03
topics = [
"Computer science/Algorithms/Quantum computing",
"Mathematics/Physics/Quantum information",
]
abstract = """
This work contains a formalization of quantum projective measurements,
also known as von Neumann measurements, which are based on elements of
spectral theory. We also formalized the CHSH inequality, an inequality
involving expectations in a probability space that is violated by
quantum measurements, thus proving that quantum mechanics cannot be modeled with an underlying local hidden-variable theory."""
license = "bsd"
note = ""
[authors]
[authors.echenim]
-homepage = "echenim_homepage1"
+homepage = "echenim_homepage"
[contributors]
[notify]
echenim = "echenim_email"
[history]
[extra]
diff --git a/metadata/entries/Promela.toml b/metadata/entries/Promela.toml
--- a/metadata/entries/Promela.toml
+++ b/metadata/entries/Promela.toml
@@ -1,31 +1,31 @@
title = "Promela Formalization"
date = 2014-05-28
topics = [
"Computer science/System description languages",
]
abstract = """
We present an executable formalization of the language Promela, the
description language for models of the model checker SPIN. This
formalization is part of the work for a completely verified model
checker (CAVA), but also serves as a useful (and executable!)
description of the semantics of the language itself, something that is
currently missing.
The formalization uses three steps: It takes an abstract syntax tree
generated from an SML parser, removes syntactic sugar and enriches it
with type information. This further gets translated into a transition
system, on which the semantic engine (read: successor function) operates."""
license = "bsd"
note = ""
[authors]
[authors.neumann]
-email = "neumann_email1"
+email = "neumann_email"
[contributors]
[notify]
[history]
[extra]
diff --git a/metadata/entries/Quantales.toml b/metadata/entries/Quantales.toml
--- a/metadata/entries/Quantales.toml
+++ b/metadata/entries/Quantales.toml
@@ -1,25 +1,25 @@
title = "Quantales"
date = 2018-12-11
topics = [
"Mathematics/Algebra",
]
abstract = """
These mathematical components formalise basic properties of quantales,
together with some important models, constructions, and concepts,
including quantic nuclei and conuclei."""
license = "bsd"
note = ""
[authors]
[authors.struth]
-homepage = "struth_homepage1"
+homepage = "struth_homepage"
[contributors]
[notify]
struth = "struth_email"
[history]
[extra]
diff --git a/metadata/entries/Quaternions.toml b/metadata/entries/Quaternions.toml
--- a/metadata/entries/Quaternions.toml
+++ b/metadata/entries/Quaternions.toml
@@ -1,32 +1,32 @@
title = "Quaternions"
date = 2018-09-05
topics = [
"Mathematics/Algebra",
"Mathematics/Geometry",
]
abstract = """
This theory is inspired by the HOL Light development of quaternions,
but follows its own route. Quaternions are developed coinductively, as
in the existing formalisation of the complex numbers. Quaternions are
quickly shown to belong to the type classes of real normed division
algebras and real inner product spaces. And therefore they inherit a
great body of facts involving algebraic laws, limits, continuity,
etc., which must be proved explicitly in the HOL Light version. The
development concludes with the geometric interpretation of the product
of imaginary quaternions."""
license = "bsd"
note = ""
[authors]
[authors.paulson]
-homepage = "paulson_homepage1"
+homepage = "paulson_homepage"
[contributors]
[notify]
paulson = "paulson_email"
[history]
[extra]
diff --git a/metadata/entries/ROBDD.toml b/metadata/entries/ROBDD.toml
--- a/metadata/entries/ROBDD.toml
+++ b/metadata/entries/ROBDD.toml
@@ -1,40 +1,40 @@
title = "Algorithms for Reduced Ordered Binary Decision Diagrams"
date = 2016-04-27
topics = [
"Computer science/Algorithms",
"Computer science/Data structures",
]
abstract = """
We present a verified and executable implementation of ROBDDs in
Isabelle/HOL. Our implementation relates pointer-based computation in
the Heap monad to operations on an abstract definition of boolean
functions. Internally, we implemented the if-then-else combinator in a
recursive fashion, following the Shannon decomposition of the argument
functions. The implementation mixes and adapts known techniques and is
built with efficiency in mind."""
license = "bsd"
note = ""
[authors]
[authors.michaelis]
homepage = "michaelis_homepage"
[authors.haslbeck]
-homepage = "haslbeck_homepage1"
+homepage = "haslbeck_homepage"
[authors.lammich]
homepage = "lammich_homepage"
[authors.hupel]
homepage = "hupel_homepage"
[contributors]
[notify]
michaelis = "michaelis_email2"
haslbeck = "haslbeck_email1"
[history]
[extra]
diff --git a/metadata/entries/Registers.toml b/metadata/entries/Registers.toml
--- a/metadata/entries/Registers.toml
+++ b/metadata/entries/Registers.toml
@@ -1,31 +1,31 @@
title = "Quantum and Classical Registers"
date = 2021-10-28
topics = [
"Computer science/Algorithms/Quantum computing",
"Computer science/Programming languages/Logics",
"Computer science/Semantics",
]
abstract = """
A formalization of the theory of quantum and classical registers as
developed by (Unruh, Quantum and Classical Registers). In a nutshell,
a register refers to a part of a larger memory or system that can be
accessed independently. Registers can be constructed from other
registers and several (compatible) registers can be composed. This
formalization develops both the generic theory of registers as well as
specific instantiations for classical and quantum registers."""
license = "bsd"
note = ""
[authors]
[authors.unruh]
-homepage = "unruh_homepage1"
+homepage = "unruh_homepage"
[contributors]
[notify]
unruh = "unruh_email"
[history]
[extra]
diff --git a/metadata/entries/Regular_Algebras.toml b/metadata/entries/Regular_Algebras.toml
--- a/metadata/entries/Regular_Algebras.toml
+++ b/metadata/entries/Regular_Algebras.toml
@@ -1,35 +1,35 @@
title = "Regular Algebras"
date = 2014-05-21
topics = [
"Computer science/Automata and formal languages",
"Mathematics/Algebra",
]
abstract = """
Regular algebras axiomatise the equational theory of regular expressions as induced by
regular language identity. We use Isabelle/HOL for a detailed systematic study of regular
algebras given by Boffa, Conway, Kozen and Salomaa. We investigate the relationships between
these classes, formalise a soundness proof for the smallest class (Salomaa's) and obtain
completeness of the largest one (Boffa's) relative to a deep result by Krob. In addition
we provide a large collection of regular identities in the general setting of Boffa's axiom.
Our regular algebra hierarchy is orthogonal to the Kleene algebra hierarchy in the Archive
of Formal Proofs; we have not aimed at an integration for pragmatic reasons."""
license = "bsd"
note = ""
[authors]
[authors.fosters]
-homepage = "fosters_homepage1"
+homepage = "fosters_homepage"
[authors.struth]
homepage = "struth_homepage"
[contributors]
[notify]
fosters = "fosters_email"
struth = "struth_email"
[history]
[extra]
diff --git a/metadata/entries/Regular_Tree_Relations.toml b/metadata/entries/Regular_Tree_Relations.toml
--- a/metadata/entries/Regular_Tree_Relations.toml
+++ b/metadata/entries/Regular_Tree_Relations.toml
@@ -1,48 +1,48 @@
title = "Regular Tree Relations"
date = 2021-12-15
topics = [
"Computer science/Automata and formal languages",
]
abstract = """
Tree automata have good closure properties and therefore a commonly
used to prove/disprove properties. This formalization contains among
other things the proofs of many closure properties of tree automata
(anchored) ground tree transducers and regular relations. Additionally
it includes the well known pumping lemma and a lifting of the Myhill
Nerode theorem for regular languages to tree languages. We want to
mention the existence of a tree
automata APF-entry developed by Peter Lammich. His work is
based on epsilon free top-down tree automata, while this entry builds
on bottom-up tree auotamta with epsilon transitions. Moreover our
formalization relies on the Collections
Framework, also by Peter Lammich, to obtain efficient code.
All proven constructions of the closure properties are exportable
using the Isabelle/HOL code generation facilities."""
license = "bsd"
note = ""
[authors]
[authors.lochmann]
email = "lochmann_email"
[authors.felgenhauer]
[authors.sternagel]
homepage = "sternagel_homepage"
[authors.thiemann]
-homepage = "thiemann_homepage2"
+homepage = "thiemann_homepage"
[authors.sternagelt]
[contributors]
[notify]
lochmann = "lochmann_email"
[history]
[extra]
diff --git a/metadata/entries/Relation_Algebra.toml b/metadata/entries/Relation_Algebra.toml
--- a/metadata/entries/Relation_Algebra.toml
+++ b/metadata/entries/Relation_Algebra.toml
@@ -1,38 +1,38 @@
title = "Relation Algebra"
date = 2014-01-25
topics = [
"Mathematics/Algebra",
]
abstract = """
Tarski's algebra of binary relations is formalised along the lines of
the standard textbooks of Maddux and Schmidt and Ströhlein. This
includes relation-algebraic concepts such as subidentities, vectors and
a domain operation as well as various notions associated to functions.
Relation algebras are also expanded by a reflexive transitive closure
operation, and they are linked with Kleene algebras and models of binary
relations and Boolean matrices."""
license = "bsd"
note = ""
[authors]
[authors.armstrong]
[authors.fosters]
email = "fosters_email"
[authors.struth]
-homepage = "struth_homepage1"
+homepage = "struth_homepage"
[authors.weber]
homepage = "weber_homepage"
[contributors]
[notify]
struth = "struth_email"
weber = "weber_email"
[history]
[extra]
diff --git a/metadata/entries/Relational-Incorrectness-Logic.toml b/metadata/entries/Relational-Incorrectness-Logic.toml
--- a/metadata/entries/Relational-Incorrectness-Logic.toml
+++ b/metadata/entries/Relational-Incorrectness-Logic.toml
@@ -1,35 +1,35 @@
title = "An Under-Approximate Relational Logic"
date = 2020-03-12
topics = [
"Computer science/Programming languages/Logics",
"Computer science/Security",
]
abstract = """
Recently, authors have proposed under-approximate logics for reasoning
about programs. So far, all such logics have been confined to
reasoning about individual program behaviours. Yet there exist many
over-approximate relational logics for reasoning about pairs of
programs and relating their behaviours. We present the first
under-approximate relational logic, for the simple imperative language
IMP. We prove our logic is both sound and complete. Additionally, we
show how reasoning in this logic can be decomposed into non-relational
reasoning in an under-approximate Hoare logic, mirroring Beringer’s
result for over-approximate relational logics. We illustrate the
application of our logic on some small examples in which we provably
demonstrate the presence of insecurity."""
license = "bsd"
note = ""
[authors]
[authors.murray]
-homepage = "murray_homepage1"
+homepage = "murray_homepage"
[contributors]
[notify]
murray = "murray_email"
[history]
[extra]
diff --git a/metadata/entries/Relational_Forests.toml b/metadata/entries/Relational_Forests.toml
--- a/metadata/entries/Relational_Forests.toml
+++ b/metadata/entries/Relational_Forests.toml
@@ -1,29 +1,29 @@
title = "Relational Forests"
date = 2021-08-03
topics = [
"Mathematics/Graph theory",
]
abstract = """
We study second-order formalisations of graph properties expressed as
first-order formulas in relation algebras extended with a Kleene star.
The formulas quantify over relations while still avoiding
quantification over elements of the base set. We formalise the
property of undirected graphs being acyclic this way. This involves a
study of various kinds of orientation of graphs. We also verify basic
algorithms to constructively prove several second-order properties."""
license = "bsd"
note = ""
[authors]
[authors.guttmann]
-homepage = "guttmann_homepage1"
+homepage = "guttmann_homepage"
[contributors]
[notify]
guttmann = "guttmann_email"
[history]
[extra]
diff --git a/metadata/entries/Roth_Arithmetic_Progressions.toml b/metadata/entries/Roth_Arithmetic_Progressions.toml
--- a/metadata/entries/Roth_Arithmetic_Progressions.toml
+++ b/metadata/entries/Roth_Arithmetic_Progressions.toml
@@ -1,44 +1,44 @@
title = "Roth's Theorem on Arithmetic Progressions"
date = 2021-12-28
topics = [
"Mathematics/Graph theory",
"Mathematics/Combinatorics",
]
abstract = """
We formalise a proof of Roth's Theorem on Arithmetic
Progressions, a major result in additive combinatorics on the
existence of 3-term arithmetic progressions in subsets of natural
numbers. To this end, we follow a proof using graph regularity. We
employ our recent formalisation of Szemerédi's Regularity Lemma,
a major result in extremal graph theory, which we use here to prove
the Triangle Counting Lemma and the Triangle Removal Lemma. Our
sources are Yufei Zhao's MIT lecture notes
\"Graph Theory and Additive Combinatorics\"
(revised version here)
and W.T. Gowers's Cambridge lecture notes
\"Topics in Combinatorics\".
We also refer to the University of
Georgia notes by Stephanie Bell and Will Grodzicki,
\"Using Szemerédi's Regularity Lemma to Prove Roth's Theorem\"."""
license = "bsd"
note = ""
[authors]
[authors.edmonds]
homepage = "edmonds_homepage"
[authors.argyraki]
homepage = "argyraki_homepage"
[authors.paulson]
-homepage = "paulson_homepage1"
+homepage = "paulson_homepage"
[contributors]
[notify]
paulson = "paulson_email"
[history]
[extra]
diff --git a/metadata/entries/SATSolverVerification.toml b/metadata/entries/SATSolverVerification.toml
--- a/metadata/entries/SATSolverVerification.toml
+++ b/metadata/entries/SATSolverVerification.toml
@@ -1,21 +1,21 @@
title = "Formal Verification of Modern SAT Solvers"
date = 2008-07-23
topics = [
"Computer science/Algorithms",
]
abstract = "This document contains formal correctness proofs of modern SAT solvers. Following (Krstic et al, 2007) and (Nieuwenhuis et al., 2006), solvers are described using state-transition systems. Several different SAT solver descriptions are given and their partial correctness and termination is proved. These include:
a solver based on classical DPLL procedure (using only a backtrack-search with unit propagation),
a very general solver with backjumping and learning (similar to the description given in (Nieuwenhuis et al., 2006)), and
a solver with a specific conflict analysis algorithm (similar to the description given in (Krstic et al., 2007)).
Within the SAT solver correctness proofs, a large number of lemmas about propositional logic and CNF formulae are proved. This theory is self-contained and could be used for further exploring of properties of CNF based SAT algorithms."
license = "bsd"
note = ""
[authors]
[authors.maricf]
-homepage = "maricf_homepage1"
+homepage = "maricf_homepage"
[contributors]
[notify]
[history]
[extra]
diff --git a/metadata/entries/SC_DOM_Components.toml b/metadata/entries/SC_DOM_Components.toml
--- a/metadata/entries/SC_DOM_Components.toml
+++ b/metadata/entries/SC_DOM_Components.toml
@@ -1,42 +1,42 @@
title = "A Formalization of Safely Composable Web Components"
date = 2020-09-28
topics = [
"Computer science/Data structures",
]
abstract = """
While the (safely composable) DOM with shadow trees provide the
technical basis for defining web components, it does neither defines
the concept of web components nor specifies the safety properties that
web components should guarantee. Consequently, the standard also does
not discuss how or even if the methods for modifying the DOM respect
component boundaries. In AFP entry, we present a formally verified
model of safely composable web components and define safety properties
which ensure that different web components can only interact with each
other using well-defined interfaces. Moreover, our verification of the
application programming interface (API) of the DOM revealed numerous
invariants that implementations of the DOM API need to preserve to
ensure the integrity of components. In comparison to the strict
standard compliance formalization of Web Components in the AFP entry
\"DOM_Components\", the notion of components in this entry
(based on \"SC_DOM\" and \"Shadow_SC_DOM\") provides
much stronger safety guarantees."""
license = "bsd"
note = ""
[authors]
[authors.brucker]
-homepage = "brucker_homepage1"
+homepage = "brucker_homepage"
[authors.herzberg]
homepage = "herzberg_homepage"
[contributors]
[notify]
brucker = "brucker_email2"
herzberg = "herzberg_email"
[history]
[extra]
diff --git a/metadata/entries/SIFUM_Type_Systems.toml b/metadata/entries/SIFUM_Type_Systems.toml
--- a/metadata/entries/SIFUM_Type_Systems.toml
+++ b/metadata/entries/SIFUM_Type_Systems.toml
@@ -1,44 +1,44 @@
title = "A Formalization of Assumptions and Guarantees for Compositional Noninterference"
date = 2014-04-23
topics = [
"Computer science/Security",
"Computer science/Programming languages/Type systems",
]
abstract = """
Research in information-flow security aims at developing methods to
identify undesired information leaks within programs from private
(high) sources to public (low) sinks. For a concurrent system, it is
desirable to have compositional analysis methods that allow for
analyzing each thread independently and that nevertheless guarantee
that the parallel composition of successfully analyzed threads
satisfies a global security guarantee. However, such a compositional
analysis should not be overly pessimistic about what an environment
might do with shared resources. Otherwise, the analysis will reject
many intuitively secure programs.
The paper \"Assumptions and Guarantees for Compositional
Noninterference\" by Mantel et. al. presents one solution for this problem:
an approach for compositionally reasoning about non-interference in
concurrent programs via rely-guarantee-style reasoning. We present an
Isabelle/HOL formalization of the concepts and proofs of this approach."""
license = "bsd"
note = ""
[authors]
[authors.grewe]
-email = "grewe_email1"
+email = "grewe_email"
[authors.mantel]
email = "mantel_email"
[authors.schoepe]
email = "schoepe_email"
[contributors]
[notify]
[history]
[extra]
diff --git a/metadata/entries/Safe_Distance.toml b/metadata/entries/Safe_Distance.toml
--- a/metadata/entries/Safe_Distance.toml
+++ b/metadata/entries/Safe_Distance.toml
@@ -1,37 +1,37 @@
title = "A Formally Verified Checker of the Safe Distance Traffic Rules for Autonomous Vehicles"
date = 2020-06-01
topics = [
"Computer science/Algorithms/Mathematical",
"Mathematics/Physics",
]
abstract = """
The Vienna Convention on Road Traffic defines the safe distance
traffic rules informally. This could make autonomous vehicle liable
for safe-distance-related accidents because there is no clear
definition of how large a safe distance is. We provide a formally
proven prescriptive definition of a safe distance, and checkers which
can decide whether an autonomous vehicle is obeying the safe distance
rule. Not only does our work apply to the domain of law, but it also
serves as a specification for autonomous vehicle manufacturers and for
online verification of path planners."""
license = "bsd"
note = ""
[authors]
[authors.rizaldi]
email = "rizaldi_email"
[authors.immler]
-homepage = "immler_homepage2"
+homepage = "immler_homepage"
[contributors]
[notify]
rizaldi = "rizaldi_email"
-immler = "immler_email2"
+immler = "immler_email1"
rau = "rau_email"
[history]
[extra]
diff --git a/metadata/entries/Saturation_Framework_Extensions.toml b/metadata/entries/Saturation_Framework_Extensions.toml
--- a/metadata/entries/Saturation_Framework_Extensions.toml
+++ b/metadata/entries/Saturation_Framework_Extensions.toml
@@ -1,38 +1,38 @@
title = "Extensions to the Comprehensive Framework for Saturation Theorem Proving"
date = 2020-08-25
topics = [
"Logic/General logic/Mechanization of proofs",
]
abstract = """
This Isabelle/HOL formalization extends the AFP entry
Saturation_Framework with the following
contributions:
an application of the framework
to prove Bachmair and Ganzinger's resolution prover RP
refutationally complete, which was formalized in a more ad hoc fashion
by Schlichtkrull et al. in the AFP entry
Ordered_Resultion_Prover;
generalizations of various basic concepts formalized by
Schlichtkrull et al., which were needed to verify RP and could be
useful to formalize other calculi, such as superposition;
alternative proofs of fairness (and hence saturation and
ultimately refutational completeness) for the given clause procedures
GC and LGC, based on invariance.
"""
license = "bsd"
note = ""
[authors]
[authors.blanchette]
homepage = "blanchette_homepage1"
[authors.tourret]
-homepage = "tourret_homepage1"
+homepage = "tourret_homepage"
[contributors]
[notify]
blanchette = "blanchette_email"
[history]
[extra]
diff --git a/metadata/entries/Shadow_DOM.toml b/metadata/entries/Shadow_DOM.toml
--- a/metadata/entries/Shadow_DOM.toml
+++ b/metadata/entries/Shadow_DOM.toml
@@ -1,41 +1,41 @@
title = "A Formal Model of the Document Object Model with Shadow Roots"
date = 2020-09-28
topics = [
"Computer science/Data structures",
]
abstract = """
In this AFP entry, we extend our formalization of the core DOM with
Shadow Roots. Shadow roots are a recent proposal of the web community
to support a component-based development approach for client-side web
applications. Shadow roots are a significant extension to the DOM
standard and, as web standards are condemned to be backward
compatible, such extensions often result in complex specification that
may contain unwanted subtleties that can be detected by a
formalization. Our Isabelle/HOL formalization is, in the sense of
object-orientation, an extension of our formalization of the core DOM
and enjoys the same basic properties, i.e., it is extensible, i.e.,
can be extended without the need of re-proving already proven
properties and executable, i.e., we can generate executable code from
our specification. We exploit the executability to show that our
formalization complies to the official standard of the W3C,
respectively, the WHATWG."""
license = "bsd"
note = ""
[authors]
[authors.brucker]
-homepage = "brucker_homepage1"
+homepage = "brucker_homepage"
[authors.herzberg]
homepage = "herzberg_homepage"
[contributors]
[notify]
brucker = "brucker_email2"
herzberg = "herzberg_email"
[history]
[extra]
diff --git a/metadata/entries/Shadow_SC_DOM.toml b/metadata/entries/Shadow_SC_DOM.toml
--- a/metadata/entries/Shadow_SC_DOM.toml
+++ b/metadata/entries/Shadow_SC_DOM.toml
@@ -1,43 +1,43 @@
title = "A Formal Model of the Safely Composable Document Object Model with Shadow Roots"
date = 2020-09-28
topics = [
"Computer science/Data structures",
]
abstract = """
In this AFP entry, we extend our formalization of the safely
composable DOM with Shadow Roots. This is a proposal for Shadow Roots
with stricter safety guarantess than the standard compliant
formalization (see \"Shadow DOM\"). Shadow Roots are a recent
proposal of the web community to support a component-based development
approach for client-side web applications. Shadow roots are a
significant extension to the DOM standard and, as web standards are
condemned to be backward compatible, such extensions often result in
complex specification that may contain unwanted subtleties that can be
detected by a formalization. Our Isabelle/HOL formalization is, in
the sense of object-orientation, an extension of our formalization of
the core DOM and enjoys the same basic properties, i.e., it is
extensible, i.e., can be extended without the need of re-proving
already proven properties and executable, i.e., we can generate
executable code from our specification. We exploit the executability
to show that our formalization complies to the official standard of
the W3C, respectively, the WHATWG."""
license = "bsd"
note = ""
[authors]
[authors.brucker]
-homepage = "brucker_homepage1"
+homepage = "brucker_homepage"
[authors.herzberg]
homepage = "herzberg_homepage"
[contributors]
[notify]
brucker = "brucker_email2"
herzberg = "herzberg_email"
[history]
[extra]
diff --git a/metadata/entries/Simple_Firewall.toml b/metadata/entries/Simple_Firewall.toml
--- a/metadata/entries/Simple_Firewall.toml
+++ b/metadata/entries/Simple_Firewall.toml
@@ -1,44 +1,44 @@
title = "Simple Firewall"
date = 2016-08-24
topics = [
"Computer science/Networks",
]
abstract = """
We present a simple model of a firewall. The firewall can accept or
drop a packet and can match on interfaces, IP addresses, protocol, and
ports. It was designed to feature nice mathematical properties: The
type of match expressions was carefully crafted such that the
conjunction of two match expressions is only one match expression.
This model is too simplistic to mirror all aspects of the real world.
In the upcoming entry \"Iptables Semantics\", we will translate the
Linux firewall iptables to this model. For a fixed service (e.g. ssh,
http), we provide an algorithm to compute an overview of the
firewall's filtering behavior. The algorithm computes minimal service
matrices, i.e. graphs which partition the complete IPv4 and IPv6
address space and visualize the allowed accesses between partitions.
For a detailed description, see
Verified iptables Firewall
Analysis, IFIP Networking 2016."""
license = "bsd"
note = ""
[authors]
[authors.diekmann]
homepage = "diekmann_homepage"
[authors.michaelis]
homepage = "michaelis_homepage"
[authors.haslbeck]
-homepage = "haslbeck_homepage1"
+homepage = "haslbeck_homepage"
[contributors]
[notify]
diekmann = "diekmann_email"
haslbeck = "haslbeck_email2"
[history]
[extra]
diff --git a/metadata/entries/Simplex.toml b/metadata/entries/Simplex.toml
--- a/metadata/entries/Simplex.toml
+++ b/metadata/entries/Simplex.toml
@@ -1,37 +1,37 @@
title = "An Incremental Simplex Algorithm with Unsatisfiable Core Generation"
date = 2018-08-24
topics = [
"Computer science/Algorithms/Optimization",
]
abstract = """
We present an Isabelle/HOL formalization and total correctness proof
for the incremental version of the Simplex algorithm which is used in
most state-of-the-art SMT solvers. It supports extraction of
satisfying assignments, extraction of minimal unsatisfiable cores, incremental
assertion of constraints and backtracking. The formalization relies on
stepwise program refinement, starting from a simple specification,
going through a number of refinement steps, and ending up in a fully
executable functional implementation. Symmetries present in the
algorithm are handled with special care."""
license = "bsd"
note = ""
[authors]
[authors.maricf]
email = "maricf_email"
[authors.spasic]
email = "spasic_email"
[authors.thiemann]
-homepage = "thiemann_homepage1"
+homepage = "thiemann_homepage"
[contributors]
[notify]
thiemann = "thiemann_email"
[history]
[extra]
diff --git a/metadata/entries/Simplicial_complexes_and_boolean_functions.toml b/metadata/entries/Simplicial_complexes_and_boolean_functions.toml
--- a/metadata/entries/Simplicial_complexes_and_boolean_functions.toml
+++ b/metadata/entries/Simplicial_complexes_and_boolean_functions.toml
@@ -1,37 +1,37 @@
title = "Simplicial Complexes and Boolean functions"
date = 2021-11-29
topics = [
"Mathematics/Topology",
]
abstract = """
In this work we formalise the isomorphism between simplicial complexes
of dimension $n$ and monotone Boolean functions in $n$ variables,
mainly following the definitions and results as introduced by N. A.
Scoville. We also take advantage of the AFP
representation of ROBDD
(Reduced Ordered Binary Decision Diagrams) to compute the ROBDD representation of a
given simplicial complex (by means of the isomorphism to Boolean
functions). Some examples of simplicial complexes and associated
Boolean functions are also presented."""
license = "bsd"
note = ""
[authors]
[authors.aransay]
-homepage = "aransay_homepage1"
+homepage = "aransay_homepage"
[authors.campo]
email = "campo_email"
[authors.michaelis]
-homepage = "michaelis_homepage1"
+homepage = "michaelis_homepage"
[contributors]
[notify]
aransay = "aransay_email"
[history]
[extra]
diff --git a/metadata/entries/Skip_Lists.toml b/metadata/entries/Skip_Lists.toml
--- a/metadata/entries/Skip_Lists.toml
+++ b/metadata/entries/Skip_Lists.toml
@@ -1,34 +1,34 @@
title = "Skip Lists"
date = 2020-01-09
topics = [
"Computer science/Data structures",
]
abstract = """
Skip lists are sorted linked lists enhanced with shortcuts
and are an alternative to binary search trees. A skip lists consists
of multiple levels of sorted linked lists where a list on level n is a
subsequence of the list on level n − 1. In the ideal case, elements
are skipped in such a way that a lookup in a skip lists takes O(log n)
time. In a randomised skip list the skipped elements are choosen
randomly.
This entry contains formalized proofs
of the textbook results about the expected height and the expected
length of a search path in a randomised skip list.
"""
license = "bsd"
note = ""
[authors]
[authors.haslbeck]
homepage = "haslbeck_homepage"
[authors.eberl]
-homepage = "eberl_homepage1"
+homepage = "eberl_homepage"
[contributors]
[notify]
haslbeck = "haslbeck_email2"
[history]
[extra]
diff --git a/metadata/entries/Smith_Normal_Form.toml b/metadata/entries/Smith_Normal_Form.toml
--- a/metadata/entries/Smith_Normal_Form.toml
+++ b/metadata/entries/Smith_Normal_Form.toml
@@ -1,36 +1,36 @@
title = "A verified algorithm for computing the Smith normal form of a matrix"
date = 2020-05-23
topics = [
"Mathematics/Algebra",
"Computer science/Algorithms/Mathematical",
]
abstract = """
This work presents a formal proof in Isabelle/HOL of an algorithm to
transform a matrix into its Smith normal form, a canonical matrix
form, in a general setting: the algorithm is parameterized by
operations to prove its existence over elementary divisor rings, while
execution is guaranteed over Euclidean domains. We also provide a
formal proof on some results about the generality of this algorithm as
well as the uniqueness of the Smith normal form. Since Isabelle/HOL
does not feature dependent types, the development is carried out
switching conveniently between two different existing libraries: the
Hermite normal form (based on HOL Analysis) and the Jordan normal form
AFP entries. This permits to reuse results from both developments and
it is done by means of the lifting and transfer package together with
the use of local type definitions."""
license = "bsd"
note = ""
[authors]
[authors.divason]
-homepage = "divason_homepage2"
+homepage = "divason_homepage"
[contributors]
[notify]
-divason = "divason_email1"
+divason = "divason_email"
[history]
[extra]
diff --git a/metadata/entries/Smooth_Manifolds.toml b/metadata/entries/Smooth_Manifolds.toml
--- a/metadata/entries/Smooth_Manifolds.toml
+++ b/metadata/entries/Smooth_Manifolds.toml
@@ -1,34 +1,34 @@
title = "Smooth Manifolds"
date = 2018-10-22
topics = [
"Mathematics/Analysis",
"Mathematics/Topology",
]
abstract = """
We formalize the definition and basic properties of smooth manifolds
in Isabelle/HOL. Concepts covered include partition of unity, tangent
and cotangent spaces, and the fundamental theorem of path integrals.
We also examine some concrete manifolds such as spheres and projective
spaces. The formalization makes extensive use of the analysis and
linear algebra libraries in Isabelle/HOL, in particular its
“types-to-sets” mechanism."""
license = "bsd"
note = ""
[authors]
[authors.immler]
-homepage = "immler_homepage2"
+homepage = "immler_homepage"
[authors.zhan]
homepage = "zhan_homepage"
[contributors]
[notify]
immler = "immler_email"
zhan = "zhan_email"
[history]
[extra]
diff --git a/metadata/entries/Stream-Fusion.toml b/metadata/entries/Stream-Fusion.toml
--- a/metadata/entries/Stream-Fusion.toml
+++ b/metadata/entries/Stream-Fusion.toml
@@ -1,22 +1,22 @@
title = "Stream Fusion"
date = 2009-04-29
topics = [
"Computer science/Functional programming",
]
abstract = "Stream Fusion is a system for removing intermediate list structures from Haskell programs; it consists of a Haskell library along with several compiler rewrite rules. (The library is available online.)
These theories contain a formalization of much of the Stream Fusion library in HOLCF. Lazy list and stream types are defined, along with coercions between the two types, as well as an equivalence relation for streams that generate the same list. List and stream versions of map, filter, foldr, enumFromTo, append, zipWith, and concatMap are defined, and the stream versions are shown to respect stream equivalence."
license = "bsd"
note = ""
[authors]
[authors.huffman]
-homepage = "huffman_homepage1"
+homepage = "huffman_homepage"
[contributors]
[notify]
huffman = "huffman_email1"
[history]
[extra]
diff --git a/metadata/entries/Strong_Security.toml b/metadata/entries/Strong_Security.toml
--- a/metadata/entries/Strong_Security.toml
+++ b/metadata/entries/Strong_Security.toml
@@ -1,51 +1,51 @@
title = "A Formalization of Strong Security"
date = 2014-04-23
topics = [
"Computer science/Security",
"Computer science/Programming languages/Type systems",
]
abstract = """
Research in information-flow security aims at developing methods to
identify undesired information leaks within programs from private
sources to public sinks. Noninterference captures this
intuition. Strong security from Sabelfeld and Sands
formalizes noninterference for concurrent systems.
We present an Isabelle/HOL formalization of strong security for
arbitrary security lattices (Sabelfeld and Sands use
a two-element security lattice in the original publication).
The formalization includes
compositionality proofs for strong security and a soundness proof
for a security type system that checks strong security for programs
in a simple while language with dynamic thread creation.
Our formalization of the security type system is abstract in the
language for expressions and in the semantic side conditions for
expressions. It can easily be instantiated with different syntactic
approximations for these side conditions. The soundness proof of
such an instantiation boils down to showing that these syntactic
approximations imply the semantic side conditions."""
license = "bsd"
note = ""
[authors]
[authors.grewe]
-email = "grewe_email1"
+email = "grewe_email"
[authors.lux]
email = "lux_email"
[authors.mantel]
email = "mantel_email"
[authors.sauer]
email = "sauer_email"
[contributors]
[notify]
[history]
[extra]
diff --git a/metadata/entries/Szemeredi_Regularity.toml b/metadata/entries/Szemeredi_Regularity.toml
--- a/metadata/entries/Szemeredi_Regularity.toml
+++ b/metadata/entries/Szemeredi_Regularity.toml
@@ -1,42 +1,42 @@
title = "Szemerédi's Regularity Lemma"
date = 2021-11-05
topics = [
"Mathematics/Graph theory",
"Mathematics/Combinatorics",
]
abstract = """
Szemerédi's
regularity lemma is a key result in the study of large
graphs. It asserts the existence of an upper bound on the number of parts
the vertices of a graph need to be partitioned into such that the
edges between the parts are random in a certain sense. This bound
depends only on the desired precision and not on the graph itself, in
the spirit of Ramsey's theorem. The formalisation follows online
course notes by Tim
Gowers and Yufei
Zhao."""
license = "bsd"
note = ""
[authors]
[authors.edmonds]
homepage = "edmonds_homepage"
[authors.argyraki]
homepage = "argyraki_homepage2"
[authors.paulson]
-homepage = "paulson_homepage1"
+homepage = "paulson_homepage"
[contributors]
[notify]
paulson = "paulson_email"
[history]
[extra]
diff --git a/metadata/entries/TESL_Language.toml b/metadata/entries/TESL_Language.toml
--- a/metadata/entries/TESL_Language.toml
+++ b/metadata/entries/TESL_Language.toml
@@ -1,59 +1,59 @@
title = "A Formal Development of a Polychronous Polytimed Coordination Language"
date = 2019-07-30
topics = [
"Computer science/System description languages",
"Computer science/Semantics",
"Computer science/Concurrency",
]
abstract = """
The design of complex systems involves different formalisms for
modeling their different parts or aspects. The global model of a
system may therefore consist of a coordination of concurrent
sub-models that use different paradigms. We develop here a theory for
a language used to specify the timed coordination of such
heterogeneous subsystems by addressing the following issues:
the
behavior of the sub-systems is observed only at a series of discrete
instants,
events may occur in different sub-systems at unrelated
times, leading to polychronous systems, which do not necessarily have
a common base clock,
coordination between subsystems involves
causality, so the occurrence of an event may enforce the occurrence of
other events, possibly after a certain duration has elapsed or an
event has occurred a given number of times,
the domain of time
(discrete, rational, continuous...) may be different in the
subsystems, leading to polytimed systems,
the time frames of
different sub-systems may be related (for instance, time in a GPS
satellite and in a GPS receiver on Earth are related although they are
not the same).
Firstly, a denotational semantics of the language is
defined. Then, in order to be able to incrementally check the behavior
of systems, an operational semantics is given, with proofs of
progress, soundness and completeness with regard to the denotational
semantics. These proofs are made according to a setup that can scale
up when new operators are added to the language. In order for
specifications to be composed in a clean way, the language should be
invariant by stuttering (i.e., adding observation instants at which
nothing happens). The proof of this invariance is also given."""
license = "bsd"
note = ""
[authors]
[authors.van]
email = "van_email"
[authors.boulanger]
email = "boulanger_email"
[authors.wolff]
-email = "wolff_email1"
+email = "wolff_email"
[contributors]
[notify]
boulanger = "boulanger_email"
-wolff = "wolff_email1"
+wolff = "wolff_email"
[history]
[extra]
diff --git a/metadata/entries/Timed_Automata.toml b/metadata/entries/Timed_Automata.toml
--- a/metadata/entries/Timed_Automata.toml
+++ b/metadata/entries/Timed_Automata.toml
@@ -1,43 +1,43 @@
title = "Timed Automata"
date = 2016-03-08
topics = [
"Computer science/Automata and formal languages",
]
abstract = """
Timed automata are a widely used formalism for modeling real-time
systems, which is employed in a class of successful model checkers
such as UPPAAL [LPY97], HyTech [HHWt97] or Kronos [Yov97]. This work
formalizes the theory for the subclass of diagonal-free timed
automata, which is sufficient to model many interesting problems. We
first define the basic concepts and semantics of diagonal-free timed
automata. Based on this, we prove two types of decidability results
for the language emptiness problem. The first is the classic result
of Alur and Dill [AD90, AD94], which uses a finite partitioning of
the state space into so-called `regions`. Our second result focuses
on an approach based on `Difference Bound Matrices (DBMs)`, which is
practically used by model checkers. We prove the correctness of the
basic forward analysis operations on DBMs. One of these operations is
the Floyd-Warshall algorithm for the all-pairs shortest paths problem.
To obtain a finite search space, a widening operation has to be used
for this kind of analysis. We use Patricia Bouyer's [Bou04] approach
to prove that this widening operation is correct in the sense that
DBM-based forward analysis in combination with the widening operation
also decides language emptiness. The interesting property of this
proof is that the first decidability result is reused to obtain the
second one."""
license = "bsd"
note = ""
[authors]
[authors.wimmer]
homepage = "wimmer_homepage"
[contributors]
[notify]
-wimmer = "wimmer_email1"
+wimmer = "wimmer_email"
[history]
[extra]
diff --git a/metadata/entries/Topology.toml b/metadata/entries/Topology.toml
--- a/metadata/entries/Topology.toml
+++ b/metadata/entries/Topology.toml
@@ -1,21 +1,21 @@
title = "Topology"
date = 2004-04-26
topics = [
"Mathematics/Topology",
]
abstract = "This entry contains two theories. The first, Topology, develops the basic notions of general topology. The second, which can be viewed as a demonstration of the first, is called LList_Topology. It develops the topology of lazy lists."
license = "bsd"
note = ""
[authors]
[authors.friedrich]
[contributors]
[notify]
-paulson = "paulson_email1"
+paulson = "paulson_email"
[history]
[extra]
diff --git a/metadata/entries/Transformer_Semantics.toml b/metadata/entries/Transformer_Semantics.toml
--- a/metadata/entries/Transformer_Semantics.toml
+++ b/metadata/entries/Transformer_Semantics.toml
@@ -1,33 +1,33 @@
title = "Transformer Semantics"
date = 2018-12-11
topics = [
"Mathematics/Algebra",
"Computer science/Semantics",
]
abstract = """
These mathematical components formalise predicate transformer
semantics for programs, yet currently only for partial correctness and
in the absence of faults. A first part for isotone (or monotone),
Sup-preserving and Inf-preserving transformers follows Back and von
Wright's approach, with additional emphasis on the quantalic
structure of algebras of transformers. The second part develops
Sup-preserving and Inf-preserving predicate transformers from the
powerset monad, via its Kleisli category and Eilenberg-Moore algebras,
with emphasis on adjunctions and dualities, as well as isomorphisms
between relations, state transformers and predicate transformers."""
license = "bsd"
note = ""
[authors]
[authors.struth]
-homepage = "struth_homepage1"
+homepage = "struth_homepage"
[contributors]
[notify]
struth = "struth_email"
[history]
[extra]
diff --git a/metadata/entries/Transitive_Models.toml b/metadata/entries/Transitive_Models.toml
--- a/metadata/entries/Transitive_Models.toml
+++ b/metadata/entries/Transitive_Models.toml
@@ -1,38 +1,38 @@
title = "Transitive Models of Fragments of ZFC"
date = 2022-03-03
topics = [
"Logic/Set theory",
]
abstract = """
We extend the ZF-Constructibility library by relativizing theories of
the Isabelle/ZF and Delta System Lemma sessions to a transitive class.
We also relativize Paulson's work on Aleph and our former
treatment of the Axiom of Dependent Choices. This work is a
prerrequisite to our formalization of the independence of the
Continuum Hypothesis."""
license = "bsd"
note = ""
[authors]
[authors.gunther]
email = "gunther_email"
[authors.pagano]
homepage = "pagano_homepage"
[authors.terraf]
-homepage = "terraf_homepage2"
+homepage = "terraf_homepage"
[authors.steinberg]
email = "steinberg_email"
[contributors]
[notify]
terraf = "terraf_email"
-pagano = "pagano_email1"
+pagano = "pagano_email"
[history]
[extra]
diff --git a/metadata/entries/Treaps.toml b/metadata/entries/Treaps.toml
--- a/metadata/entries/Treaps.toml
+++ b/metadata/entries/Treaps.toml
@@ -1,44 +1,44 @@
title = "Treaps"
date = 2018-02-06
topics = [
"Computer science/Data structures",
]
abstract = """
A Treap is a binary tree whose nodes contain pairs
consisting of some payload and an associated priority. It must have
the search-tree property w.r.t. the payloads and the heap property
w.r.t. the priorities. Treaps are an interesting data structure that
is related to binary search trees (BSTs) in the following way: if one
forgets all the priorities of a treap, the resulting BST is exactly
the same as if one had inserted the elements into an empty BST in
order of ascending priority. This means that a treap behaves like a
BST where we can pretend the elements were inserted in a different
order from the one in which they were actually inserted.
In particular, by choosing these priorities at random upon
insertion of an element, we can pretend that we inserted the elements
in random order, so that the shape of the
resulting tree is that of a random BST no matter in what order we
insert the elements. This is the main result of this
formalisation.
"""
license = "bsd"
note = ""
[authors]
[authors.haslbeck]
homepage = "haslbeck_homepage"
[authors.eberl]
homepage = "eberl_homepage2"
[authors.nipkow]
-homepage = "nipkow_homepage3"
+homepage = "nipkow_homepage"
[contributors]
[notify]
eberl = "eberl_email"
[history]
[extra]
diff --git a/metadata/entries/Types_Tableaus_and_Goedels_God.toml b/metadata/entries/Types_Tableaus_and_Goedels_God.toml
--- a/metadata/entries/Types_Tableaus_and_Goedels_God.toml
+++ b/metadata/entries/Types_Tableaus_and_Goedels_God.toml
@@ -1,37 +1,37 @@
title = "Types, Tableaus and Gödel’s God in Isabelle/HOL"
date = 2017-05-01
topics = [
"Logic/Philosophical aspects",
]
abstract = """
A computer-formalisation of the essential parts of Fitting's
textbook \"Types, Tableaus and Gödel's God\" in
Isabelle/HOL is presented. In particular, Fitting's (and
Anderson's) variant of the ontological argument is verified and
confirmed. This variant avoids the modal collapse, which has been
criticised as an undesirable side-effect of Kurt Gödel's (and
Dana Scott's) versions of the ontological argument.
Fitting's work is employing an intensional higher-order modal
logic, which we shallowly embed here in classical higher-order logic.
We then utilize the embedded logic for the formalisation of
Fitting's argument. (See also the earlier AFP entry ``Gödel's God in Isabelle/HOL''.)"""
license = "bsd"
note = ""
[authors]
[authors.fuenmayor]
email = "fuenmayor_email"
[authors.benzmueller]
-homepage = "benzmueller_homepage2"
+homepage = "benzmueller_homepage"
[contributors]
[notify]
fuenmayor = "fuenmayor_email"
benzmueller = "benzmueller_email"
[history]
[extra]
diff --git a/metadata/entries/UPF_Firewall.toml b/metadata/entries/UPF_Firewall.toml
--- a/metadata/entries/UPF_Firewall.toml
+++ b/metadata/entries/UPF_Firewall.toml
@@ -1,35 +1,35 @@
title = "Formal Network Models and Their Application to Firewall Policies"
date = 2017-01-08
topics = [
"Computer science/Security",
"Computer science/Networks",
]
abstract = """
We present a formal model of network protocols and their application
to modeling firewall policies. The formalization is based on the
Unified Policy Framework (UPF). The formalization was originally
developed with for generating test cases for testing the security
configuration actual firewall and router (middle-boxes) using
HOL-TestGen. Our work focuses on modeling application level protocols
on top of tcp/ip."""
license = "bsd"
note = ""
[authors]
[authors.brucker]
-homepage = "brucker_homepage1"
+homepage = "brucker_homepage"
[authors.bruegger]
[authors.wolff]
homepage = "wolff_homepage"
[contributors]
[notify]
brucker = "brucker_email2"
[history]
[extra]
diff --git a/metadata/entries/Universal_Turing_Machine.toml b/metadata/entries/Universal_Turing_Machine.toml
--- a/metadata/entries/Universal_Turing_Machine.toml
+++ b/metadata/entries/Universal_Turing_Machine.toml
@@ -1,36 +1,36 @@
title = "Universal Turing Machine"
date = 2019-02-08
topics = [
"Logic/Computability",
"Computer science/Automata and formal languages",
]
abstract = """
We formalise results from computability theory: recursive functions,
undecidability of the halting problem, and the existence of a
universal Turing machine. This formalisation is the AFP entry
corresponding to the paper Mechanising Turing Machines and Computability Theory
in Isabelle/HOL, ITP 2013."""
license = "bsd"
note = ""
[authors]
[authors.xu]
[authors.zhangx]
[authors.urban]
-homepage = "urban_homepage1"
+homepage = "urban_homepage"
[authors.joosten]
-homepage = "joosten_homepage1"
+homepage = "joosten_homepage"
[contributors]
[notify]
joosten = "joosten_email1"
urban = "urban_email"
[history]
[extra]
diff --git a/metadata/entries/Van_der_Waerden.toml b/metadata/entries/Van_der_Waerden.toml
--- a/metadata/entries/Van_der_Waerden.toml
+++ b/metadata/entries/Van_der_Waerden.toml
@@ -1,36 +1,36 @@
title = "Van der Waerden's Theorem"
date = 2021-06-22
topics = [
"Mathematics/Combinatorics",
]
abstract = """
This article formalises the proof of Van der Waerden's Theorem
from Ramsey theory. Van der Waerden's Theorem states that for
integers $k$ and $l$ there exists a number $N$ which guarantees that
if an integer interval of length at least $N$ is coloured with $k$
colours, there will always be an arithmetic progression of length $l$
of the same colour in said interval. The proof goes along the lines of
\\cite{Swan}. The smallest number $N_{k,l}$ fulfilling Van der
Waerden's Theorem is then called the Van der Waerden Number.
Finding the Van der Waerden Number is still an open problem for most
values of $k$ and $l$."""
license = "bsd"
note = ""
[authors]
[authors.kreuzer]
homepage = "kreuzer_homepage"
[authors.eberl]
-homepage = "eberl_homepage1"
+homepage = "eberl_homepage"
[contributors]
[notify]
kreuzer = "kreuzer_email"
eberl = "eberl_email"
[history]
[extra]
diff --git a/metadata/entries/VerifyThis2019.toml b/metadata/entries/VerifyThis2019.toml
--- a/metadata/entries/VerifyThis2019.toml
+++ b/metadata/entries/VerifyThis2019.toml
@@ -1,30 +1,30 @@
title = "VerifyThis 2019 -- Polished Isabelle Solutions"
date = 2019-10-16
topics = [
"Computer science/Algorithms",
]
abstract = """
VerifyThis 2019 (http://www.pm.inf.ethz.ch/research/verifythis.html)
was a program verification competition associated with ETAPS 2019. It
was the 8th event in the VerifyThis competition series. In this entry,
we present polished and completed versions of our solutions that we
created during the competition."""
license = "bsd"
note = ""
[authors]
[authors.lammich]
[authors.wimmer]
-homepage = "wimmer_homepage1"
+homepage = "wimmer_homepage"
[contributors]
[notify]
lammich = "lammich_email"
-wimmer = "wimmer_email1"
+wimmer = "wimmer_email"
[history]
[extra]
diff --git a/metadata/entries/WHATandWHERE_Security.toml b/metadata/entries/WHATandWHERE_Security.toml
--- a/metadata/entries/WHATandWHERE_Security.toml
+++ b/metadata/entries/WHATandWHERE_Security.toml
@@ -1,58 +1,58 @@
title = "A Formalization of Declassification with WHAT-and-WHERE-Security"
date = 2014-04-23
topics = [
"Computer science/Security",
"Computer science/Programming languages/Type systems",
]
abstract = """
Research in information-flow security aims at developing methods to
identify undesired information leaks within programs from private
sources to public sinks. Noninterference captures this intuition by
requiring that no information whatsoever flows from private sources
to public sinks. However, in practice this definition is often too
strict: Depending on the intuitive desired security policy, the
controlled declassification of certain private information (WHAT) at
certain points in the program (WHERE) might not result in an
undesired information leak.
We present an Isabelle/HOL formalization of such a security property
for controlled declassification, namely WHAT&WHERE-security from
\"Scheduler-Independent Declassification\" by Lux, Mantel, and Perner.
The formalization includes
compositionality proofs for and a soundness proof for a security
type system that checks for programs in a simple while language with
dynamic thread creation.
Our formalization of the security type system is abstract in the
language for expressions and in the semantic side conditions for
expressions. It can easily be instantiated with different syntactic
approximations for these side conditions. The soundness proof of
such an instantiation boils down to showing that these syntactic
approximations imply the semantic side conditions.
This Isabelle/HOL formalization uses theories from the entry
Strong Security."""
license = "bsd"
note = ""
[authors]
[authors.grewe]
-email = "grewe_email1"
+email = "grewe_email"
[authors.lux]
email = "lux_email"
[authors.mantel]
email = "mantel_email"
[authors.sauer]
email = "sauer_email"
[contributors]
[notify]
[history]
[extra]
diff --git a/metadata/entries/Weight_Balanced_Trees.toml b/metadata/entries/Weight_Balanced_Trees.toml
--- a/metadata/entries/Weight_Balanced_Trees.toml
+++ b/metadata/entries/Weight_Balanced_Trees.toml
@@ -1,32 +1,32 @@
title = "Weight-Balanced Trees"
date = 2018-03-13
topics = [
"Computer science/Data structures",
]
abstract = """
This theory provides a verified implementation of weight-balanced
trees following the work of Hirai
and Yamamoto who proved that all parameters in a certain
range are valid, i.e. guarantee that insertion and deletion preserve
weight-balance. Instead of a general theorem we provide parameterized
proofs of preservation of the invariant that work for many (all?)
valid parameters."""
license = "bsd"
note = ""
[authors]
[authors.nipkow]
-homepage = "nipkow_homepage3"
+homepage = "nipkow_homepage"
[authors.dirix]
[contributors]
[notify]
nipkow = "nipkow_email"
[history]
[extra]
diff --git a/metadata/entries/Word_Lib.toml b/metadata/entries/Word_Lib.toml
--- a/metadata/entries/Word_Lib.toml
+++ b/metadata/entries/Word_Lib.toml
@@ -1,45 +1,45 @@
title = "Finite Machine Word Library"
date = 2016-06-09
topics = [
"Computer science/Data structures",
]
abstract = """
This entry contains an extension to the Isabelle library for
fixed-width machine words. In particular, the entry adds quickcheck setup
for words, printing as hexadecimals, additional operations, reasoning
about alignment, signed words, enumerations of words, normalisation of
word numerals, and an extensive library of properties about generic
fixed-width words, as well as an instantiation of many of these to the
commonly used 32 and 64-bit bases."""
license = "bsd"
note = ""
[authors]
[authors.beeren]
[authors.fernandez]
[authors.gao]
[authors.klein]
homepage = "klein_homepage"
[authors.kolanski]
[authors.lim]
[authors.lewis]
[authors.matichuk]
[authors.sewell]
[contributors]
[notify]
-klein = "klein_email1"
+klein = "klein_email"
[history]
[extra]
diff --git a/metadata/entries/Youngs_Inequality.toml b/metadata/entries/Youngs_Inequality.toml
--- a/metadata/entries/Youngs_Inequality.toml
+++ b/metadata/entries/Youngs_Inequality.toml
@@ -1,31 +1,31 @@
title = "Young's Inequality for Increasing Functions"
date = 2022-01-31
topics = [
"Mathematics/Analysis",
]
abstract = """
Young's inequality states that $$ ab \\leq \\int_0^a f(x)dx +
\\int_0^b f^{-1}(y) dy $$ where $a\\geq 0$, $b\\geq 0$ and $f$ is
strictly increasing and continuous. Its proof is formalised following
the
development by Cunningham and Grossman. Their idea is to
make the intuitive, geometric folklore proof rigorous by reasoning
about step functions. The lack of the Riemann integral makes the
development longer than one would like, but their argument is
reproduced faithfully."""
license = "bsd"
note = ""
[authors]
[authors.paulson]
-homepage = "paulson_homepage1"
+homepage = "paulson_homepage"
[contributors]
[notify]
paulson = "paulson_email"
[history]
[extra]
diff --git a/metadata/entries/ZFC_in_HOL.toml b/metadata/entries/ZFC_in_HOL.toml
--- a/metadata/entries/ZFC_in_HOL.toml
+++ b/metadata/entries/ZFC_in_HOL.toml
@@ -1,48 +1,48 @@
title = "Zermelo Fraenkel Set Theory in Higher-Order Logic"
date = 2019-10-24
topics = [
"Logic/Set theory",
]
abstract = """
This entry is a new formalisation of ZFC set theory in Isabelle/HOL. It is
logically equivalent to Obua's HOLZF; the point is to have the closest
possible integration with the rest of Isabelle/HOL, minimising the amount of
new notations and exploiting type classes.
There is a type V of sets and a function elts :: V => V
set mapping a set to its elements. Classes simply have type V
set, and a predicate identifies the small classes: those that correspond
to actual sets. Type classes connected with orders and lattices are used to
minimise the amount of new notation for concepts such as the subset relation,
union and intersection. Basic concepts — Cartesian products, disjoint sums,
natural numbers, functions, etc. — are formalised.
More advanced set-theoretic concepts, such as transfinite induction,
ordinals, cardinals and the transitive closure of a set, are also provided.
The definition of addition and multiplication for general sets (not just
ordinals) follows Kirby.
The theory provides two type classes with the aim of facilitating
developments that combine V with other Isabelle/HOL types:
embeddable, the class of types that can be injected into V
(including V itself as well as V*V, etc.), and
small, the class of types that correspond to some ZF set.
extra-history =
Change history:
[2020-01-28]: Generalisation of the \"small\" predicate and order types to arbitrary sets;
ordinal exponentiation;
introduction of the coercion ord_of_nat :: \"nat => V\";
numerous new lemmas. (revision 6081d5be8d08)"""
license = "bsd"
note = ""
[authors]
[authors.paulson]
-homepage = "paulson_homepage1"
+homepage = "paulson_homepage"
[contributors]
[notify]
paulson = "paulson_email"
[history]
[extra]
diff --git a/web/authors/abdulaziz/index.html b/web/authors/abdulaziz/index.html
--- a/web/authors/abdulaziz/index.html
+++ b/web/authors/abdulaziz/index.html
@@ -1,140 +1,140 @@
abdulaziz - Archive of Formal Proofs
by Mohammad Abdulaziz🌐 and Lawrence C. Paulson🌐
Jan 11
\ No newline at end of file
diff --git a/web/authors/aehlig/index.html b/web/authors/aehlig/index.html
--- a/web/authors/aehlig/index.html
+++ b/web/authors/aehlig/index.html
@@ -1,98 +1,98 @@
aehlig - Archive of Formal Proofs
\ No newline at end of file
diff --git a/web/authors/aissat/index.html b/web/authors/aissat/index.html
--- a/web/authors/aissat/index.html
+++ b/web/authors/aissat/index.html
@@ -1,95 +1,95 @@
aissat - Archive of Formal Proofs
\ No newline at end of file
diff --git a/web/authors/aransay/index.html b/web/authors/aransay/index.html
--- a/web/authors/aransay/index.html
+++ b/web/authors/aransay/index.html
@@ -1,139 +1,139 @@
aransay - Archive of Formal Proofs
by Jose Divasón🌐 and Jesús Aransay🌐
Jan 16
\ No newline at end of file
diff --git a/web/authors/argyraki/index.html b/web/authors/argyraki/index.html
--- a/web/authors/argyraki/index.html
+++ b/web/authors/argyraki/index.html
@@ -1,162 +1,162 @@
argyraki - Archive of Formal Proofs
May 23
\ No newline at end of file
diff --git a/web/authors/armstrong/index.html b/web/authors/armstrong/index.html
--- a/web/authors/armstrong/index.html
+++ b/web/authors/armstrong/index.html
@@ -1,111 +1,111 @@
armstrong - Archive of Formal Proofs
\ No newline at end of file
diff --git a/web/authors/ausaf/index.html b/web/authors/ausaf/index.html
--- a/web/authors/ausaf/index.html
+++ b/web/authors/ausaf/index.html
@@ -1,98 +1,98 @@
ausaf - Archive of Formal Proofs
\ No newline at end of file
diff --git a/web/authors/bauer/index.html b/web/authors/bauer/index.html
--- a/web/authors/bauer/index.html
+++ b/web/authors/bauer/index.html
@@ -1,95 +1,95 @@
bauer - Archive of Formal Proofs
\ No newline at end of file
diff --git a/web/authors/bentkamp/index.html b/web/authors/bentkamp/index.html
--- a/web/authors/bentkamp/index.html
+++ b/web/authors/bentkamp/index.html
@@ -1,128 +1,128 @@
bentkamp - Archive of Formal Proofs
\ No newline at end of file
diff --git a/web/authors/benzmueller/index.html b/web/authors/benzmueller/index.html
--- a/web/authors/benzmueller/index.html
+++ b/web/authors/benzmueller/index.html
@@ -1,146 +1,146 @@
benzmueller - Archive of Formal Proofs
Nov 12
\ No newline at end of file
diff --git a/web/authors/beresford/index.html b/web/authors/beresford/index.html
--- a/web/authors/beresford/index.html
+++ b/web/authors/beresford/index.html
@@ -1,107 +1,107 @@
beresford - Archive of Formal Proofs
\ No newline at end of file
diff --git a/web/authors/bhatt/index.html b/web/authors/bhatt/index.html
--- a/web/authors/bhatt/index.html
+++ b/web/authors/bhatt/index.html
@@ -1,98 +1,98 @@
bhatt - Archive of Formal Proofs
\ No newline at end of file
diff --git a/web/authors/biendarra/index.html b/web/authors/biendarra/index.html
--- a/web/authors/biendarra/index.html
+++ b/web/authors/biendarra/index.html
@@ -1,104 +1,104 @@
biendarra - Archive of Formal Proofs
\ No newline at end of file
diff --git a/web/authors/blanchette/index.html b/web/authors/blanchette/index.html
--- a/web/authors/blanchette/index.html
+++ b/web/authors/blanchette/index.html
@@ -1,183 +1,183 @@
blanchette - Archive of Formal Proofs
Oct 15
\ No newline at end of file
diff --git a/web/authors/blasum/index.html b/web/authors/blasum/index.html
--- a/web/authors/blasum/index.html
+++ b/web/authors/blasum/index.html
@@ -1,98 +1,98 @@
blasum - Archive of Formal Proofs
\ No newline at end of file
diff --git a/web/authors/blumson/index.html b/web/authors/blumson/index.html
--- a/web/authors/blumson/index.html
+++ b/web/authors/blumson/index.html
@@ -1,107 +1,107 @@
blumson - Archive of Formal Proofs
\ No newline at end of file
diff --git a/web/authors/bortin/index.html b/web/authors/bortin/index.html
--- a/web/authors/bortin/index.html
+++ b/web/authors/bortin/index.html
@@ -1,105 +1,105 @@
bortin - Archive of Formal Proofs
\ No newline at end of file
diff --git a/web/authors/bottesch/index.html b/web/authors/bottesch/index.html
--- a/web/authors/bottesch/index.html
+++ b/web/authors/bottesch/index.html
@@ -1,123 +1,123 @@
bottesch - Archive of Formal Proofs
\ No newline at end of file
diff --git a/web/authors/bracevac/index.html b/web/authors/bracevac/index.html
--- a/web/authors/bracevac/index.html
+++ b/web/authors/bracevac/index.html
@@ -1,98 +1,98 @@
bracevac - Archive of Formal Proofs
\ No newline at end of file
diff --git a/web/authors/brandt/index.html b/web/authors/brandt/index.html
--- a/web/authors/brandt/index.html
+++ b/web/authors/brandt/index.html
@@ -1,98 +1,98 @@
brandt - Archive of Formal Proofs
\ No newline at end of file
diff --git a/web/authors/brien/index.html b/web/authors/brien/index.html
--- a/web/authors/brien/index.html
+++ b/web/authors/brien/index.html
@@ -1,95 +1,95 @@
brien - Archive of Formal Proofs
\ No newline at end of file
diff --git a/web/authors/brinkop/index.html b/web/authors/brinkop/index.html
--- a/web/authors/brinkop/index.html
+++ b/web/authors/brinkop/index.html
@@ -1,98 +1,98 @@
brinkop - Archive of Formal Proofs
\ No newline at end of file
diff --git a/web/authors/brucker/index.html b/web/authors/brucker/index.html
--- a/web/authors/brucker/index.html
+++ b/web/authors/brucker/index.html
@@ -1,191 +1,191 @@
brucker - Archive of Formal Proofs
by Achim D. Brucker📧, Frédéric Tuong📧 and Burkhart Wolff📧
Jan 16
\ No newline at end of file
diff --git a/web/authors/bruegger/index.html b/web/authors/bruegger/index.html
--- a/web/authors/bruegger/index.html
+++ b/web/authors/bruegger/index.html
@@ -1,107 +1,107 @@
bruegger - Archive of Formal Proofs
Nov 28
\ No newline at end of file
diff --git a/web/authors/brun/index.html b/web/authors/brun/index.html
--- a/web/authors/brun/index.html
+++ b/web/authors/brun/index.html
@@ -1,104 +1,104 @@
brun - Archive of Formal Proofs
\ No newline at end of file
diff --git a/web/authors/bulwahn/index.html b/web/authors/bulwahn/index.html
--- a/web/authors/bulwahn/index.html
+++ b/web/authors/bulwahn/index.html
@@ -1,195 +1,195 @@
bulwahn - Archive of Formal Proofs
\ No newline at end of file
diff --git a/web/authors/butler/index.html b/web/authors/butler/index.html
--- a/web/authors/butler/index.html
+++ b/web/authors/butler/index.html
@@ -1,108 +1,108 @@
butler - Archive of Formal Proofs
\ No newline at end of file
diff --git a/web/authors/caballero/index.html b/web/authors/caballero/index.html
--- a/web/authors/caballero/index.html
+++ b/web/authors/caballero/index.html
@@ -1,117 +1,117 @@
caballero - Archive of Formal Proofs
\ No newline at end of file
diff --git a/web/authors/dardinier/index.html b/web/authors/dardinier/index.html
--- a/web/authors/dardinier/index.html
+++ b/web/authors/dardinier/index.html
@@ -1,123 +1,123 @@
dardinier - Archive of Formal Proofs
\ No newline at end of file
diff --git a/web/authors/diekmann/index.html b/web/authors/diekmann/index.html
--- a/web/authors/diekmann/index.html
+++ b/web/authors/diekmann/index.html
@@ -1,144 +1,144 @@
diekmann - Archive of Formal Proofs
\ No newline at end of file
diff --git a/web/authors/divason/index.html b/web/authors/divason/index.html
--- a/web/authors/divason/index.html
+++ b/web/authors/divason/index.html
@@ -1,180 +1,180 @@
divason - Archive of Formal Proofs
by Jose Divasón🌐 and Jesús Aransay🌐
Jan 16
\ No newline at end of file
diff --git a/web/authors/dyckhoff/index.html b/web/authors/dyckhoff/index.html
--- a/web/authors/dyckhoff/index.html
+++ b/web/authors/dyckhoff/index.html
@@ -1,98 +1,98 @@
dyckhoff - Archive of Formal Proofs
\ No newline at end of file
diff --git a/web/authors/eberl/index.html b/web/authors/eberl/index.html
--- a/web/authors/eberl/index.html
+++ b/web/authors/eberl/index.html
@@ -1,509 +1,509 @@
eberl - Archive of Formal Proofs
by Manuel Eberl🌐
Jan 11
\ No newline at end of file
diff --git a/web/authors/echenim/index.html b/web/authors/echenim/index.html
--- a/web/authors/echenim/index.html
+++ b/web/authors/echenim/index.html
@@ -1,117 +1,117 @@
echenim - Archive of Formal Proofs
\ No newline at end of file
diff --git a/web/authors/esparza/index.html b/web/authors/esparza/index.html
--- a/web/authors/esparza/index.html
+++ b/web/authors/esparza/index.html
@@ -1,98 +1,98 @@
esparza - Archive of Formal Proofs
\ No newline at end of file
diff --git a/web/authors/essmann/index.html b/web/authors/essmann/index.html
--- a/web/authors/essmann/index.html
+++ b/web/authors/essmann/index.html
@@ -1,98 +1,98 @@
essmann - Archive of Formal Proofs
\ No newline at end of file
diff --git a/web/authors/feliachi/index.html b/web/authors/feliachi/index.html
--- a/web/authors/feliachi/index.html
+++ b/web/authors/feliachi/index.html
@@ -1,107 +1,107 @@
feliachi - Archive of Formal Proofs
May 27
\ No newline at end of file
diff --git a/web/authors/fiedler/index.html b/web/authors/fiedler/index.html
--- a/web/authors/fiedler/index.html
+++ b/web/authors/fiedler/index.html
@@ -1,98 +1,98 @@
fiedler - Archive of Formal Proofs
\ No newline at end of file
diff --git a/web/authors/fleury/index.html b/web/authors/fleury/index.html
--- a/web/authors/fleury/index.html
+++ b/web/authors/fleury/index.html
@@ -1,110 +1,110 @@
fleury - Archive of Formal Proofs
\ No newline at end of file
diff --git a/web/authors/fosters/index.html b/web/authors/fosters/index.html
--- a/web/authors/fosters/index.html
+++ b/web/authors/fosters/index.html
@@ -1,135 +1,135 @@
fosters - Archive of Formal Proofs
Jan 25
\ No newline at end of file
diff --git a/web/authors/fuenmayor/index.html b/web/authors/fuenmayor/index.html
--- a/web/authors/fuenmayor/index.html
+++ b/web/authors/fuenmayor/index.html
@@ -1,123 +1,123 @@
fuenmayor - Archive of Formal Proofs
May 01
\ No newline at end of file
diff --git a/web/authors/furusawa/index.html b/web/authors/furusawa/index.html
--- a/web/authors/furusawa/index.html
+++ b/web/authors/furusawa/index.html
@@ -1,98 +1,98 @@
furusawa - Archive of Formal Proofs
\ No newline at end of file
diff --git a/web/authors/gammie/index.html b/web/authors/gammie/index.html
--- a/web/authors/gammie/index.html
+++ b/web/authors/gammie/index.html
@@ -1,185 +1,185 @@
gammie - Archive of Formal Proofs
\ No newline at end of file
diff --git a/web/authors/gaudel/index.html b/web/authors/gaudel/index.html
--- a/web/authors/gaudel/index.html
+++ b/web/authors/gaudel/index.html
@@ -1,98 +1,98 @@
gaudel - Archive of Formal Proofs
\ No newline at end of file
diff --git a/web/authors/gay/index.html b/web/authors/gay/index.html
--- a/web/authors/gay/index.html
+++ b/web/authors/gay/index.html
@@ -1,98 +1,98 @@
gay - Archive of Formal Proofs
\ No newline at end of file
diff --git a/web/authors/gomes/index.html b/web/authors/gomes/index.html
--- a/web/authors/gomes/index.html
+++ b/web/authors/gomes/index.html
@@ -1,158 +1,158 @@
gomes - Archive of Formal Proofs
by Alasdair Armstrong, Victor B. F. Gomes🌐 and Georg Struth🌐
Jan 23
\ No newline at end of file
diff --git a/web/authors/grewe/index.html b/web/authors/grewe/index.html
--- a/web/authors/grewe/index.html
+++ b/web/authors/grewe/index.html
@@ -1,121 +1,121 @@
grewe - Archive of Formal Proofs
\ No newline at end of file
diff --git a/web/authors/gunther/index.html b/web/authors/gunther/index.html
--- a/web/authors/gunther/index.html
+++ b/web/authors/gunther/index.html
@@ -1,114 +1,114 @@
gunther - Archive of Formal Proofs
by Emmanuel Gunther📧, Miguel Pagano🌐 and Pedro Sánchez Terraf🌐
May 06
\ No newline at end of file
diff --git a/web/authors/guttmann/index.html b/web/authors/guttmann/index.html
--- a/web/authors/guttmann/index.html
+++ b/web/authors/guttmann/index.html
@@ -1,176 +1,176 @@
guttmann - Archive of Formal Proofs
by Victor B. F. Gomes🌐, Walter Guttmann🌐, Peter Höfner🌐, Georg Struth🌐 and Tjark Weber🌐
Apr 12
\ No newline at end of file
diff --git a/web/authors/haftmann/index.html b/web/authors/haftmann/index.html
--- a/web/authors/haftmann/index.html
+++ b/web/authors/haftmann/index.html
@@ -1,107 +1,107 @@
haftmann - Archive of Formal Proofs
\ No newline at end of file
diff --git a/web/authors/haslbeck/index.html b/web/authors/haslbeck/index.html
--- a/web/authors/haslbeck/index.html
+++ b/web/authors/haslbeck/index.html
@@ -1,139 +1,139 @@
haslbeck - Archive of Formal Proofs
by Julius Michaelis🌐, Max W. Haslbeck🌐, Peter Lammich🌐 and Lars Hupel🌐
Apr 27
\ No newline at end of file
diff --git a/web/authors/haslbeckm/index.html b/web/authors/haslbeckm/index.html
--- a/web/authors/haslbeckm/index.html
+++ b/web/authors/haslbeckm/index.html
@@ -1,116 +1,116 @@
haslbeckm - Archive of Formal Proofs
Feb 17
\ No newline at end of file
diff --git a/web/authors/havle/index.html b/web/authors/havle/index.html
--- a/web/authors/havle/index.html
+++ b/web/authors/havle/index.html
@@ -1,98 +1,98 @@
havle - Archive of Formal Proofs
\ No newline at end of file
diff --git a/web/authors/heimes/index.html b/web/authors/heimes/index.html
--- a/web/authors/heimes/index.html
+++ b/web/authors/heimes/index.html
@@ -1,102 +1,102 @@
heimes - Archive of Formal Proofs
Dec 26
\ No newline at end of file
diff --git a/web/authors/hoefner/index.html b/web/authors/hoefner/index.html
--- a/web/authors/hoefner/index.html
+++ b/web/authors/hoefner/index.html
@@ -1,116 +1,116 @@
hoefner - Archive of Formal Proofs
Oct 23
\ No newline at end of file
diff --git a/web/authors/hoelzl/index.html b/web/authors/hoelzl/index.html
--- a/web/authors/hoelzl/index.html
+++ b/web/authors/hoelzl/index.html
@@ -1,156 +1,156 @@
hoelzl - Archive of Formal Proofs
by Johannes Hölzl🌐 and Tobias Nipkow🌐
Jan 03
\ No newline at end of file
diff --git a/web/authors/hu/index.html b/web/authors/hu/index.html
--- a/web/authors/hu/index.html
+++ b/web/authors/hu/index.html
@@ -1,98 +1,98 @@
hu - Archive of Formal Proofs
\ No newline at end of file
diff --git a/web/authors/huffman/index.html b/web/authors/huffman/index.html
--- a/web/authors/huffman/index.html
+++ b/web/authors/huffman/index.html
@@ -1,137 +1,137 @@
huffman - Archive of Formal Proofs
by Brian Huffman🌐
Nov 11
\ No newline at end of file
diff --git a/web/authors/hupel/index.html b/web/authors/hupel/index.html
--- a/web/authors/hupel/index.html
+++ b/web/authors/hupel/index.html
@@ -1,188 +1,188 @@
hupel - Archive of Formal Proofs
Feb 13
\ No newline at end of file
diff --git a/web/authors/immler/index.html b/web/authors/immler/index.html
--- a/web/authors/immler/index.html
+++ b/web/authors/immler/index.html
@@ -1,178 +1,178 @@
immler - Archive of Formal Proofs
\ No newline at end of file
diff --git a/web/authors/joosten/index.html b/web/authors/joosten/index.html
--- a/web/authors/joosten/index.html
+++ b/web/authors/joosten/index.html
@@ -1,160 +1,160 @@
joosten - Archive of Formal Proofs
Dec 22
\ No newline at end of file
diff --git a/web/authors/kaliszyk/index.html b/web/authors/kaliszyk/index.html
--- a/web/authors/kaliszyk/index.html
+++ b/web/authors/kaliszyk/index.html
@@ -1,125 +1,125 @@
kaliszyk - Archive of Formal Proofs
Feb 21
\ No newline at end of file
diff --git a/web/authors/klein/index.html b/web/authors/klein/index.html
--- a/web/authors/klein/index.html
+++ b/web/authors/klein/index.html
@@ -1,119 +1,119 @@
klein - Archive of Formal Proofs
Jun 01
\ No newline at end of file
diff --git a/web/authors/kleppmann/index.html b/web/authors/kleppmann/index.html
--- a/web/authors/kleppmann/index.html
+++ b/web/authors/kleppmann/index.html
@@ -1,107 +1,107 @@
kleppmann - Archive of Formal Proofs
\ No newline at end of file
diff --git a/web/authors/krauss/index.html b/web/authors/krauss/index.html
--- a/web/authors/krauss/index.html
+++ b/web/authors/krauss/index.html
@@ -1,98 +1,98 @@
krauss - Archive of Formal Proofs
\ No newline at end of file
diff --git a/web/authors/kuncar/index.html b/web/authors/kuncar/index.html
--- a/web/authors/kuncar/index.html
+++ b/web/authors/kuncar/index.html
@@ -1,98 +1,98 @@
kuncar - Archive of Formal Proofs
\ No newline at end of file
diff --git a/web/authors/lammich/index.html b/web/authors/lammich/index.html
--- a/web/authors/lammich/index.html
+++ b/web/authors/lammich/index.html
@@ -1,340 +1,340 @@
lammich - Archive of Formal Proofs
Dec 14
\ No newline at end of file
diff --git a/web/authors/langenstein/index.html b/web/authors/langenstein/index.html
--- a/web/authors/langenstein/index.html
+++ b/web/authors/langenstein/index.html
@@ -1,98 +1,98 @@
langenstein - Archive of Formal Proofs
\ No newline at end of file
diff --git a/web/authors/li/index.html b/web/authors/li/index.html
--- a/web/authors/li/index.html
+++ b/web/authors/li/index.html
@@ -1,173 +1,173 @@
li - Archive of Formal Proofs
\ No newline at end of file
diff --git a/web/authors/lochbihler/index.html b/web/authors/lochbihler/index.html
--- a/web/authors/lochbihler/index.html
+++ b/web/authors/lochbihler/index.html
@@ -1,267 +1,267 @@
lochbihler - Archive of Formal Proofs
by Andreas Lochbihler🌐
Dec 03
\ No newline at end of file
diff --git a/web/authors/maletzky/index.html b/web/authors/maletzky/index.html
--- a/web/authors/maletzky/index.html
+++ b/web/authors/maletzky/index.html
@@ -1,132 +1,132 @@
maletzky - Archive of Formal Proofs
Aug 10
\ No newline at end of file
diff --git a/web/authors/mantel/index.html b/web/authors/mantel/index.html
--- a/web/authors/mantel/index.html
+++ b/web/authors/mantel/index.html
@@ -1,121 +1,121 @@
mantel - Archive of Formal Proofs
\ No newline at end of file
diff --git a/web/authors/maric/index.html b/web/authors/maric/index.html
--- a/web/authors/maric/index.html
+++ b/web/authors/maric/index.html
@@ -1,107 +1,107 @@
maric - Archive of Formal Proofs
\ No newline at end of file
diff --git a/web/authors/maricf/index.html b/web/authors/maricf/index.html
--- a/web/authors/maricf/index.html
+++ b/web/authors/maricf/index.html
@@ -1,126 +1,126 @@
maricf - Archive of Formal Proofs
\ No newline at end of file
diff --git a/web/authors/michaelis/index.html b/web/authors/michaelis/index.html
--- a/web/authors/michaelis/index.html
+++ b/web/authors/michaelis/index.html
@@ -1,144 +1,144 @@
michaelis - Archive of Formal Proofs
by Julius Michaelis🌐, Max W. Haslbeck🌐, Peter Lammich🌐 and Lars Hupel🌐
Apr 27
\ No newline at end of file
diff --git a/web/authors/moeller/index.html b/web/authors/moeller/index.html
--- a/web/authors/moeller/index.html
+++ b/web/authors/moeller/index.html
@@ -1,98 +1,98 @@
moeller - Archive of Formal Proofs
\ No newline at end of file
diff --git a/web/authors/mulligan/index.html b/web/authors/mulligan/index.html
--- a/web/authors/mulligan/index.html
+++ b/web/authors/mulligan/index.html
@@ -1,114 +1,114 @@
mulligan - Archive of Formal Proofs
Jun 25
\ No newline at end of file
diff --git a/web/authors/naraschewski/index.html b/web/authors/naraschewski/index.html
--- a/web/authors/naraschewski/index.html
+++ b/web/authors/naraschewski/index.html
@@ -1,95 +1,95 @@
naraschewski - Archive of Formal Proofs
\ No newline at end of file
diff --git a/web/authors/nemouchi/index.html b/web/authors/nemouchi/index.html
--- a/web/authors/nemouchi/index.html
+++ b/web/authors/nemouchi/index.html
@@ -1,107 +1,107 @@
nemouchi - Archive of Formal Proofs
by René Neumann📧
Oct 28
\ No newline at end of file
diff --git a/web/authors/nipkow/index.html b/web/authors/nipkow/index.html
--- a/web/authors/nipkow/index.html
+++ b/web/authors/nipkow/index.html
@@ -1,434 +1,434 @@
nipkow - Archive of Formal Proofs
by Tobias Nipkow🌐 and Cornelia Pusch
Mar 19
\ No newline at end of file
diff --git a/web/authors/pagano/index.html b/web/authors/pagano/index.html
--- a/web/authors/pagano/index.html
+++ b/web/authors/pagano/index.html
@@ -1,114 +1,114 @@
pagano - Archive of Formal Proofs
by Emmanuel Gunther📧, Miguel Pagano🌐 and Pedro Sánchez Terraf🌐
May 06
\ No newline at end of file
diff --git a/web/authors/parsert/index.html b/web/authors/parsert/index.html
--- a/web/authors/parsert/index.html
+++ b/web/authors/parsert/index.html
@@ -1,126 +1,126 @@
parsert - Archive of Formal Proofs
\ No newline at end of file
diff --git a/web/authors/paulson/index.html b/web/authors/paulson/index.html
--- a/web/authors/paulson/index.html
+++ b/web/authors/paulson/index.html
@@ -1,296 +1,296 @@
paulson - Archive of Formal Proofs
by Tobias Nipkow🌐 and Lawrence C. Paulson🌐
Nov 07
\ No newline at end of file
diff --git a/web/authors/pierzchalski/index.html b/web/authors/pierzchalski/index.html
--- a/web/authors/pierzchalski/index.html
+++ b/web/authors/pierzchalski/index.html
@@ -1,102 +1,102 @@
pierzchalski - Archive of Formal Proofs
Jun 25
\ No newline at end of file
diff --git a/web/authors/platzer/index.html b/web/authors/platzer/index.html
--- a/web/authors/platzer/index.html
+++ b/web/authors/platzer/index.html
@@ -1,117 +1,117 @@
platzer - Archive of Formal Proofs
\ No newline at end of file
diff --git a/web/authors/popescu/index.html b/web/authors/popescu/index.html
--- a/web/authors/popescu/index.html
+++ b/web/authors/popescu/index.html
@@ -1,245 +1,245 @@
popescu - Archive of Formal Proofs
\ No newline at end of file
diff --git a/web/authors/raszyk/index.html b/web/authors/raszyk/index.html
--- a/web/authors/raszyk/index.html
+++ b/web/authors/raszyk/index.html
@@ -1,114 +1,114 @@
raszyk - Archive of Formal Proofs
\ No newline at end of file
diff --git a/web/authors/rau/index.html b/web/authors/rau/index.html
--- a/web/authors/rau/index.html
+++ b/web/authors/rau/index.html
@@ -1,107 +1,107 @@
rau - Archive of Formal Proofs
\ No newline at end of file
diff --git a/web/authors/raya/index.html b/web/authors/raya/index.html
--- a/web/authors/raya/index.html
+++ b/web/authors/raya/index.html
@@ -1,98 +1,98 @@
raya - Archive of Formal Proofs
\ No newline at end of file
diff --git a/web/authors/reynaud/index.html b/web/authors/reynaud/index.html
--- a/web/authors/reynaud/index.html
+++ b/web/authors/reynaud/index.html
@@ -1,95 +1,95 @@
reynaud - Archive of Formal Proofs
\ No newline at end of file
diff --git a/web/authors/ribeiro/index.html b/web/authors/ribeiro/index.html
--- a/web/authors/ribeiro/index.html
+++ b/web/authors/ribeiro/index.html
@@ -1,95 +1,95 @@
ribeiro - Archive of Formal Proofs
\ No newline at end of file
diff --git a/web/authors/rizaldi/index.html b/web/authors/rizaldi/index.html
--- a/web/authors/rizaldi/index.html
+++ b/web/authors/rizaldi/index.html
@@ -1,98 +1,98 @@
rizaldi - Archive of Formal Proofs
\ No newline at end of file
diff --git a/web/authors/rizkallah/index.html b/web/authors/rizkallah/index.html
--- a/web/authors/rizkallah/index.html
+++ b/web/authors/rizkallah/index.html
@@ -1,130 +1,130 @@
rizkallah - Archive of Formal Proofs
\ No newline at end of file
diff --git a/web/authors/romanos/index.html b/web/authors/romanos/index.html
--- a/web/authors/romanos/index.html
+++ b/web/authors/romanos/index.html
@@ -1,98 +1,98 @@
romanos - Archive of Formal Proofs
\ No newline at end of file
diff --git a/web/authors/rosskopf/index.html b/web/authors/rosskopf/index.html
--- a/web/authors/rosskopf/index.html
+++ b/web/authors/rosskopf/index.html
@@ -1,98 +1,98 @@
rosskopf - Archive of Formal Proofs
\ No newline at end of file
diff --git a/web/authors/saile/index.html b/web/authors/saile/index.html
--- a/web/authors/saile/index.html
+++ b/web/authors/saile/index.html
@@ -1,98 +1,98 @@
saile - Archive of Formal Proofs
\ No newline at end of file
diff --git a/web/authors/schaeffeler/index.html b/web/authors/schaeffeler/index.html
--- a/web/authors/schaeffeler/index.html
+++ b/web/authors/schaeffeler/index.html
@@ -1,105 +1,105 @@
schaeffeler - Archive of Formal Proofs
Dec 16
\ No newline at end of file
diff --git a/web/authors/schimpf/index.html b/web/authors/schimpf/index.html
--- a/web/authors/schimpf/index.html
+++ b/web/authors/schimpf/index.html
@@ -1,105 +1,105 @@
schimpf - Archive of Formal Proofs
\ No newline at end of file
diff --git a/web/authors/schlichtkrull/index.html b/web/authors/schlichtkrull/index.html
--- a/web/authors/schlichtkrull/index.html
+++ b/web/authors/schlichtkrull/index.html
@@ -1,139 +1,139 @@
schlichtkrull - Archive of Formal Proofs
Jun 30
\ No newline at end of file
diff --git a/web/authors/schmaltz/index.html b/web/authors/schmaltz/index.html
--- a/web/authors/schmaltz/index.html
+++ b/web/authors/schmaltz/index.html
@@ -1,98 +1,98 @@
schmaltz - Archive of Formal Proofs
\ No newline at end of file
diff --git a/web/authors/schneider/index.html b/web/authors/schneider/index.html
--- a/web/authors/schneider/index.html
+++ b/web/authors/schneider/index.html
@@ -1,141 +1,141 @@
schneider - Archive of Formal Proofs
by Andreas Lochbihler🌐 and Joshua Schneider
Dec 22
\ No newline at end of file
diff --git a/web/authors/sefidgar/index.html b/web/authors/sefidgar/index.html
--- a/web/authors/sefidgar/index.html
+++ b/web/authors/sefidgar/index.html
@@ -1,136 +1,136 @@
sefidgar - Archive of Formal Proofs
Aug 12
\ No newline at end of file
diff --git a/web/authors/sickert/index.html b/web/authors/sickert/index.html
--- a/web/authors/sickert/index.html
+++ b/web/authors/sickert/index.html
@@ -1,128 +1,128 @@
sickert - Archive of Formal Proofs
\ No newline at end of file
diff --git a/web/authors/sison/index.html b/web/authors/sison/index.html
--- a/web/authors/sison/index.html
+++ b/web/authors/sison/index.html
@@ -1,102 +1,102 @@
sison - Archive of Formal Proofs
Jun 25
\ No newline at end of file
diff --git a/web/authors/smaus/index.html b/web/authors/smaus/index.html
--- a/web/authors/smaus/index.html
+++ b/web/authors/smaus/index.html
@@ -1,98 +1,98 @@
smaus - Archive of Formal Proofs
\ No newline at end of file
diff --git a/web/authors/somogyi/index.html b/web/authors/somogyi/index.html
--- a/web/authors/somogyi/index.html
+++ b/web/authors/somogyi/index.html
@@ -1,95 +1,95 @@
somogyi - Archive of Formal Proofs
\ No newline at end of file
diff --git a/web/authors/spasic/index.html b/web/authors/spasic/index.html
--- a/web/authors/spasic/index.html
+++ b/web/authors/spasic/index.html
@@ -1,98 +1,98 @@
spasic - Archive of Formal Proofs
\ No newline at end of file
diff --git a/web/authors/steinberg/index.html b/web/authors/steinberg/index.html
--- a/web/authors/steinberg/index.html
+++ b/web/authors/steinberg/index.html
@@ -1,105 +1,105 @@
steinberg - Archive of Formal Proofs
Mar 03
\ No newline at end of file
diff --git a/web/authors/stephan/index.html b/web/authors/stephan/index.html
--- a/web/authors/stephan/index.html
+++ b/web/authors/stephan/index.html
@@ -1,98 +1,98 @@
stephan - Archive of Formal Proofs
\ No newline at end of file
diff --git a/web/authors/sternagel/index.html b/web/authors/sternagel/index.html
--- a/web/authors/sternagel/index.html
+++ b/web/authors/sternagel/index.html
@@ -1,245 +1,245 @@
sternagel - Archive of Formal Proofs
by Christian Sternagel📧 and René Thiemann🌐
Jun 14
\ No newline at end of file
diff --git a/web/authors/stricker/index.html b/web/authors/stricker/index.html
--- a/web/authors/stricker/index.html
+++ b/web/authors/stricker/index.html
@@ -1,98 +1,98 @@
stricker - Archive of Formal Proofs
\ No newline at end of file
diff --git a/web/authors/struth/index.html b/web/authors/struth/index.html
--- a/web/authors/struth/index.html
+++ b/web/authors/struth/index.html
@@ -1,188 +1,188 @@
struth - Archive of Formal Proofs
Jan 15
\ No newline at end of file
diff --git a/web/authors/stuewe/index.html b/web/authors/stuewe/index.html
--- a/web/authors/stuewe/index.html
+++ b/web/authors/stuewe/index.html
@@ -1,95 +1,95 @@
stuewe - Archive of Formal Proofs
\ No newline at end of file
diff --git a/web/authors/sudbrock/index.html b/web/authors/sudbrock/index.html
--- a/web/authors/sudbrock/index.html
+++ b/web/authors/sudbrock/index.html
@@ -1,98 +1,98 @@
sudbrock - Archive of Formal Proofs
\ No newline at end of file
diff --git a/web/authors/sulejmani/index.html b/web/authors/sulejmani/index.html
--- a/web/authors/sulejmani/index.html
+++ b/web/authors/sulejmani/index.html
@@ -1,95 +1,95 @@
sulejmani - Archive of Formal Proofs
\ No newline at end of file
diff --git a/web/authors/taha/index.html b/web/authors/taha/index.html
--- a/web/authors/taha/index.html
+++ b/web/authors/taha/index.html
@@ -1,116 +1,116 @@
taha - Archive of Formal Proofs
\ No newline at end of file
diff --git a/web/authors/tan/index.html b/web/authors/tan/index.html
--- a/web/authors/tan/index.html
+++ b/web/authors/tan/index.html
@@ -1,107 +1,107 @@
tan - Archive of Formal Proofs
\ No newline at end of file
diff --git a/web/authors/tasch/index.html b/web/authors/tasch/index.html
--- a/web/authors/tasch/index.html
+++ b/web/authors/tasch/index.html
@@ -1,98 +1,98 @@
tasch - Archive of Formal Proofs
\ No newline at end of file
diff --git a/web/authors/terraf/index.html b/web/authors/terraf/index.html
--- a/web/authors/terraf/index.html
+++ b/web/authors/terraf/index.html
@@ -1,121 +1,121 @@
terraf - Archive of Formal Proofs
by Emmanuel Gunther📧, Miguel Pagano🌐 and Pedro Sánchez Terraf🌐
May 06
\ No newline at end of file
diff --git a/web/authors/thiemann/index.html b/web/authors/thiemann/index.html
--- a/web/authors/thiemann/index.html
+++ b/web/authors/thiemann/index.html
@@ -1,384 +1,384 @@
thiemann - Archive of Formal Proofs
by Christian Sternagel📧 and René Thiemann🌐
Jun 14
\ No newline at end of file
diff --git a/web/authors/thommes/index.html b/web/authors/thommes/index.html
--- a/web/authors/thommes/index.html
+++ b/web/authors/thommes/index.html
@@ -1,95 +1,95 @@
thommes - Archive of Formal Proofs
\ No newline at end of file
diff --git a/web/authors/tourret/index.html b/web/authors/tourret/index.html
--- a/web/authors/tourret/index.html
+++ b/web/authors/tourret/index.html
@@ -1,105 +1,105 @@
tourret - Archive of Formal Proofs
\ No newline at end of file
diff --git a/web/authors/traut/index.html b/web/authors/traut/index.html
--- a/web/authors/traut/index.html
+++ b/web/authors/traut/index.html
@@ -1,95 +1,95 @@
traut - Archive of Formal Proofs
\ No newline at end of file
diff --git a/web/authors/traytel/index.html b/web/authors/traytel/index.html
--- a/web/authors/traytel/index.html
+++ b/web/authors/traytel/index.html
@@ -1,261 +1,261 @@
traytel - Archive of Formal Proofs
by Dmitriy Traytel🌐
Nov 15
\ No newline at end of file
diff --git a/web/authors/tuong/index.html b/web/authors/tuong/index.html
--- a/web/authors/tuong/index.html
+++ b/web/authors/tuong/index.html
@@ -1,126 +1,126 @@
tuong - Archive of Formal Proofs
\ No newline at end of file
diff --git a/web/authors/tverdyshev/index.html b/web/authors/tverdyshev/index.html
--- a/web/authors/tverdyshev/index.html
+++ b/web/authors/tverdyshev/index.html
@@ -1,98 +1,98 @@
tverdyshev - Archive of Formal Proofs
\ No newline at end of file
diff --git a/web/authors/unruh/index.html b/web/authors/unruh/index.html
--- a/web/authors/unruh/index.html
+++ b/web/authors/unruh/index.html
@@ -1,117 +1,117 @@
unruh - Archive of Formal Proofs
\ No newline at end of file
diff --git a/web/authors/urban/index.html b/web/authors/urban/index.html
--- a/web/authors/urban/index.html
+++ b/web/authors/urban/index.html
@@ -1,125 +1,125 @@
urban - Archive of Formal Proofs
by Chunhan Wu, Xingyuan Zhang and Christian Urban🌐
Aug 26
\ No newline at end of file
diff --git a/web/authors/verbeek/index.html b/web/authors/verbeek/index.html
--- a/web/authors/verbeek/index.html
+++ b/web/authors/verbeek/index.html
@@ -1,107 +1,107 @@
verbeek - Archive of Formal Proofs
\ No newline at end of file
diff --git a/web/authors/voisin/index.html b/web/authors/voisin/index.html
--- a/web/authors/voisin/index.html
+++ b/web/authors/voisin/index.html
@@ -1,95 +1,95 @@
voisin - Archive of Formal Proofs
\ No newline at end of file
diff --git a/web/authors/waldmann/index.html b/web/authors/waldmann/index.html
--- a/web/authors/waldmann/index.html
+++ b/web/authors/waldmann/index.html
@@ -1,114 +1,114 @@
waldmann - Archive of Formal Proofs
\ No newline at end of file
diff --git a/web/authors/weber/index.html b/web/authors/weber/index.html
--- a/web/authors/weber/index.html
+++ b/web/authors/weber/index.html
@@ -1,126 +1,126 @@
weber - Archive of Formal Proofs
\ No newline at end of file
diff --git a/web/authors/wimmer/index.html b/web/authors/wimmer/index.html
--- a/web/authors/wimmer/index.html
+++ b/web/authors/wimmer/index.html
@@ -1,172 +1,172 @@
wimmer - Archive of Formal Proofs
by Abderrahmane Feliachi📧, Burkhart Wolff📧 and Marie-Claude Gaudel📧
May 27
\ No newline at end of file
diff --git a/web/authors/wu/index.html b/web/authors/wu/index.html
--- a/web/authors/wu/index.html
+++ b/web/authors/wu/index.html
@@ -1,95 +1,95 @@
wu - Archive of Formal Proofs
\ No newline at end of file
diff --git a/web/authors/yamada/index.html b/web/authors/yamada/index.html
--- a/web/authors/yamada/index.html
+++ b/web/authors/yamada/index.html
@@ -1,181 +1,181 @@
yamada - Archive of Formal Proofs
Aug 21
\ No newline at end of file
diff --git a/web/authors/ye/index.html b/web/authors/ye/index.html
--- a/web/authors/ye/index.html
+++ b/web/authors/ye/index.html
@@ -1,107 +1,107 @@
ye - Archive of Formal Proofs
\ No newline at end of file
diff --git a/web/authors/zeyda/index.html b/web/authors/zeyda/index.html
--- a/web/authors/zeyda/index.html
+++ b/web/authors/zeyda/index.html
@@ -1,107 +1,107 @@
zeyda - Archive of Formal Proofs
\ No newline at end of file
diff --git a/web/authors/zhan/index.html b/web/authors/zhan/index.html
--- a/web/authors/zhan/index.html
+++ b/web/authors/zhan/index.html
@@ -1,121 +1,121 @@
zhan - Archive of Formal Proofs
\ No newline at end of file
diff --git a/web/authors/zhangx/index.html b/web/authors/zhangx/index.html
--- a/web/authors/zhangx/index.html
+++ b/web/authors/zhangx/index.html
@@ -1,104 +1,104 @@
zhangx - Archive of Formal Proofs
\ No newline at end of file
diff --git a/web/entries/ADS_Functor.html b/web/entries/ADS_Functor.html
--- a/web/entries/ADS_Functor.html
+++ b/web/entries/ADS_Functor.html
@@ -1,186 +1,186 @@
Authenticated Data Structures As Functors - Archive of Formal Proofs
Authenticated data structures allow several systems to convince each
other that they are referring to the same data structure, even if each
of them knows only a part of the data structure. Using inclusion
proofs, knowledgeable systems can selectively share their knowledge
with other systems and the latter can verify the authenticity of what
is being shared. In this article, we show how to modularly define
authenticated data structures, their inclusion proofs, and operations
thereon as datatypes in Isabelle/HOL, using a shallow embedding.
Modularity allows us to construct complicated trees from reusable
building blocks, which we call Merkle functors. Merkle functors
include sums, products, and function spaces and are closed under
composition and least fixpoints. As a practical application, we model
the hierarchical transactions of Canton, a
practical interoperability protocol for distributed ledgers, as
authenticated data structures. This is a first step towards
formalizing the Canton protocol and verifying its integrity and
security guarantees.
@article{ADS_Functor-AFP,
author = {lochbihler and maric},
title = {Authenticated Data Structures As Functors},
journal = {Archive of Formal Proofs},
month = April,
year = 2020,
note = {\url{https://isa-afp.org/entries/ADS_Functor.html},
Formal proof development},
ISSN = {2150-914x},
}
\ No newline at end of file
diff --git a/web/entries/AVL-Trees.html b/web/entries/AVL-Trees.html
--- a/web/entries/AVL-Trees.html
+++ b/web/entries/AVL-Trees.html
@@ -1,155 +1,155 @@
AVL Trees - Archive of Formal Proofs
Two formalizations of AVL trees with room for extensions. The first formalization is monolithic and shorter, the second one in two stages, longer and a bit simpler. The final implementation is the same. If you are interested in developing this further, please contact gerwin.klein@nicta.com.au.
\ No newline at end of file
diff --git a/web/entries/Abs_Int_ITP2012.html b/web/entries/Abs_Int_ITP2012.html
--- a/web/entries/Abs_Int_ITP2012.html
+++ b/web/entries/Abs_Int_ITP2012.html
@@ -1,186 +1,186 @@
Abstract Interpretation of Annotated Commands - Archive of Formal Proofs
This is the Isabelle formalization of the material decribed in the
eponymous ITP 2012 paper.
It develops a generic abstract interpreter for a
while-language, including widening and narrowing. The collecting
semantics and the abstract interpreter operate on annotated commands:
the program is represented as a syntax tree with the semantic
information directly embedded, without auxiliary labels. The aim of
the formalization is simplicity, not efficiency or
precision. This is motivated by the inclusion of the material in a
theorem prover based course on semantics. A similar (but more
polished) development is covered in the book
Concrete Semantics.
These therories describe Hoare logics for a number of imperative language constructs, from while-loops to mutually recursive procedures. Both partial and total correctness are treated. In particular a proof system for total correctness of recursive procedures in the presence of unbounded nondeterminism is presented.
We present an Isabelle formalization of abstract rewriting (see, e.g.,
the book by Baader and Nipkow). First, we define standard relations like
joinability, meetability, conversion, etc. Then, we
formalize important properties of abstract rewrite systems, e.g.,
confluence and strong normalization. Our main concern is on strong
normalization, since this formalization is the basis of CeTA (which is
mainly about strong normalization of term rewrite systems). Hence lemmas
involving strong normalization constitute by far the biggest part of this
theory. One of those is Newman's lemma.
[2010-09-17] Added theories defining several (ordered)
semirings related to strong normalization and giving some standard
instances.
[2013-10-16] Generalized delta-orders from rationals to Archimedean fields.
A formalization of an abstract property of possibly infinite derivation trees (modeled by a codatatype), representing the core of a proof (in Beth/Hintikka style) of the first-order logic completeness theorem, independent of the concrete syntax or inference rules. This work is described in detail in the IJCAR 2014 publication by the authors.
The abstract proof can be instantiated for a wide range of Gentzen and tableau systems as well as various flavors of FOL---e.g., with or without predicates, equality, or sorts. Here, we give only a toy example instantiation with classical propositional logic. A more serious instance---many-sorted FOL with equality---is described elsewhere [Blanchette and Popescu, FroCoS 2013].
A formalized coinductive account of the abstract development of
Brotherston, Gorogiannis, and Petersen [APLAS 2012], in a slightly
more general form since we work with arbitrary infinite proofs, which
may be acyclic. This work is described in detail in an article by the
authors, published in 2017 in the Journal of Automated
Reasoning. The abstract proof can be instantiated for
various formalisms, including first-order logic with inductive
predicates.
\ No newline at end of file
diff --git a/web/entries/Ackermanns_not_PR.html b/web/entries/Ackermanns_not_PR.html
--- a/web/entries/Ackermanns_not_PR.html
+++ b/web/entries/Ackermanns_not_PR.html
@@ -1,168 +1,168 @@
Ackermann's Function Is Not Primitive Recursive - Archive of Formal Proofs
Ackermann's function is defined in the usual way and a number of
its elementary properties are proved. Then, the primitive recursive
functions are defined inductively: as a predicate on the functions
that map lists of numbers to numbers. It is shown that every
primitive recursive function is strictly dominated by Ackermann's
function. The formalisation follows an earlier one by Nora Szasz.
We give a formalization of affine forms as abstract representations of zonotopes.
We provide affine operations as well as overapproximations of some non-affine operations like multiplication and division.
Expressions involving those operations can automatically be turned into (executable) functions approximating the original
expression in affine arithmetic.
BSD License
Change history
[2015-01-31] added algorithm for zonotope/hyperplane intersection
[2017-09-20] linear approximations for all symbols from the floatarith data
type
We develop algebras for aggregation and minimisation for weight
matrices and for edge weights in graphs. We verify the correctness of
Prim's and Kruskal's minimum spanning tree algorithms based
on these algebras. We also show numerous instances of these algebras
based on linearly ordered commutative semigroups.
\ No newline at end of file
diff --git a/web/entries/Akra_Bazzi.html b/web/entries/Akra_Bazzi.html
--- a/web/entries/Akra_Bazzi.html
+++ b/web/entries/Akra_Bazzi.html
@@ -1,188 +1,188 @@
The Akra-Bazzi theorem and the Master theorem - Archive of Formal Proofs
This article contains a formalisation of the Akra-Bazzi method
based on a proof by Leighton. It is a generalisation of the well-known
Master Theorem for analysing the complexity of Divide & Conquer algorithms.
We also include a generalised version of the Master theorem based on the
Akra-Bazzi theorem, which is easier to apply than the Akra-Bazzi theorem
itself.
Some proof methods that facilitate applying the Master theorem are also
included. For a more detailed explanation of the formalisation and the
proof methods, see the accompanying paper (publication forthcoming).
\ No newline at end of file
diff --git a/web/entries/Algebraic_Numbers.html b/web/entries/Algebraic_Numbers.html
--- a/web/entries/Algebraic_Numbers.html
+++ b/web/entries/Algebraic_Numbers.html
@@ -1,196 +1,196 @@
Algebraic Numbers in Isabelle/HOL - Archive of Formal Proofs
Based on existing libraries for matrices, factorization of rational polynomials, and Sturm's theorem, we formalized algebraic numbers in Isabelle/HOL. Our development serves as an implementation for real and complex numbers, and it admits to compute roots and completely factorize real and complex polynomials, provided that all coefficients are rational numbers. Moreover, we provide two implementations to display algebraic numbers, an injective and expensive one, or a faster but approximative version.
To this end, we mechanized several results on resultants, which also required us to prove that polynomials over a unique factorization domain form again a unique factorization domain.
BSD License
Change history
[2016-01-29] Split off Polynomial Interpolation and Polynomial Factorization
[2017-04-16] Use certified Berlekamp-Zassenhaus factorization, use subresultant algorithm for computing resultants, improved bisection algorithm
A framework for the analysis of the amortized complexity of functional
data structures is formalized in Isabelle/HOL and applied to a number of
standard examples and to the folowing non-trivial ones: skew heaps,
splay trees, splay heaps and pairing heaps.
A preliminary version of this work (without pairing heaps) is described
in a paper
published in the proceedings of the conference on Interactive
Theorem Proving ITP 2015. An extended version of this publication
is available here.
BSD License
Change history
[2015-03-17] Added pairing heaps by Hauke Brinkop.
[2016-07-12] Moved splay heaps from here to Splay_Tree
[2016-07-14] Moved pairing heaps from here to the new Pairing_Heap
\ No newline at end of file
diff --git a/web/entries/AnselmGod.html b/web/entries/AnselmGod.html
--- a/web/entries/AnselmGod.html
+++ b/web/entries/AnselmGod.html
@@ -1,170 +1,170 @@
Anselm's God in Isabelle/HOL - Archive of Formal Proofs
Paul Oppenheimer and Edward Zalta's formalisation of
Anselm's ontological argument for the existence of God is
automated by embedding a free logic for definite descriptions within
Isabelle/HOL.
Applicative functors augment computations with effects by lifting function application to types which model the effects. As the structure of the computation cannot depend on the effects, applicative expressions can be analysed statically. This allows us to lift universally quantified equations to the effectful types, as observed by Hinze. Thus, equational reasoning over effectful computations can be reduced to pure types.
This entry provides a package for registering applicative functors and two proof methods for lifting of equations over applicative functors. The first method normalises applicative expressions according to the laws of applicative functors. This way, equations whose two sides contain the same list of variables can be lifted to every applicative functor.
To lift larger classes of equations, the second method exploits a number of additional properties (e.g., commutativity of effects) provided the properties have been declared for the concrete applicative functor at hand upon registration.
We declare several types from the Isabelle library as applicative functors and illustrate the use of the methods with two examples: the lifting of the arithmetic type class hierarchy to streams and the verification of a relabelling function on binary trees. We also formalise and verify the normalisation algorithm used by the first proof method.
BSD License
Change history
[2016-03-03] added formalisation of lifting with combinators
We present the first formal verification of approximation algorithms
for NP-complete optimization problems: vertex cover, set cover, independent set,
center selection, load balancing, and bin packing. The proofs correct incompletenesses
in existing proofs and improve the approximation ratio in one case.
A detailed description of our work (excluding center selection) has been published in the proceedings of
IJCAR 2020.
\ No newline at end of file
diff --git a/web/entries/ArrowImpossibilityGS.html b/web/entries/ArrowImpossibilityGS.html
--- a/web/entries/ArrowImpossibilityGS.html
+++ b/web/entries/ArrowImpossibilityGS.html
@@ -1,167 +1,167 @@
Arrow and Gibbard-Satterthwaite - Archive of Formal Proofs
This article formalizes two proofs of Arrow's impossibility theorem due to Geanakoplos and derives the Gibbard-Satterthwaite theorem as a corollary. One formalization is based on utility functions, the other one on strict partial orders.
\ No newline at end of file
diff --git a/web/entries/BNF_CC.html b/web/entries/BNF_CC.html
--- a/web/entries/BNF_CC.html
+++ b/web/entries/BNF_CC.html
@@ -1,190 +1,190 @@
Bounded Natural Functors with Covariance and Contravariance - Archive of Formal Proofs
Bounded Natural Functors With Covariance and Contravariance
Bounded natural functors (BNFs) provide a modular framework for the
construction of (co)datatypes in higher-order logic. Their functorial
operations, the mapper and relator, are restricted to a subset of the
parameters, namely those where recursion can take place. For certain
applications, such as free theorems, data refinement, quotients, and
generalised rewriting, it is desirable that these operations do not
ignore the other parameters. In this article, we formalise the
generalisation BNFCC that extends the mapper
and relator to covariant and contravariant parameters. We show that
BNFCCs are closed under
functor composition and least and greatest fixpoints,
subtypes inherit the BNFCC structure
under conditions that generalise those for the BNF case,
and
BNFCCs preserve
quotients under mild conditions.
These proofs
are carried out for abstract BNFCCs similar to
the AFP entry BNF Operations. In addition, we apply the
BNFCC theory to several concrete functors.
\ No newline at end of file
diff --git a/web/entries/BNF_Operations.html b/web/entries/BNF_Operations.html
--- a/web/entries/BNF_Operations.html
+++ b/web/entries/BNF_Operations.html
@@ -1,182 +1,182 @@
Operations on Bounded Natural Functors - Archive of Formal Proofs
This entry formalizes the closure property of bounded natural functors
(BNFs) under seven operations. These operations and the corresponding
proofs constitute the core of Isabelle's (co)datatype package. To
be close to the implemented tactics, the proofs are deliberately
formulated as detailed apply scripts. The (co)datatypes together with
(co)induction principles and (co)recursors are byproducts of the
fixpoint operations LFP and GFP. Composition of BNFs is subdivided
into four simpler operations: Compose, Kill, Lift, and Permute. The
N2M operation provides mutual (co)induction principles and
(co)recursors for nested (co)datatypes.
We formalize in Isabelle/HOL a result
due to S. Banach and H. Steinhaus known as
the Banach-Steinhaus theorem or Uniform boundedness principle: a
pointwise-bounded family of continuous linear operators from a Banach
space to a normed space is uniformly bounded. Our approach is an
adaptation to Isabelle/HOL of a proof due to A. Sokal.
\ No newline at end of file
diff --git a/web/entries/Berlekamp_Zassenhaus.html b/web/entries/Berlekamp_Zassenhaus.html
--- a/web/entries/Berlekamp_Zassenhaus.html
+++ b/web/entries/Berlekamp_Zassenhaus.html
@@ -1,218 +1,218 @@
The Factorization Algorithm of Berlekamp and Zassenhaus - Archive of Formal Proofs
The Factorization Algorithm of Berlekamp and Zassenhaus
We formalize the Berlekamp-Zassenhaus algorithm for factoring
square-free integer polynomials in Isabelle/HOL. We further adapt an
existing formalization of Yun’s square-free factorization algorithm to
integer polynomials, and thus provide an efficient and certified
factorization algorithm for arbitrary univariate polynomials.
The algorithm first performs a factorization in the prime field GF(p) and
then performs computations in the integer ring modulo p^k, where both
p and k are determined at runtime. Since a natural modeling of these
structures via dependent types is not possible in Isabelle/HOL, we
formalize the whole algorithm using Isabelle’s recent addition of
local type definitions.
Through experiments we verify that our algorithm factors polynomials of degree
100 within seconds.
Bernoulli numbers were first discovered in the closed-form
expansion of the sum 1m +
2m + … + nm
for a fixed m and appear in many other places. This entry provides
three different definitions for them: a recursive one, an explicit
one, and one through their exponential generating function.
In addition, we prove some basic facts, e.g. their relation
to sums of powers of integers and that all odd Bernoulli numbers
except the first are zero, and some advanced facts like their
relationship to the Riemann zeta function on positive even
integers.
We also prove the correctness of the
Akiyama–Tanigawa algorithm for computing Bernoulli numbers
with reasonable efficiency, and we define the periodic Bernoulli
polynomials (which appear e.g. in the Euler–MacLaurin
summation formula and the expansion of the log-Gamma function) and
prove their basic properties.
Bertrand's postulate is an early result on the
distribution of prime numbers: For every positive integer n, there
exists a prime number that lies strictly between n and 2n.
The proof is ported from John Harrison's formalisation
in HOL Light. It proceeds by first showing that the property is true
for all n greater than or equal to 600 and then showing that it also
holds for all n below 600 by case distinction.
Priority queues are an important data structure and efficient implementations of them are crucial. We implement a functional variant of binomial queues in Isabelle/HOL and show its functional correctness. A verification against an abstract reference specification of priority queues has also been attempted, but could not be achieved to the full extent.
This entry provides executable checkers for the following properties of
boolean expressions: satisfiability, tautology and equivalence. Internally,
the checkers operate on binary decision trees and are reasonably efficient
(for purely functional algorithms).
BSD License
Change history
[2015-09-23] Salomon Sickert added an interface that does not require the usage of the Boolean formula datatype. Furthermore the general Mapping type is used instead of an association list.
\ No newline at end of file
diff --git a/web/entries/Buffons_Needle.html b/web/entries/Buffons_Needle.html
--- a/web/entries/Buffons_Needle.html
+++ b/web/entries/Buffons_Needle.html
@@ -1,164 +1,164 @@
Buffon's Needle Problem - Archive of Formal Proofs
In the 18th century, Georges-Louis Leclerc, Comte de Buffon posed and
later solved the following problem, which is often called the first
problem ever solved in geometric probability: Given a floor divided
into vertical strips of the same width, what is the probability that a
needle thrown onto the floor randomly will cross two strips? This
entry formally defines the problem in the case where the needle's
position is chosen uniformly at random in a single strip around the
origin (which is equivalent to larger arrangements due to symmetry).
It then provides proofs of the simple solution in the case where the
needle's length is no greater than the width of the strips and
the more complicated solution in the opposite case.
\ No newline at end of file
diff --git a/web/entries/CAVA_LTL_Modelchecker.html b/web/entries/CAVA_LTL_Modelchecker.html
--- a/web/entries/CAVA_LTL_Modelchecker.html
+++ b/web/entries/CAVA_LTL_Modelchecker.html
@@ -1,227 +1,227 @@
A Fully Verified Executable LTL Model Checker - Archive of Formal Proofs
We present an LTL model checker whose code has been completely verified
using the Isabelle theorem prover. The checker consists of over 4000
lines of ML code. The code is produced using the Isabelle Refinement
Framework, which allows us to split its correctness proof into (1) the
proof of an abstract version of the checker, consisting of a few hundred
lines of ``formalized pseudocode'', and (2) a verified refinement step
in which mathematical sets and other abstract structures are replaced by
implementations of efficient structures like red-black trees and
functional arrays. This leads to a checker that,
while still slower than unverified checkers, can already be used as a
trusted reference implementation against which advanced implementations
can be tested.
An early version of this model checker is described in the
CAV 2013 paper
with the same title.
\ No newline at end of file
diff --git a/web/entries/CISC-Kernel.html b/web/entries/CISC-Kernel.html
--- a/web/entries/CISC-Kernel.html
+++ b/web/entries/CISC-Kernel.html
@@ -1,192 +1,192 @@
Formal Specification of a Generic Separation Kernel - Archive of Formal Proofs
Formal Specification of a Generic Separation Kernel
Intransitive noninterference has been a widely studied topic in the last
few decades. Several well-established methodologies apply interactive
theorem proving to formulate a noninterference theorem over abstract
academic models. In joint work with several industrial and academic partners
throughout Europe, we are helping in the certification process of PikeOS, an
industrial separation kernel developed at SYSGO. In this process,
established theories could not be applied. We present a new generic model of
separation kernels and a new theory of intransitive noninterference. The
model is rich in detail, making it suitable for formal verification of
realistic and industrial systems such as PikeOS. Using a refinement-based
theorem proving approach, we ensure that proofs remain manageable.
This document corresponds to the deliverable D31.1 of the EURO-MILS
Project http://www.euromils.eu.
\ No newline at end of file
diff --git a/web/entries/CRDT.html b/web/entries/CRDT.html
--- a/web/entries/CRDT.html
+++ b/web/entries/CRDT.html
@@ -1,176 +1,176 @@
A framework for establishing Strong Eventual Consistency for Conflict-free Replicated Datatypes - Archive of Formal Proofs
AFramework for Establishing Strong Eventual Consistency for Conflict-Free Replicated Datatypes
In this work, we focus on the correctness of Conflict-free Replicated
Data Types (CRDTs), a class of algorithm that provides strong eventual
consistency guarantees for replicated data. We develop a modular and
reusable framework for verifying the correctness of CRDT algorithms.
We avoid correctness issues that have dogged previous mechanised
proofs in this area by including a network model in our formalisation,
and proving that our theorems hold in all possible network behaviours.
Our axiomatic network model is a standard abstraction that accurately
reflects the behaviour of real-world computer networks. Moreover, we
identify an abstract convergence theorem, a property of order
relations, which provides a formal definition of strong eventual
consistency. We then obtain the first machine-checked correctness
theorems for three concrete CRDTs: the Replicated Growable Array, the
Observed-Remove Set, and an Increment-Decrement Counter.
\ No newline at end of file
diff --git a/web/entries/CYK.html b/web/entries/CYK.html
--- a/web/entries/CYK.html
+++ b/web/entries/CYK.html
@@ -1,167 +1,167 @@
A formalisation of the Cocke-Younger-Kasami algorithm - Archive of Formal Proofs
AFormalisation of the Cocke-Younger-Kasami Algorithm
The theory provides a formalisation of the Cocke-Younger-Kasami
algorithm (CYK for short), an approach to solving the word problem
for context-free languages. CYK decides if a word is in the
languages generated by a context-free grammar in Chomsky normal form.
The formalized algorithm is executable.
\ No newline at end of file
diff --git a/web/entries/Cartan_FP.html b/web/entries/Cartan_FP.html
--- a/web/entries/Cartan_FP.html
+++ b/web/entries/Cartan_FP.html
@@ -1,171 +1,171 @@
The Cartan Fixed Point Theorems - Archive of Formal Proofs
The Cartan fixed point theorems concern the group of holomorphic
automorphisms on a connected open set of Cn. Ciolli et al.
have formalised the one-dimensional case of these theorems in HOL
Light. This entry contains their proofs, ported to Isabelle/HOL. Thus
it addresses the authors' remark that "it would be important to write
a formal proof in a language that can be read by both humans and
machines".
In this work, we define the Catalan numbers Cn
and prove several equivalent definitions (including some closed-form
formulae). We also show one of their applications (counting the number
of binary trees of size n), prove the asymptotic growth
approximation Cn ∼ 4n / (√π ·
n1.5), and provide reasonably efficient executable
code to compute them.
The derivation of the closed-form
formulae uses algebraic manipulations of the ordinary generating
function of the Catalan numbers, and the asymptotic approximation is
then done using generalised binomial coefficients and the Gamma
function. Thanks to these highly non-elementary mathematical tools,
the proofs are very short and simple.
\ No newline at end of file
diff --git a/web/entries/Chandy_Lamport.html b/web/entries/Chandy_Lamport.html
--- a/web/entries/Chandy_Lamport.html
+++ b/web/entries/Chandy_Lamport.html
@@ -1,180 +1,180 @@
A Formal Proof of The Chandy--Lamport Distributed Snapshot Algorithm - Archive of Formal Proofs
AFormal Proof of the Chandy--Lamport Distributed Snapshot Algorithm
We provide a suitable distributed system model and implementation of the
Chandy--Lamport distributed snapshot algorithm [ACM Transactions on
Computer Systems, 3, 63-75, 1985]. Our main result is a formal
termination and correctness proof of the Chandy--Lamport algorithm and
its use in stable property detection.
\ No newline at end of file
diff --git a/web/entries/Circus.html b/web/entries/Circus.html
--- a/web/entries/Circus.html
+++ b/web/entries/Circus.html
@@ -1,180 +1,180 @@
Isabelle/Circus - Archive of Formal Proofs
The Circus specification language combines elements for complex data and behavior specifications, using an integration of Z and CSP with a refinement calculus. Its semantics is based on Hoare and He's Unifying Theories of Programming (UTP). Isabelle/Circus is a formalization of the UTP and the Circus language in Isabelle/HOL. It contains proof rules and tactic support that allows for proofs of refinement for Circus processes (involving both data and behavioral aspects).
The Isabelle/Circus environment supports a syntax for the semantic definitions which is close to textbook presentations of Circus. This article contains an extended version of corresponding VSTTE Paper together with the complete formal development of its underlying commented theories.
BSD License
Change history
[2014-06-05] More polishing, shorter proofs, added Circus syntax, added Makarius Wenzel as contributor.
\ No newline at end of file
diff --git a/web/entries/Closest_Pair_Points.html b/web/entries/Closest_Pair_Points.html
--- a/web/entries/Closest_Pair_Points.html
+++ b/web/entries/Closest_Pair_Points.html
@@ -1,177 +1,177 @@
Closest Pair of Points Algorithms - Archive of Formal Proofs
This entry provides two related verified divide-and-conquer algorithms
solving the fundamental Closest Pair of Points
problem in Computational Geometry. Functional correctness and the
optimal running time of O(n log n) are
proved. Executable code is generated which is empirically competitive
with handwritten reference implementations.
BSD License
Change history
[2020-04-14] Incorporate Time_Monad of the AFP entry Root_Balanced_Tree.
\ No newline at end of file
diff --git a/web/entries/Coinductive.html b/web/entries/Coinductive.html
--- a/web/entries/Coinductive.html
+++ b/web/entries/Coinductive.html
@@ -1,183 +1,183 @@
Coinductive - Archive of Formal Proofs
This article collects formalisations of general-purpose coinductive data types and sets. Currently, it contains coinductive natural numbers, coinductive lists, i.e. lazy lists or streams, infinite streams, coinductive terminated lists, coinductive resumptions, a library of operations on coinductive lists, and a version of König's lemma as an application for coinductive lists. The initial theory was contributed by Paulson and Wenzel. Extensions and other coinductive formalisations of general interest are welcome.
\ No newline at end of file
diff --git a/web/entries/Coinductive_Languages.html b/web/entries/Coinductive_Languages.html
--- a/web/entries/Coinductive_Languages.html
+++ b/web/entries/Coinductive_Languages.html
@@ -1,185 +1,185 @@
A Codatatype of Formal Languages - Archive of Formal Proofs
We define formal languages as a codataype of infinite trees
branching over the alphabet. Each node in such a tree indicates whether the
path to this node constitutes a word inside or outside of the language. This
codatatype is isormorphic to the set of lists representation of languages,
but caters for definitions by corecursion and proofs by coinduction.
Regular operations on languages are then defined by primitive corecursion.
A difficulty arises here, since the standard definitions of concatenation and
iteration from the coalgebraic literature are not primitively
corecursive-they require guardedness up-to union/concatenation.
Without support for up-to corecursion, these operation must be defined as a
composition of primitive ones (and proved being equal to the standard
definitions). As an exercise in coinduction we also prove the axioms of
Kleene algebra for the defined regular operations.
Furthermore, a language for context-free grammars given by productions in
Greibach normal form and an initial nonterminal is constructed by primitive
corecursion, yielding an executable decision procedure for the word problem
without further ado.
This development provides an efficient, extensible, machine checked collections framework. The library adopts the concepts of interface, implementation and generic algorithm from object-oriented programming and implements them in Isabelle/HOL. The framework features the use of data refinement techniques to refine an abstract specification (using high-level concepts like sets) to a more concrete implementation (using collection datastructures, like red-black-trees). The code-generator of Isabelle/HOL can be used to generate efficient code.
BSD License
Change history
[2010-10-08] New Interfaces: OrderedSet, OrderedMap, List.
Fifo now implements list-interface: Function names changed: put/get --> enqueue/dequeue.
New Implementations: ArrayList, ArrayHashMap, ArrayHashSet, TrieMap, TrieSet.
Invariant-free datastructures: Invariant implicitely hidden in typedef.
Record-interfaces: All operations of an interface encapsulated as record.
Examples moved to examples subdirectory.
[2010-12-01] New Interfaces: Priority Queues, Annotated Lists. Implemented by finger trees, (skew) binomial queues.
[2011-10-10] SetSpec: Added operations: sng, isSng, bexists, size_abort, diff, filter, iterate_rule_insertP
MapSpec: Added operations: sng, isSng, iterate_rule_insertP, bexists, size, size_abort, restrict,
map_image_filter, map_value_image_filter
Some maintenance changes
[2012-04-25] New iterator foundation by Tuerk. Various maintenance changes.
\ No newline at end of file
diff --git a/web/entries/Comparison_Sort_Lower_Bound.html b/web/entries/Comparison_Sort_Lower_Bound.html
--- a/web/entries/Comparison_Sort_Lower_Bound.html
+++ b/web/entries/Comparison_Sort_Lower_Bound.html
@@ -1,183 +1,183 @@
Lower bound on comparison-based sorting algorithms - Archive of Formal Proofs
Lower Bound on Comparison-Based Sorting Algorithms
This article contains a formal proof of the well-known fact
that number of comparisons that a comparison-based sorting algorithm
needs to perform to sort a list of length n is at
least log2 (n!)
in the worst case, i. e. Ω(n log
n).
For this purpose, a shallow
embedding for comparison-based sorting algorithms is defined: a
sorting algorithm is a recursive datatype containing either a HOL
function or a query of a comparison oracle with a continuation
containing the remaining computation. This makes it possible to force
the algorithm to use only comparisons and to track the number of
comparisons made.
An exception compilation scheme that dynamically creates and removes exception handler entries on the stack. A formalization of an article of the same name by Hutton and Wright.
\ No newline at end of file
diff --git a/web/entries/Constructive_Cryptography_CM.html b/web/entries/Constructive_Cryptography_CM.html
--- a/web/entries/Constructive_Cryptography_CM.html
+++ b/web/entries/Constructive_Cryptography_CM.html
@@ -1,195 +1,195 @@
Constructive Cryptography in HOL: the Communication Modeling Aspect - Archive of Formal Proofs
Constructive Cryptography in HOL: The Communication Modeling Aspect
Constructive Cryptography (CC) [ICS
2011, TOSCA
2011, TCC
2016] introduces an abstract approach to composable security
statements that allows one to focus on a particular aspect of security
proofs at a time. Instead of proving the properties of concrete
systems, CC studies system classes, i.e., the shared behavior of
similar systems, and their transformations. Modeling of systems
communication plays a crucial role in composability and reusability of
security statements; yet, this aspect has not been studied in any of
the existing CC results. We extend our previous CC formalization
[Constructive_Cryptography,
CSF
2019] with a new semantic domain called Fused Resource
Templates (FRT) that abstracts over the systems communication patterns
in CC proofs. This widens the scope of cryptography proof
formalizations in the CryptHOL library [CryptHOL,
ESOP
2016, J
Cryptol 2020]. This formalization is described in Abstract
Modeling of Systems Communication in Constructive Cryptography using
CryptHOL.
This development provides a framework for container types like sets and maps such that generated code implements these containers with different (efficient) data structures.
Thanks to type classes and refinement during code generation, this light-weight approach can seamlessly replace Isabelle's default setup for code generation.
Heuristics automatically pick one of the available data structures depending on the type of elements to be stored, but users can also choose on their own.
The extensible design permits to add more implementations at any time.
To support arbitrary nesting of sets, we define a linear order on sets based on a linear order of the elements and provide efficient implementations.
It even allows to compare complements with non-complements.
BSD License
Change history
[2013-07-11] add pretty printing for sets (revision 7f3f52c5f5fa)
[2014-07-08] add support for going from partial functions to mappings (revision 7a6fc957e8ed)
[2018-03-05] add two application examples: depth-first search and 2SAT (revision e5e1a1da2411)
\ No newline at end of file
diff --git a/web/entries/Core_SC_DOM.html b/web/entries/Core_SC_DOM.html
--- a/web/entries/Core_SC_DOM.html
+++ b/web/entries/Core_SC_DOM.html
@@ -1,214 +1,214 @@
The Safely Composable DOM - Archive of Formal Proofs
In this AFP entry, we formalize the core of the Safely Composable
Document Object Model (SC DOM). The SC DOM improve the standard DOM
(as formalized in the AFP entry "Core DOM") by strengthening
the tree boundaries set by shadow roots: in the SC DOM, the shadow
root is a sub-class of the document class (instead of a base class).
This modifications also results in changes to some API methods (e.g.,
getOwnerDocument) to return the nearest shadow root rather than the
document root. As a result, many API methods that, when called on a
node inside a shadow tree, would previously ``break out''
and return or modify nodes that are possibly outside the shadow tree,
now stay within its boundaries. This change in behavior makes programs
that operate on shadow trees more predictable for the developer and
allows them to make more assumptions about other code accessing the
DOM.
\ No newline at end of file
diff --git a/web/entries/Cotangent_PFD_Formula.html b/web/entries/Cotangent_PFD_Formula.html
--- a/web/entries/Cotangent_PFD_Formula.html
+++ b/web/entries/Cotangent_PFD_Formula.html
@@ -1,159 +1,159 @@
A Proof from THE BOOK: The Partial Fraction Expansion of the Cotangent - Archive of Formal Proofs
AProof From the BOOK: The Partial Fraction Expansion of the Cotangent
In this article, I formalise a proof from THE
BOOK; namely a formula that was called ‘one of the most
beautiful formulas involving elementary functions’:
@article{Cotangent_PFD_Formula-AFP,
author = {eberl},
title = {A Proof from THE BOOK: The Partial Fraction Expansion of the Cotangent},
journal = {Archive of Formal Proofs},
month = March,
year = 2022,
note = {\url{https://isa-afp.org/entries/Cotangent_PFD_Formula.html},
Formal proof development},
ISSN = {2150-914x},
}
\ No newline at end of file
diff --git a/web/entries/CryptHOL.html b/web/entries/CryptHOL.html
--- a/web/entries/CryptHOL.html
+++ b/web/entries/CryptHOL.html
@@ -1,189 +1,189 @@
CryptHOL - Archive of Formal Proofs
CryptHOL provides a framework for formalising cryptographic arguments
in Isabelle/HOL. It shallowly embeds a probabilistic functional
programming language in higher order logic. The language features
monadic sequencing, recursion, random sampling, failures and failure
handling, and black-box access to oracles. Oracles are probabilistic
functions which maintain hidden state between different invocations.
All operators are defined in the new semantic domain of
generative probabilistic values, a codatatype. We derive proof rules for
the operators and establish a connection with the theory of relational
parametricity. Thus, the resuting proofs are trustworthy and
comprehensible, and the framework is extensible and widely applicable.
The framework is used in the accompanying AFP entry "Game-based
Cryptography in HOL". There, we show-case our framework by formalizing
different game-based proofs from the literature. This formalisation
continues the work described in the author's ESOP 2016 paper.
\ No newline at end of file
diff --git a/web/entries/DFS_Framework.html b/web/entries/DFS_Framework.html
--- a/web/entries/DFS_Framework.html
+++ b/web/entries/DFS_Framework.html
@@ -1,195 +1,195 @@
A Framework for Verifying Depth-First Search Algorithms - Archive of Formal Proofs
AFramework for Verifying Depth-First Search Algorithms
This entry presents a framework for the modular verification of
DFS-based algorithms, which is described in our [CPP-2015] paper. It
provides a generic DFS algorithm framework, that can be parameterized
with user-defined actions on certain events (e.g. discovery of new
node). It comes with an extensible library of invariants, which can
be used to derive invariants of a specific parameterization. Using
refinement techniques, efficient implementations of the algorithms can
easily be derived. Here, the framework comes with templates for a
recursive and a tail-recursive implementation, and also with several
templates for implementing the data structures required by the DFS
algorithm. Finally, this entry contains a set of re-usable DFS-based
algorithms, which illustrate the application of the framework.
[CPP-2015] Peter Lammich, René Neumann: A Framework for Verifying
Depth-First Search Algorithms. CPP 2015: 137-146
\ No newline at end of file
diff --git a/web/entries/DOM_Components.html b/web/entries/DOM_Components.html
--- a/web/entries/DOM_Components.html
+++ b/web/entries/DOM_Components.html
@@ -1,183 +1,183 @@
A Formalization of Web Components - Archive of Formal Proofs
While the DOM with shadow trees provide the technical basis for
defining web components, the DOM standard neither defines the concept
of web components nor specifies the safety properties that web
components should guarantee. Consequently, the standard also does not
discuss how or even if the methods for modifying the DOM respect
component boundaries. In AFP entry, we present a formally verified
model of web components and define safety properties which ensure that
different web components can only interact with each other using
well-defined interfaces. Moreover, our verification of the application
programming interface (API) of the DOM revealed numerous invariants
that implementations of the DOM API need to preserve to ensure the
integrity of components.
\ No newline at end of file
diff --git a/web/entries/Density_Compiler.html b/web/entries/Density_Compiler.html
--- a/web/entries/Density_Compiler.html
+++ b/web/entries/Density_Compiler.html
@@ -1,188 +1,188 @@
A Verified Compiler for Probability Density Functions - Archive of Formal Proofs
AVerified Compiler for Probability Density Functions
Bhat et al. [TACAS 2013] developed an inductive compiler that computes
density functions for probability spaces described by programs in a
probabilistic functional language. In this work, we implement such a
compiler for a modified version of this language within the theorem prover
Isabelle and give a formal proof of its soundness w.r.t. the semantics of
the source and target language. Together with Isabelle's code generation
for inductive predicates, this yields a fully verified, executable density
compiler. The proof is done in two steps: First, an abstract compiler
working with abstract functions modelled directly in the theorem prover's
logic is defined and proved sound. Then, this compiler is refined to a
concrete version that returns a target-language expression.
An article with the same title and authors is published in the proceedings
of ESOP 2015.
A detailed presentation of this work can be found in the first author's
master's thesis.
The paper "Compositional Verification and Refinement of Concurrent
Value-Dependent Noninterference" by Murray et. al. (CSF 2016) presents
a compositional theory of refinement for a value-dependent
noninterference property, defined in (Murray, PLAS 2015), for
concurrent programs. This development formalises that refinement
theory, and demonstrates its application on some small examples.
\ No newline at end of file
diff --git a/web/entries/Dependent_SIFUM_Type_Systems.html b/web/entries/Dependent_SIFUM_Type_Systems.html
--- a/web/entries/Dependent_SIFUM_Type_Systems.html
+++ b/web/entries/Dependent_SIFUM_Type_Systems.html
@@ -1,187 +1,187 @@
A Dependent Security Type System for Concurrent Imperative Programs - Archive of Formal Proofs
ADependent Security Type System for Concurrent Imperative Programs
The paper "Compositional Verification and Refinement of Concurrent
Value-Dependent Noninterference" by Murray et. al. (CSF 2016) presents
a dependent security type system for compositionally verifying a
value-dependent noninterference property, defined in (Murray, PLAS
2015), for concurrent programs. This development formalises that
security definition, the type system and its soundness proof, and
demonstrates its application on some small examples. It was derived
from the SIFUM_Type_Systems AFP entry, by Sylvia Grewe, Heiko Mantel
and Daniel Schoepe, and whose structure it inherits.
\ No newline at end of file
diff --git a/web/entries/Descartes_Sign_Rule.html b/web/entries/Descartes_Sign_Rule.html
--- a/web/entries/Descartes_Sign_Rule.html
+++ b/web/entries/Descartes_Sign_Rule.html
@@ -1,174 +1,174 @@
Descartes' Rule of Signs - Archive of Formal Proofs
Descartes' Rule of Signs relates the number of positive real roots of a
polynomial with the number of sign changes in its coefficient sequence.
Our proof follows the simple inductive proof given by Rob Arthan, which was also
used by John Harrison in his HOL Light formalisation. We proved most of the
lemmas for arbitrary linearly-ordered integrity domains (e.g. integers,
rationals, reals); the main result, however, requires the intermediate value
theorem and was therefore only proven for real polynomials.
\ No newline at end of file
diff --git a/web/entries/Differential_Game_Logic.html b/web/entries/Differential_Game_Logic.html
--- a/web/entries/Differential_Game_Logic.html
+++ b/web/entries/Differential_Game_Logic.html
@@ -1,186 +1,186 @@
Differential Game Logic - Archive of Formal Proofs
This formalization provides differential game logic (dGL), a logic for
proving properties of hybrid game. In addition to the syntax and
semantics, it formalizes a uniform substitution calculus for dGL.
Church's uniform substitutions substitute a term or formula for a
function or predicate symbol everywhere. The uniform substitutions for
dGL also substitute hybrid games for a game symbol everywhere. We
prove soundness of one-pass uniform substitutions and the axioms of
differential game logic with respect to their denotational semantics.
One-pass uniform substitutions are faster by postponing
soundness-critical admissibility checks with a linear pass homomorphic
application and regain soundness by a variable condition at the
replacements. The formalization is based on prior non-mechanized
soundness proofs for dGL.
\ No newline at end of file
diff --git a/web/entries/Dirichlet_L.html b/web/entries/Dirichlet_L.html
--- a/web/entries/Dirichlet_L.html
+++ b/web/entries/Dirichlet_L.html
@@ -1,186 +1,186 @@
Dirichlet L-Functions and Dirichlet's Theorem - Archive of Formal Proofs
This article provides a formalisation of Dirichlet characters
and Dirichlet L-functions including proofs of
their basic properties – most notably their analyticity,
their areas of convergence, and their non-vanishing for ℜ(s)
≥ 1. All of this is built in a very high-level style using
Dirichlet series. The proof of the non-vanishing follows a very short
and elegant proof by Newman, which we attempt to reproduce faithfully
in a similar level of abstraction in Isabelle.
This
also leads to a relatively short proof of Dirichlet’s Theorem, which
states that, if h and n are
coprime, there are infinitely many primes p with
p ≡ h (mod
n).
\ No newline at end of file
diff --git a/web/entries/Dirichlet_Series.html b/web/entries/Dirichlet_Series.html
--- a/web/entries/Dirichlet_Series.html
+++ b/web/entries/Dirichlet_Series.html
@@ -1,201 +1,201 @@
Dirichlet Series - Archive of Formal Proofs
\ No newline at end of file
diff --git a/web/entries/DiscretePricing.html b/web/entries/DiscretePricing.html
--- a/web/entries/DiscretePricing.html
+++ b/web/entries/DiscretePricing.html
@@ -1,165 +1,165 @@
Pricing in discrete financial models - Archive of Formal Proofs
We have formalized the computation of fair prices for derivative
products in discrete financial models. As an application, we derive a
way to compute fair prices of derivative products in the
Cox-Ross-Rubinstein model of a financial market, thus completing the
work that was presented in this paper.
This article formalizes the amortized analysis of dynamic tables
parameterized with their minimal and maximal load factors and the
expansion and contraction factors.
\ No newline at end of file
diff --git a/web/entries/E_Transcendental.html b/web/entries/E_Transcendental.html
--- a/web/entries/E_Transcendental.html
+++ b/web/entries/E_Transcendental.html
@@ -1,159 +1,159 @@
The Transcendence of e - Archive of Formal Proofs
This work contains a proof that Euler's number e is transcendental. The
proof follows the standard approach of assuming that e is algebraic and
then using a specific integer polynomial to derive two inconsistent bounds,
leading to a contradiction.
This kind of approach can be found in
many different sources; this formalisation mostly follows a PlanetMath article by Roger Lipsett.
\ No newline at end of file
diff --git a/web/entries/Echelon_Form.html b/web/entries/Echelon_Form.html
--- a/web/entries/Echelon_Form.html
+++ b/web/entries/Echelon_Form.html
@@ -1,182 +1,182 @@
Echelon Form - Archive of Formal Proofs
We formalize an algorithm to compute the Echelon Form of a matrix. We have proved its existence over Bézout domains and made it executable over Euclidean domains, such as the integer ring and the univariate polynomials over a field. This allows us to compute determinants, inverses and characteristic polynomials of matrices. The work is based on the HOL-Multivariate Analysis library, and on both the Gauss-Jordan and Cayley-Hamilton AFP entries. As a by-product, some algebraic structures have been implemented (principal ideal domains, Bézout domains...). The algorithm has been refined to immutable arrays and code can be generated to functional languages as well.
\ No newline at end of file
diff --git a/web/entries/Ergodic_Theory.html b/web/entries/Ergodic_Theory.html
--- a/web/entries/Ergodic_Theory.html
+++ b/web/entries/Ergodic_Theory.html
@@ -1,180 +1,180 @@
Ergodic Theory - Archive of Formal Proofs
Ergodic theory is the branch of mathematics that studies the behaviour of measure preserving transformations, in finite or infinite measure. It interacts both with probability theory (mainly through measure theory) and with geometry as a lot of interesting examples are from geometric origin. We implement the first definitions and theorems of ergodic theory, including notably Poicaré recurrence theorem for finite measure preserving systems (together with the notion of conservativity in general), induced maps, Kac's theorem, Birkhoff theorem (arguably the most important theorem in ergodic theory), and variations around it such as conservativity of the corresponding skew product, or Atkinson lemma.
\ No newline at end of file
diff --git a/web/entries/Error_Function.html b/web/entries/Error_Function.html
--- a/web/entries/Error_Function.html
+++ b/web/entries/Error_Function.html
@@ -1,172 +1,172 @@
The Error Function - Archive of Formal Proofs
This entry provides the definitions and basic properties of
the complex and real error function erf and the complementary error
function erfc. Additionally, it gives their full asymptotic
expansions.
\ No newline at end of file
diff --git a/web/entries/Euler_MacLaurin.html b/web/entries/Euler_MacLaurin.html
--- a/web/entries/Euler_MacLaurin.html
+++ b/web/entries/Euler_MacLaurin.html
@@ -1,180 +1,180 @@
The Euler–MacLaurin Formula - Archive of Formal Proofs
The Euler-MacLaurin formula relates the value of a
discrete sum to that of the corresponding integral in terms of the
derivatives at the borders of the summation and a remainder term.
Since the remainder term is often very small as the summation bounds
grow, this can be used to compute asymptotic expansions for
sums.
This entry contains a proof of this formula
for functions from the reals to an arbitrary Banach space. Two
variants of the formula are given: the standard textbook version and a
variant outlined in Concrete Mathematics that is
more useful for deriving asymptotic estimates.
As
example applications, we use that formula to derive the full
asymptotic expansion of the harmonic numbers and the sum of inverse
squares.
\ No newline at end of file
diff --git a/web/entries/Factor_Algebraic_Polynomial.html b/web/entries/Factor_Algebraic_Polynomial.html
--- a/web/entries/Factor_Algebraic_Polynomial.html
+++ b/web/entries/Factor_Algebraic_Polynomial.html
@@ -1,193 +1,193 @@
Factorization of Polynomials with Algebraic Coefficients - Archive of Formal Proofs
Factorization of Polynomials With Algebraic Coefficients
The AFP already contains a verified implementation of algebraic
numbers. However, it is has a severe limitation in its factorization
algorithm of real and complex polynomials: the factorization is only
guaranteed to succeed if the coefficients of the polynomial are
rational numbers. In this work, we verify an algorithm to factor all
real and complex polynomials whose coefficients are algebraic. The
existence of such an algorithm proves in a constructive way that the
set of complex algebraic numbers is algebraically closed. Internally,
the algorithm is based on resultants of multivariate polynomials and
an approximation algorithm using interval arithmetic.
\ No newline at end of file
diff --git a/web/entries/Farkas.html b/web/entries/Farkas.html
--- a/web/entries/Farkas.html
+++ b/web/entries/Farkas.html
@@ -1,182 +1,182 @@
Farkas' Lemma and Motzkin's Transposition Theorem - Archive of Formal Proofs
We formalize a proof of Motzkin's transposition theorem and
Farkas' lemma in Isabelle/HOL. Our proof is based on the
formalization of the simplex algorithm which, given a set of linear
constraints, either returns a satisfying assignment to the problem or
detects unsatisfiability. By reusing facts about the simplex algorithm
we show that a set of linear constraints is unsatisfiable if and only
if there is a linear combination of the constraints which evaluates to
a trivially unsatisfiable inequality.
\ No newline at end of file
diff --git a/web/entries/Featherweight_OCL.html b/web/entries/Featherweight_OCL.html
--- a/web/entries/Featherweight_OCL.html
+++ b/web/entries/Featherweight_OCL.html
@@ -1,194 +1,194 @@
Featherweight OCL: A Proposal for a Machine-Checked Formal Semantics for OCL 2.5 - Archive of Formal Proofs
Featherweight OCL: AProposal for a Machine-Checked Formal Semantics for OCL 2.5
The Unified Modeling Language (UML) is one of the few
modeling languages that is widely used in industry. While
UML is mostly known as diagrammatic modeling language
(e.g., visualizing class models), it is complemented by a
textual language, called Object Constraint Language
(OCL). The current version of OCL is based on a four-valued
logic that turns UML into a formal language. Any type
comprises the elements "invalid" and "null" which are
propagated as strict and non-strict, respectively.
Unfortunately, the former semi-formal semantics of this
specification language, captured in the "Annex A" of the
OCL standard, leads to different interpretations of corner
cases. We formalize the core of OCL: denotational
definitions, a logical calculus and operational rules that
allow for the execution of OCL expressions by a mixture of
term rewriting and code compilation. Our formalization
reveals several inconsistencies and contradictions in the
current version of the OCL standard. Overall, this document
is intended to provide the basis for a machine-checked text
"Annex A" of the OCL standard targeting at tool
implementors.
\ No newline at end of file
diff --git a/web/entries/FinFun.html b/web/entries/FinFun.html
--- a/web/entries/FinFun.html
+++ b/web/entries/FinFun.html
@@ -1,169 +1,169 @@
Code Generation for Functions as Data - Archive of Formal Proofs
FinFuns are total functions that are constant except for a finite set of points, i.e. a generalisation of finite maps. They are formalised as a new type in Isabelle/HOL such that the code generator can handle equality tests and quantification on FinFuns. On the code output level, FinFuns are explicitly represented by constant functions and pointwise updates, similarly to associative lists. Inside the logic, they behave like ordinary functions with extensionality. Via the update/constant pattern, a recursion combinator and an induction rule for FinFuns allow for defining and reasoning about operators on FinFun that are also executable.
\ No newline at end of file
diff --git a/web/entries/Finite_Automata_HF.html b/web/entries/Finite_Automata_HF.html
--- a/web/entries/Finite_Automata_HF.html
+++ b/web/entries/Finite_Automata_HF.html
@@ -1,173 +1,173 @@
Finite Automata in Hereditarily Finite Set Theory - Archive of Formal Proofs
Finite Automata, both deterministic and non-deterministic, for regular languages.
The Myhill-Nerode Theorem. Closure under intersection, concatenation, etc.
Regular expressions define regular languages. Closure under reversal;
the powerset construction mapping NFAs to DFAs. Left and right languages; minimal DFAs.
Brzozowski's minimization algorithm. Uniqueness up to isomorphism of minimal DFAs.
\ No newline at end of file
diff --git a/web/entries/Finitely_Generated_Abelian_Groups.html b/web/entries/Finitely_Generated_Abelian_Groups.html
--- a/web/entries/Finitely_Generated_Abelian_Groups.html
+++ b/web/entries/Finitely_Generated_Abelian_Groups.html
@@ -1,186 +1,186 @@
Finitely Generated Abelian Groups - Archive of Formal Proofs
This article deals with the formalisation of some group-theoretic
results including the fundamental theorem of finitely generated
abelian groups characterising the structure of these groups as a
uniquely determined product of cyclic groups. Both the invariant
factor decomposition and the primary decomposition are covered.
Additional work includes results about the direct product, the
internal direct product and more group-theoretic lemmas.
\ No newline at end of file
diff --git a/web/entries/Fishburn_Impossibility.html b/web/entries/Fishburn_Impossibility.html
--- a/web/entries/Fishburn_Impossibility.html
+++ b/web/entries/Fishburn_Impossibility.html
@@ -1,178 +1,178 @@
The Incompatibility of Fishburn-Strategyproofness and Pareto-Efficiency - Archive of Formal Proofs
The Incompatibility of Fishburn-Strategyproofness and Pareto-Efficiency
This formalisation contains the proof that there is no
anonymous Social Choice Function for at least three agents and
alternatives that fulfils both Pareto-Efficiency and
Fishburn-Strategyproofness. It was derived from a proof of Brandt
et al., which relies on an unverified
translation of a fixed finite instance of the original problem to SAT.
This Isabelle proof contains a machine-checked version of both the
statement for exactly three agents and alternatives and the lifting to
the general case.
This work defines and proves the correctness of the Fisher–Yates
algorithm for shuffling – i.e. producing a random permutation – of a
list. The algorithm proceeds by traversing the list and in
each step swapping the current element with a random element from the
remaining list.
\ No newline at end of file
diff --git a/web/entries/Floyd_Warshall.html b/web/entries/Floyd_Warshall.html
--- a/web/entries/Floyd_Warshall.html
+++ b/web/entries/Floyd_Warshall.html
@@ -1,184 +1,184 @@
The Floyd-Warshall Algorithm for Shortest Paths - Archive of Formal Proofs
The Floyd-Warshall algorithm [Flo62, Roy59, War62] is a classic
dynamic programming algorithm to compute the length of all shortest
paths between any two vertices in a graph (i.e. to solve the all-pairs
shortest path problem, or APSP for short). Given a representation of
the graph as a matrix of weights M, it computes another matrix M'
which represents a graph with the same path lengths and contains the
length of the shortest path between any two vertices i and j. This is
only possible if the graph does not contain any negative cycles.
However, in this case the Floyd-Warshall algorithm will detect the
situation by calculating a negative diagonal entry. This entry
includes a formalization of the algorithm and of these key properties.
The algorithm is refined to an efficient imperative version using the
Imperative Refinement Framework.
\ No newline at end of file
diff --git a/web/entries/Flyspeck-Tame.html b/web/entries/Flyspeck-Tame.html
--- a/web/entries/Flyspeck-Tame.html
+++ b/web/entries/Flyspeck-Tame.html
@@ -1,212 +1,212 @@
Flyspeck I: Tame Graphs - Archive of Formal Proofs
These theories present the verified enumeration of tame plane graphs
as defined by Thomas C. Hales in his proof of the Kepler Conjecture in his
book Dense Sphere Packings. A Blueprint for Formal Proofs. [CUP 2012].
The values of the constants in the definition of tameness are identical to
those in the Flyspeck project.
The IJCAR 2006 paper by Nipkow, Bauer and Schultz refers to the original version of Hales' proof,
the ITP 2011 paper by Nipkow refers to the Blueprint version of the proof.
BSD License
Change history
[2010-11-02] modified theories to reflect the modified definition of tameness in Hales' revised proof.
[2014-07-03] modified constants in def of tameness and Archive according to the final state of the Flyspeck proof.
\ No newline at end of file
diff --git a/web/entries/Forcing.html b/web/entries/Forcing.html
--- a/web/entries/Forcing.html
+++ b/web/entries/Forcing.html
@@ -1,203 +1,203 @@
Formalization of Forcing in Isabelle/ZF - Archive of Formal Proofs
We formalize the theory of forcing in the set theory framework of
Isabelle/ZF. Under the assumption of the existence of a countable
transitive model of ZFC, we construct a proper generic extension and
show that the latter also satisfies ZFC.
\ No newline at end of file
diff --git a/web/entries/Formal_Puiseux_Series.html b/web/entries/Formal_Puiseux_Series.html
--- a/web/entries/Formal_Puiseux_Series.html
+++ b/web/entries/Formal_Puiseux_Series.html
@@ -1,179 +1,179 @@
Formal Puiseux Series - Archive of Formal Proofs
Formal Puiseux series are generalisations of formal power
series and formal Laurent series that also allow for fractional
exponents. They have the following general form: \[\sum_{i=N}^\infty
a_{i/d} X^{i/d}\] where N is an integer and
d is a positive integer.
This
entry defines these series including their basic algebraic properties.
Furthermore, it proves the Newton–Puiseux Theorem, namely that the
Puiseux series over an algebraically closed field of characteristic 0
are also algebraically closed.
\ No newline at end of file
diff --git a/web/entries/Formula_Derivatives.html b/web/entries/Formula_Derivatives.html
--- a/web/entries/Formula_Derivatives.html
+++ b/web/entries/Formula_Derivatives.html
@@ -1,188 +1,188 @@
Derivatives of Logical Formulas - Archive of Formal Proofs
We formalize new decision procedures for WS1S, M2L(Str), and Presburger
Arithmetics. Formulas of these logics denote regular languages. Unlike
traditional decision procedures, we do not translate formulas into automata
(nor into regular expressions), at least not explicitly. Instead we devise
notions of derivatives (inspired by Brzozowski derivatives for regular
expressions) that operate on formulas directly and compute a syntactic
bisimulation using these derivatives. The treatment of Boolean connectives and
quantifiers is uniform for all mentioned logics and is abstracted into a
locale. This locale is then instantiated by different atomic formulas and their
derivatives (which may differ even for the same logic under different encodings
of interpretations as formal words).
This theory defines a type constructor representing the free Boolean algebra over a set of generators. Values of type (α)formula represent propositional formulas with uninterpreted variables from type α, ordered by implication. In addition to all the standard Boolean algebra operations, the library also provides a function for building homomorphisms to any other Boolean algebra type.
\ No newline at end of file
diff --git a/web/entries/FunWithFunctions.html b/web/entries/FunWithFunctions.html
--- a/web/entries/FunWithFunctions.html
+++ b/web/entries/FunWithFunctions.html
@@ -1,152 +1,152 @@
Fun With Functions - Archive of Formal Proofs
This is a collection of cute puzzles of the form ``Show that if a function satisfies the following constraints, it must be ...'' Please add further examples to this collection!
\ No newline at end of file
diff --git a/web/entries/FunWithTilings.html b/web/entries/FunWithTilings.html
--- a/web/entries/FunWithTilings.html
+++ b/web/entries/FunWithTilings.html
@@ -1,161 +1,161 @@
Fun With Tilings - Archive of Formal Proofs
Tilings are defined inductively. It is shown that one form of mutilated chess board cannot be tiled with dominoes, while another one can be tiled with L-shaped tiles. Please add further fun examples of this kind!
This theory defines deterministic and nondeterministic automata in a functional representation: the transition function/relation and the finality predicate are just functions. Hence the state space may be infinite. It is shown how to convert regular expressions into such automata. A scanner (generator) is implemented with the help of functional automata: the scanner chops the input up into longest recognized substrings. Finally we also show how to convert a certain subclass of functional automata (essentially the finite deterministic ones) into regular sets.
\ No newline at end of file
diff --git a/web/entries/Functional_Ordered_Resolution_Prover.html b/web/entries/Functional_Ordered_Resolution_Prover.html
--- a/web/entries/Functional_Ordered_Resolution_Prover.html
+++ b/web/entries/Functional_Ordered_Resolution_Prover.html
@@ -1,176 +1,176 @@
A Verified Functional Implementation of Bachmair and Ganzinger's Ordered Resolution Prover - Archive of Formal Proofs
AVerified Functional Implementation of Bachmair and Ganzinger's Ordered Resolution Prover
This Isabelle/HOL formalization refines the abstract ordered
resolution prover presented in Section 4.3 of Bachmair and
Ganzinger's "Resolution Theorem Proving" chapter in the
Handbook of Automated Reasoning. The result is a
functional implementation of a first-order prover.
\ No newline at end of file
diff --git a/web/entries/Furstenberg_Topology.html b/web/entries/Furstenberg_Topology.html
--- a/web/entries/Furstenberg_Topology.html
+++ b/web/entries/Furstenberg_Topology.html
@@ -1,169 +1,169 @@
Furstenberg's topology and his proof of the infinitude of primes - Archive of Formal Proofs
Furstenberg's Topology and His Proof of the Infinitude of Primes
This article gives a formal version of Furstenberg's
topological proof of the infinitude of primes. He defines a topology
on the integers based on arithmetic progressions (or, equivalently,
residue classes). Using some fairly obvious properties of this
topology, the infinitude of primes is then easily obtained.
Apart from this, this topology is also fairly ‘nice’ in
general: it is second countable, metrizable, and perfect. All of these
(well-known) facts are formally proven, including an explicit metric
for the topology given by Zulfeqarr.
@article{Furstenberg_Topology-AFP,
author = {eberl},
title = {Furstenberg's topology and his proof of the infinitude of primes},
journal = {Archive of Formal Proofs},
month = March,
year = 2020,
note = {\url{https://isa-afp.org/entries/Furstenberg_Topology.html},
Formal proof development},
ISSN = {2150-914x},
}
\ No newline at end of file
diff --git a/web/entries/GaleStewart_Games.html b/web/entries/GaleStewart_Games.html
--- a/web/entries/GaleStewart_Games.html
+++ b/web/entries/GaleStewart_Games.html
@@ -1,186 +1,186 @@
Gale-Stewart Games - Archive of Formal Proofs
This is a formalisation of the main result of Gale and Stewart from
1953, showing that closed finite games are determined. This property
is now known as the Gale Stewart Theorem. While the original paper
shows some additional theorems as well, we only formalize this main
result, but do so in a somewhat general way. We formalize games of a
fixed arbitrary length, including infinite length, using co-inductive
lists, and show that defensive strategies exist unless the other
player is winning. For closed games, defensive strategies are winning
for the closed player, proving that such games are determined. For
finite games, which are a special case in our formalisation, all games
are closed.
This is a stepwise refinement and proof of the Gale-Shapley stable
matching (or marriage) algorithm down to executable code. Both a
purely functional implementation based on lists and a functional
implementation based on efficient arrays (provided by the Collections
Framework in the AFP) are developed. The latter implementation runs in
time O(n2) where
n is the cardinality of the two sets to be matched.
\ No newline at end of file
diff --git a/web/entries/Game_Based_Crypto.html b/web/entries/Game_Based_Crypto.html
--- a/web/entries/Game_Based_Crypto.html
+++ b/web/entries/Game_Based_Crypto.html
@@ -1,206 +1,206 @@
Game-based cryptography in HOL - Archive of Formal Proofs
In this AFP entry, we show how to specify game-based cryptographic
security notions and formally prove secure several cryptographic
constructions from the literature using the CryptHOL framework. Among
others, we formalise the notions of a random oracle, a pseudo-random
function, an unpredictable function, and of encryption schemes that are
indistinguishable under chosen plaintext and/or ciphertext attacks. We
prove the random-permutation/random-function switching lemma, security
of the Elgamal and hashed Elgamal public-key encryption scheme and
correctness and security of several constructions with pseudo-random
functions.
Our proofs follow the game-hopping style advocated by
Shoup and Bellare and Rogaway, from which most of the examples have
been taken. We generalise some of their results such that they can be
reused in other proofs. Thanks to CryptHOL's integration with
Isabelle's parametricity infrastructure, many simple hops are easily
justified using the theory of representation independence.
\ No newline at end of file
diff --git a/web/entries/Gauss-Jordan-Elim-Fun.html b/web/entries/Gauss-Jordan-Elim-Fun.html
--- a/web/entries/Gauss-Jordan-Elim-Fun.html
+++ b/web/entries/Gauss-Jordan-Elim-Fun.html
@@ -1,164 +1,164 @@
Gauss-Jordan Elimination for Matrices Represented as Functions - Archive of Formal Proofs
Gauss-Jordan Elimination for Matrices Represented as Functions
This theory provides a compact formulation of Gauss-Jordan elimination for matrices represented as functions. Its distinctive feature is succinctness. It is not meant for large computations.
\ No newline at end of file
diff --git a/web/entries/Gauss_Jordan.html b/web/entries/Gauss_Jordan.html
--- a/web/entries/Gauss_Jordan.html
+++ b/web/entries/Gauss_Jordan.html
@@ -1,198 +1,198 @@
Gauss-Jordan Algorithm and Its Applications - Archive of Formal Proofs
The Gauss-Jordan algorithm states that any matrix over a field can be transformed by means of elementary row operations to a matrix in reduced row echelon form. The formalization is based on the Rank Nullity Theorem entry of the AFP and on the HOL-Multivariate-Analysis session of Isabelle, where matrices are represented as functions over finite types. We have set up the code generator to make this representation executable. In order to improve the performance, a refinement to immutable arrays has been carried out. We have formalized some of the applications of the Gauss-Jordan algorithm. Thanks to this development, the following facts can be computed over matrices whose elements belong to a field: Ranks, Determinants, Inverses, Bases and dimensions and Solutions of systems of linear equations. Code can be exported to SML and Haskell.
\ No newline at end of file
diff --git a/web/entries/Gauss_Sums.html b/web/entries/Gauss_Sums.html
--- a/web/entries/Gauss_Sums.html
+++ b/web/entries/Gauss_Sums.html
@@ -1,171 +1,171 @@
Gauss Sums and the Pólya–Vinogradov Inequality - Archive of Formal Proofs
The Gaussian integers are the subring ℤ[i] of the
complex numbers, i. e. the ring of all complex numbers with integral
real and imaginary part. This article provides a definition of this
ring as well as proofs of various basic properties, such as that they
form a Euclidean ring and a full classification of their primes. An
executable (albeit not very efficient) factorisation algorithm is also
provided.
Lastly, this Gaussian integer
formalisation is used in two short applications:
The characterisation of all positive integers that can be
written as sums of two squares
Euclid's
formula for primitive Pythagorean triples
While elementary proofs for both of these are already
available in the AFP, the theory of Gaussian integers provides more
concise proofs and a more high-level view.
\ No newline at end of file
diff --git a/web/entries/Goedel_HFSet_Semantic.html b/web/entries/Goedel_HFSet_Semantic.html
--- a/web/entries/Goedel_HFSet_Semantic.html
+++ b/web/entries/Goedel_HFSet_Semantic.html
@@ -1,178 +1,178 @@
From Abstract to Concrete Gödel's Incompleteness Theorems—Part I - Archive of Formal Proofs
From Abstract to Concrete Gödel's Incompleteness Theorems—Part I
We validate an abstract formulation of Gödel's First and
Second Incompleteness Theorems from a separate
AFP entry by instantiating them to the case of
finite sound extensions of the Hereditarily Finite (HF) Set
theory, i.e., FOL theories extending the HF Set theory with
a finite set of axioms that are sound in the standard model. The
concrete results had been previously formalised in an AFP
entry by Larry Paulson; our instantiation reuses the
infrastructure developed in that entry.
\ No newline at end of file
diff --git a/web/entries/Goedel_HFSet_Semanticless.html b/web/entries/Goedel_HFSet_Semanticless.html
--- a/web/entries/Goedel_HFSet_Semanticless.html
+++ b/web/entries/Goedel_HFSet_Semanticless.html
@@ -1,195 +1,195 @@
From Abstract to Concrete Gödel's Incompleteness Theorems—Part II - Archive of Formal Proofs
From Abstract to Concrete Gödel's Incompleteness Theorems—Part II
We validate an abstract formulation of Gödel's Second
Incompleteness Theorem from a separate
AFP entry by instantiating it to the case of finite
consistent extensions of the Hereditarily Finite (HF) Set
theory, i.e., consistent FOL theories extending the HF Set
theory with a finite set of axioms. The instantiation draws heavily
on infrastructure previously developed by Larry Paulson in his direct
formalisation of the concrete result. It strengthens
Paulson's formalization of Gödel's Second from that
entry by not assuming soundness, and in fact not
relying on any notion of model or semantic interpretation. The
strengthening was obtained by first replacing some of Paulson’s
semantic arguments with proofs within his HF calculus, and then
plugging in some of Paulson's (modified) lemmas to instantiate
our soundness-free Gödel's Second locale.
\ No newline at end of file
diff --git a/web/entries/Goedel_Incompleteness.html b/web/entries/Goedel_Incompleteness.html
--- a/web/entries/Goedel_Incompleteness.html
+++ b/web/entries/Goedel_Incompleteness.html
@@ -1,204 +1,204 @@
An Abstract Formalization of Gödel's Incompleteness Theorems - Archive of Formal Proofs
An Abstract Formalization of Gödel's Incompleteness Theorems
We present an abstract formalization of Gödel's
incompleteness theorems. We analyze sufficient conditions for the
theorems' applicability to a partially specified logic. Our
abstract perspective enables a comparison between alternative
approaches from the literature. These include Rosser's variation
of the first theorem, Jeroslow's variation of the second theorem,
and the Swierczkowski–Paulson semantics-based approach. This
AFP entry is the main entry point to the results described in our
CADE-27 paper A
Formally Verified Abstract Account of Gödel's Incompleteness
Theorems. As part of our abstract formalization's
validation, we instantiate our locales twice in the separate AFP
entries Goedel_HFSet_Semantic
and Goedel_HFSet_Semanticless.
\ No newline at end of file
diff --git a/web/entries/Green.html b/web/entries/Green.html
--- a/web/entries/Green.html
+++ b/web/entries/Green.html
@@ -1,182 +1,182 @@
An Isabelle/HOL formalisation of Green's Theorem - Archive of Formal Proofs
We formalise a statement of Green’s theorem—the first formalisation to
our knowledge—in Isabelle/HOL. The theorem statement that we formalise
is enough for most applications, especially in physics and
engineering. Our formalisation is made possible by a novel proof that
avoids the ubiquitous line integral cancellation argument. This
eliminates the need to formalise orientations and region boundaries
explicitly with respect to the outwards-pointing normal vector.
Instead we appeal to a homological argument about equivalences between
paths.
\ No newline at end of file
diff --git a/web/entries/Groebner_Bases.html b/web/entries/Groebner_Bases.html
--- a/web/entries/Groebner_Bases.html
+++ b/web/entries/Groebner_Bases.html
@@ -1,201 +1,201 @@
Gröbner Bases Theory - Archive of Formal Proofs
This formalization is concerned with the theory of Gröbner bases in
(commutative) multivariate polynomial rings over fields, originally
developed by Buchberger in his 1965 PhD thesis. Apart from the
statement and proof of the main theorem of the theory, the
formalization also implements Buchberger's algorithm for actually
computing Gröbner bases as a tail-recursive function, thus allowing to
effectively decide ideal membership in finitely generated polynomial
ideals. Furthermore, all functions can be executed on a concrete
representation of multivariate polynomials as association lists.
BSD License
Change history
[2019-04-18] Specialized Gröbner bases to less abstract representation of polynomials, where
power-products are represented as polynomial mappings.
\ No newline at end of file
diff --git a/web/entries/HOL-CSP.html b/web/entries/HOL-CSP.html
--- a/web/entries/HOL-CSP.html
+++ b/web/entries/HOL-CSP.html
@@ -1,197 +1,197 @@
HOL-CSP Version 2.0 - Archive of Formal Proofs
This is a complete formalization of the work of Hoare and Roscoe on
the denotational semantics of the Failure/Divergence Model of CSP. It
follows essentially the presentation of CSP in Roscoe’s Book ”Theory
and Practice of Concurrency” [8] and the semantic details in a joint
Paper of Roscoe and Brooks ”An improved failures model for
communicating processes". The present work is based on a prior
formalization attempt, called HOL-CSP 1.0, done in 1997 by H. Tej and
B. Wolff with the Isabelle proof technology available at that time.
This work revealed minor, but omnipresent foundational errors in key
concepts like the process invariant. The present version HOL-CSP
profits from substantially improved libraries (notably HOLCF),
improved automated proof techniques, and structured proof techniques
in Isar and is substantially shorter but more complete.
@article{HOL-CSP-AFP,
author = {taha, ye and wolff},
title = {HOL-CSP Version 2.0},
journal = {Archive of Formal Proofs},
month = April,
year = 2019,
note = {\url{https://isa-afp.org/entries/HOL-CSP.html},
Formal proof development},
ISSN = {2150-914x},
}
\ No newline at end of file
diff --git a/web/entries/HereditarilyFinite.html b/web/entries/HereditarilyFinite.html
--- a/web/entries/HereditarilyFinite.html
+++ b/web/entries/HereditarilyFinite.html
@@ -1,183 +1,183 @@
The Hereditarily Finite Sets - Archive of Formal Proofs
The theory of hereditarily finite sets is formalised, following
the development of Swierczkowski.
An HF set is a finite collection of other HF sets; they enjoy an induction principle
and satisfy all the axioms of ZF set theory apart from the axiom of infinity, which is negated.
All constructions that are possible in ZF set theory (Cartesian products, disjoint sums, natural numbers,
functions) without using infinite sets are possible here.
The definition of addition for the HF sets follows Kirby.
This development forms the foundation for the Isabelle proof of Gödel's incompleteness theorems,
which has been formalised separately.
BSD License
Change history
[2015-02-23] Added the theory "Finitary" defining the class of types that can be embedded in hf, including int, char, option, list, etc.
\ No newline at end of file
diff --git a/web/entries/Hermite.html b/web/entries/Hermite.html
--- a/web/entries/Hermite.html
+++ b/web/entries/Hermite.html
@@ -1,172 +1,172 @@
Hermite Normal Form - Archive of Formal Proofs
Hermite Normal Form is a canonical matrix analogue of Reduced Echelon Form, but involving matrices over more general rings. In this work we formalise an algorithm to compute the Hermite Normal Form of a matrix by means of elementary row operations, taking advantage of the Echelon Form AFP entry. We have proven the correctness of such an algorithm and refined it to immutable arrays. Furthermore, we have also formalised the uniqueness of the Hermite Normal Form of a matrix. Code can be exported and some examples of execution involving integer matrices and polynomial matrices are presented as well.
\ No newline at end of file
diff --git a/web/entries/Hermite_Lindemann.html b/web/entries/Hermite_Lindemann.html
--- a/web/entries/Hermite_Lindemann.html
+++ b/web/entries/Hermite_Lindemann.html
@@ -1,202 +1,202 @@
The Hermite–Lindemann–Weierstraß Transcendence Theorem - Archive of Formal Proofs
The Hermite–Lindemann–Weierstraß Transcendence Theorem
This article provides a formalisation of the
Hermite-Lindemann-Weierstraß Theorem (also known as simply
Hermite-Lindemann or Lindemann-Weierstraß). This theorem is one of the
crowning achievements of 19th century number theory.
The theorem states that if $\alpha_1, \ldots,
\alpha_n\in\mathbb{C}$ are algebraic numbers that are linearly
independent over $\mathbb{Z}$, then $e^{\alpha_1},\ldots,e^{\alpha_n}$
are algebraically independent over $\mathbb{Q}$.
Like the previous
formalisation in Coq by Bernard, I proceeded by formalising
Baker's
version of the theorem and proof and then deriving the
original one from that. Baker's version states that for any
algebraic numbers $\beta_1, \ldots, \beta_n\in\mathbb{C}$ and distinct
algebraic numbers $\alpha_i, \ldots, \alpha_n\in\mathbb{C}$, we have
$\beta_1 e^{\alpha_1} + \ldots + \beta_n e^{\alpha_n} = 0$ if and only
if all the $\beta_i$ are zero.
This has a number of
direct corollaries, e.g.:
$e$ and $\pi$
are transcendental
$e^z$, $\sin z$, $\tan z$,
etc. are transcendental for algebraic
$z\in\mathbb{C}\setminus\{0\}$
$\ln z$ is
transcendental for algebraic $z\in\mathbb{C}\setminus\{0,
1\}$
This entry contains a formalization of hidden Markov models [3] based
on Johannes Hölzl's formalization of discrete time Markov chains
[1]. The basic definitions are provided and the correctness of two
main (dynamic programming) algorithms for hidden Markov models is
proved: the forward algorithm for computing the likelihood of an
observed sequence, and the Viterbi algorithm for decoding the most
probable hidden state sequence. The Viterbi algorithm is made
executable including memoization. Hidden markov models have various
applications in natural language processing. For an introduction see
Jurafsky and Martin [2].
\ No newline at end of file
diff --git a/web/entries/Hoare_Time.html b/web/entries/Hoare_Time.html
--- a/web/entries/Hoare_Time.html
+++ b/web/entries/Hoare_Time.html
@@ -1,207 +1,207 @@
Hoare Logics for Time Bounds - Archive of Formal Proofs
We study three different Hoare logics for reasoning about time bounds
of imperative programs and formalize them in Isabelle/HOL: a classical
Hoare like logic due to Nielson, a logic with potentials due to
Carbonneaux et al. and a separation
logic following work by Atkey, Chaguérand and Pottier.
These logics are formally shown to be sound and complete. Verification
condition generators are developed and are shown sound and complete
too. We also consider variants of the systems where we abstract from
multiplicative constants in the running time bounds, thus supporting a
big-O style of reasoning. Finally we compare the expressive power of
the three systems.
\ No newline at end of file
diff --git a/web/entries/HotelKeyCards.html b/web/entries/HotelKeyCards.html
--- a/web/entries/HotelKeyCards.html
+++ b/web/entries/HotelKeyCards.html
@@ -1,157 +1,157 @@
Hotel Key Card System - Archive of Formal Proofs
Two models of an electronic hotel key card system are contrasted: a state based and a trace based one. Both are defined, verified, and proved equivalent in the theorem prover Isabelle/HOL. It is shown that if a guest follows a certain safety policy regarding her key cards, she can be sure that nobody but her can enter her room.
\ No newline at end of file
diff --git a/web/entries/IEEE_Floating_Point.html b/web/entries/IEEE_Floating_Point.html
--- a/web/entries/IEEE_Floating_Point.html
+++ b/web/entries/IEEE_Floating_Point.html
@@ -1,183 +1,183 @@
A Formal Model of IEEE Floating Point Arithmetic - Archive of Formal Proofs
This development provides a formal model of IEEE-754 floating-point arithmetic. This formalization, including formal specification of the standard and proofs of important properties of floating-point arithmetic, forms the foundation for verifying programs with floating-point computation. There is also a code generation setup for floats so that we can execute programs using this formalization in functional programming languages.
BSD License
Change history
[2017-09-25] Added conversions from and to software floating point numbers
(by Fabian Hellauer and Fabian Immler).
[2018-02-05] 'Modernized' representation following the formalization in HOL4:
former "float_format" and predicate "is_valid" is now encoded in a type "('e, 'f) float" where
'e and 'f encode the size of exponent and fraction.
\ No newline at end of file
diff --git a/web/entries/IMO2019.html b/web/entries/IMO2019.html
--- a/web/entries/IMO2019.html
+++ b/web/entries/IMO2019.html
@@ -1,174 +1,174 @@
Selected Problems from the International Mathematical Olympiad 2019 - Archive of Formal Proofs
Selected Problems From the International Mathematical Olympiad 2019
This entry contains formalisations of the answers to three of
the six problem of the International Mathematical Olympiad 2019,
namely Q1, Q4, and Q5.
The reason why these
problems were chosen is that they are particularly amenable to
formalisation: they can be solved with minimal use of libraries. The
remaining three concern geometry and graph theory, which, in the
author's opinion, are more difficult to formalise resp. require a
more complex library.
\ No newline at end of file
diff --git a/web/entries/IMP2.html b/web/entries/IMP2.html
--- a/web/entries/IMP2.html
+++ b/web/entries/IMP2.html
@@ -1,203 +1,203 @@
IMP2 – Simple Program Verification in Isabelle/HOL - Archive of Formal Proofs
IMP2 – Simple Program Verification in Isabelle/HOL
IMP2 is a simple imperative language together with Isabelle tooling to
create a program verification environment in Isabelle/HOL. The tools
include a C-like syntax, a verification condition generator, and
Isabelle commands for the specification of programs. The framework is
modular, i.e., it allows easy reuse of already proved programs within
larger programs. This entry comes with a quickstart guide and a large
collection of examples, spanning basic algorithms with simple proofs
to more advanced algorithms and proof techniques like data refinement.
Some highlights from the examples are:
Bisection
Square Root,
Extended Euclid,
Exponentiation by Squaring,
Binary
Search,
Insertion Sort,
Quicksort,
Depth First Search.
The abstract syntax and semantics are very
simple and well-documented. They are suitable to be used in a course,
as extension to the IMP language which comes with the Isabelle
distribution. While this entry is limited to a simple imperative
language, the ideas could be extended to more sophisticated languages.
\ No newline at end of file
diff --git a/web/entries/IP_Addresses.html b/web/entries/IP_Addresses.html
--- a/web/entries/IP_Addresses.html
+++ b/web/entries/IP_Addresses.html
@@ -1,195 +1,195 @@
IP Addresses - Archive of Formal Proofs
This entry contains a definition of IP addresses and a library to work
with them. Generic IP addresses are modeled as machine words of
arbitrary length. Derived from this generic definition, IPv4 addresses
are 32bit machine words, IPv6 addresses are 128bit words.
Additionally, IPv4 addresses can be represented in dot-decimal
notation and IPv6 addresses in (compressed) colon-separated notation.
We support toString functions and parsers for both notations. Sets of
IP addresses can be represented with a netmask (e.g.
192.168.0.0/255.255.0.0) or in CIDR notation (e.g. 192.168.0.0/16). To
provide executable code for set operations on IP address ranges, the
library includes a datatype to work on arbitrary intervals of machine
words.
\ No newline at end of file
diff --git a/web/entries/Impossible_Geometry.html b/web/entries/Impossible_Geometry.html
--- a/web/entries/Impossible_Geometry.html
+++ b/web/entries/Impossible_Geometry.html
@@ -1,165 +1,165 @@
Proving the Impossibility of Trisecting an Angle and Doubling the Cube - Archive of Formal Proofs
Proving the Impossibility of Trisecting an Angle and Doubling the Cube
Squaring the circle, doubling the cube and trisecting an angle, using a compass and straightedge alone, are classic unsolved problems first posed by the ancient Greeks. All three problems were proved to be impossible in the 19th century. The following document presents the proof of the impossibility of solving the latter two problems using Isabelle/HOL, following a proof by Carrega. The proof uses elementary methods: no Galois theory or field extensions. The set of points constructible using a compass and straightedge is defined inductively. Radical expressions, which involve only square roots and arithmetic of rational numbers, are defined, and we find that all constructive points have radical coordinates. Finally, doubling the cube and trisecting certain angles requires solving certain cubic equations that can be proved to have no rational roots. The Isabelle proofs require a great many detailed calculations.
@article{Impossible_Geometry-AFP,
author = {romanos and paulson},
title = {Proving the Impossibility of Trisecting an Angle and Doubling the Cube},
journal = {Archive of Formal Proofs},
month = August,
year = 2012,
note = {\url{https://isa-afp.org/entries/Impossible_Geometry.html},
Formal proof development},
ISSN = {2150-914x},
}
Gödel's two incompleteness theorems are formalised, following a careful presentation by Swierczkowski, in the theory of hereditarily finite sets. This represents the first ever machine-assisted proof of the second incompleteness theorem. Compared with traditional formalisations using Peano arithmetic (see e.g. Boolos), coding is simpler, with no need to formalise the notion
of multiplication (let alone that of a prime number)
in the formalised calculus upon which the theorem is based.
However, other technical problems had to be solved in order to complete the argument.
\ No newline at end of file
diff --git a/web/entries/Independence_CH.html b/web/entries/Independence_CH.html
--- a/web/entries/Independence_CH.html
+++ b/web/entries/Independence_CH.html
@@ -1,208 +1,208 @@
The Independence of the Continuum Hypothesis in Isabelle/ZF - Archive of Formal Proofs
The Independence of the Continuum Hypothesis in Isabelle/ZF
We redeveloped our formalization of forcing in the set theory
framework of Isabelle/ZF. Under the assumption of the existence of a
countable transitive model of ZFC, we construct proper generic
extensions that satisfy the Continuum Hypothesis and its negation.
\ No newline at end of file
diff --git a/web/entries/InfPathElimination.html b/web/entries/InfPathElimination.html
--- a/web/entries/InfPathElimination.html
+++ b/web/entries/InfPathElimination.html
@@ -1,191 +1,191 @@
Infeasible Paths Elimination by Symbolic Execution Techniques: Proof of Correctness and Preservation of Paths - Archive of Formal Proofs
Infeasible Paths Elimination by Symbolic Execution Techniques: Proof of Correctness and Preservation of Paths
TRACER is a tool for verifying safety properties of sequential C
programs. TRACER attempts at building a finite symbolic execution
graph which over-approximates the set of all concrete reachable states
and the set of feasible paths. We present an abstract framework for
TRACER and similar CEGAR-like systems. The framework provides 1) a
graph- transformation based method for reducing the feasible paths in
control-flow graphs, 2) a model for symbolic execution, subsumption,
predicate abstraction and invariant generation. In this framework we
formally prove two key properties: correct construction of the
symbolic states and preservation of feasible paths. The framework
focuses on core operations, leaving to concrete prototypes to “fit in”
heuristics for combining them. The accompanying paper (published in
ITP 2016) can be found at
https://www.lri.fr/∼wolff/papers/conf/2016-itp-InfPathsNSE.pdf.
\ No newline at end of file
diff --git a/web/entries/Irrationality_J_Hancl.html b/web/entries/Irrationality_J_Hancl.html
--- a/web/entries/Irrationality_J_Hancl.html
+++ b/web/entries/Irrationality_J_Hancl.html
@@ -1,171 +1,171 @@
Irrational Rapidly Convergent Series - Archive of Formal Proofs
We formalize with Isabelle/HOL a proof of a theorem by J. Hancl asserting the
irrationality of the sum of a series consisting of rational numbers, built up
by sequences that fulfill certain properties. Even though the criterion is a
number theoretic result, the proof makes use only of analytical arguments. We
also formalize a corollary of the theorem for a specific series fulfilling the
assumptions of the theorem.
\ No newline at end of file
diff --git a/web/entries/Jinja.html b/web/entries/Jinja.html
--- a/web/entries/Jinja.html
+++ b/web/entries/Jinja.html
@@ -1,242 +1,242 @@
Jinja is not Java - Archive of Formal Proofs
We introduce Jinja, a Java-like programming language with a formal semantics designed to exhibit core features of the Java language architecture. Jinja is a compromise between realism of the language and tractability and clarity of the formal semantics. The following aspects are formalised: a big and a small step operational semantics for Jinja and a proof of their equivalence; a type system and a definite initialisation analysis; a type safety proof of the small step semantics; a virtual machine (JVM), its operational semantics and its type system; a type safety proof for the JVM; a bytecode verifier, i.e. data flow analyser for the JVM; a correctness proof of the bytecode verifier w.r.t. the type system; a compiler and a proof that it preserves semantics and well-typedness. The emphasis of this work is not on particular language features but on providing a unified model of the source language, the virtual machine and the compiler. The whole development has been carried out in the theorem prover Isabelle/HOL.
\ No newline at end of file
diff --git a/web/entries/JinjaThreads.html b/web/entries/JinjaThreads.html
--- a/web/entries/JinjaThreads.html
+++ b/web/entries/JinjaThreads.html
@@ -1,331 +1,331 @@
Jinja with Threads - Archive of Formal Proofs
We extend the Jinja source code semantics by Klein and Nipkow with Java-style arrays and threads. Concurrency is captured in a generic framework semantics for adding concurrency through interleaving to a sequential semantics, which features dynamic thread creation, inter-thread communication via shared memory, lock synchronisation and joins. Also, threads can suspend themselves and be notified by others. We instantiate the framework with the adapted versions of both Jinja source and byte code and show type safety for the multithreaded case. Equally, the compiler from source to byte code is extended, for which we prove weak bisimilarity between the source code small step semantics and the defensive Jinja virtual machine. On top of this, we formalise the JMM and show the DRF guarantee and consistency. For description of the different parts, see Lochbihler's papers at FOOL 2008, ESOP 2010, ITP 2011, and ESOP 2012.
\ No newline at end of file
diff --git a/web/entries/KAD.html b/web/entries/KAD.html
--- a/web/entries/KAD.html
+++ b/web/entries/KAD.html
@@ -1,188 +1,188 @@
Kleene Algebras with Domain - Archive of Formal Proofs
Kleene algebras with domain are Kleene algebras endowed with an
operation that maps each element of the algebra to its domain of
definition (or its complement) in abstract fashion. They form a simple
algebraic basis for Hoare logics, dynamic logics or predicate
transformer semantics. We formalise a modular hierarchy of algebras
with domain and antidomain (domain complement) operations in
Isabelle/HOL that ranges from domain and antidomain semigroups to
modal Kleene algebras and divergence Kleene algebras. We link these
algebras with models of binary relations and program traces. We
include some examples from modal logics, termination and program
analysis.
\ No newline at end of file
diff --git a/web/entries/KAT_and_DRA.html b/web/entries/KAT_and_DRA.html
--- a/web/entries/KAT_and_DRA.html
+++ b/web/entries/KAT_and_DRA.html
@@ -1,191 +1,191 @@
Kleene Algebra with Tests and Demonic Refinement Algebras - Archive of Formal Proofs
Kleene Algebra With Tests and Demonic Refinement Algebras
We formalise Kleene algebra with tests (KAT) and demonic refinement
algebra (DRA) in Isabelle/HOL. KAT is relevant for program verification
and correctness proofs in the partial correctness setting. While DRA
targets similar applications in the context of total correctness. Our
formalisation contains the two most important models of these algebras:
binary relations in the case of KAT and predicate transformers in the
case of DRA. In addition, we derive the inference rules for Hoare logic
in KAT and its relational model and present a simple formally verified
program verification tool prototype based on the algebraic approach.
\ No newline at end of file
diff --git a/web/entries/Knuth_Bendix_Order.html b/web/entries/Knuth_Bendix_Order.html
--- a/web/entries/Knuth_Bendix_Order.html
+++ b/web/entries/Knuth_Bendix_Order.html
@@ -1,181 +1,181 @@
A Formalization of Knuth–Bendix Orders - Archive of Formal Proofs
We define a generalized version of Knuth–Bendix orders,
including subterm coefficient functions. For these orders we formalize
several properties such as strong normalization, the subterm property,
closure properties under substitutions and contexts, as well as ground
totality.
\ No newline at end of file
diff --git a/web/entries/LLL_Basis_Reduction.html b/web/entries/LLL_Basis_Reduction.html
--- a/web/entries/LLL_Basis_Reduction.html
+++ b/web/entries/LLL_Basis_Reduction.html
@@ -1,202 +1,202 @@
A verified LLL algorithm - Archive of Formal Proofs
The Lenstra-Lenstra-Lovász basis reduction algorithm, also known as
LLL algorithm, is an algorithm to find a basis with short, nearly
orthogonal vectors of an integer lattice. Thereby, it can also be seen
as an approximation to solve the shortest vector problem (SVP), which
is an NP-hard problem, where the approximation quality solely depends
on the dimension of the lattice, but not the lattice itself. The
algorithm also possesses many applications in diverse fields of
computer science, from cryptanalysis to number theory, but it is
specially well-known since it was used to implement the first
polynomial-time algorithm to factor polynomials. In this work we
present the first mechanized soundness proof of the LLL algorithm to
compute short vectors in lattices. The formalization follows a
textbook by von zur Gathen and Gerhard.
BSD License
Change history
[2018-04-16] Integrated formal complexity bounds (Haslbeck, Thiemann)
[2018-05-25] Integrated much faster LLL implementation based on integer arithmetic (Bottesch, Haslbeck, Thiemann)
\ No newline at end of file
diff --git a/web/entries/LLL_Factorization.html b/web/entries/LLL_Factorization.html
--- a/web/entries/LLL_Factorization.html
+++ b/web/entries/LLL_Factorization.html
@@ -1,186 +1,186 @@
A verified factorization algorithm for integer polynomials with polynomial complexity - Archive of Formal Proofs
AVerified Factorization Algorithm for Integer Polynomials With Polynomial Complexity
Short vectors in lattices and factors of integer polynomials are
related. Each factor of an integer polynomial belongs to a certain
lattice. When factoring polynomials, the condition that we are looking
for an irreducible polynomial means that we must look for a small
element in a lattice, which can be done by a basis reduction
algorithm. In this development we formalize this connection and
thereby one main application of the LLL basis reduction algorithm: an
algorithm to factor square-free integer polynomials which runs in
polynomial time. The work is based on our previous
Berlekamp–Zassenhaus development, where the exponential reconstruction
phase has been replaced by the polynomial-time basis reduction
algorithm. Thanks to this formalization we found a serious flaw in a
textbook.
\ No newline at end of file
diff --git a/web/entries/LOFT.html b/web/entries/LOFT.html
--- a/web/entries/LOFT.html
+++ b/web/entries/LOFT.html
@@ -1,173 +1,173 @@
LOFT — Verified Migration of Linux Firewalls to SDN - Archive of Formal Proofs
LOFT — Verified Migration of Linux Firewalls to SDN
We present LOFT — Linux firewall OpenFlow Translator, a system that
transforms the main routing table and FORWARD chain of iptables of a
Linux-based firewall into a set of static OpenFlow rules. Our
implementation is verified against a model of a simplified Linux-based
router and we can directly show how much of the original functionality
is preserved.
\ No newline at end of file
diff --git a/web/entries/LTL_to_DRA.html b/web/entries/LTL_to_DRA.html
--- a/web/entries/LTL_to_DRA.html
+++ b/web/entries/LTL_to_DRA.html
@@ -1,178 +1,178 @@
Converting Linear Temporal Logic to Deterministic (Generalized) Rabin Automata - Archive of Formal Proofs
Converting Linear Temporal Logic to Deterministic (Generalized) Rabin Automata
Recently, Javier Esparza and Jan Kretinsky proposed a new method directly translating linear temporal logic (LTL) formulas to deterministic (generalized) Rabin automata. Compared to the existing approaches of constructing a non-deterministic Buechi-automaton in the first step and then applying a determinization procedure (e.g. some variant of Safra's construction) in a second step, this new approach preservers a relation between the formula and the states of the resulting automaton. While the old approach produced a monolithic structure, the new method is compositional. Furthermore, in some cases the resulting automata are much smaller than the automata generated by existing approaches. In order to ensure the correctness of the construction, this entry contains a complete formalisation and verification of the translation. Furthermore from this basis executable code is generated.
BSD License
Change history
[2015-09-23] Enable code export for the eager unfolding optimisation and reduce running time of the generated tool. Moreover, add support for the mlton SML compiler.
[2016-03-24] Make use of the LTL entry and include the simplifier.
\ No newline at end of file
diff --git a/web/entries/LambdaAuth.html b/web/entries/LambdaAuth.html
--- a/web/entries/LambdaAuth.html
+++ b/web/entries/LambdaAuth.html
@@ -1,187 +1,187 @@
Formalization of Generic Authenticated Data Structures - Archive of Formal Proofs
Formalization of Generic Authenticated Data Structures
Authenticated data structures are a technique for outsourcing data
storage and maintenance to an untrusted server. The server is required
to produce an efficiently checkable and cryptographically secure proof
that it carried out precisely the requested computation. Miller et
al. introduced λ• (pronounced
lambda auth)—a functional programming
language with a built-in primitive authentication construct, which
supports a wide range of user-specified authenticated data structures
while guaranteeing certain correctness and security properties for all
well-typed programs. We formalize λ• and prove its
correctness and security properties. With Isabelle's help, we
uncover and repair several mistakes in the informal proofs and lemma
statements. Our findings are summarized in an ITP'19 paper.
\ No newline at end of file
diff --git a/web/entries/Lambert_W.html b/web/entries/Lambert_W.html
--- a/web/entries/Lambert_W.html
+++ b/web/entries/Lambert_W.html
@@ -1,173 +1,173 @@
The Lambert W Function on the Reals - Archive of Formal Proofs
The Lambert W function is a multi-valued
function defined as the inverse function of x
↦ x
ex. Besides numerous
applications in combinatorics, physics, and engineering, it also
frequently occurs when solving equations containing both
ex and
x, or both x and log
x.
This article provides a
definition of the two real-valued branches
W0(x)
and
W-1(x)
and proves various properties such as basic identities and
inequalities, monotonicity, differentiability, asymptotic expansions,
and the MacLaurin series of
W0(x)
at x = 0.
@article{Lambert_W-AFP,
author = {eberl},
title = {The Lambert W Function on the Reals},
journal = {Archive of Formal Proofs},
month = April,
year = 2020,
note = {\url{https://isa-afp.org/entries/Lambert_W.html},
Formal proof development},
ISSN = {2150-914x},
}
This entry provides Landau symbols to describe and reason about the asymptotic growth of functions for sufficiently large inputs. A number of simplification procedures are provided for additional convenience: cancelling of dominated terms in sums under a Landau symbol, cancelling of common factors in products, and a decision procedure for Landau expressions containing products of powers of functions like x, ln(x), ln(ln(x)) etc.
\ No newline at end of file
diff --git a/web/entries/Laws_of_Large_Numbers.html b/web/entries/Laws_of_Large_Numbers.html
--- a/web/entries/Laws_of_Large_Numbers.html
+++ b/web/entries/Laws_of_Large_Numbers.html
@@ -1,187 +1,187 @@
The Laws of Large Numbers - Archive of Formal Proofs
The Law of Large Numbers states that, informally, if one
performs a random experiment $X$ many times and takes the average of
the results, that average will be very close to the expected value
$E[X]$.
More formally, let
$(X_i)_{i\in\mathbb{N}}$ be a sequence of independently identically
distributed random variables whose expected value $E[X_1]$ exists.
Denote the running average of $X_1, \ldots, X_n$ as $\overline{X}_n$.
Then:
The Weak Law of Large Numbers
states that $\overline{X}_{n} \longrightarrow E[X_1]$ in probability
for $n\to\infty$, i.e. $\mathcal{P}(|\overline{X}_{n} - E[X_1]| >
\varepsilon) \longrightarrow 0$ as $n\to\infty$ for any $\varepsilon
> 0$.
The Strong Law of Large Numbers states
that $\overline{X}_{n} \longrightarrow E[X_1]$ almost surely for
$n\to\infty$, i.e. $\mathcal{P}(\overline{X}_{n} \longrightarrow
E[X_1]) = 1$.
In this entry, I
formally prove the strong law and from it the weak law. The approach
used for the proof of the strong law is a particularly quick and slick
one based on ergodic theory, which was formalised by Gouëzel in
another AFP entry.
\ No newline at end of file
diff --git a/web/entries/LinearQuantifierElim.html b/web/entries/LinearQuantifierElim.html
--- a/web/entries/LinearQuantifierElim.html
+++ b/web/entries/LinearQuantifierElim.html
@@ -1,179 +1,179 @@
Quantifier Elimination for Linear Arithmetic - Archive of Formal Proofs
This article formalizes quantifier elimination procedures for dense linear orders, linear real arithmetic and Presburger arithmetic. In each case both a DNF-based non-elementary algorithm and one or more (doubly) exponential NNF-based algorithms are formalized, including the well-known algorithms by Ferrante and Rackoff and by Cooper. The NNF-based algorithms for dense linear orders are new but based on Ferrante and Rackoff and on an algorithm by Loos and Weisspfenning which simulates infenitesimals. All algorithms are directly executable. In particular, they yield reflective quantifier elimination procedures for HOL itself. The formalization makes heavy use of locales and is therefore highly modular.
\ No newline at end of file
diff --git a/web/entries/Linear_Inequalities.html b/web/entries/Linear_Inequalities.html
--- a/web/entries/Linear_Inequalities.html
+++ b/web/entries/Linear_Inequalities.html
@@ -1,194 +1,194 @@
Linear Inequalities - Archive of Formal Proofs
We formalize results about linear inqualities, mainly from
Schrijver's book. The main results are the proof of the
fundamental theorem on linear inequalities, Farkas' lemma,
Carathéodory's theorem, the Farkas-Minkowsky-Weyl theorem, the
decomposition theorem of polyhedra, and Meyer's result that the
integer hull of a polyhedron is a polyhedron itself. Several theorems
include bounds on the appearing numbers, and in particular we provide
an a-priori bound on mixed-integer solutions of linear inequalities.
\ No newline at end of file
diff --git a/web/entries/Linear_Programming.html b/web/entries/Linear_Programming.html
--- a/web/entries/Linear_Programming.html
+++ b/web/entries/Linear_Programming.html
@@ -1,178 +1,178 @@
Linear Programming - Archive of Formal Proofs
We use the previous formalization of the general simplex algorithm to
formulate an algorithm for solving linear programs. We encode the
linear programs using only linear constraints. Solving these
constraints also solves the original linear program. This algorithm is
proven to be sound by applying the weak duality theorem which is also
part of this formalization.
\ No newline at end of file
diff --git a/web/entries/Linear_Recurrences.html b/web/entries/Linear_Recurrences.html
--- a/web/entries/Linear_Recurrences.html
+++ b/web/entries/Linear_Recurrences.html
@@ -1,190 +1,190 @@
Linear Recurrences - Archive of Formal Proofs
Linear recurrences with constant coefficients are an
interesting class of recurrence equations that can be solved
explicitly. The most famous example are certainly the Fibonacci
numbers with the equation f(n) =
f(n-1) +
f(n - 2) and the quite
non-obvious closed form
(φn
-
(-φ)-n)
/ √5 where φ is the golden ratio.
In this work, I build on existing tools in
Isabelle – such as formal power series and polynomial
factorisation algorithms – to develop a theory of these
recurrences and derive a fully executable solver for them that can be
exported to programming languages like Haskell.
Liouville numbers are a class of transcendental numbers that can be approximated
particularly well with rational numbers. Historically, they were the first
numbers whose transcendence was proven.
In this entry, we define the concept of Liouville numbers as well as the
standard construction to obtain Liouville numbers (including Liouville's
constant) and we prove their most important properties: irrationality and
transcendence.
The proof is very elementary and requires only standard arithmetic, the Mean
Value Theorem for polynomials, and the boundedness of polynomials on compact
intervals.
\ No newline at end of file
diff --git a/web/entries/List-Index.html b/web/entries/List-Index.html
--- a/web/entries/List-Index.html
+++ b/web/entries/List-Index.html
@@ -1,155 +1,155 @@
List Index - Archive of Formal Proofs
\ No newline at end of file
diff --git a/web/entries/List_Inversions.html b/web/entries/List_Inversions.html
--- a/web/entries/List_Inversions.html
+++ b/web/entries/List_Inversions.html
@@ -1,170 +1,170 @@
The Inversions of a List - Archive of Formal Proofs
This entry defines the set of inversions
of a list, i.e. the pairs of indices that violate sortedness. It also
proves the correctness of the well-known
O(n log n)
divide-and-conquer algorithm to compute the number of
inversions.
\ No newline at end of file
diff --git a/web/entries/List_Update.html b/web/entries/List_Update.html
--- a/web/entries/List_Update.html
+++ b/web/entries/List_Update.html
@@ -1,190 +1,190 @@
Analysis of List Update Algorithms - Archive of Formal Proofs
These theories formalize the quantitative analysis of a number of classical algorithms for the list update problem: 2-competitiveness of move-to-front, the lower bound of 2 for the competitiveness of deterministic list update algorithms and 1.6-competitiveness of the randomized COMB algorithm, the best randomized list update algorithm known to date.
The material is based on the first two chapters of Online Computation
and Competitive Analysis by Borodin and El-Yaniv.
\ No newline at end of file
diff --git a/web/entries/Lowe_Ontological_Argument.html b/web/entries/Lowe_Ontological_Argument.html
--- a/web/entries/Lowe_Ontological_Argument.html
+++ b/web/entries/Lowe_Ontological_Argument.html
@@ -1,182 +1,182 @@
Computer-assisted Reconstruction and Assessment of E. J. Lowe's Modal Ontological Argument - Archive of Formal Proofs
Computer-Assisted Reconstruction and Assessment of E. J. Lowe's Modal Ontological Argument
Computers may help us to understand --not just verify-- philosophical
arguments. By utilizing modern proof assistants in an iterative
interpretive process, we can reconstruct and assess an argument by
fully formal means. Through the mechanization of a variant of St.
Anselm's ontological argument by E. J. Lowe, which is a
paradigmatic example of a natural-language argument with strong ties
to metaphysics and religion, we offer an ideal showcase for our
computer-assisted interpretive method.
We present a formalization of algorithms for solving Markov Decision
Processes (MDPs) with formal guarantees on the optimality of their
solutions. In particular we build on our analysis of the Bellman
operator for discounted infinite horizon MDPs. From the iterator rule
on the Bellman operator we directly derive executable value iteration
and policy iteration algorithms to iteratively solve finite MDPs. We
also prove correct optimized versions of value iteration that use
matrix splittings to improve the convergence rate. In particular, we
formally verify Gauss-Seidel value iteration and modified policy
iteration. The algorithms are evaluated on two standard examples from
the literature, namely, inventory management and gridworld. Our
formalization covers most of chapter 6 in Puterman's book
"Markov Decision Processes: Discrete Stochastic Dynamic
Programming".
\ No newline at end of file
diff --git a/web/entries/MDP-Rewards.html b/web/entries/MDP-Rewards.html
--- a/web/entries/MDP-Rewards.html
+++ b/web/entries/MDP-Rewards.html
@@ -1,186 +1,186 @@
Markov Decision Processes with Rewards - Archive of Formal Proofs
We present a formalization of Markov Decision Processes with rewards.
In particular we first build on Hölzl's formalization of MDPs
(AFP entry: Markov_Models) and extend them with rewards. We proceed
with an analysis of the expected total discounted reward criterion for
infinite horizon MDPs. The central result is the construction of the
iteration rule for the Bellman operator. We prove the optimality
equations for this operator and show the existence of an optimal
stationary deterministic solution. The analysis can be used to obtain
dynamic programming algorithms such as value iteration and policy
iteration to solve MDPs with formal guarantees. Our formalization is
based on chapters 5 and 6 in Puterman's book "Markov
Decision Processes: Discrete Stochastic Dynamic Programming".
\ No newline at end of file
diff --git a/web/entries/MFMC_Countable.html b/web/entries/MFMC_Countable.html
--- a/web/entries/MFMC_Countable.html
+++ b/web/entries/MFMC_Countable.html
@@ -1,197 +1,197 @@
A Formal Proof of the Max-Flow Min-Cut Theorem for Countable Networks - Archive of Formal Proofs
AFormal Proof of the Max-Flow Min-Cut Theorem for Countable Networks
This article formalises a proof of the maximum-flow minimal-cut
theorem for networks with countably many edges. A network is a
directed graph with non-negative real-valued edge labels and two
dedicated vertices, the source and the sink. A flow in a network
assigns non-negative real numbers to the edges such that for all
vertices except for the source and the sink, the sum of values on
incoming edges equals the sum of values on outgoing edges. A cut is a
subset of the vertices which contains the source, but not the sink.
Our theorem states that in every network, there is a flow and a cut
such that the flow saturates all the edges going out of the cut and is
zero on all the incoming edges. The proof is based on the paper
The Max-Flow Min-Cut theorem for countable networks by
Aharoni et al. Additionally, we prove a characterisation of the
lifting operation for relations on discrete probability distributions,
which leads to a concise proof of its distributivity over relation
composition.
\ No newline at end of file
diff --git a/web/entries/MFODL_Monitor_Optimized.html b/web/entries/MFODL_Monitor_Optimized.html
--- a/web/entries/MFODL_Monitor_Optimized.html
+++ b/web/entries/MFODL_Monitor_Optimized.html
@@ -1,196 +1,196 @@
Formalization of an Optimized Monitoring Algorithm for Metric First-Order Dynamic Logic with Aggregations - Archive of Formal Proofs
Formalization of an Optimized Monitoring Algorithm for Metric First-Order Dynamic Logic With Aggregations
A monitor is a runtime verification tool that solves the following
problem: Given a stream of time-stamped events and a policy formulated
in a specification language, decide whether the policy is satisfied at
every point in the stream. We verify the correctness of an executable
monitor for specifications given as formulas in metric first-order
dynamic logic (MFODL), which combines the features of metric
first-order temporal logic (MFOTL) and metric dynamic logic. Thus,
MFODL supports real-time constraints, first-order parameters, and
regular expressions. Additionally, the monitor supports aggregation
operations such as count and sum. This formalization, which is
described in a
forthcoming paper at IJCAR 2020, significantly extends previous
work on a verified monitor for MFOTL. Apart from the
addition of regular expressions and aggregations, we implemented multi-way
joins and a specialized sliding window algorithm to further
optimize the monitor.
BSD License
Change history
[2021-10-19] corrected a mistake in the calculation of median aggregations
(reported by Nicolas Kaletsch, revision 02b14c9bf3da)
\ No newline at end of file
diff --git a/web/entries/MFOTL_Monitor.html b/web/entries/MFOTL_Monitor.html
--- a/web/entries/MFOTL_Monitor.html
+++ b/web/entries/MFOTL_Monitor.html
@@ -1,192 +1,192 @@
Formalization of a Monitoring Algorithm for Metric First-Order Temporal Logic - Archive of Formal Proofs
Formalization of a Monitoring Algorithm for Metric First-Order Temporal Logic
A monitor is a runtime verification tool that solves the following
problem: Given a stream of time-stamped events and a policy formulated
in a specification language, decide whether the policy is satisfied at
every point in the stream. We verify the correctness of an executable
monitor for specifications given as formulas in metric first-order
temporal logic (MFOTL), an expressive extension of linear temporal
logic with real-time constraints and first-order quantification. The
verified monitor implements a simplified variant of the algorithm used
in the efficient MonPoly monitoring tool. The formalization is
presented in a RV
2019 paper, which also compares the output of the verified
monitor to that of other monitoring tools on randomly generated
inputs. This case study revealed several errors in the optimized but
unverified tools.
\ No newline at end of file
diff --git a/web/entries/MSO_Regex_Equivalence.html b/web/entries/MSO_Regex_Equivalence.html
--- a/web/entries/MSO_Regex_Equivalence.html
+++ b/web/entries/MSO_Regex_Equivalence.html
@@ -1,201 +1,201 @@
Decision Procedures for MSO on Words Based on Derivatives of Regular Expressions - Archive of Formal Proofs
Decision Procedures for MSO on Words Based on Derivatives of Regular Expressions
Monadic second-order logic on finite words (MSO) is a decidable yet
expressive logic into which many decision problems can be encoded. Since MSO
formulas correspond to regular languages, equivalence of MSO formulas can be
reduced to the equivalence of some regular structures (e.g. automata). We
verify an executable decision procedure for MSO formulas that is not based
on automata but on regular expressions.
Decision procedures for regular expression equivalence have been formalized
before, usually based on Brzozowski derivatives. Yet, for a straightforward
embedding of MSO formulas into regular expressions an extension of regular
expressions with a projection operation is required. We prove total
correctness and completeness of an equivalence checker for regular
expressions extended in that way. We also define a language-preserving
translation of formulas into regular expressions with respect to two
different semantics of MSO.
@article{MSO_Regex_Equivalence-AFP,
author = {traytel and nipkow},
title = {Decision Procedures for MSO on Words Based on Derivatives of Regular Expressions},
journal = {Archive of Formal Proofs},
month = June,
year = 2014,
note = {\url{https://isa-afp.org/entries/MSO_Regex_Equivalence.html},
Formal proof development},
ISSN = {2150-914x},
}
This is a formalization of Markov models in Isabelle/HOL. It
builds on Isabelle's probability theory. The available models are
currently Discrete-Time Markov Chains and a extensions of them with
rewards.
As application of these models we formalize probabilistic model
checking of pCTL formulas, analysis of IPv4 address allocation in
ZeroConf and an analysis of the anonymity of the Crowds protocol.
See here for the corresponding paper.
\ No newline at end of file
diff --git a/web/entries/Marriage.html b/web/entries/Marriage.html
--- a/web/entries/Marriage.html
+++ b/web/entries/Marriage.html
@@ -1,157 +1,157 @@
Hall's Marriage Theorem - Archive of Formal Proofs
\ No newline at end of file
diff --git a/web/entries/Mason_Stothers.html b/web/entries/Mason_Stothers.html
--- a/web/entries/Mason_Stothers.html
+++ b/web/entries/Mason_Stothers.html
@@ -1,182 +1,182 @@
The Mason–Stothers Theorem - Archive of Formal Proofs
This article provides a formalisation of Snyder’s simple and
elegant proof of the Mason–Stothers theorem, which is the
polynomial analogue of the famous abc Conjecture for integers.
Remarkably, Snyder found this very elegant proof when he was still a
high-school student.
In short, the statement of the
theorem is that three non-zero coprime polynomials
A, B, C
over a field which sum to 0 and do not all have vanishing derivatives
fulfil max{deg(A), deg(B),
deg(C)} < deg(rad(ABC))
where the rad(P) denotes the
radical of P,
i. e. the product of all unique irreducible factors of
P.
This theorem also implies a
kind of polynomial analogue of Fermat’s Last Theorem for polynomials:
except for trivial cases,
An +
Bn +
Cn = 0 implies
n ≤ 2 for coprime polynomials
A, B, C
over a field.
\ No newline at end of file
diff --git a/web/entries/Matrix.html b/web/entries/Matrix.html
--- a/web/entries/Matrix.html
+++ b/web/entries/Matrix.html
@@ -1,182 +1,182 @@
Executable Matrix Operations on Matrices of Arbitrary Dimensions - Archive of Formal Proofs
Executable Matrix Operations on Matrices of Arbitrary Dimensions
We provide the operations of matrix addition, multiplication,
transposition, and matrix comparisons as executable functions over
ordered semirings. Moreover, it is proven that strongly normalizing
(monotone) orders can be lifted to strongly normalizing (monotone) orders
over matrices. We further show that the standard semirings over the
naturals, integers, and rationals, as well as the arctic semirings
satisfy the axioms that are required by our matrix theory. Our
formalization is part of the CeTA system
which contains several termination techniques. The provided theories have
been essential to formalize matrix-interpretations and arctic
interpretations.
\ No newline at end of file
diff --git a/web/entries/Median_Of_Medians_Selection.html b/web/entries/Median_Of_Medians_Selection.html
--- a/web/entries/Median_Of_Medians_Selection.html
+++ b/web/entries/Median_Of_Medians_Selection.html
@@ -1,173 +1,173 @@
The Median-of-Medians Selection Algorithm - Archive of Formal Proofs
This entry provides an executable functional implementation
of the Median-of-Medians algorithm for selecting the
k-th smallest element of an unsorted list
deterministically in linear time. The size bounds for the recursive
call that lead to the linear upper bound on the run-time of the
algorithm are also proven.
\ No newline at end of file
diff --git a/web/entries/Mersenne_Primes.html b/web/entries/Mersenne_Primes.html
--- a/web/entries/Mersenne_Primes.html
+++ b/web/entries/Mersenne_Primes.html
@@ -1,177 +1,177 @@
Mersenne primes and the Lucas–Lehmer test - Archive of Formal Proofs
This article provides formal proofs of basic properties of
Mersenne numbers, i. e. numbers of the form
2n - 1, and especially of
Mersenne primes.
In particular, an efficient,
verified, and executable version of the Lucas–Lehmer test is
developed. This test decides primality for Mersenne numbers in time
polynomial in n.
\ No newline at end of file
diff --git a/web/entries/Metalogic_ProofChecker.html b/web/entries/Metalogic_ProofChecker.html
--- a/web/entries/Metalogic_ProofChecker.html
+++ b/web/entries/Metalogic_ProofChecker.html
@@ -1,194 +1,194 @@
Isabelle's Metalogic: Formalization and Proof Checker - Archive of Formal Proofs
Isabelle's Metalogic: Formalization and Proof Checker
In this entry we formalize Isabelle's metalogic in Isabelle/HOL.
Furthermore, we define a language of proof terms and an executable
proof checker and prove its soundness wrt. the metalogic. The
formalization is intentionally kept close to the Isabelle
implementation(for example using de Brujin indices) to enable easy
integration of generated code with the Isabelle system without a
complicated translation layer. The formalization is described in our
CADE 28 paper.
\ No newline at end of file
diff --git a/web/entries/MiniML.html b/web/entries/MiniML.html
--- a/web/entries/MiniML.html
+++ b/web/entries/MiniML.html
@@ -1,166 +1,166 @@
Mini ML - Archive of Formal Proofs
This theory defines the type inference rules and the type inference algorithm W for MiniML (simply-typed lambda terms with let) due to Milner. It proves the soundness and completeness of W w.r.t. the rules.
Minkowski's theorem relates a subset of
ℝn, the Lebesgue measure, and the
integer lattice ℤn: It states that
any convex subset of ℝn with volume
greater than 2n contains at least one lattice
point from ℤn\{0}, i. e. a
non-zero point with integer coefficients.
A
related theorem which directly implies this is Blichfeldt's
theorem, which states that any subset of
ℝn with a volume greater than 1
contains two different points whose difference vector has integer
components.
\ No newline at end of file
diff --git a/web/entries/Modular_Assembly_Kit_Security.html b/web/entries/Modular_Assembly_Kit_Security.html
--- a/web/entries/Modular_Assembly_Kit_Security.html
+++ b/web/entries/Modular_Assembly_Kit_Security.html
@@ -1,190 +1,190 @@
An Isabelle/HOL Formalization of the Modular Assembly Kit for Security Properties - Archive of Formal Proofs
An Isabelle/HOLFormalization of the Modular Assembly Kit for Security Properties
The "Modular Assembly Kit for Security Properties" (MAKS) is
a framework for both the definition and verification of possibilistic
information-flow security properties at the specification-level. MAKS
supports the uniform representation of a wide range of possibilistic
information-flow properties and provides support for the verification
of such properties via unwinding results and compositionality results.
We provide a formalization of this framework in Isabelle/HOL.
\ No newline at end of file
diff --git a/web/entries/Monad_Memo_DP.html b/web/entries/Monad_Memo_DP.html
--- a/web/entries/Monad_Memo_DP.html
+++ b/web/entries/Monad_Memo_DP.html
@@ -1,208 +1,208 @@
Monadification, Memoization and Dynamic Programming - Archive of Formal Proofs
Monadification, Memoization and Dynamic Programming
We present a lightweight framework for the automatic verified
(functional or imperative) memoization of recursive functions. Our
tool can turn a pure Isabelle/HOL function definition into a
monadified version in a state monad or the Imperative HOL heap monad,
and prove a correspondence theorem. We provide a variety of memory
implementations for the two types of monads. A number of simple
techniques allow us to achieve bottom-up computation and
space-efficient memoization. The framework’s utility is demonstrated
on a number of representative dynamic programming problems. A detailed
description of our work can be found in the accompanying paper [2].
The usual monad laws can directly be used as rewrite rules for Isabelle’s
simplifier to normalise monadic HOL terms and decide equivalences.
In a commutative monad, however, the commutativity law is a
higher-order permutative rewrite rule that makes the simplifier loop.
This AFP entry implements a simproc that normalises monadic
expressions in commutative monads using ordered rewriting. The
simproc can also permute computations across control operators like if
and case.
\ No newline at end of file
diff --git a/web/entries/Monomorphic_Monad.html b/web/entries/Monomorphic_Monad.html
--- a/web/entries/Monomorphic_Monad.html
+++ b/web/entries/Monomorphic_Monad.html
@@ -1,181 +1,181 @@
Effect polymorphism in higher-order logic - Archive of Formal Proofs
The notion of a monad cannot be expressed within higher-order logic
(HOL) due to type system restrictions. We show that if a monad is used
with values of only one type, this notion can be formalised in HOL.
Based on this idea, we develop a library of effect specifications and
implementations of monads and monad transformers. Hence, we can
abstract over the concrete monad in HOL definitions and thus use the
same definition for different (combinations of) effects. We illustrate
the usefulness of effect polymorphism with a monadic interpreter for a
simple language.
Binary multirelations associate elements of a set with its subsets; hence
they are binary relations from a set to its power set. Applications include
alternating automata, models and logics for games, program semantics with
dual demonic and angelic nondeterministic choices and concurrent dynamic
logics. This proof document supports an arXiv article that formalises the
basic algebra of multirelations and proposes axiom systems for them,
ranging from weak bi-monoids to weak bi-quantales.
\ No newline at end of file
diff --git a/web/entries/Myhill-Nerode.html b/web/entries/Myhill-Nerode.html
--- a/web/entries/Myhill-Nerode.html
+++ b/web/entries/Myhill-Nerode.html
@@ -1,175 +1,175 @@
The Myhill-Nerode Theorem Based on Regular Expressions - Archive of Formal Proofs
The Myhill-Nerode Theorem Based on Regular Expressions
There are many proofs of the Myhill-Nerode theorem using automata. In this library we give a proof entirely based on regular expressions, since regularity of languages can be conveniently defined using regular expressions (it is more painful in HOL to define regularity in terms of automata). We prove the first direction of the Myhill-Nerode theorem by solving equational systems that involve regular expressions. For the second direction we give two proofs: one using tagging-functions and another using partial derivatives. We also establish various closure properties of regular languages. Most details of the theories are described in our ITP 2011 paper.
\ No newline at end of file
diff --git a/web/entries/Native_Word.html b/web/entries/Native_Word.html
--- a/web/entries/Native_Word.html
+++ b/web/entries/Native_Word.html
@@ -1,199 +1,199 @@
Native Word - Archive of Formal Proofs
This entry makes machine words and machine arithmetic available for code generation from Isabelle/HOL. It provides a common abstraction that hides the differences between the different target languages. The code generator maps these operations to the APIs of the target languages. Apart from that, we extend the available bit operations on types int and integer, and map them to the operations in the target languages.
\ No newline at end of file
diff --git a/web/entries/Nested_Multisets_Ordinals.html b/web/entries/Nested_Multisets_Ordinals.html
--- a/web/entries/Nested_Multisets_Ordinals.html
+++ b/web/entries/Nested_Multisets_Ordinals.html
@@ -1,178 +1,178 @@
Formalization of Nested Multisets, Hereditary Multisets, and Syntactic Ordinals - Archive of Formal Proofs
Formalization of Nested Multisets, Hereditary Multisets, and Syntactic Ordinals
This Isabelle/HOL formalization introduces a nested multiset datatype and defines Dershowitz and Manna's nested multiset order. The order is proved well founded and linear. By removing one constructor, we transform the nested multisets into hereditary multisets. These are isomorphic to the syntactic ordinals—the ordinals can be recursively expressed in Cantor normal form. Addition, subtraction, multiplication, and linear orders are provided on this type.
Dealing with binders, renaming of bound variables, capture-avoiding
substitution, etc., is very often a major problem in formal
proofs, especially in proofs by structural and rule
induction. Nominal Isabelle is designed to make such proofs easy to
formalise: it provides an infrastructure for declaring nominal
datatypes (that is alpha-equivalence classes) and for defining
functions over them by structural recursion. It also provides
induction principles that have Barendregt’s variable convention
already built in.
This entry can be used as a more advanced replacement for
HOL/Nominal in the Isabelle distribution.
\ No newline at end of file
diff --git a/web/entries/NormByEval.html b/web/entries/NormByEval.html
--- a/web/entries/NormByEval.html
+++ b/web/entries/NormByEval.html
@@ -1,152 +1,152 @@
Normalization by Evaluation - Archive of Formal Proofs
This article formalizes normalization by evaluation as implemented in Isabelle. Lambda calculus plus term rewriting is compiled into a functional program with pattern matching. It is proved that the result of a successful evaluation is a) correct, i.e. equivalent to the input, and b) in normal form.
\ No newline at end of file
diff --git a/web/entries/Octonions.html b/web/entries/Octonions.html
--- a/web/entries/Octonions.html
+++ b/web/entries/Octonions.html
@@ -1,175 +1,175 @@
Octonions - Archive of Formal Proofs
We develop the basic theory of Octonions, including various identities
and properties of the octonions and of the octonionic product, a
description of 7D isometries and representations of orthogonal
transformations. To this end we first develop the theory of the vector
cross product in 7 dimensions. The development of the theory of
Octonions is inspired by that of the theory of Quaternions by Lawrence
Paulson. However, we do not work within the type class real_algebra_1
because the octonionic product is not associative.
\ No newline at end of file
diff --git a/web/entries/OpSets.html b/web/entries/OpSets.html
--- a/web/entries/OpSets.html
+++ b/web/entries/OpSets.html
@@ -1,172 +1,172 @@
OpSets: Sequential Specifications for Replicated Datatypes - Archive of Formal Proofs
OpSets: Sequential Specifications for Replicated Datatypes
We introduce OpSets, an executable framework for specifying and
reasoning about the semantics of replicated datatypes that provide
eventual consistency in a distributed system, and for mechanically
verifying algorithms that implement these datatypes. Our approach is
simple but expressive, allowing us to succinctly specify a variety of
abstract datatypes, including maps, sets, lists, text, graphs, trees,
and registers. Our datatypes are also composable, enabling the
construction of complex data structures. To demonstrate the utility of
OpSets for analysing replication algorithms, we highlight an important
correctness property for collaborative text editing that has
traditionally been overlooked; algorithms that do not satisfy this
property can exhibit awkward interleaving of text. We use OpSets to
specify this correctness property and prove that although one existing
replication algorithm satisfies this property, several other published
algorithms do not.
\ No newline at end of file
diff --git a/web/entries/Optimal_BST.html b/web/entries/Optimal_BST.html
--- a/web/entries/Optimal_BST.html
+++ b/web/entries/Optimal_BST.html
@@ -1,175 +1,175 @@
Optimal Binary Search Trees - Archive of Formal Proofs
This article formalizes recursive algorithms for the construction
of optimal binary search trees given fixed access frequencies.
We follow Knuth (1971), Yao (1980) and Mehlhorn (1984).
The algorithms are memoized with the help of the AFP article
Monadification, Memoization and Dynamic Programming,
thus yielding dynamic programming algorithms.
\ No newline at end of file
diff --git a/web/entries/Ordered_Resolution_Prover.html b/web/entries/Ordered_Resolution_Prover.html
--- a/web/entries/Ordered_Resolution_Prover.html
+++ b/web/entries/Ordered_Resolution_Prover.html
@@ -1,190 +1,190 @@
Formalization of Bachmair and Ganzinger's Ordered Resolution Prover - Archive of Formal Proofs
Formalization of Bachmair and Ganzinger's Ordered Resolution Prover
This Isabelle/HOL formalization covers Sections 2 to 4 of Bachmair and
Ganzinger's "Resolution Theorem Proving" chapter in the
Handbook of Automated Reasoning. This includes
soundness and completeness of unordered and ordered variants of ground
resolution with and without literal selection, the standard redundancy
criterion, a general framework for refutational theorem proving, and
soundness and completeness of an abstract first-order prover.
This development defines a well-ordered type of countable ordinals. It includes notions of continuous and normal functions, recursively defined functions over ordinals, least fixed-points, and derivatives. Much of ordinal arithmetic is formalized, including exponentials and logarithms. The development concludes with formalizations of Cantor Normal Form and Veblen hierarchies over normal functions.
Session Ordinary-Differential-Equations formalizes ordinary differential equations (ODEs) and initial value
problems. This work comprises proofs for local and global existence of unique solutions
(Picard-Lindelöf theorem). Moreover, it contains a formalization of the (continuous or even
differentiable) dependency of the flow on initial conditions as the flow of ODEs.
Not in the generated document are the following sessions:
HOL-ODE-Numerics:
Rigorous numerical algorithms for computing enclosures of solutions based on Runge-Kutta methods
and affine arithmetic. Reachability analysis with splitting and reduction at hyperplanes.
HOL-ODE-Examples:
Applications of the numerical algorithms to concrete systems of ODEs.
Lorenz_C0, Lorenz_C1:
Verified algorithms for checking C1-information according to Tucker's proof,
computation of C0-information.
BSD License
Change history
[2014-02-13] added an implementation of the Euler method based on affine arithmetic
[2016-04-14] added flow and variational equation
[2016-08-03] numerical algorithms for reachability analysis (using second-order Runge-Kutta methods, splitting, and reduction) implemented using Lammich's framework for automatic refinement
[2017-09-20] added Poincare map and propagation of variational equation in
reachability analysis, verified algorithms for C1-information and computations
for C0-information of the Lorenz attractor.
This library defines three different versions of pairing heaps: a
functional version of the original design based on binary
trees [Fredman et al. 1986], the version by Okasaki [1998] and
a modified version of the latter that is free of structural invariants.
The amortized complexity of pairing heaps is analyzed in the AFP article
Amortized Complexity.
BSD License
Extra 0
Origin: This library was extracted from Amortized Complexity and extended.
This article gives the basic theory of Pell's equation
x2 = 1 +
Dy2,
where
D ∈ ℕ is
a parameter and x, y are
integer variables.
The main result that is proven
is the following: If D is not a perfect square,
then there exists a fundamental solution
(x0,
y0) that is not the
trivial solution (1, 0) and which generates all other solutions
(x, y) in the sense that
there exists some
n ∈ ℕ
such that |x| +
|y| √D =
(x0 +
y0 √D)n.
This also implies that the set of solutions is infinite, and it gives
us an explicit and executable characterisation of all the solutions.
Based on this, simple executable algorithms for
computing the fundamental solution and the infinite sequence of all
non-negative solutions are also provided.
\ No newline at end of file
diff --git a/web/entries/Perron_Frobenius.html b/web/entries/Perron_Frobenius.html
--- a/web/entries/Perron_Frobenius.html
+++ b/web/entries/Perron_Frobenius.html
@@ -1,200 +1,200 @@
Perron-Frobenius Theorem for Spectral Radius Analysis - Archive of Formal Proofs
Perron-Frobenius Theorem for Spectral Radius Analysis
The spectral radius of a matrix A is the maximum norm of all
eigenvalues of A. In previous work we already formalized that for a
complex matrix A, the values in An grow polynomially in n
if and only if the spectral radius is at most one. One problem with
the above characterization is the determination of all
complex eigenvalues. In case A contains only non-negative
real values, a simplification is possible with the help of the
Perron–Frobenius theorem, which tells us that it suffices to consider only
the real eigenvalues of A, i.e., applying Sturm's method can
decide the polynomial growth of An.
We formalize
the Perron–Frobenius theorem based on a proof via Brouwer's fixpoint
theorem, which is available in the HOL multivariate analysis (HMA)
library. Since the results on the spectral radius is based on matrices
in the Jordan normal form (JNF) library, we further develop a
connection which allows us to easily transfer theorems between HMA and
JNF. With this connection we derive the combined result: if A is a
non-negative real matrix, and no real eigenvalue of A is strictly
larger than one, then An is polynomially bounded in n.
\ No newline at end of file
diff --git a/web/entries/Pi_Transcendental.html b/web/entries/Pi_Transcendental.html
--- a/web/entries/Pi_Transcendental.html
+++ b/web/entries/Pi_Transcendental.html
@@ -1,177 +1,177 @@
The Transcendence of π - Archive of Formal Proofs
This entry shows the transcendence of π based on the
classic proof using the fundamental theorem of symmetric polynomials
first given by von Lindemann in 1882, but the formalisation mostly
follows the version by Niven. The proof reuses much of the machinery
developed in the AFP entry on the transcendence of
e.
\ No newline at end of file
diff --git a/web/entries/Poincare_Bendixson.html b/web/entries/Poincare_Bendixson.html
--- a/web/entries/Poincare_Bendixson.html
+++ b/web/entries/Poincare_Bendixson.html
@@ -1,174 +1,174 @@
The Poincaré-Bendixson Theorem - Archive of Formal Proofs
The Poincaré-Bendixson theorem is a classical result in the study of
(continuous) dynamical systems. Colloquially, it restricts the
possible behaviors of planar dynamical systems: such systems cannot be
chaotic. In practice, it is a useful tool for proving the existence of
(limiting) periodic behavior in planar systems. The theorem is an
interesting and challenging benchmark for formalized mathematics
because proofs in the literature rely on geometric sketches and only
hint at symmetric cases. It also requires a substantial background of
mathematical theories, e.g., the Jordan curve theorem, real analysis,
ordinary differential equations, and limiting (long-term) behavior of
dynamical systems.
We define multivariate polynomials over arbitrary (ordered) semirings in
combination with (executable) operations like addition, multiplication,
and substitution. We also define (weak) monotonicity of polynomials and
comparison of polynomials where we provide standard estimations like
absolute positiveness or the more recent approach of Neurauter, Zankl,
and Middeldorp. Moreover, it is proven that strongly normalizing
(monotone) orders can be lifted to strongly normalizing (monotone) orders
over polynomials. Our formalization was performed as part of the IsaFoR/CeTA-system
which contains several termination techniques. The provided theories have
been essential to formalize polynomial interpretations.
This formalization also contains an abstract representation as coefficient functions with finite
support and a type of power-products. If this type is ordered by a linear (term) ordering, various
additional notions, such as leading power-product, leading coefficient etc., are introduced as
well. Furthermore, a lot of generic properties of, and functions on, multivariate polynomials are
formalized, including the substitution and evaluation homomorphisms, embeddings of polynomial rings
into larger rings (i.e. with one additional indeterminate), homogenization and dehomogenization of
polynomials, and the canonical isomorphism between R[X,Y] and R[X][Y].
[2010-09-17] Moved theories on arbitrary (ordered) semirings to Abstract Rewriting.
[2016-10-28] Added abstract representation of polynomials and authors Maletzky/Immler.
[2018-01-23] Added authors Haftmann, Lochbihler after incorporating
their formalization of multivariate polynomials based on Polynomial mappings.
Moved material from Bentkamp's entry "Deep Learning".
[2019-04-18] Added material about polynomials whose power-products are represented themselves
by polynomial mappings.
\ No newline at end of file
diff --git a/web/entries/Posix-Lexing.html b/web/entries/Posix-Lexing.html
--- a/web/entries/Posix-Lexing.html
+++ b/web/entries/Posix-Lexing.html
@@ -1,168 +1,168 @@
POSIX Lexing with Derivatives of Regular Expressions - Archive of Formal Proofs
POSIXLexing With Derivatives of Regular Expressions
Brzozowski introduced the notion of derivatives for regular
expressions. They can be used for a very simple regular expression
matching algorithm. Sulzmann and Lu cleverly extended this algorithm
in order to deal with POSIX matching, which is the underlying
disambiguation strategy for regular expressions needed in lexers. In
this entry we give our inductive definition of what a POSIX value is
and show (i) that such a value is unique (for given regular expression
and string being matched) and (ii) that Sulzmann and Lu's algorithm
always generates such a value (provided that the regular expression
matches the string). We also prove the correctness of an optimised
version of the POSIX matching algorithm.
\ No newline at end of file
diff --git a/web/entries/Power_Sum_Polynomials.html b/web/entries/Power_Sum_Polynomials.html
--- a/web/entries/Power_Sum_Polynomials.html
+++ b/web/entries/Power_Sum_Polynomials.html
@@ -1,211 +1,211 @@
Power Sum Polynomials - Archive of Formal Proofs
This article provides a formalisation of the symmetric
multivariate polynomials known as power sum
polynomials. These are of the form
pn(X1,…,
Xk) =
X1n
+ … +
Xkn.
A formal proof of the Girard–Newton Theorem is also given. This
theorem relates the power sum polynomials to the elementary symmetric
polynomials sk in the form
of a recurrence relation
(-1)kk sk
=
∑i∈[0,k)
(-1)i si
pk-i .
As an application, this is then used to solve a generalised
form of a puzzle given as an exercise in Dummit and Foote's
Abstract Algebra: For k
complex unknowns x1,
…,
xk,
define pj :=
x1j
+ … +
xkj.
Then for each vector a ∈
ℂk, show that
there is exactly one solution to the system p1
= a1, …,
pk =
ak up to permutation of
the
xi
and determine the value of
pi for
i>k.
\ No newline at end of file
diff --git a/web/entries/Prim_Dijkstra_Simple.html b/web/entries/Prim_Dijkstra_Simple.html
--- a/web/entries/Prim_Dijkstra_Simple.html
+++ b/web/entries/Prim_Dijkstra_Simple.html
@@ -1,185 +1,185 @@
Purely Functional, Simple, and Efficient Implementation of Prim and Dijkstra - Archive of Formal Proofs
Purely Functional, Simple, and Efficient Implementation of Prim and Dijkstra
We verify purely functional, simple and efficient implementations of
Prim's and Dijkstra's algorithms. This constitutes the first
verification of an executable and even efficient version of
Prim's algorithm. This entry formalizes the second part of our
ITP-2019 proof pearl Purely Functional, Simple and Efficient
Priority Search Trees and Applications to Prim and Dijkstra.
@article{Prim_Dijkstra_Simple-AFP,
author = {lammich and nipkow},
title = {Purely Functional, Simple, and Efficient Implementation of Prim and Dijkstra},
journal = {Archive of Formal Proofs},
month = June,
year = 2019,
note = {\url{https://isa-afp.org/entries/Prim_Dijkstra_Simple.html},
Formal proof development},
ISSN = {2150-914x},
}
\ No newline at end of file
diff --git a/web/entries/Prime_Distribution_Elementary.html b/web/entries/Prime_Distribution_Elementary.html
--- a/web/entries/Prime_Distribution_Elementary.html
+++ b/web/entries/Prime_Distribution_Elementary.html
@@ -1,191 +1,191 @@
Elementary Facts About the Distribution of Primes - Archive of Formal Proofs
This entry is a formalisation of Chapter 4 (and parts of
Chapter 3) of Apostol's Introduction
to Analytic Number Theory. The main topics that
are addressed are properties of the distribution of prime numbers that
can be shown in an elementary way (i. e. without the Prime
Number Theorem), the various equivalent forms of the PNT (which imply
each other in elementary ways), and consequences that follow from the
PNT in elementary ways. The latter include, most notably, asymptotic
bounds for the number of distinct prime factors of
n, the divisor function
d(n), Euler's totient function
φ(n), and
lcm(1,…,n).
@article{Prime_Distribution_Elementary-AFP,
author = {eberl},
title = {Elementary Facts About the Distribution of Primes},
journal = {Archive of Formal Proofs},
month = February,
year = 2019,
note = {\url{https://isa-afp.org/entries/Prime_Distribution_Elementary.html},
Formal proof development},
ISSN = {2150-914x},
}
\ No newline at end of file
diff --git a/web/entries/Prime_Harmonic_Series.html b/web/entries/Prime_Harmonic_Series.html
--- a/web/entries/Prime_Harmonic_Series.html
+++ b/web/entries/Prime_Harmonic_Series.html
@@ -1,179 +1,179 @@
The Divergence of the Prime Harmonic Series - Archive of Formal Proofs
In this work, we prove the lower bound ln(H_n) -
ln(5/3) for the
partial sum of the Prime Harmonic series and, based on this, the divergence of
the Prime Harmonic Series
∑[p prime] · 1/p.
The proof relies on the unique squarefree decomposition of natural numbers. This
is similar to Euler's original proof (which was highly informal and morally
questionable). Its advantage over proofs by contradiction, like the famous one
by Paul Erdős, is that it provides a relatively good lower bound for the partial
sums.
\ No newline at end of file
diff --git a/web/entries/Prime_Number_Theorem.html b/web/entries/Prime_Number_Theorem.html
--- a/web/entries/Prime_Number_Theorem.html
+++ b/web/entries/Prime_Number_Theorem.html
@@ -1,207 +1,207 @@
The Prime Number Theorem - Archive of Formal Proofs
This article provides a short proof of the Prime Number
Theorem in several equivalent forms, most notably
π(x) ~ x/ln
x where π(x) is the
number of primes no larger than x. It also
defines other basic number-theoretic functions related to primes like
Chebyshev's functions ϑ and ψ and the
“n-th prime number” function
pn. We also show various
bounds and relationship between these functions are shown. Lastly, we
derive Mertens' First and Second Theorem, i. e.
∑p≤x
ln p/p = ln
x + O(1) and
∑p≤x
1/p = ln ln x + M +
O(1/ln x). We also give
explicit bounds for the remainder terms.
The proof
of the Prime Number Theorem builds on a library of Dirichlet series
and analytic combinatorics. We essentially follow the presentation by
Newman. The core part of the proof is a Tauberian theorem for
Dirichlet series, which is proven using complex analysis and then used
to strengthen Mertens' First Theorem to
∑p≤x
ln p/p = ln
x + c + o(1).
A variant of this proof has been formalised before by
Harrison in HOL Light, and formalisations of Selberg's elementary
proof exist both by Avigad et al. in Isabelle and
by Carneiro in Metamath. The advantage of the analytic proof is that,
while it requires more powerful mathematical tools, it is considerably
shorter and clearer. This article attempts to provide a short and
clear formalisation of all components of that proof using the full
range of mathematical machinery available in Isabelle, staying as
close as possible to Newman's simple paper proof.
\ No newline at end of file
diff --git a/web/entries/Priority_Queue_Braun.html b/web/entries/Priority_Queue_Braun.html
--- a/web/entries/Priority_Queue_Braun.html
+++ b/web/entries/Priority_Queue_Braun.html
@@ -1,160 +1,160 @@
Priority Queues Based on Braun Trees - Archive of Formal Proofs
This entry verifies priority queues based on Braun trees. Insertion
and deletion take logarithmic time and preserve the balanced nature
of Braun trees. Two implementations of deletion are provided.
BSD License
Change history
[2019-12-16] Added theory Priority_Queue_Braun2 with second version of del_min
\ No newline at end of file
diff --git a/web/entries/Priority_Search_Trees.html b/web/entries/Priority_Search_Trees.html
--- a/web/entries/Priority_Search_Trees.html
+++ b/web/entries/Priority_Search_Trees.html
@@ -1,179 +1,179 @@
Priority Search Trees - Archive of Formal Proofs
We present a new, purely functional, simple and efficient data
structure combining a search tree and a priority queue, which we call
a priority search tree. The salient feature of priority search
trees is that they offer a decrease-key operation, something that is
missing from other simple, purely functional priority queue
implementations. Priority search trees can be implemented on top of
any search tree. This entry does the implementation for red-black
trees. This entry formalizes the first part of our ITP-2019 proof
pearl Purely Functional, Simple and Efficient Priority
Search Trees and Applications to Prim and Dijkstra.
We formalize a probabilistic noninterference for a multi-threaded language with uniform scheduling, where probabilistic behaviour comes from both the scheduler and the individual threads. We define notions probabilistic noninterference in two variants: resumption-based and trace-based. For the resumption-based notions, we prove compositionality w.r.t. the language constructs and establish sound type-system-like syntactic criteria. This is a formalization of the mathematical development presented at CPP 2013 and CALCO 2013. It is the probabilistic variant of the Possibilistic Noninterference AFP entry.
The most efficient known primality tests are
probabilistic in the sense that they use
randomness and may, with some probability, mistakenly classify a
composite number as prime – but never a prime number as
composite. Examples of this are the Miller–Rabin test, the
Solovay–Strassen test, and (in most cases) Fermat's
test.
This entry defines these three tests and
proves their correctness. It also develops some of the
number-theoretic foundations, such as Carmichael numbers and the
Jacobi symbol with an efficient executable algorithm to compute
it.
\ No newline at end of file
diff --git a/web/entries/Probabilistic_System_Zoo.html b/web/entries/Probabilistic_System_Zoo.html
--- a/web/entries/Probabilistic_System_Zoo.html
+++ b/web/entries/Probabilistic_System_Zoo.html
@@ -1,164 +1,164 @@
A Zoo of Probabilistic Systems - Archive of Formal Proofs
Numerous models of probabilistic systems are studied in the literature.
Coalgebra has been used to classify them into system types and compare their
expressiveness. We formalize the resulting hierarchy of probabilistic system
types by modeling the semantics of the different systems as codatatypes.
This approach yields simple and concise proofs, as bisimilarity coincides
with equality for codatatypes.
This work is described in detail in the ITP 2015 publication by the authors.
We present a formalization of probabilistic timed automata (PTA) for
which we try to follow the formula MDP + TA = PTA as far as possible:
our work starts from our existing formalizations of Markov decision
processes (MDP) and timed automata (TA) and combines them modularly.
We prove the fundamental result for probabilistic timed automata: the
region construction that is known from timed automata carries over to
the probabilistic setting. In particular, this allows us to prove that
minimum and maximum reachability probabilities can be computed via a
reduction to MDP model checking, including the case where one wants to
disregard unrealizable behavior. Further information can be found in
our ITP paper [2].
\ No newline at end of file
diff --git a/web/entries/Probabilistic_While.html b/web/entries/Probabilistic_While.html
--- a/web/entries/Probabilistic_While.html
+++ b/web/entries/Probabilistic_While.html
@@ -1,169 +1,169 @@
Probabilistic while loop - Archive of Formal Proofs
This AFP entry defines a probabilistic while operator based on
sub-probability mass functions and formalises zero-one laws and variant
rules for probabilistic loop termination. As applications, we
implement probabilistic algorithms for the Bernoulli, geometric and
arbitrary uniform distributions that only use fair coin flips, and
prove them correct and terminating with probability 1.
\ No newline at end of file
diff --git a/web/entries/Propositional_Proof_Systems.html b/web/entries/Propositional_Proof_Systems.html
--- a/web/entries/Propositional_Proof_Systems.html
+++ b/web/entries/Propositional_Proof_Systems.html
@@ -1,221 +1,221 @@
Propositional Proof Systems - Archive of Formal Proofs
We formalize a range of proof systems for classical propositional
logic (sequent calculus, natural deduction, Hilbert systems,
resolution) and prove the most important meta-theoretic results about
semantics and proofs: compactness, soundness, completeness,
translations between proof systems, cut-elimination, interpolation and
model existence.
\ No newline at end of file
diff --git a/web/entries/QR_Decomposition.html b/web/entries/QR_Decomposition.html
--- a/web/entries/QR_Decomposition.html
+++ b/web/entries/QR_Decomposition.html
@@ -1,184 +1,184 @@
QR Decomposition - Archive of Formal Proofs
QR decomposition is an algorithm to decompose a real matrix A into the product of two other matrices Q and R, where Q is orthogonal and R is invertible and upper triangular. The algorithm is useful for the least squares problem; i.e., the computation of the best approximation of an unsolvable system of linear equations. As a side-product, the Gram-Schmidt process has also been formalized. A refinement using immutable arrays is presented as well. The development relies, among others, on the AFP entry "Implementing field extensions of the form Q[sqrt(b)]" by René Thiemann, which allows execution of the algorithm using symbolic computations. Verified code can be generated and executed using floats as well.
BSD License
Change history
[2015-06-18] The second part of the Fundamental Theorem of Linear Algebra has been generalized to more general inner product spaces.
\ No newline at end of file
diff --git a/web/entries/Quick_Sort_Cost.html b/web/entries/Quick_Sort_Cost.html
--- a/web/entries/Quick_Sort_Cost.html
+++ b/web/entries/Quick_Sort_Cost.html
@@ -1,180 +1,180 @@
The number of comparisons in QuickSort - Archive of Formal Proofs
We give a formal proof of the well-known results about the
number of comparisons performed by two variants of QuickSort: first,
the expected number of comparisons of randomised QuickSort
(i. e. QuickSort with random pivot choice) is
2 (n+1) Hn -
4 n, which is asymptotically equivalent to
2 n ln n; second, the number of
comparisons performed by the classic non-randomised QuickSort has the
same distribution in the average case as the randomised one.
We present a verified and executable implementation of ROBDDs in
Isabelle/HOL. Our implementation relates pointer-based computation in
the Heap monad to operations on an abstract definition of boolean
functions. Internally, we implemented the if-then-else combinator in a
recursive fashion, following the Shannon decomposition of the argument
functions. The implementation mixes and adapts known techniques and is
built with efficiency in mind.
\ No newline at end of file
diff --git a/web/entries/Random_BSTs.html b/web/entries/Random_BSTs.html
--- a/web/entries/Random_BSTs.html
+++ b/web/entries/Random_BSTs.html
@@ -1,179 +1,179 @@
Expected Shape of Random Binary Search Trees - Archive of Formal Proofs
This entry contains proofs for the textbook results about the
distributions of the height and internal path length of random binary
search trees (BSTs), i. e. BSTs that are formed by taking
an empty BST and inserting elements from a fixed set in random
order.
In particular, we prove a logarithmic upper
bound on the expected height and the Θ(n log n)
closed-form solution for the expected internal path length in terms of
the harmonic numbers. We also show how the internal path length
relates to the average-case cost of a lookup in a BST.
\ No newline at end of file
diff --git a/web/entries/Randomised_BSTs.html b/web/entries/Randomised_BSTs.html
--- a/web/entries/Randomised_BSTs.html
+++ b/web/entries/Randomised_BSTs.html
@@ -1,171 +1,171 @@
Randomised Binary Search Trees - Archive of Formal Proofs
This work is a formalisation of the Randomised Binary Search
Trees introduced by Martínez and Roura, including definitions and
correctness proofs.
Like randomised treaps, they
are a probabilistic data structure that behaves exactly as if elements
were inserted into a non-balancing BST in random order. However,
unlike treaps, they only use discrete probability distributions, but
their use of randomness is more complicated.
\ No newline at end of file
diff --git a/web/entries/Rank_Nullity_Theorem.html b/web/entries/Rank_Nullity_Theorem.html
--- a/web/entries/Rank_Nullity_Theorem.html
+++ b/web/entries/Rank_Nullity_Theorem.html
@@ -1,172 +1,172 @@
Rank-Nullity Theorem in Linear Algebra - Archive of Formal Proofs
In this contribution, we present some formalizations based on the HOL-Multivariate-Analysis session of Isabelle. Firstly, a generalization of several theorems of such library are presented. Secondly, some definitions and proofs involving Linear Algebra and the four fundamental subspaces of a matrix are shown. Finally, we present a proof of the result known in Linear Algebra as the ``Rank-Nullity Theorem'', which states that, given any linear map f from a finite dimensional vector space V to a vector space W, then the dimension of V is equal to the dimension of the kernel of f (which is a subspace of V) and the dimension of the range of f (which is a subspace of W). The proof presented here is based on the one given by Sheldon Axler in his book Linear Algebra Done Right. As a corollary of the previous theorem, and taking advantage of the relationship between linear maps and matrices, we prove that, for every matrix A (which has associated a linear map between finite dimensional vector spaces), the sum of its null space and its column space (which is equal to the range of the linear map) is equal to the number of columns of A.
BSD License
Change history
[2014-07-14] Added some generalizations that allow us to formalize the Rank-Nullity Theorem over finite dimensional vector spaces, instead of over the more particular euclidean spaces. Updated abstract.
We formalize a unified framework for verified decision procedures for regular
expression equivalence. Five recently published formalizations of such
decision procedures (three based on derivatives, two on marked regular
expressions) can be obtained as instances of the framework. We discover that
the two approaches based on marked regular expressions, which were previously
thought to be the same, are different, and one seems to produce uniformly
smaller automata. The common framework makes it possible to compare the
performance of the different decision procedures in a meaningful way.
The formalization is described in a paper of the same name presented at
Interactive Theorem Proving 2014.
\ No newline at end of file
diff --git a/web/entries/Regular-Sets.html b/web/entries/Regular-Sets.html
--- a/web/entries/Regular-Sets.html
+++ b/web/entries/Regular-Sets.html
@@ -1,183 +1,183 @@
Regular Sets and Expressions - Archive of Formal Proofs
This is a library of constructions on regular expressions and languages. It provides the operations of concatenation, Kleene star and derivative on languages. Regular expressions and their meaning are defined. An executable equivalence checker for regular expressions is verified; it does not need automata but works directly on regular expressions. By mapping regular expressions to binary relations, an automatic and complete proof method for (in)equalities of binary relations over union, concatenation and (reflexive) transitive closure is obtained.
Extended regular expressions with complement and intersection are also defined and an equivalence checker is provided.
BSD License
Change history
[2011-08-26] Christian Urban added a theory about derivatives and partial derivatives of regular expressions
[2012-05-10] Tobias Nipkow added equivalence checking with partial derivatives
Regular algebras axiomatise the equational theory of regular expressions as induced by
regular language identity. We use Isabelle/HOL for a detailed systematic study of regular
algebras given by Boffa, Conway, Kozen and Salomaa. We investigate the relationships between
these classes, formalise a soundness proof for the smallest class (Salomaa's) and obtain
completeness of the largest one (Boffa's) relative to a deep result by Krob. In addition
we provide a large collection of regular identities in the general setting of Boffa's axiom.
Our regular algebra hierarchy is orthogonal to the Kleene algebra hierarchy in the Archive
of Formal Proofs; we have not aimed at an integration for pragmatic reasons.
We give a simple relation-algebraic semantics of read and write
operations on associative arrays. The array operations seamlessly
integrate with assignments in the Hoare-logic library. Using relation
algebras and Kleene algebras we verify the correctness of an
array-based implementation of disjoint-set forests with a naive union
operation and a find operation with path compression.
\ No newline at end of file
diff --git a/web/entries/Relational_Minimum_Spanning_Trees.html b/web/entries/Relational_Minimum_Spanning_Trees.html
--- a/web/entries/Relational_Minimum_Spanning_Trees.html
+++ b/web/entries/Relational_Minimum_Spanning_Trees.html
@@ -1,161 +1,161 @@
Relational Minimum Spanning Tree Algorithms - Archive of Formal Proofs
\ No newline at end of file
diff --git a/web/entries/Relational_Paths.html b/web/entries/Relational_Paths.html
--- a/web/entries/Relational_Paths.html
+++ b/web/entries/Relational_Paths.html
@@ -1,185 +1,185 @@
Relational Characterisations of Paths - Archive of Formal Proofs
Binary relations are one of the standard ways to encode, characterise
and reason about graphs. Relation algebras provide equational axioms
for a large fragment of the calculus of binary relations. Although
relations are standard tools in many areas of mathematics and
computing, researchers usually fall back to point-wise reasoning when
it comes to arguments about paths in a graph. We present a purely
algebraic way to specify different kinds of paths in Kleene relation
algebras, which are relation algebras equipped with an operation for
reflexive transitive closure. We study the relationship between paths
with a designated root vertex and paths without such a vertex. Since
we stay in first-order logic this development helps with mechanising
proofs. To demonstrate the applicability of the algebraic framework we
verify the correctness of three basic graph algorithms.
\ No newline at end of file
diff --git a/web/entries/Root_Balanced_Tree.html b/web/entries/Root_Balanced_Tree.html
--- a/web/entries/Root_Balanced_Tree.html
+++ b/web/entries/Root_Balanced_Tree.html
@@ -1,188 +1,188 @@
Root-Balanced Tree - Archive of Formal Proofs
Andersson introduced general balanced trees,
search trees based on the design principle of partial rebuilding:
perform update operations naively until the tree becomes too
unbalanced, at which point a whole subtree is rebalanced. This article
defines and analyzes a functional version of general balanced trees,
which we call root-balanced trees. Using a lightweight model
of execution time, amortized logarithmic complexity is verified in
the theorem prover Isabelle.
This is the Isabelle formalization of the material decribed in the APLAS 2017 article
Verified Root-Balanced Trees
by the same author, which also presents experimental results that show
competitiveness of root-balanced with AVL and red-black trees.
\ No newline at end of file
diff --git a/web/entries/Routing.html b/web/entries/Routing.html
--- a/web/entries/Routing.html
+++ b/web/entries/Routing.html
@@ -1,170 +1,170 @@
Routing - Archive of Formal Proofs
This entry contains definitions for routing with routing
tables/longest prefix matching. A routing table entry is modelled as
a record of a prefix match, a metric, an output port, and an optional
next hop. A routing table is a list of entries, sorted by prefix
length and metric. Additionally, a parser and serializer for the
output of the ip-route command, a function to create a relation from
output port to corresponding destination IP space, and a model of a
Linux-style router are included.
\ No newline at end of file
diff --git a/web/entries/SATSolverVerification.html b/web/entries/SATSolverVerification.html
--- a/web/entries/SATSolverVerification.html
+++ b/web/entries/SATSolverVerification.html
@@ -1,179 +1,179 @@
Formal Verification of Modern SAT Solvers - Archive of Formal Proofs
This document contains formal correctness proofs of modern SAT solvers. Following (Krstic et al, 2007) and (Nieuwenhuis et al., 2006), solvers are described using state-transition systems. Several different SAT solver descriptions are given and their partial correctness and termination is proved. These include:
a solver based on classical DPLL procedure (using only a backtrack-search with unit propagation),
a very general solver with backjumping and learning (similar to the description given in (Nieuwenhuis et al., 2006)), and
a solver with a specific conflict analysis algorithm (similar to the description given in (Krstic et al., 2007)).
Within the SAT solver correctness proofs, a large number of lemmas about propositional logic and CNF formulae are proved. This theory is self-contained and could be used for further exploring of properties of CNF based SAT algorithms.
\ No newline at end of file
diff --git a/web/entries/SC_DOM_Components.html b/web/entries/SC_DOM_Components.html
--- a/web/entries/SC_DOM_Components.html
+++ b/web/entries/SC_DOM_Components.html
@@ -1,188 +1,188 @@
A Formalization of Safely Composable Web Components - Archive of Formal Proofs
AFormalization of Safely Composable Web Components
While the (safely composable) DOM with shadow trees provide the
technical basis for defining web components, it does neither defines
the concept of web components nor specifies the safety properties that
web components should guarantee. Consequently, the standard also does
not discuss how or even if the methods for modifying the DOM respect
component boundaries. In AFP entry, we present a formally verified
model of safely composable web components and define safety properties
which ensure that different web components can only interact with each
other using well-defined interfaces. Moreover, our verification of the
application programming interface (API) of the DOM revealed numerous
invariants that implementations of the DOM API need to preserve to
ensure the integrity of components. In comparison to the strict
standard compliance formalization of Web Components in the AFP entry
"DOM_Components", the notion of components in this entry
(based on "SC_DOM" and "Shadow_SC_DOM") provides
much stronger safety guarantees.
\ No newline at end of file
diff --git a/web/entries/Safe_Distance.html b/web/entries/Safe_Distance.html
--- a/web/entries/Safe_Distance.html
+++ b/web/entries/Safe_Distance.html
@@ -1,167 +1,167 @@
A Formally Verified Checker of the Safe Distance Traffic Rules for Autonomous Vehicles - Archive of Formal Proofs
AFormally Verified Checker of the Safe Distance Traffic Rules for Autonomous Vehicles
The Vienna Convention on Road Traffic defines the safe distance
traffic rules informally. This could make autonomous vehicle liable
for safe-distance-related accidents because there is no clear
definition of how large a safe distance is. We provide a formally
proven prescriptive definition of a safe distance, and checkers which
can decide whether an autonomous vehicle is obeying the safe distance
rule. Not only does our work apply to the domain of law, but it also
serves as a specification for autonomous vehicle manufacturers and for
online verification of path planners.
\ No newline at end of file
diff --git a/web/entries/Saturation_Framework_Extensions.html b/web/entries/Saturation_Framework_Extensions.html
--- a/web/entries/Saturation_Framework_Extensions.html
+++ b/web/entries/Saturation_Framework_Extensions.html
@@ -1,171 +1,171 @@
Extensions to the Comprehensive Framework for Saturation Theorem Proving - Archive of Formal Proofs
Extensions to the Comprehensive Framework for Saturation Theorem Proving
This Isabelle/HOL formalization extends the AFP entry
Saturation_Framework with the following
contributions:
an application of the framework
to prove Bachmair and Ganzinger's resolution prover RP
refutationally complete, which was formalized in a more ad hoc fashion
by Schlichtkrull et al. in the AFP entry
Ordered_Resultion_Prover;
generalizations of various basic concepts formalized by
Schlichtkrull et al., which were needed to verify RP and could be
useful to formalize other calculi, such as superposition;
alternative proofs of fairness (and hence saturation and
ultimately refutational completeness) for the given clause procedures
GC and LGC, based on invariance.
We present a generic type class implementation of separation algebra for Isabelle/HOL as well as lemmas and generic tactics which can be used directly for any instantiation of the type class.
The ex directory contains example instantiations that include structures such as a heap or virtual memory.
The abstract separation algebra is based upon "Abstract Separation Logic" by Calcagno et al. These theories are also the basis of the ITP 2012 rough diamond "Mechanised Separation Algebra" by the authors.
The aim of this work is to support and significantly reduce the effort for future separation logic developments in Isabelle/HOL by factoring out the part of separation logic that can be treated abstractly once and for all. This includes developing typical default rule sets for reasoning as well as automated tactic support for separation logic.
\ No newline at end of file
diff --git a/web/entries/Shadow_DOM.html b/web/entries/Shadow_DOM.html
--- a/web/entries/Shadow_DOM.html
+++ b/web/entries/Shadow_DOM.html
@@ -1,197 +1,197 @@
A Formal Model of the Document Object Model with Shadow Roots - Archive of Formal Proofs
AFormal Model of the Document Object Model With Shadow Roots
In this AFP entry, we extend our formalization of the core DOM with
Shadow Roots. Shadow roots are a recent proposal of the web community
to support a component-based development approach for client-side web
applications. Shadow roots are a significant extension to the DOM
standard and, as web standards are condemned to be backward
compatible, such extensions often result in complex specification that
may contain unwanted subtleties that can be detected by a
formalization. Our Isabelle/HOL formalization is, in the sense of
object-orientation, an extension of our formalization of the core DOM
and enjoys the same basic properties, i.e., it is extensible, i.e.,
can be extended without the need of re-proving already proven
properties and executable, i.e., we can generate executable code from
our specification. We exploit the executability to show that our
formalization complies to the official standard of the W3C,
respectively, the WHATWG.
@article{Shadow_DOM-AFP,
author = {brucker and herzberg},
title = {A Formal Model of the Document Object Model with Shadow Roots},
journal = {Archive of Formal Proofs},
month = September,
year = 2020,
note = {\url{https://isa-afp.org/entries/Shadow_DOM.html},
Formal proof development},
ISSN = {2150-914x},
}
\ No newline at end of file
diff --git a/web/entries/Shadow_SC_DOM.html b/web/entries/Shadow_SC_DOM.html
--- a/web/entries/Shadow_SC_DOM.html
+++ b/web/entries/Shadow_SC_DOM.html
@@ -1,199 +1,199 @@
A Formal Model of the Safely Composable Document Object Model with Shadow Roots - Archive of Formal Proofs
AFormal Model of the Safely Composable Document Object Model With Shadow Roots
In this AFP entry, we extend our formalization of the safely
composable DOM with Shadow Roots. This is a proposal for Shadow Roots
with stricter safety guarantess than the standard compliant
formalization (see "Shadow DOM"). Shadow Roots are a recent
proposal of the web community to support a component-based development
approach for client-side web applications. Shadow roots are a
significant extension to the DOM standard and, as web standards are
condemned to be backward compatible, such extensions often result in
complex specification that may contain unwanted subtleties that can be
detected by a formalization. Our Isabelle/HOL formalization is, in
the sense of object-orientation, an extension of our formalization of
the core DOM and enjoys the same basic properties, i.e., it is
extensible, i.e., can be extended without the need of re-proving
already proven properties and executable, i.e., we can generate
executable code from our specification. We exploit the executability
to show that our formalization complies to the official standard of
the W3C, respectively, the WHATWG.
@article{Shadow_SC_DOM-AFP,
author = {brucker and herzberg},
title = {A Formal Model of the Safely Composable Document Object Model with Shadow Roots},
journal = {Archive of Formal Proofs},
month = September,
year = 2020,
note = {\url{https://isa-afp.org/entries/Shadow_SC_DOM.html},
Formal proof development},
ISSN = {2150-914x},
}
\ No newline at end of file
diff --git a/web/entries/Sigma_Commit_Crypto.html b/web/entries/Sigma_Commit_Crypto.html
--- a/web/entries/Sigma_Commit_Crypto.html
+++ b/web/entries/Sigma_Commit_Crypto.html
@@ -1,186 +1,186 @@
Sigma Protocols and Commitment Schemes - Archive of Formal Proofs
We use CryptHOL to formalise commitment schemes and Sigma-protocols.
Both are widely used fundamental two party cryptographic primitives.
Security for commitment schemes is considered using game-based
definitions whereas the security of Sigma-protocols is considered
using both the game-based and simulation-based security paradigms. In
this work, we first define security for both primitives and then prove
secure multiple case studies: the Schnorr, Chaum-Pedersen and
Okamoto Sigma-protocols as well as a construction that allows for
compound (AND and OR statements) Sigma-protocols and the Pedersen and
Rivest commitment schemes. We also prove that commitment schemes can
be constructed from Sigma-protocols. We formalise this proof at an
abstract level, only assuming the existence of a Sigma-protocol;
consequently, the instantiations of this result for the concrete
Sigma-protocols we consider come for free.
We present a simple model of a firewall. The firewall can accept or
drop a packet and can match on interfaces, IP addresses, protocol, and
ports. It was designed to feature nice mathematical properties: The
type of match expressions was carefully crafted such that the
conjunction of two match expressions is only one match expression.
This model is too simplistic to mirror all aspects of the real world.
In the upcoming entry "Iptables Semantics", we will translate the
Linux firewall iptables to this model. For a fixed service (e.g. ssh,
http), we provide an algorithm to compute an overview of the
firewall's filtering behavior. The algorithm computes minimal service
matrices, i.e. graphs which partition the complete IPv4 and IPv6
address space and visualize the allowed accesses between partitions.
For a detailed description, see
Verified iptables Firewall
Analysis, IFIP Networking 2016.
\ No newline at end of file
diff --git a/web/entries/Simplex.html b/web/entries/Simplex.html
--- a/web/entries/Simplex.html
+++ b/web/entries/Simplex.html
@@ -1,181 +1,181 @@
An Incremental Simplex Algorithm with Unsatisfiable Core Generation - Archive of Formal Proofs
An Incremental Simplex Algorithm With Unsatisfiable Core Generation
We present an Isabelle/HOL formalization and total correctness proof
for the incremental version of the Simplex algorithm which is used in
most state-of-the-art SMT solvers. It supports extraction of
satisfying assignments, extraction of minimal unsatisfiable cores, incremental
assertion of constraints and backtracking. The formalization relies on
stepwise program refinement, starting from a simple specification,
going through a number of refinement steps, and ending up in a fully
executable functional implementation. Symmetries present in the
algorithm are handled with special care.
Skew heaps are an amazingly simple and lightweight implementation of
priority queues. They were invented by Sleator and Tarjan [SIAM 1986]
and have logarithmic amortized complexity. This entry provides executable
and verified functional skew heaps.
The amortized complexity of skew heaps is analyzed in the AFP entry
Amortized Complexity.
\ No newline at end of file
diff --git a/web/entries/Sliding_Window_Algorithm.html b/web/entries/Sliding_Window_Algorithm.html
--- a/web/entries/Sliding_Window_Algorithm.html
+++ b/web/entries/Sliding_Window_Algorithm.html
@@ -1,173 +1,173 @@
Formalization of an Algorithm for Greedily Computing Associative Aggregations on Sliding Windows - Archive of Formal Proofs
Formalization of an Algorithm for Greedily Computing Associative Aggregations on Sliding Windows
Basin et al.'s sliding
window algorithm (SWA) is an algorithm for combining the
elements of subsequences of a sequence with an associative operator.
It is greedy and minimizes the number of operator applications. We
formalize the algorithm and verify its functional correctness. We
extend the algorithm with additional operations and provide an
alternative interface to the slide operation that does not require the
entire input sequence.
@article{Sliding_Window_Algorithm-AFP,
author = {heimes, traytel and schneider},
title = {Formalization of an Algorithm for Greedily Computing Associative Aggregations on Sliding Windows},
journal = {Archive of Formal Proofs},
month = April,
year = 2020,
note = {\url{https://isa-afp.org/entries/Sliding_Window_Algorithm.html},
Formal proof development},
ISSN = {2150-914x},
}
We formalize the definition and basic properties of smooth manifolds
in Isabelle/HOL. Concepts covered include partition of unity, tangent
and cotangent spaces, and the fundamental theorem of path integrals.
We also examine some concrete manifolds such as spheres and projective
spaces. The formalization makes extensive use of the analysis and
linear algebra libraries in Isabelle/HOL, in particular its
“types-to-sets” mechanism.
\ No newline at end of file
diff --git a/web/entries/Sophomores_Dream.html b/web/entries/Sophomores_Dream.html
--- a/web/entries/Sophomores_Dream.html
+++ b/web/entries/Sophomores_Dream.html
@@ -1,157 +1,157 @@
The Sophomore's Dream - Archive of Formal Proofs
\ No newline at end of file
diff --git a/web/entries/Special_Function_Bounds.html b/web/entries/Special_Function_Bounds.html
--- a/web/entries/Special_Function_Bounds.html
+++ b/web/entries/Special_Function_Bounds.html
@@ -1,169 +1,169 @@
Real-Valued Special Functions: Upper and Lower Bounds - Archive of Formal Proofs
Real-Valued Special Functions: Upper and Lower Bounds
This development proves upper and lower bounds for several familiar real-valued functions. For sin, cos, exp and sqrt, it defines and verifies infinite families of upper and lower bounds, mostly based on Taylor series expansions. For arctan, ln and exp, it verifies a finite collection of upper and lower bounds, originally obtained from the functions' continued fraction expansions using the computer algebra system Maple. A common theme in these proofs is to take the difference between a function and its approximation, which should be zero at one point, and then consider the sign of the derivative. The immediate purpose of this development is to verify axioms used by MetiTarski, an automatic theorem prover for real-valued special functions. Crucial to MetiTarski's operation is the provision of upper and lower bounds for each function of interest.
\ No newline at end of file
diff --git a/web/entries/Splay_Tree.html b/web/entries/Splay_Tree.html
--- a/web/entries/Splay_Tree.html
+++ b/web/entries/Splay_Tree.html
@@ -1,177 +1,177 @@
Splay Tree - Archive of Formal Proofs
Splay trees are self-adjusting binary search trees which were invented by Sleator and Tarjan [JACM 1985].
This entry provides executable and verified functional splay trees
as well as the related splay heaps (due to Okasaki).
The amortized complexity of splay trees and heaps is analyzed in the AFP entry
Amortized Complexity.
BSD License
Change history
[2016-07-12] Moved splay heaps here from Amortized_Complexity
\ No newline at end of file
diff --git a/web/entries/Stern_Brocot.html b/web/entries/Stern_Brocot.html
--- a/web/entries/Stern_Brocot.html
+++ b/web/entries/Stern_Brocot.html
@@ -1,161 +1,161 @@
The Stern-Brocot Tree - Archive of Formal Proofs
The Stern-Brocot tree contains all rational numbers exactly once and in their lowest terms. We formalise the Stern-Brocot tree as a coinductive tree using recursive and iterative specifications, which we have proven equivalent, and show that it indeed contains all the numbers as stated. Following Hinze, we prove that the Stern-Brocot tree can be linearised looplessly into Stern's diatonic sequence (also known as Dijkstra's fusc function) and that it is a permutation of the Bird tree.
The reasoning stays at an abstract level by appealing to the uniqueness of solutions of guarded recursive equations and lifting algebraic laws point-wise to trees and streams using applicative functors.
\ No newline at end of file
diff --git a/web/entries/Stirling_Formula.html b/web/entries/Stirling_Formula.html
--- a/web/entries/Stirling_Formula.html
+++ b/web/entries/Stirling_Formula.html
@@ -1,166 +1,166 @@
Stirling's formula - Archive of Formal Proofs
This work contains a proof of Stirling's formula both for the factorial $n! \sim \sqrt{2\pi n} (n/e)^n$ on natural numbers and the real
Gamma function $\Gamma(x)\sim \sqrt{2\pi/x} (x/e)^x$. The proof is based on work by Graham Jameson.
This is then extended to the full asymptotic expansion
$$\log\Gamma(z) = \big(z - \tfrac{1}{2}\big)\log z - z + \tfrac{1}{2}\log(2\pi) + \sum_{k=1}^{n-1} \frac{B_{k+1}}{k(k+1)} z^{-k}\\
{} - \frac{1}{n} \int_0^\infty B_n([t])(t + z)^{-n}\,\text{d}t$$
uniformly for all complex $z\neq 0$ in the cone $\text{arg}(z)\leq \alpha$ for any $\alpha\in(0,\pi)$, with which the above asymptotic
relation for Γ is also extended to complex arguments.
\ No newline at end of file
diff --git a/web/entries/Stochastic_Matrices.html b/web/entries/Stochastic_Matrices.html
--- a/web/entries/Stochastic_Matrices.html
+++ b/web/entries/Stochastic_Matrices.html
@@ -1,177 +1,177 @@
Stochastic Matrices and the Perron-Frobenius Theorem - Archive of Formal Proofs
Stochastic Matrices and the Perron-Frobenius Theorem
Stochastic matrices are a convenient way to model discrete-time and
finite state Markov chains. The Perron–Frobenius theorem
tells us something about the existence and uniqueness of non-negative
eigenvectors of a stochastic matrix. In this entry, we formalize
stochastic matrices, link the formalization to the existing AFP-entry
on Markov chains, and apply the Perron–Frobenius theorem to
prove that stationary distributions always exist, and they are unique
if the stochastic matrix is irreducible.
\ No newline at end of file
diff --git a/web/entries/Stone_Algebras.html b/web/entries/Stone_Algebras.html
--- a/web/entries/Stone_Algebras.html
+++ b/web/entries/Stone_Algebras.html
@@ -1,167 +1,167 @@
Stone Algebras - Archive of Formal Proofs
A range of algebras between lattices and Boolean algebras generalise
the notion of a complement. We develop a hierarchy of these
pseudo-complemented algebras that includes Stone algebras.
Independently of this theory we study filters based on partial orders.
Both theories are combined to prove Chen and Grätzer's construction
theorem for Stone algebras. The latter involves extensive reasoning
about algebraic structures in addition to reasoning in algebraic
structures.
We develop Stone-Kleene relation algebras, which expand Stone relation
algebras with a Kleene star operation to describe reachability in
weighted graphs. Many properties of the Kleene star arise as a special
case of a more general theory of iteration based on Conway semirings
extended by simulation axioms. This includes several theorems
representing complex program transformations. We formally prove the
correctness of Conway's automata-based construction of the Kleene
star of a matrix. We prove numerous results useful for reasoning about
weighted graphs.
\ No newline at end of file
diff --git a/web/entries/Stone_Relation_Algebras.html b/web/entries/Stone_Relation_Algebras.html
--- a/web/entries/Stone_Relation_Algebras.html
+++ b/web/entries/Stone_Relation_Algebras.html
@@ -1,180 +1,180 @@
Stone Relation Algebras - Archive of Formal Proofs
We develop Stone relation algebras, which generalise relation algebras
by replacing the underlying Boolean algebra structure with a Stone
algebra. We show that finite matrices over extended real numbers form
an instance. As a consequence, relation-algebraic concepts and methods
can be used for reasoning about weighted graphs. We also develop a
fixpoint calculus and apply it to compare different definitions of
reflexive-transitive closures in semirings.
Stream Fusion is a system for removing intermediate list structures from Haskell programs; it consists of a Haskell library along with several compiler rewrite rules. (The library is available online.)
These theories contain a formalization of much of the Stream Fusion library in HOLCF. Lazy list and stream types are defined, along with coercions between the two types, as well as an equivalence relation for streams that generate the same list. List and stream versions of map, filter, foldr, enumFromTo, append, zipWith, and concatMap are defined, and the stream versions are shown to respect stream equivalence.
\ No newline at end of file
diff --git a/web/entries/Stream_Fusion_Code.html b/web/entries/Stream_Fusion_Code.html
--- a/web/entries/Stream_Fusion_Code.html
+++ b/web/entries/Stream_Fusion_Code.html
@@ -1,171 +1,171 @@
Stream Fusion in HOL with Code Generation - Archive of Formal Proofs
Stream Fusion is a system for removing intermediate list data structures from functional programs, in particular Haskell. This entry adapts stream fusion to Isabelle/HOL and its code generator. We define stream types for finite and possibly infinite lists and stream versions for most of the fusible list functions in the theories List and Coinductive_List, and prove them correct with respect to the conversion functions between lists and streams. The Stream Fusion transformation itself is implemented as a simproc in the preprocessor of the code generator. [Brian Huffman's AFP entry formalises stream fusion in HOLCF for the domain of lazy lists to prove the GHC compiler rewrite rules correct. In contrast, this work enables Isabelle's code generator to perform stream fusion itself. To that end, it covers both finite and coinductive lists from the HOL library and the Coinductive entry. The fusible list functions require specification and proof principles different from Huffman's.]
Sturm's Theorem states that polynomial sequences with certain
properties, so-called Sturm sequences, can be used to count the number
of real roots of a real polynomial. This work contains a proof of
Sturm's Theorem and code for constructing Sturm sequences efficiently.
It also provides the “sturm” proof method, which can decide certain
statements about the roots of real polynomials, such as “the polynomial
P has exactly n roots in the interval I” or “P(x) > Q(x) for all x
∈ ℝ”.
\ No newline at end of file
diff --git a/web/entries/Subset_Boolean_Algebras.html b/web/entries/Subset_Boolean_Algebras.html
--- a/web/entries/Subset_Boolean_Algebras.html
+++ b/web/entries/Subset_Boolean_Algebras.html
@@ -1,166 +1,166 @@
A Hierarchy of Algebras for Boolean Subsets - Archive of Formal Proofs
We present a collection of axiom systems for the construction of
Boolean subalgebras of larger overall algebras. The subalgebras are
defined as the range of a complement-like operation on a semilattice.
This technique has been used, for example, with the antidomain
operation, dynamic negation and Stone algebras. We present a common
ground for these constructions based on a new equational
axiomatisation of Boolean algebras.
A symmetric polynomial is a polynomial in variables
X1,…,Xn
that does not discriminate between its variables, i. e. it
is invariant under any permutation of them. These polynomials are
important in the study of the relationship between the coefficients of
a univariate polynomial and its roots in its algebraic
closure.
This article provides a definition of
symmetric polynomials and the elementary symmetric polynomials
e1,…,en and
proofs of their basic properties, including three notable
ones:
Vieta's formula, which
gives an explicit expression for the k-th
coefficient of a univariate monic polynomial in terms of its roots
x1,…,xn,
namely
ck = (-1)n-k en-k(x1,…,xn).
Second, the Fundamental Theorem of Symmetric Polynomials,
which states that any symmetric polynomial is itself a uniquely
determined polynomial combination of the elementary symmetric
polynomials.
Third, as a corollary of the
previous two, that given a polynomial over some ring
R, any symmetric polynomial combination of its
roots is also in R even when the roots are not.
Both the symmetry property itself and the
witness for the Fundamental Theorem are executable.
We formalize a notion of logic whose terms and formulas are kept
abstract. In particular, logical connectives, substitution, free
variables, and provability are not defined, but characterized by their
general properties as locale assumptions. Based on this abstract
characterization, we develop further reusable reasoning
infrastructure. For example, we define parallel substitution (along
with proving its characterizing theorems) from single-point
substitution. Similarly, we develop a natural deduction style proof
system starting from the abstract Hilbert-style one. These one-time
efforts benefit different concrete logics satisfying our locales'
assumptions. We instantiate the syntax-independent logic
infrastructure to Robinson arithmetic (also known as Q) in the AFP
entry Robinson_Arithmetic
and to hereditarily finite set theory in the AFP entries Goedel_HFSet_Semantic
and Goedel_HFSet_Semanticless,
which are part of our formalization of Gödel's
Incompleteness Theorems described in our CADE-27 paper A
Formally Verified Abstract Account of Gödel's Incompleteness
Theorems.
\ No newline at end of file
diff --git a/web/entries/Taylor_Models.html b/web/entries/Taylor_Models.html
--- a/web/entries/Taylor_Models.html
+++ b/web/entries/Taylor_Models.html
@@ -1,184 +1,184 @@
Taylor Models - Archive of Formal Proofs
We present a formally verified implementation of multivariate Taylor
models. Taylor models are a form of rigorous polynomial approximation,
consisting of an approximation polynomial based on Taylor expansions,
combined with a rigorous bound on the approximation error. Taylor
models were introduced as a tool to mitigate the dependency problem of
interval arithmetic. Our implementation automatically computes Taylor
models for the class of elementary functions, expressed by composition
of arithmetic operations and basic functions like exp, sin, or square
root.
Timed automata are a widely used formalism for modeling real-time
systems, which is employed in a class of successful model checkers
such as UPPAAL [LPY97], HyTech [HHWt97] or Kronos [Yov97]. This work
formalizes the theory for the subclass of diagonal-free timed
automata, which is sufficient to model many interesting problems. We
first define the basic concepts and semantics of diagonal-free timed
automata. Based on this, we prove two types of decidability results
for the language emptiness problem. The first is the classic result
of Alur and Dill [AD90, AD94], which uses a finite partitioning of
the state space into so-called `regions`. Our second result focuses
on an approach based on `Difference Bound Matrices (DBMs)`, which is
practically used by model checkers. We prove the correctness of the
basic forward analysis operations on DBMs. One of these operations is
the Floyd-Warshall algorithm for the all-pairs shortest paths problem.
To obtain a finite search space, a widening operation has to be used
for this kind of analysis. We use Patricia Bouyer's [Bou04] approach
to prove that this widening operation is correct in the sense that
DBM-based forward analysis in combination with the widening operation
also decides language emptiness. The interesting property of this
proof is that the first decidability result is reused to obtain the
second one.
\ No newline at end of file
diff --git a/web/entries/Transitive_Models.html b/web/entries/Transitive_Models.html
--- a/web/entries/Transitive_Models.html
+++ b/web/entries/Transitive_Models.html
@@ -1,206 +1,206 @@
Transitive Models of Fragments of ZFC - Archive of Formal Proofs
We extend the ZF-Constructibility library by relativizing theories of
the Isabelle/ZF and Delta System Lemma sessions to a transitive class.
We also relativize Paulson's work on Aleph and our former
treatment of the Axiom of Dependent Choices. This work is a
prerrequisite to our formalization of the independence of the
Continuum Hypothesis.
\ No newline at end of file
diff --git a/web/entries/Treaps.html b/web/entries/Treaps.html
--- a/web/entries/Treaps.html
+++ b/web/entries/Treaps.html
@@ -1,187 +1,187 @@
Treaps - Archive of Formal Proofs
A Treap is a binary tree whose nodes contain pairs
consisting of some payload and an associated priority. It must have
the search-tree property w.r.t. the payloads and the heap property
w.r.t. the priorities. Treaps are an interesting data structure that
is related to binary search trees (BSTs) in the following way: if one
forgets all the priorities of a treap, the resulting BST is exactly
the same as if one had inserted the elements into an empty BST in
order of ascending priority. This means that a treap behaves like a
BST where we can pretend the elements were inserted in a different
order from the one in which they were actually inserted.
In particular, by choosing these priorities at random upon
insertion of an element, we can pretend that we inserted the elements
in random order, so that the shape of the
resulting tree is that of a random BST no matter in what order we
insert the elements. This is the main result of this
formalisation.
\ No newline at end of file
diff --git a/web/entries/Triangle.html b/web/entries/Triangle.html
--- a/web/entries/Triangle.html
+++ b/web/entries/Triangle.html
@@ -1,179 +1,179 @@
Basic Geometric Properties of Triangles - Archive of Formal Proofs
This entry contains a definition of angles between vectors and between three
points. Building on this, we prove basic geometric properties of triangles, such
as the Isosceles Triangle Theorem, the Law of Sines and the Law of Cosines, that
the sum of the angles of a triangle is π, and the congruence theorems for
triangles.
The definitions and proofs were developed following those by John Harrison in
HOL Light. However, due to Isabelle's type class system, all definitions and
theorems in the Isabelle formalisation hold for all real inner product spaces.
\ No newline at end of file
diff --git a/web/entries/Trie.html b/web/entries/Trie.html
--- a/web/entries/Trie.html
+++ b/web/entries/Trie.html
@@ -1,174 +1,174 @@
Trie - Archive of Formal Proofs
This article formalizes the ``trie'' data structure invented by
Fredkin [CACM 1960]. It also provides a specialization where the entries
in the trie are lists.
BSD License
Extra 0
Origin: This article was extracted from existing articles by the authors.
\ No newline at end of file
diff --git a/web/entries/Types_Tableaus_and_Goedels_God.html b/web/entries/Types_Tableaus_and_Goedels_God.html
--- a/web/entries/Types_Tableaus_and_Goedels_God.html
+++ b/web/entries/Types_Tableaus_and_Goedels_God.html
@@ -1,183 +1,183 @@
Types, Tableaus and Gödel’s God in Isabelle/HOL - Archive of Formal Proofs
A computer-formalisation of the essential parts of Fitting's
textbook "Types, Tableaus and Gödel's God" in
Isabelle/HOL is presented. In particular, Fitting's (and
Anderson's) variant of the ontological argument is verified and
confirmed. This variant avoids the modal collapse, which has been
criticised as an undesirable side-effect of Kurt Gödel's (and
Dana Scott's) versions of the ontological argument.
Fitting's work is employing an intensional higher-order modal
logic, which we shallowly embed here in classical higher-order logic.
We then utilize the embedded logic for the formalisation of
Fitting's argument. (See also the earlier AFP entry ``Gödel's God in Isabelle/HOL''.)
@article{Types_Tableaus_and_Goedels_God-AFP,
author = {fuenmayor and benzmueller},
title = {Types, Tableaus and Gödel’s God in Isabelle/HOL},
journal = {Archive of Formal Proofs},
month = May,
year = 2017,
note = {\url{https://isa-afp.org/entries/Types_Tableaus_and_Goedels_God.html},
Formal proof development},
ISSN = {2150-914x},
}
\ No newline at end of file
diff --git a/web/entries/UPF.html b/web/entries/UPF.html
--- a/web/entries/UPF.html
+++ b/web/entries/UPF.html
@@ -1,188 +1,188 @@
The Unified Policy Framework (UPF) - Archive of Formal Proofs
We present the Unified Policy Framework (UPF), a generic framework
for modelling security (access-control) policies. UPF emphasizes
the view that a policy is a policy decision function that grants or
denies access to resources, permissions, etc. In other words,
instead of modelling the relations of permitted or prohibited
requests directly, we model the concrete function that implements
the policy decision point in a system. In more detail, UPF is
based on the following four principles: 1) Functional representation
of policies, 2) No conflicts are possible, 3) Three-valued decision
type (allow, deny, undefined), 4) Output type not containing the
decision only.
\ No newline at end of file
diff --git a/web/entries/UPF_Firewall.html b/web/entries/UPF_Firewall.html
--- a/web/entries/UPF_Firewall.html
+++ b/web/entries/UPF_Firewall.html
@@ -1,218 +1,218 @@
Formal Network Models and Their Application to Firewall Policies - Archive of Formal Proofs
Formal Network Models and Their Application to Firewall Policies
We present a formal model of network protocols and their application
to modeling firewall policies. The formalization is based on the
Unified Policy Framework (UPF). The formalization was originally
developed with for generating test cases for testing the security
configuration actual firewall and router (middle-boxes) using
HOL-TestGen. Our work focuses on modeling application level protocols
on top of tcp/ip.
\ No newline at end of file
diff --git a/web/entries/UTP.html b/web/entries/UTP.html
--- a/web/entries/UTP.html
+++ b/web/entries/UTP.html
@@ -1,228 +1,228 @@
Isabelle/UTP: Mechanised Theory Engineering for Unifying Theories of Programming - Archive of Formal Proofs
Isabelle/UTP: Mechanised Theory Engineering for Unifying Theories of Programming
Isabelle/UTP is a mechanised theory engineering toolkit based on Hoare
and He’s Unifying Theories of Programming (UTP). UTP enables the
creation of denotational, algebraic, and operational semantics for
different programming languages using an alphabetised relational
calculus. We provide a semantic embedding of the alphabetised
relational calculus in Isabelle/HOL, including new type definitions,
relational constructors, automated proof tactics, and accompanying
algebraic laws. Isabelle/UTP can be used to both capture laws of
programming for different languages, and put these fundamental
theorems to work in the creation of associated verification tools,
using calculi like Hoare logics. This document describes the
relational core of the UTP in Isabelle/HOL.
VerifyThis
2018 was a program verification competition associated with
ETAPS 2018. It was the 7th event in the VerifyThis competition series.
In this entry, we present polished and completed versions of our
solutions that we created during the competition.
\ No newline at end of file
diff --git a/web/entries/Weight_Balanced_Trees.html b/web/entries/Weight_Balanced_Trees.html
--- a/web/entries/Weight_Balanced_Trees.html
+++ b/web/entries/Weight_Balanced_Trees.html
@@ -1,173 +1,173 @@
Weight-Balanced Trees - Archive of Formal Proofs
This theory provides a verified implementation of weight-balanced
trees following the work of Hirai
and Yamamoto who proved that all parameters in a certain
range are valid, i.e. guarantee that insertion and deletion preserve
weight-balance. Instead of a general theorem we provide parameterized
proofs of preservation of the invariant that work for many (all?)
valid parameters.
\ No newline at end of file
diff --git a/web/entries/Zeta_3_Irrational.html b/web/entries/Zeta_3_Irrational.html
--- a/web/entries/Zeta_3_Irrational.html
+++ b/web/entries/Zeta_3_Irrational.html
@@ -1,161 +1,161 @@
The Irrationality of ζ(3) - Archive of Formal Proofs
This article provides a formalisation of Beukers's
straightforward analytic proof that ζ(3) is irrational. This was first
proven by Apéry (which is why this result is also often called
‘Apéry's Theorem’) using a more algebraic approach. This
formalisation follows Filaseta's
presentation of Beukers's proof.
\ No newline at end of file
diff --git a/web/entries/Zeta_Function.html b/web/entries/Zeta_Function.html
--- a/web/entries/Zeta_Function.html
+++ b/web/entries/Zeta_Function.html
@@ -1,192 +1,192 @@
The Hurwitz and Riemann ζ Functions - Archive of Formal Proofs
This entry builds upon the results about formal and analytic Dirichlet
series to define the Hurwitz ζ function ζ(a,s) and,
based on that, the Riemann ζ function ζ(s).
This is done by first defining them for ℜ(z) > 1
and then successively extending the domain to the left using the
Euler–MacLaurin formula.
Apart from the most basic facts such as analyticity, the following
results are provided:
the Stieltjes constants and the Laurent expansion of
ζ(s) at s = 1
the non-vanishing of ζ(s)
for ℜ(z) ≥ 1
the relationship between ζ(a,s) and Γ
the special values at negative integers and positive even integers
Hurwitz's formula and the reflection formula for ζ(s)