diff --git a/admin/sitegen b/admin/sitegen --- a/admin/sitegen +++ b/admin/sitegen @@ -1,6 +1,6 @@ #!/bin/bash # standard invocation of sitegen.py cd $(hg root) -admin/sitegen.py --dest=web metadata $@ +admin/sitegen.py --dest=web --metadata=metadata thys $@ diff --git a/admin/sitegen.py b/admin/sitegen.py --- a/admin/sitegen.py +++ b/admin/sitegen.py @@ -1,998 +1,979 @@ #!/usr/bin/env python # vim: set fileencoding=utf-8 : ## ## Dependencies: Python 2.7 ## ## This script reads a metadata file and generates the topics.shtml, ## index.shtml and the entry pages on afp.sf.net. ## ## For meta data documentation see ../metadata/README ## For adding new entries see ../doc/editors/new-entry-checkin.html ## from collections import OrderedDict from optparse import OptionParser from sys import argv, stderr from functools import partial import codecs import ConfigParser import os import re from termcolor import colored # global config ### input templates # string which will be replaced by output of a generator # $ denotes an (optional) parameter for the generator magic_str = u"" # pattern for release tarball filename release_pattern = """^afp-(.*)-([0-9\-]{10}).tar.gz$""" # suffix of files which will be considered as template template_suffix = ".tpl" # suffix of a template file will be replaced by this suffix output_suffix = ".shtml" ### html output # wrapper for a link list of entries # {0}: link list html_topic_link_list = u"""
\n{0}
\n\n""" # template for a single link to an entry on topic page, where # filename is the same as the display name # {0}: filename (without .shtml suffix) relative to 'entries' directory html_topic_link = u"""{0}  \n""" # list of headings # {0}: display name of heading html_topic_headings = [ u"

{0}

\n\n", u"

{0}

\n\n", u"\n{0}: " ] # denotes the heading level (beginning with 0), after which no # separate html_topic_link_list will be created html_until_level = 1 # link to an author # {0}: url # {1}: display name html_author_link = u"""{1}""" # wrapper for each year's entries displayed on index page # {0}: year in YYYY format # {1}: entries html_index_year = u"""

 

{1}
{0}
""" # template for an entry displayed on index page # {0}: date in YYYY-MM-HH format # {1}: filename (without .shtml suffix) relative to 'entries' directory # {2}: display name # {3}: list of html_author_link, comma-separated html_index_entry = u"""\n{0}:\n{2}\n
Author:\n{3}\n\n\n""" # heading wrapper on entry page html_entry_heading = u"

{0}

\n

" # capitalized word in heading on entry page # {0}: first character # {1}: other characters html_entry_capitalized = u"""{0}{1}\n""" # link to license text # {0}: display title # {1}: url html_license_link = u"""{0}""" # link to another entry # {0}: display title # {1}: url html_entry_link = u"""{0}""" # wrapper for a text column in header # {0}: title (e. g. 'Change history') # {1}: text html_entry_text_wrapper = u""" {0}: {1} """ # wrapper for a pre-formatted text column in header # {0}: title (e. g. 'BibTeX') # {1}: text html_entry_pre_text_wrapper = u""" {0}:
{1}
""" # wrapper for contributors line # {0} formatted contributors list html_contributors = u""" Contributors: {0} """ # wrapper for the entry header # {0}: title # {1}: author # {2}: contributors # {3}: date # {4}: text columns (html_entry_text_wrapper) # {5}: license # {6}: entry # {7}: depends-on # {{...}} is for escaping, because Py's format syntax collides with SSI html_entry_header_wrapper = u""" {2} {4} {7}
Title: {0}
Author: {1}
Submission date: {3}
License: {5}

""" html_entry_depends_on_wrapper = u""" Depends on: {0} """ # list wrapper for older releases # {0}: list entries html_entry_older_list = u"" # list entry for older releases # {0}: isabelle release (e. g. "2009") # {1}: release date (e. g. "2009-04-29") html_entry_older_release = u"""
  • Isabelle {0}: -{1}.tar.gz">afp--{1}.tar.gz
  • \n""" ### html output # wrapper for bibtex output # {0}: key # {1}: title # {2}: author # {3}: month # {4}: year # {{...}} is for escaping, because Py's format syntax collides with SSI bibtex_wrapper = u"""@article{{{0}-AFP, author = {{{1}}}, title = {{{2}}}, journal = {{Archive of Formal Proofs}}, month = {3}, year = {4}, note = {{\\url{{http://afp.sf.net/entries/{0}.shtml}}, Formal proof development}}, ISSN = {{2150-914x}}, }}""" ### metadata format # key : (split, processor, default) # 'key' denotes the key of the key-value pair in the metadata file # if it ends with a '*', e. g. 'extra*' and you have two keys 'extra-foo' # and 'extra-bar' in your metadata file, you will get: # attributes['extra'] == { 'foo': , 'bar': } # 'split' if False, the value will be treated as a simple string, otherwise # it will be split at ',' # 'processor' if defined, the function with this name will be called with # each string (or substring if split is not None) and the result is used # (notice: must be a string, because the functions are defined later in source) # 'default' is optional and specifies a default value, which will be treated # as if it has been read from the file, i. e. is subject to splitting and # processing attribute_schema = { 'topic': (True, None, None), 'date': (False, None, None), 'author': (True, "parse_author", None), 'contributors': (True, "parse_contributors", ""), 'title': (False, None, None), 'abstract': (False, None, None), 'license': (False, "parse_license", "BSD"), 'ignore': (True, None, ""), - 'extra*': (False, "parse_extra", None), - 'depends-on': (False, "parse_depends_on", ""), + 'extra*': (False, "parse_extra", None) } ### licenses # key : (title, url) # 'key' denotes the short name of the license (e. g. 'LGPL') # 'title' denotes the display title of the license (e. g. 'GNU Lesser General Public License (LGPL)') # 'url' contains the url of the license text licenses = { 'LGPL': ("GNU Lesser General Public License (LGPL)", "http://afp.sourceforge.net/LICENSE.LGPL"), 'BSD': ("BSD License", "http://afp.sourceforge.net/LICENSE"), } ### template files # key : (path, generator, for-each) # 'key' denotes the filename of the template (without suffix) # 'path' denotes the destination sub-path for generated files # 'generator' denotes the name of the generating function for the data of the # template. The parameters this function will get is determined by 'for-each'. # You may also assume that this function never gets any ignored entry. # 'for-each' determines generation behaviour: if set to None, one file # will be generated for all entries together and the 'generator' function will # receive a list of all entries. Otherwise, a function expression # is expected which gets the entry name and returns a filename to store the # result in (without suffix) and the 'generator' function will get just one # entry name and its attributes templates = { 'topics': ('.', "generate_topics", None), 'index': ('.', "generate_index", None), 'entry': ('./entries', "generate_entry", lambda entry: entry) } # end global config class Tree(object): def __init__(self): self.subtopics = OrderedDict() self.entries = [] def add_topic(self, topic): if len(topic) > 0: if topic[0] not in self.subtopics: tree = Tree() self.subtopics[topic[0]] = tree else: tree = self.subtopics[topic[0]] tree.add_topic(topic[1:]) def add_to_topic(self, topic, entry): if len(topic) > 0: if topic[0] not in self.subtopics: warn(u"In entry {0}: unknown (sub)topic {1}".format(entry, topic)) else: self.subtopics[topic[0]].add_to_topic(topic[1:], entry) else: self.entries.append(entry) def __str__(self): return self._to_str() def _to_str(self, indent = 0): indent_str = ' ' * indent result = indent_str + str(self.entries) + "\n" for subtopic, tree in self.subtopics.items(): result += indent_str result += subtopic result += "\n" result += tree._to_str(indent + 2) return result class Stats(object): def __init__(self): self.tpls = 0 self.failed_tpls = 0 self.gens = 0 self.failed_gens = 0 def __str__(self): failed_tpl_str = "({0!s} failed)".format(self.failed_tpls) if self.failed_tpls > 0 else "" failed_gen_str = "({0!s} failed)".format(self.failed_gens) if self.failed_gens > 0 else "" success_tpl_str = "Successfully read {0!s} template(s)".format(self.tpls - self.failed_tpls) success_gen_str = "Successfully written {0!s} file(s)".format(self.gens - self.failed_gens) return "{0} {1}\n{2} {3}".format( colored(success_tpl_str, 'green', attrs=['bold']), colored(failed_tpl_str, 'red', attrs=['bold']), colored(success_gen_str, 'green', attrs=['bold']), colored(failed_gen_str, 'red', attrs=['bold']) ) def debug(message, indent = 0, title = ""): if options.enable_debug: if isinstance(message, list): debug(title + ": [" if title else "[", indent) for line in message: debug(line, indent + 2) debug("]", indent) elif isinstance(message, dict): debug(title + ": {" if title else "{", indent) for key, value in message.items(): debug(u"{0} -> {1}".format(key, value), indent + 2) debug("}", indent) else: if title: print >> stderr, (u"Debug: {0}{1}: {2}".format(' ' * indent, title, message)) else: print >> stderr, (u"Debug: {0}{1}".format(' ' * indent, message)) def warn(message): if options.enable_warnings: print >> stderr, (colored(u"Warning: {0}".format(message), 'yellow', attrs=['bold'])) def notice(message): if options.enable_warnings: print >> stderr, (u"Notice: {0}".format(message)) def error(message, exception = None, abort = False): print >> stderr, (colored(u"Error: {0}".format(message), 'red', attrs=['bold'])) if exception: error("*** exception message:") error(u"*** {0!s} {1!s}".format(exception, type(exception))) if abort: error("Fatal. Aborting") exit(1) # performs a 'diff' between metadata and the actual filesystem contents def check_fs(meta_entries, directory): fs_entries = set(e for e in os.listdir(directory) if os.path.isdir(os.path.join(directory, e))) meta_entries = set(k for k, _ in meta_entries.items()) # check for entries not existing in filesystem for fs_missing in meta_entries - fs_entries: print >> stderr, (colored(u"Check: In metadata: entry {0} doesn't exist in filesystem".format(fs_missing), 'yellow', attrs=['bold'])) for meta_missing in fs_entries - meta_entries: print >> stderr, (colored(u"Check: In filesystem: entry {0} doesn't exist in metadata".format(meta_missing), 'yellow', attrs=['bold'])) return len(fs_entries ^ meta_entries) # takes the 'raw' data from metadata file as input and performs: # * checking of data against attribute_schema # * defaults for missing keys # * elimination of extraneous keys # * splitting at ',' if an array is requested def validate(entry, attributes): sane_attributes = {} missing_keys = [] processed_keys = set() for key, (split, processor_str, default) in attribute_schema.items(): if processor_str: if processor_str in globals(): processor = globals()[processor_str] else: error(u"In metadata: For key {0}: processor function {1} doesn't exist".format(key, processor_str), abort = True) else: processor = lambda str, **kwargs: str if key.endswith("*"): shortkey = key[:len(key)-1] result = OrderedDict() process = partial(processor, entry = entry, shortkey = shortkey) for appkey, str in attributes.items(): if appkey.startswith(shortkey + "-"): processed_keys.add(appkey) app = appkey[:len(shortkey)] if not split: result[app] = process(str.strip(), appendix = app) else: result[app] = [process(s.strip(), appendix = app) for s in str.split(',')] sane_attributes[shortkey] = result else: process = partial(processor, entry = entry, key = key) if default == None and key not in attributes: missing_keys.append(key) sane_attributes[key] = process("") if not split else [] else: value = attributes[key] if key in attributes else default processed_keys.add(key) if not split: sane_attributes[key] = process(value) else: sane_attributes[key] = [process(s.strip()) for s in value.split(',')] if missing_keys: warn(u"In entry {0}: missing key(s) {1!s}".format(entry, missing_keys)) extraneous_keys = set(attributes.keys()) - processed_keys if extraneous_keys: warn(u"In entry {0}: unknown key(s) {1!s}. Have you misspelled them?".format(entry, list(extraneous_keys))) return sane_attributes # reads the metadata file and returns a dict mapping each entry to the attributes # specified. one can rely upon that they conform to the attribute_schema def parse(filename): parser = ConfigParser.RawConfigParser(dict_type = OrderedDict) try: parser.readfp(codecs.open(filename, encoding='UTF-8', errors='strict')) return OrderedDict((sec, validate(sec, dict(parser.items(sec)))) for sec in parser.sections()) except UnicodeDecodeError as ex: error(u"In file {0}: invalid UTF-8 character".format(filename), exception = ex, abort = True) # reads the version file, composed of pairs of version number and release date def read_versions(filename): versions = [] try: with open(filename) as input: for line in input: try: version, release_date = line.split(" = ") except ValueError as ex: error(u"In file {0}: Malformed association {1}".format(filename, line), exception = ex) error("Not processing releases") return [] else: versions.append((version, release_date.strip())) except Exception as ex: error(u"In file {0}: error".format(filename), exception = ex) error("Not processing releases") return [] else: versions.sort(None, lambda (v, d): d, True) return versions # reads the list of entry releases (metadata/releases) def associate_releases(entries, versions, filename): for _, attributes in entries.items(): attributes['releases'] = OrderedDict() prog = re.compile(release_pattern) warnings = {} try: with open(filename) as input: lines = [] for line in input: line = line.strip() result = prog.match(line) try: entry, date = result.groups() except ValueError as ex: error(u"In file {0}: Malformed release {1}".format(filename, line.replace), exception = ex) else: if not entry in entries: if not entry in warnings: warnings[entry] = [line] else: warnings[entry].append(line) else: lines.append((entry, date)) for entry, releases in warnings.items(): warn(u"In file {0}: In release(s) {1!s}: Unknown entry {2}".format(filename, releases, entry)) lines.sort(reverse = True) for line in lines: found = False entry, date = line for version_number, version_date in versions: if version_date <= date: entry_releases = entries[entry]['releases'] if version_number not in entry_releases: entry_releases[version_number] = [] entry_releases[version_number].append(date) found = True break if not found: warn(u"In file {0}: In release {1}: Release date {2} has no matching version".format(filename, line, date)) except Exception as ex: error(u"In file {0}: error".format(filename), exception = ex) error("Not processing releases") finally: debug([(entry, attributes['releases']) for entry, attributes in entries.items()], title = "Releases") def read_topics(filename): tree = Tree() stack = [] with open(filename) as input: for line in input: count = 0 while line[count] == ' ': count += 1 if count % 2: raise Exception(u"Illegal indentation at line '{0}'".format(line)) level = count // 2 if level <= len(stack): stack = stack[0:level] else: raise Exception(u"Illegal indentation at line '{0}'".format(line)) stack.append(line[count:len(line)-1]) tree.add_topic(stack) return tree # for topics page: group entries by topic def collect_topics(entries): tree = read_topics(os.path.join(metadata_dir, "topics")) for entry, attributes in entries: for topic in attributes['topic']: tree.add_to_topic([str.strip() for str in topic.split('/')], entry) return tree # for index page: group entries by year def collect_years(entries): extracted = [ (attributes['date'] if attributes['date'] != '' else 'unknown', entry, attributes) for entry, attributes in entries ] extracted.sort(None, lambda (y, e, a): y, True) years = OrderedDict() for date, entry, attributes in extracted: key = date[0:4] if date != 'unknown' else date if key in years: years[key].append((entry, attributes)) else: years[key] = [(entry, attributes)] return years.items() def parse_extra(extra, **kwargs): k, v = extra.split(":", 1) return k.strip(), v.strip() def parse_license(license, **kwargs): if license not in licenses: raise ValueError(u"Unknown license {0}".formate(license)) return licenses[license] def parse_author(author, entry, key): return parse_name_url(author, entry, key) def parse_contributors(contributor, entry, key): if contributor == "": return "", None else: - return parse_name_url(contributor, entry, key) + return parse_name_url(contributor, entry, key) # extracts name and URL from 'name ' as a pair def parse_name_url(name, entry, key): if name.find(" and ") != -1: warn(u"In entry {0}: {1} field contains 'and'. Use ',' to separate names.".format(entry, key)) url_start = name.find('<') url_end = name.find('>') if url_start != -1 and url_end != -1: url = name[url_start+1:url_end].strip() if url.startswith("mailto:"): url = url.replace("@", " /at/ ").replace(".", " /dot/ ") return name[:url_start].strip(), url else: notice(u"In entry {0}: no URL specified for {1} {2} ".format(entry, key, name)) return name, None # Extracts parts of a date, used in the bibtex files def month_of_date(date): return "jan feb mar apr may jun jul aug sep oct nov dec".split(" ")[int(date.split("-")[1]) - 1] def year_of_date(date): return date.split("-")[0] -# splits list of dependencies. returns empty list if no dependency is -# given -def parse_depends_on(dependency, **kwargs): - deps = [x.strip() for x in dependency.split(',')] - return filter(lambda x: x <> '', deps) - def generate_link_list(entries): return ''.join([html_topic_link.format(e) for e in entries]) # takes a list of author-URL pairs and formats a string, either with # or without email addresses def generate_author_list(authors, spacer, last_spacer, ignore_mail = True, ignore_url = False): def _to_str(author): name, url = author if url and not ignore_url: if url.startswith("mailto:"): if ignore_mail: return name else: return u"{0} ({1})".format(name, url[7:]) return html_author_link.format(url, name) else: return name authors = map(_to_str, authors) if len(authors) == 0: return "" elif len(authors) == 1: return authors[0] else: return u"{0}{1}{2}".format( spacer.join(authors[:len(authors)-1]), last_spacer, authors[len(authors)-1] ) # HTML formatting for topics page def generate_topics(entries): def _gen(tree, level): result = "" if level <= html_until_level: if len(tree.entries) > 0: result += html_topic_link_list.format(generate_link_list(tree.entries)) for topic, subtree in tree.subtopics.items(): result += html_topic_headings[level].format(topic) if level < html_until_level: result += _gen(subtree, level + 1) else: result += html_topic_link_list.format(_gen(subtree, level + 1)) else: result = generate_link_list(tree.entries) for topic, subtree in tree.subtopics.items(): result += html_topic_headings[level].format(topic) result += _gen(subtree, level + 1) return result tree = collect_topics(entries) debug(tree, title = "Entries grouped by topic") return _gen(tree, 0) # HTML formatting for index page def generate_index(entries): years = collect_years(entries) debug(years, title = "Entries grouped by year") result = "" for year, list in years: rows = "" for entry, attributes in list: rows += html_index_entry.format( attributes['date'], entry, attributes['title'] if attributes['title'] != '' else entry, generate_author_list(attributes['author'], ",\n", " and \n") ) result += html_index_year.format(year, rows) return result def format_entry_text(title, text): return html_entry_text_wrapper.format( title, "\n" + text ) def format_entry_pre_text(title, text): return html_entry_pre_text_wrapper.format( title, text ) def depends_on_string(deps): return ', '.join(html_entry_link.format(dep, dep + ".shtml") for dep in deps) def format_depends_on(deps): if len(deps) == 0: return '' else: return html_entry_depends_on_wrapper.format(depends_on_string(deps)) def format_opt_contributors(contributors): if contributors == [("",None)]: return "" else: return html_contributors.format(generate_author_list(contributors, ', ', ' and ', ignore_mail = False)) # HTML formatting for entry page # supports the following parameters: # 'header' for entry header (author, title, date etc.) # 'older' for list of older releases def generate_entry(entry, attributes, param): if param == "header": result = "" capitalized_title = "" for word in [str.strip() for str in attributes['title'].split(' ')]: if len(word) > 0 and word[0].isupper(): capitalized_title += html_entry_capitalized.format(word[0], word[1:]) else: capitalized_title += word + "\n" result += html_entry_heading.format(capitalized_title) text_columns = format_entry_text("Abstract", attributes['abstract']) text_columns += "".join([format_entry_text(k, v) for k, v in attributes['extra'].values()]) text_columns += format_entry_pre_text("BibTeX", bibtex_wrapper.format( entry, generate_author_list(attributes['author'], ' and ', ' and ', ignore_url = True), attributes['title'], month_of_date(attributes['date']), year_of_date(attributes['date'])) ) result += html_entry_header_wrapper.format( attributes['title'], generate_author_list(attributes['author'], ', ', ' and ', ignore_mail = False), format_opt_contributors(attributes['contributors']), attributes['date'], text_columns, html_license_link.format(attributes['license'][0], attributes['license'][1]), entry, format_depends_on(attributes['depends-on']), ) elif param == "older": if len(attributes['releases']) > 1: str = "" for version, release_dates in attributes['releases'].items()[1:]: str += "".join(html_entry_older_release.format(version, release_date) for release_date in release_dates) result = html_entry_older_list.format(str) else: result = "None" else: raise Exception("In generator 'entry': Unknown parameter "+param) return result # look for templates in the metadata directory according to the definitions # in the global configuration def scan_templates(entries): for template in templates.keys(): try: stats.tpls += 1 template_filename = os.path.join(metadata_dir, template + template_suffix) if os.path.exists(template_filename): handle_template(entries, template, read_template(template_filename)) else: raise(Exception("File not found. Make sure the files defined in 'templates' dict exist".format(template_filename))) except UnicodeDecodeError as ex: error(u"In file {0}: invalid UTF-8 character".format(template_filename), exception = ex) stats.failed_tpls += 1 return except Exception as ex: error(u"In template {0}: File couldn't be processed".format(template_filename), exception = ex) stats.failed_tpls += 1 return # reads a template line by line and returns a list # Suppose you have the following template content: # foo # bar # # baz # # qux # then you will get the following result: # [ # (['foo', 'bar'], ''), # (['baz'], 'param'), # (['qux'], None) # ] def read_template(template_filename): lines = [] result = [] with codecs.open(template_filename, encoding='UTF-8', errors='strict') as input: debug(u"Reading template {0}".format(template_filename)) found = False for line in input: stripped = line.strip() if stripped.startswith(magic_str_start) and stripped.endswith(magic_str_end): found = True param = stripped[len(magic_str_start):len(stripped)-len(magic_str_end)] if param.startswith(":"): param = param[1:].strip() debug(u"In file {0}: Found generator with parameter {1}".format(template_filename, param)) else: debug(u"In file {0}: Found generator without parameter".format(template_filename)) result.append((lines, param)) lines = [] else: lines.append(line) if not found: warn(u"In file {0}: No generator found".format(template_filename)) result.append((lines, None)) return result # opens a file, invokes the generator and writes the result def write_output(filename, content, generator): stats.gens += 1 debug(u"Writing result to "+filename) failed = False try: with codecs.open(filename, mode='w', encoding='UTF-8', errors='strict') as output: for lines, param in content: for line in lines: output.write(line) if param != None: try: if param == '': result = generator() else: result = generator(param) except Exception as ex: failed = True error(u"For output file {0}: generator failed".format(filename), exception = ex) else: output.write(result) except Exception as ex: failed = True error(u"For output file {0}: error".format(filename), exception = ex) finally: if failed: stats.failed_gens += 1 # main function for handling a template # Steps: # * search the appropriate generator function # * create destination directories # * filter ignored entries # * call write_output for each entry or for all entries together def handle_template(entries, template, content): def _ignore(entry, attributes): if template in attributes['ignore']: notice(u"In template {0}: ignoring entry {1} because ignore flag is set in metadata".format(template, entry)) return True else: return False dir, generator_str, for_each_func = templates[template] if generator_str not in globals(): error(u"In template {0}: generator function {1} doesn't exist".format(template, generator_str)) return else: generator = globals()[generator_str] dest_subdir = os.path.join(options.dest_dir, dir) try: os.makedirs(dest_subdir) except Exception as ex: if not os.path.exists(dest_subdir): error(u"In template {0}: directory {1} couldn't be created".format(template, dest_subdir), exception = ex) return not_ignored = [(entry, attributes) for entry, attributes in entries.items() if not _ignore(entry, attributes)] if for_each_func: for entry, attributes in not_ignored: output_filename = os.path.join(dest_subdir, for_each_func(entry) + output_suffix) write_output(output_filename, content, partial(generator, entry, attributes)) else: output_filename = os.path.join(dest_subdir, template + output_suffix) write_output(output_filename, content, partial(generator, not_ignored)) -# checks whether all dependencies are present -def check_deps(entries): - keys = set(entries.keys()) - for key, attributes in entries.items(): - deps = set(attributes["depends-on"]) - if not deps.issubset(keys): - warn(u"In entry {0}: Missing dependencies {1}".format(key, deps - keys)) - def normpath(path, *paths): """Return a normalized and absolute path (depending on current working directory)""" return os.path.abspath(os.path.join(path, *paths)) def theory_dict(articles): """Creates a dict with .thy files as key and the corresponding theory as value""" thys = dict() for a in articles: for thy in articles[a]['thys']: thys[thy] = a return thys def add_theories(articles, thy_dir): """Add a set of paths to .thy files of an entry to entries[e]['thys']""" for a in articles: articles[a]['thys'] = set() for root, _dirnames, filenames in os.walk(os.path.join(thy_dir, a)): for f in filenames: if f.endswith(".thy"): articles[a]['thys'].add(normpath(root, f)) def add_theory_dependencies(articles): """Adds dependencies by checking .thy-files""" thys = theory_dict(articles) pattern0 = re.compile("imports(.*?)begin", re.DOTALL) for t in thys: with open(t, 'r') as f: content = f.read() match0 = pattern0.search(content) if match0 is not None: #Go through imports and check if its a file in the AFP #if yes, add dependency imps = [normpath(os.path.dirname(t), x.strip(' \t"') + ".thy") - for x in match0.group(1).split()] + for x in match0.group(1).split()] for i in imps: if i in thys: articles[thys[t]]['depends-on'].add(thys[i]) def add_root_dependencies(articles, thy_dir): """Adds dependencies by checking ROOT files""" thys = theory_dict(articles) for a in articles: root = normpath(thy_dir, a, "ROOT") with open(root, 'r') as root_file: root_content = root_file.read() #check for imports of the form "session ... = ... (AFP) +" for line in root_content.splitlines(): if line.startswith("session"): for word in [x.strip(' \t"') for x in line.split()]: if word in articles: articles[a]['depends-on'].add(word) #run through every word in the root file and check if there's a corresponding AFP file for word in [x.strip(' \t"') for x in root_content.split()]: #catch Unicode error, since paths can only contain ASCII chars try: theory = normpath(thy_dir, a, word + ".thy") if theory in thys: articles[a]['depends-on'].add(thys[theory]) except UnicodeDecodeError: pass def remove_self_imps(articles): """Remove self imports in "depends-on"-field""" for a in articles: try: articles[a]['depends-on'].remove(a) except KeyError: pass if __name__ == "__main__": - parser = OptionParser(usage = "Usage: %prog [--no-warn] [--debug] [--check=THYS_DIR | --dest=DEST_DIR] [--depends=THYS_DIR] metadata-dir") + parser = OptionParser(usage = "Usage: %prog [--no-warn] [--debug] [--check] [--dest=DEST_DIR] --metadata=METADATA_DIR THYS_DIR") parser.add_option("--no-warn", action = "store_false", dest = "enable_warnings", default = True, help = "disable output of warnings") - parser.add_option("--check", action = "store", type = "string", dest = "thys_dir", help = "compare the contents of the metadata file with actual file system contents") + parser.add_option("--check", action = "store_true", dest = "do_check", help = "compare the contents of the metadata file with actual file system contents") parser.add_option("--dest", action = "store", type = "string", dest = "dest_dir", help = "generate files for each template in the metadata directory") parser.add_option("--debug", action = "store_true", dest = "enable_debug", default = False, help = "display debug output") - parser.add_option("--depends", action = "store", type = "string", dest = "depends_thys_dir", help = "generate 'depends-on' field from running regexes on the AFP") + parser.add_option("--metadata", action = "store", type = "string", dest = "metadata_dir", help = "metadata location") (options, args) = parser.parse_args(argv) if len(args) != 2: - parser.error("You must supply at least one parameter. For usage, supply --help.") - if not options.thys_dir and not options.dest_dir and not options.makefile_dir: - parser.error("You must supply at least one of --check or --dest or --makefile") + parser.error("You must supply the theories directory. For usage, supply --help.") - metadata_dir = args[1] + thys_dir = args[1] + metadata_dir = options.metadata_dir # parse metadata entries = parse(os.path.join(metadata_dir, "metadata")) versions = read_versions(os.path.join(metadata_dir, "release-dates")) associate_releases(entries, versions, os.path.join(metadata_dir, "releases")) debug(entries, title = "Entries from metadata file") if len(entries) == 0: warn("In metadata: No entries found") - check_deps(entries) - # generate depends-on entries - if options.depends_thys_dir: - add_theories(entries, options.depends_thys_dir) - for e in entries: - entries[e]['depends-on'] = set() - add_theory_dependencies(entries) - add_root_dependencies(entries, options.depends_thys_dir) - remove_self_imps(entries) + # generate depends-on entries + for e in entries: entries[e]['depends-on'] = set() + add_theories(entries, thys_dir) + add_theory_dependencies(entries) + add_root_dependencies(entries, thys_dir) + remove_self_imps(entries) # perform check - if options.thys_dir: - count = check_fs(entries, options.thys_dir) - output = "Checked directory {0}. Found {1} warnings.".format(options.thys_dir, count) + if options.do_check: + count = check_fs(entries, thys_dir) + output = "Checked directory {0}. Found {1} warnings.".format(thys_dir, count) color = 'yellow' if count > 0 else 'green' print(colored(output, color, attrs=['bold'])) # perform generation if options.dest_dir: # parse template format magic_str_pos = magic_str.index("$") magic_str_start = magic_str[:magic_str_pos] magic_str_end = magic_str[magic_str_pos+1:] stats = Stats() scan_templates(entries) print(stats) diff --git a/metadata/metadata b/metadata/metadata --- a/metadata/metadata +++ b/metadata/metadata @@ -1,3639 +1,3569 @@ [Liouville_Numbers] title = Liouville numbers author = Manuel Eberl date = 2015-12-28 topic = Mathematics/Analysis -depends-on = Algebraic_Numbers abstract = Liouville numbers are a class of transcendental numbers that can be approximated particularly well with rational numbers. Historically, they were the first numbers whose transcendence was proven. In this entry, we define the concept of Liouville numbers as well as the standard construction to obtain Liouville numbers (including Liouville's constant) and we prove their most important properties: irrationality and transcendence. The proof is very elementary and requires only standard arithmetic, the Mean Value Theorem for polynomials, and the boundedness of polynomials on compact intervals. [Triangle] title = Basic Geometric Properties of Triangles author = Manuel Eberl date = 2015-12-28 topic = Mathematics/Geometry abstract = This entry contains a definition of angles between vectors and between three points. Building on this, we prove basic geometric properties of triangles, such as the Isosceles Triangle Theorem, the Law of Sines and the Law of Cosines, that the sum of the angles of a triangle is π, and the congruence theorems for triangles. The definitions and proofs were developed following those by John Harrison in HOL Light. However, due to Isabelle's type class system, all definitions and theorems in the Isabelle formalisation hold for all real inner product spaces. [Prime_Harmonic_Series] title = The Divergence of the Prime Harmonic Series author = Manuel Eberl date = 2015-12-28 topic = Mathematics/Number Theory abstract = In this work, we prove the lower bound ln (H_n) - ln(5/3) for the partial sum of the Prime Harmonic series and, based on this, the divergence of the Prime Harmonic Series ∑ [p prime] · 1/p. The proof relies on the unique squarefree decomposition of natural numbers. This is similar to Euler's original proof (which was highly informal and morally questionable). Its advantage over proofs by contradiction, like the famous one by Paul Erdős, is that it provides a relatively good lower bound for the partial sums. [Descartes_Sign_Rule] title = Descartes' Rule of Signs author = Manuel Eberl date = 2015-12-28 topic = Mathematics/Analysis -depends-on = Sturm_Tarski abstract = Descartes' Rule of Signs relates the number of positive real roots of a polynomial with the number of sign changes in its coefficient sequence. Our proof follows the simple inductive proof given by Rob Arthan, which was also used by John Harrison in his HOL Light formalisation. We proved most of the lemmas for arbitrary linearly-ordered integrity domains (e.g. integers, rationals, reals); the main result, however, requires the intermediate value theorem and was therefore only proven for real polynomials. [Card_Partitions] title = Cardinality of Set Partitions author = Lukas Bulwahn date = 2015-12-12 topic = Mathematics/Combinatorics -depends-on = Discrete_Summation abstract = The theory's main theorem states that the cardinality of set partitions of size k on a carrier set of size n is expressed by Stirling numbers of the second kind. In Isabelle, Stirling numbers of the second kind are defined in the AFP entry `Discrete Summation` through their well-known recurrence relation. The main theorem relates them to the alternative definition as cardinality of set partitions. The proof follows the simple and short explanation in Richard P. Stanley's `Enumerative Combinatorics: Volume 1` and Wikipedia, and unravels the full details and implicit reasoning steps of these explanations. [Card_Number_Partitions] title = Cardinality of Number Partitions author = Lukas Bulwahn date = 2016-01-14 topic = Mathematics/Combinatorics abstract = This entry provides a basic library for number partitions, defines the two-argument partition function through its recurrence relation and relates this partition function to the cardinality of number partitions. The main proof shows that the recursively-defined partition function with arguments n and k equals the cardinality of number partitions of n with exactly k parts. The combinatorial proof follows the proof sketch of Theorem 2.4.1 in Mazur's textbook `Combinatorics: A Guided Tour`. This entry can serve as starting point for various more intrinsic properties about number partitions, the partition function and related recurrence relations. [Multirelations] title = Binary Multirelations author = Hitoshi Furusawa , Georg Struth date = 2015-06-11 topic = Mathematics/Algebra -depends-on = Kleene_Algebra abstract = Binary multirelations associate elements of a set with its subsets; hence they are binary relations from a set to its power set. Applications include alternating automata, models and logics for games, program semantics with dual demonic and angelic nondeterministic choices and concurrent dynamic logics. This proof document supports an arXiv article that formalises the basic algebra of multirelations and proposes axiom systems for them, ranging from weak bi-monoids to weak bi-quantales. [Noninterference_Generic_Unwinding] title = The Generic Unwinding Theorem for CSP Noninterference Security author = Pasquale Noce date = 2015-06-11 topic = Computer Science/Security -depends-on = Noninterference_Ipurge_Unwinding abstract =

    The classical definition of noninterference security for a deterministic state machine with outputs requires to consider the outputs produced by machine actions after any trace, i.e. any indefinitely long sequence of actions, of the machine. In order to render the verification of the security of such a machine more straightforward, there is a need of some sufficient condition for security such that just individual actions, rather than unbounded sequences of actions, have to be considered.

    By extending previous results applying to transitive noninterference policies, Rushby has proven an unwinding theorem that provides a sufficient condition of this kind in the general case of a possibly intransitive policy. This condition has to be satisfied by a generic function mapping security domains into equivalence relations over machine states.

    An analogous problem arises for CSP noninterference security, whose definition requires to consider any possible future, i.e. any indefinitely long sequence of subsequent events and any indefinitely large set of refused events associated to that sequence, for each process trace.

    This paper provides a sufficient condition for CSP noninterference security, which indeed requires to just consider individual accepted and refused events and applies to the general case of a possibly intransitive policy. This condition follows Rushby's one for classical noninterference security, and has to be satisfied by a generic function mapping security domains into equivalence relations over process traces; hence its name, Generic Unwinding Theorem. Variants of this theorem applying to deterministic processes and trace set processes are also proven. Finally, the sufficient condition for security expressed by the theorem is shown not to be a necessary condition as well, viz. there exists a secure process such that no domain-relation map satisfying the condition exists.

    [Noninterference_Ipurge_Unwinding] title = The Ipurge Unwinding Theorem for CSP Noninterference Security author = Pasquale Noce date = 2015-06-11 topic = Computer Science/Security -depends-on = Noninterference_CSP, List_Interleaving abstract =

    The definition of noninterference security for Communicating Sequential Processes requires to consider any possible future, i.e. any indefinitely long sequence of subsequent events and any indefinitely large set of refused events associated to that sequence, for each process trace. In order to render the verification of the security of a process more straightforward, there is a need of some sufficient condition for security such that just individual accepted and refused events, rather than unbounded sequences and sets of events, have to be considered.

    Of course, if such a sufficient condition were necessary as well, it would be even more valuable, since it would permit to prove not only that a process is secure by verifying that the condition holds, but also that a process is not secure by verifying that the condition fails to hold.

    This paper provides a necessary and sufficient condition for CSP noninterference security, which indeed requires to just consider individual accepted and refused events and applies to the general case of a possibly intransitive policy. This condition follows Rushby's output consistency for deterministic state machines with outputs, and has to be satisfied by a specific function mapping security domains into equivalence relations over process traces. The definition of this function makes use of an intransitive purge function following Rushby's one; hence the name given to the condition, Ipurge Unwinding Theorem.

    Furthermore, in accordance with Hoare's formal definition of deterministic processes, it is shown that a process is deterministic just in case it is a trace set process, i.e. it may be identified by means of a trace set alone, matching the set of its traces, in place of a failures-divergences pair. Then, variants of the Ipurge Unwinding Theorem are proven for deterministic processes and trace set processes.

    [List_Interleaving] title = Reasoning about Lists via List Interleaving author = Pasquale Noce date = 2015-06-11 topic = Computer Science/Data Structures abstract =

    Among the various mathematical tools introduced in his outstanding work on Communicating Sequential Processes, Hoare has defined "interleaves" as the predicate satisfied by any three lists such that the first list may be split into sublists alternately extracted from the other two ones, whatever is the criterion for extracting an item from either one list or the other in each step.

    This paper enriches Hoare's definition by identifying such criterion with the truth value of a predicate taking as inputs the head and the tail of the first list. This enhanced "interleaves" predicate turns out to permit the proof of equalities between lists without the need of an induction. Some rules that allow to infer "interleaves" statements without induction, particularly applying to the addition or removal of a prefix to the input lists, are also proven. Finally, a stronger version of the predicate, named "Interleaves", is shown to fulfil further rules applying to the addition or removal of a suffix to the input lists.

    [Residuated_Lattices] title = Residuated Lattices author = Victor B. F. Gomes , Georg Struth date = 2015-04-15 topic = Mathematics/Algebra -depends-on = Relation_Algebra abstract = The theory of residuated lattices, first proposed by Ward and Dilworth, is formalised in Isabelle/HOL. This includes concepts of residuated functions; their adjoints and conjugates. It also contains necessary and sufficient conditions for the existence of these operations in an arbitrary lattice. The mathematical components for residuated lattices are linked to the AFP entry for relation algebra. In particular, we prove Jonsson and Tsinakis conditions for a residuated boolean algebra to form a relation algebra. [ConcurrentGC] title = Relaxing Safely: Verified On-the-Fly Garbage Collection for x86-TSO author = Peter Gammie , Tony Hosking , Kai Engelhardt date = 2015-04-13 topic = Computer Science/Algorithms/Concurrent -depends-on = ConcurrentIMP abstract =

    We use ConcurrentIMP to model Schism, a state-of-the-art real-time garbage collection scheme for weak memory, and show that it is safe on x86-TSO.

    This development accompanies the PLDI 2015 paper of the same name.

    [List_Update] title = Analysis of List Update Algorithms author = Maximilian P.L. Haslbeck , Tobias Nipkow date = 2016-02-17 topic = Computer Science/Algorithms/Online -depends-on = List-Index, Regular-Sets abstract =

    These theories formalize the quantitative analysis of a number of classical algorithms for the list update problem: 2-competitiveness of move-to-front, the lower bound of 2 for the competitive- ness of deterministic list update algorithms and 1.6-competitiveness of the randomized COMB algorithm, the best randomized list update algorithm known to date.

    An informal description is found in an accompanying report. The material is based on the first two chapters of Online Computation and Competitive Analysis by Borodin and El-Yaniv.

    [ConcurrentIMP] title = Concurrent IMP author = Peter Gammie date = 2015-04-13 topic = Computer Science/Programming Languages/Logics abstract = ConcurrentIMP extends the small imperative language IMP with control non-determinism and constructs for synchronous message passing. [TortoiseHare] title = The Tortoise and Hare Algorithm author = Peter Gammie date = 2015-11-18 topic = Computer Science/Algorithms abstract = We formalize the Tortoise and Hare cycle-finding algorithm ascribed to Floyd by Knuth, and an improved version due to Brent. [UPF] title = The Unified Policy Framework (UPF) author = Achim D. Brucker , Lukas Brügger , Burkhart Wolff date = 2014-11-28 topic = Computer Science/Security abstract = We present the Unified Policy Framework (UPF), a generic framework for modelling security (access-control) policies. UPF emphasizes the view that a policy is a policy decision function that grants or denies access to resources, permissions, etc. In other words, instead of modelling the relations of permitted or prohibited requests directly, we model the concrete function that implements the policy decision point in a system. In more detail, UPF is based on the following four principles: 1) Functional representation of policies, 2) No conflicts are possible, 3) Three-valued decision type (allow, deny, undefined), 4) Output type not containing the decision only. [AODV] title = Loop freedom of the (untimed) AODV routing protocol author = Timothy Bourke , Peter Höfner date = 2014-10-23 topic = Computer Science/Process Calculi abstract =

    The Ad hoc On-demand Distance Vector (AODV) routing protocol allows the nodes in a Mobile Ad hoc Network (MANET) or a Wireless Mesh Network (WMN) to know where to forward data packets. Such a protocol is ‘loop free’ if it never leads to routing decisions that forward packets in circles.

    This development mechanises an existing pen-and-paper proof of loop freedom of AODV. The protocol is modelled in the Algebra of Wireless Networks (AWN), which is the subject of an earlier paper and AFP mechanization. The proof relies on a novel compositional approach for lifting invariants to networks of nodes.

    We exploit the mechanization to analyse several variants of AODV and show that Isabelle/HOL can re-establish most proof obligations automatically and identify exactly the steps that are no longer valid.

    [Show] title = Haskell's Show Class in Isabelle/HOL author = Christian Sternagel , René Thiemann date = 2014-07-29 topic = Computer Science/Functional Programming license = LGPL -depends-on = Datatype_Order_Generator, Deriving abstract = We implemented a type class for "to-string" functions, similar to Haskell's Show class. Moreover, we provide instantiations for Isabelle/HOL's standard types like bool, prod, sum, nats, ints, and rats. It is further possible, to automatically derive show functions for arbitrary user defined datatypes similar to Haskell's "deriving Show". extra-history = Change history: [2015-03-11]: Adapted development to new-style (BNF-based) datatypes.
    [2015-04-10]: Moved development for old-style datatypes into subdirectory "Old_Datatype".
    [Certification_Monads] title = Certification Monads author = Christian Sternagel , René Thiemann date = 2014-10-03 topic = Computer Science/Functional Programming -depends-on = Show, Partial_Function_MR abstract = This entry provides several monads intended for the development of stand-alone certifiers via code generation from Isabelle/HOL. More specifically, there are three flavors of error monads (the sum type, for the case where all monadic functions are total; an instance of the former, the so called check monad, yielding either success without any further information or an error message; as well as a variant of the sum type that accommodates partial functions by providing an explicit bottom element) and a parser monad built on top. All of this monads are heavily used in the IsaFoR/CeTA project which thus provides many examples of their usage. [CISC-Kernel] title = Formal Specification of a Generic Separation Kernel author = Freek Verbeek , Sergey Tverdyshev , Oto Havle , Holger Blasum , Bruno Langenstein , Werner Stephan , Yakoub Nemouchi , Abderrahmane Feliachi , Burkhart Wolff , Julien Schmaltz date = 2014-07-18 topic = Computer Science/Security abstract =

    Intransitive noninterference has been a widely studied topic in the last few decades. Several well-established methodologies apply interactive theorem proving to formulate a noninterference theorem over abstract academic models. In joint work with several industrial and academic partners throughout Europe, we are helping in the certification process of PikeOS, an industrial separation kernel developed at SYSGO. In this process, established theories could not be applied. We present a new generic model of separation kernels and a new theory of intransitive noninterference. The model is rich in detail, making it suitable for formal verification of realistic and industrial systems such as PikeOS. Using a refinement-based theorem proving approach, we ensure that proofs remain manageable.

    This document corresponds to the deliverable D31.1 of the EURO-MILS Project http://www.euromils.eu.

    [pGCL] title = pGCL for Isabelle author = David Cock date = 2014-07-13 topic = Computer Science/Programming Languages/Language Definitions abstract =

    pGCL is both a programming language and a specification language that incorporates both probabilistic and nondeterministic choice, in a unified manner. Program verification is by refinement or annotation (or both), using either Hoare triples, or weakest-precondition entailment, in the style of GCL.

    This package provides both a shallow embedding of the language primitives, and an annotation and refinement framework. The generated document includes a brief tutorial.

    [Noninterference_CSP] title = Noninterference Security in Communicating Sequential Processes author = Pasquale Noce date = 2014-05-23 topic = Computer Science/Security abstract =

    An extension of classical noninterference security for deterministic state machines, as introduced by Goguen and Meseguer and elegantly formalized by Rushby, to nondeterministic systems should satisfy two fundamental requirements: it should be based on a mathematically precise theory of nondeterminism, and should be equivalent to (or at least not weaker than) the classical notion in the degenerate deterministic case.

    This paper proposes a definition of noninterference security applying to Hoare's Communicating Sequential Processes (CSP) in the general case of a possibly intransitive noninterference policy, and proves the equivalence of this security property to classical noninterference security for processes representing deterministic state machines.

    Furthermore, McCullough's generalized noninterference security is shown to be weaker than both the proposed notion of CSP noninterference security for a generic process, and classical noninterference security for processes representing deterministic state machines. This renders CSP noninterference security preferable as an extension of classical noninterference security to nondeterministic systems.

    [Roy_Floyd_Warshall] title = Transitive closure according to Roy-Floyd-Warshall author = Makarius Wenzel date = 2014-05-23 topic = Computer Science/Algorithms abstract = This formulation of the Roy-Floyd-Warshall algorithm for the transitive closure bypasses matrices and arrays, but uses a more direct mathematical model with adjacency functions for immediate predecessors and successors. This can be implemented efficiently in functional programming languages and is particularly adequate for sparse relations. [GPU_Kernel_PL] title = Syntax and semantics of a GPU kernel programming language author = John Wickerson date = 2014-04-03 topic = Computer Science/Programming Languages/Language Definitions abstract = This document accompanies the article "The Design and Implementation of a Verification Technique for GPU Kernels" by Adam Betts, Nathan Chong, Alastair F. Donaldson, Jeroen Ketema, Shaz Qadeer, Paul Thomson and John Wickerson. It formalises all of the definitions provided in Sections 3 and 4 of the article. [AWN] title = Mechanization of the Algebra for Wireless Networks (AWN) author = Timothy Bourke date = 2014-03-08 topic = Computer Science/Process Calculi abstract =

    AWN is a process algebra developed for modelling and analysing protocols for Mobile Ad hoc Networks (MANETs) and Wireless Mesh Networks (WMNs). AWN models comprise five distinct layers: sequential processes, local parallel compositions, nodes, partial networks, and complete networks.

    This development mechanises the original operational semantics of AWN and introduces a variant 'open' operational semantics that enables the compositional statement and proof of invariants across distinct network nodes. It supports labels (for weakening invariants) and (abstract) data state manipulations. A framework for compositional invariant proofs is developed, including a tactic (inv_cterms) for inductive invariant proofs of sequential processes, lifting rules for the open versions of the higher layers, and a rule for transferring lifted properties back to the standard semantics. A notion of 'control terms' reduces proof obligations to the subset of subterms that act directly (in contrast to operators for combining terms and joining processes).

    [Selection_Heap_Sort] title = Verification of Selection and Heap Sort Using Locales author = Danijela Petrovic date = 2014-02-11 topic = Computer Science/Algorithms abstract = Stepwise program refinement techniques can be used to simplify program verification. Programs are better understood since their main properties are clearly stated, and verification of rather complex algorithms is reduced to proving simple statements connecting successive program specifications. Additionally, it is easy to analyze similar algorithms and to compare their properties within a single formalization. Usually, formal analysis is not done in educational setting due to complexity of verification and a lack of tools and procedures to make comparison easy. Verification of an algorithm should not only give correctness proof, but also better understanding of an algorithm. If the verification is based on small step program refinement, it can become simple enough to be demonstrated within the university-level computer science curriculum. In this paper we demonstrate this and give a formal analysis of two well known algorithms (Selection Sort and Heap Sort) using proof assistant Isabelle/HOL and program refinement techniques. [Real_Impl] title = Implementing field extensions of the form Q[sqrt(b)] author = René Thiemann date = 2014-02-06 license = LGPL topic = Mathematics/Analysis abstract = We apply data refinement to implement the real numbers, where we support all numbers in the field extension Q[sqrt(b)], i.e., all numbers of the form p + q * sqrt(b) for rational numbers p and q and some fixed natural number b. To this end, we also developed algorithms to precisely compute roots of a rational number, and to perform a factorization of natural numbers which eliminates duplicate prime factors.

    Our results have been used to certify termination proofs which involve polynomial interpretations over the reals. extra-history = Change history: [2014-07-11]: Moved NthRoot_Impl to Sqrt-Babylonian. [ShortestPath] title = An Axiomatic Characterization of the Single-Source Shortest Path Problem author = Christine Rizkallah date = 2013-05-22 topic = Mathematics/Graph Theory abstract = This theory is split into two sections. In the first section, we give a formal proof that a well-known axiomatic characterization of the single-source shortest path problem is correct. Namely, we prove that in a directed graph with a non-negative cost function on the edges the single-source shortest path function is the only function that satisfies a set of four axioms. In the second section, we give a formal proof of the correctness of an axiomatic characterization of the single-source shortest path problem for directed graphs with general cost functions. The axioms here are more involved because we have to account for potential negative cycles in the graph. The axioms are summarized in three Isabelle locales. [Launchbury] title = The Correctness of Launchbury's Natural Semantics for Lazy Evaluation author = Joachim Breitner date = 2013-01-31 topic = Computer Science/Programming Languages/Lambda Calculi abstract = In his seminal paper "Natural Semantics for Lazy Evaluation", John Launchbury proves his semantics correct with respect to a denotational semantics, and outlines an adequacy proof. We have formalized both semantics and machine-checked the correctness proof, clarifying some details. Furthermore, we provide a new and more direct adequacy proof that does not require intermediate operational semantics. extra-history = Change history: [2014-05-24]: Added the proof of adequacy, as well as simplified and improved the existing proofs. Adjusted abstract accordingly. [2015-03-16]: Booleans and if-then-else added to syntax and semantics, making this entry suitable to be used by the entry "Call_Arity". [Call_Arity] title = The Safety of Call Arity author = Joachim Breitner date = 2015-02-20 topic = Computer Science/Programming Languages/Transformations abstract = We formalize the Call Arity analysis, as implemented in GHC, and prove both functional correctness and, more interestingly, safety (i.e. the transformation does not increase allocation).

    We use syntax and the denotational semantics from the entry "Launchbury", where we formalized Launchbury's natural semantics for lazy evaluation.

    The functional correctness of Call Arity is proved with regard to that denotational semantics. The operational properties are shown with regard to a small-step semantics akin to Sestoft's mark 1 machine, which we prove to be equivalent to Launchbury's semantics.

    We use Christian Urban's Nominal2 package to define our terms and make use of Brian Huffman's HOLCF package for the domain-theoretical aspects of the development. extra-history = Change history: [2015-03-16]: This entry now builds on top of the Launchbury entry, and the equivalency proof of the natural and the small-step semantics was added. [CCS] title = CCS in nominal logic author = Jesper Bengtson date = 2012-05-29 topic = Computer Science/Process Calculi abstract = We formalise a large portion of CCS as described in Milner's book 'Communication and Concurrency' using the nominal datatype package in Isabelle. Our results include many of the standard theorems of bisimulation equivalence and congruence, for both weak and strong versions. One main goal of this formalisation is to keep the machine-checked proofs as close to their pen-and-paper counterpart as possible.

    This entry is described in detail in Bengtson's thesis. [Pi_Calculus] title = The pi-calculus in nominal logic author = Jesper Bengtson date = 2012-05-29 topic = Computer Science/Process Calculi abstract = We formalise the pi-calculus using the nominal datatype package, based on ideas from the nominal logic by Pitts et al., and demonstrate an implementation in Isabelle/HOL. The purpose is to derive powerful induction rules for the semantics in order to conduct machine checkable proofs, closely following the intuitive arguments found in manual proofs. In this way we have covered many of the standard theorems of bisimulation equivalence and congruence, both late and early, and both strong and weak in a uniform manner. We thus provide one of the most extensive formalisations of a the pi-calculus ever done inside a theorem prover.

    A significant gain in our formulation is that agents are identified up to alpha-equivalence, thereby greatly reducing the arguments about bound names. This is a normal strategy for manual proofs about the pi-calculus, but that kind of hand waving has previously been difficult to incorporate smoothly in an interactive theorem prover. We show how the nominal logic formalism and its support in Isabelle accomplishes this and thus significantly reduces the tedium of conducting completely formal proofs. This improves on previous work using weak higher order abstract syntax since we do not need extra assumptions to filter out exotic terms and can keep all arguments within a familiar first-order logic.

    This entry is described in detail in Bengtson's thesis. [Psi_Calculi] title = Psi-calculi in Isabelle author = Jesper Bengtson date = 2012-05-29 topic = Computer Science/Process Calculi abstract = Psi-calculi are extensions of the pi-calculus, accommodating arbitrary nominal datatypes to represent not only data but also communication channels, assertions and conditions, giving it an expressive power beyond the applied pi-calculus and the concurrent constraint pi-calculus.

    We have formalised psi-calculi in the interactive theorem prover Isabelle using its nominal datatype package. One distinctive feature is that the framework needs to treat binding sequences, as opposed to single binders, in an efficient way. While different methods for formalising single binder calculi have been proposed over the last decades, representations for such binding sequences are not very well explored.

    The main effort in the formalisation is to keep the machine checked proofs as close to their pen-and-paper counterparts as possible. This includes treating all binding sequences as atomic elements, and creating custom induction and inversion rules that to remove the bulk of manual alpha-conversions.

    This entry is described in detail in Bengtson's thesis. [Encodability_Process_Calculi] title = Analysing and Comparing Encodability Criteria for Process Calculi author = Kirstin Peters , Rob van Glabbeek date = 2015-08-10 topic = Computer Science/Process Calculi abstract = Encodings or the proof of their absence are the main way to compare process calculi. To analyse the quality of encodings and to rule out trivial or meaningless encodings, they are augmented with quality criteria. There exists a bunch of different criteria and different variants of criteria in order to reason in different settings. This leads to incomparable results. Moreover it is not always clear whether the criteria used to obtain a result in a particular setting do indeed fit to this setting. We show how to formally reason about and compare encodability criteria by mapping them on requirements on a relation between source and target terms that is induced by the encoding function. In particular we analyse the common criteria full abstraction, operational correspondence, divergence reflection, success sensitiveness, and respect of barbs; e.g. we analyse the exact nature of the simulation relation (coupled simulation versus bisimulation) that is induced by different variants of operational correspondence. This way we reduce the problem of analysing or comparing encodability criteria to the better understood problem of comparing relations on processes. [Circus] title = Isabelle/Circus author = Abderrahmane Feliachi , Burkhart Wolff , Marie-Claude Gaudel contributors = Makarius Wenzel date = 2012-05-27 topic = Computer Science/Process Calculi, Computer Science/System Description Languages abstract = The Circus specification language combines elements for complex data and behavior specifications, using an integration of Z and CSP with a refinement calculus. Its semantics is based on Hoare and He's Unifying Theories of Programming (UTP). Isabelle/Circus is a formalization of the UTP and the Circus language in Isabelle/HOL. It contains proof rules and tactic support that allows for proofs of refinement for Circus processes (involving both data and behavioral aspects).

    The Isabelle/Circus environment supports a syntax for the semantic definitions which is close to textbook presentations of Circus. This article contains an extended version of corresponding VSTTE Paper together with the complete formal development of its underlying commented theories. extra-history = Change history: [2014-06-05]: More polishing, shorter proofs, added Circus syntax, added Makarius Wenzel as contributor. [Dijkstra_Shortest_Path] title = Dijkstra's Shortest Path Algorithm author = Benedikt Nordhoff , Peter Lammich topic = Computer Science/Algorithms date = 2012-01-30 -depends-on = Refine_Monadic, Collections abstract = We implement and prove correct Dijkstra's algorithm for the single source shortest path problem, conceived in 1956 by E. Dijkstra. The algorithm is implemented using the data refinement framework for monadic, nondeterministic programs. An efficient implementation is derived using data structures from the Isabelle Collection Framework. [Refine_Monadic] title = Refinement for Monadic Programs author = Peter Lammich topic = Computer Science/Programming Languages/Logics date = 2012-01-30 -depends-on = Automatic_Refinement abstract = We provide a framework for program and data refinement in Isabelle/HOL. The framework is based on a nondeterminism-monad with assertions, i.e., the monad carries a set of results or an assertion failure. Recursion is expressed by fixed points. For convenience, we also provide while and foreach combinators.

    The framework provides tools to automatize canonical tasks, such as verification condition generation, finding appropriate data refinement relations, and refine an executable program to a form that is accepted by the Isabelle/HOL code generator.

    This submission comes with a collection of examples and a user-guide, illustrating the usage of the framework. extra-history = Change history: [2012-04-23] Introduced ordered FOREACH loops
    [2012-06] New features: REC_rule_arb and RECT_rule_arb allow for generalizing over variables. prepare_code_thms - command extracts code equations for recursion combinators.
    [2012-07] New example: Nested DFS for emptiness check of Buchi-automata with witness.
    New feature: fo_rule method to apply resolution using first-order matching. Useful for arg_conf, fun_cong.
    [2012-08] Adaptation to ICF v2.
    [2012-10-05] Adaptations to include support for Automatic Refinement Framework.
    [2013-09] This entry now depends on Automatic Refinement
    [2014-06] New feature: vc_solve method to solve verification conditions. Maintenace changes: VCG-rules for nfoldli, improved setup for FOREACH-loops.
    [2014-07] Now defining recursion via flat domain. Dropped many single-valued prerequisites. Changed notion of data refinement. In single-valued case, this matches the old notion. In non-single valued case, the new notion allows for more convenient rules. In particular, the new definitions allow for projecting away ghost variables as a refinement step.
    [2014-11] New features: le-or-fail relation (leof), modular reasoning about loop invariants. [Automatic_Refinement] title = Automatic Data Refinement author = Peter Lammich topic = Computer Science/Programming Languages/Logics date = 2013-10-02 abstract = We present the Autoref tool for Isabelle/HOL, which automatically refines algorithms specified over abstract concepts like maps and sets to algorithms over concrete implementations like red-black-trees, and produces a refinement theorem. It is based on ideas borrowed from relational parametricity due to Reynolds and Wadler. The tool allows for rapid prototyping of verified, executable algorithms. Moreover, it can be configured to fine-tune the result to the user~s needs. Our tool is able to automatically instantiate generic algorithms, which greatly simplifies the implementation of executable data structures.

    This AFP-entry provides the basic tool, which is then used by the Refinement and Collection Framework to provide automatic data refinement for the nondeterminism monad and various collection datastructures. [PseudoHoops] title = Pseudo Hoops author = George Georgescu <>, Laurentiu Leustean <>, Viorel Preoteasa topic = Mathematics/Algebra date = 2011-09-22 abstract = Pseudo-hoops are algebraic structures introduced by B. Bosbach under the name of complementary semigroups. In this formalization we prove some properties of pseudo-hoops and we define the basic concepts of filter and normal filter. The lattice of normal filters is isomorphic with the lattice of congruences of a pseudo-hoop. We also study some important classes of pseudo-hoops. Bounded Wajsberg pseudo-hoops are equivalent to pseudo-Wajsberg algebras and bounded basic pseudo-hoops are equivalent to pseudo-BL algebras. Some examples of pseudo-hoops are given in the last section of the formalization. [MonoBoolTranAlgebra] title = Algebra of Monotonic Boolean Transformers author = Viorel Preoteasa topic = Computer Science/Programming Languages/Logics date = 2011-09-22 abstract = Algebras of imperative programming languages have been successful in reasoning about programs. In general an algebra of programs is an algebraic structure with programs as elements and with program compositions (sequential composition, choice, skip) as algebra operations. Various versions of these algebras were introduced to model partial correctness, total correctness, refinement, demonic choice, and other aspects. We formalize here an algebra which can be used to model total correctness, refinement, demonic and angelic choice. The basic model of this algebra are monotonic Boolean transformers (monotonic functions from a Boolean algebra to itself). [LatticeProperties] title = Lattice Properties author = Viorel Preoteasa topic = Mathematics/Algebra date = 2011-09-22 abstract = This formalization introduces and collects some algebraic structures based on lattices and complete lattices for use in other developments. The structures introduced are modular, and lattice ordered groups. In addition to the results proved for the new lattices, this formalization also introduces theorems about latices and complete lattices in general. extra-history = Change history: [2012-01-05]: Removed the theory about distributive complete lattices which is in the standard library now. Added a theory about well founded and transitive relations and a result about fixpoints in complete lattices and well founded relations. Moved the results about conjunctive and disjunctive functions to a new theory. Removed the syntactic classes for inf and sup which are in the standard library now. [Impossible_Geometry] title = Proving the Impossibility of Trisecting an Angle and Doubling the Cube author = Ralph Romanos , Lawrence C. Paulson topic = Mathematics/Algebra, Mathematics/Geometry date = 2012-08-05 abstract = Squaring the circle, doubling the cube and trisecting an angle, using a compass and straightedge alone, are classic unsolved problems first posed by the ancient Greeks. All three problems were proved to be impossible in the 19th century. The following document presents the proof of the impossibility of solving the latter two problems using Isabelle/HOL, following a proof by Carrega. The proof uses elementary methods: no Galois theory or field extensions. The set of points constructible using a compass and straightedge is defined inductively. Radical expressions, which involve only square roots and arithmetic of rational numbers, are defined, and we find that all constructive points have radical coordinates. Finally, doubling the cube and trisecting certain angles requires solving certain cubic equations that can be proved to have no rational roots. The Isabelle proofs require a great many detailed calculations. [KBPs] title = Knowledge-based programs author = Peter Gammie topic = Computer Science/Automata and Formal Languages date = 2011-05-17 abstract = Knowledge-based programs (KBPs) are a formalism for directly relating agents' knowledge and behaviour. Here we present a general scheme for compiling KBPs to executable automata with a proof of correctness in Isabelle/HOL. We develop the algorithm top-down, using Isabelle's locale mechanism to structure these proofs, and show that two classic examples can be synthesised using Isabelle's code generator. extra-history = Change history: [2012-03-06]: Add some more views and revive the code generation. [Tarskis_Geometry] title = The independence of Tarski's Euclidean axiom author = T. J. M. Makarios topic = Mathematics/Geometry date = 2012-10-30 abstract = Tarski's axioms of plane geometry are formalized and, using the standard real Cartesian model, shown to be consistent. A substantial theory of the projective plane is developed. Building on this theory, the Klein-Beltrami model of the hyperbolic plane is defined and shown to satisfy all of Tarski's axioms except his Euclidean axiom; thus Tarski's Euclidean axiom is shown to be independent of his other axioms of plane geometry.

    An earlier version of this work was the subject of the author's MSc thesis, which contains natural-language explanations of some of the more interesting proofs. [General-Triangle] title = The General Triangle Is Unique author = Joachim Breitner topic = Mathematics/Geometry date = 2011-04-01 abstract = Some acute-angled triangles are special, e.g. right-angled or isoscele triangles. Some are not of this kind, but, without measuring angles, look as if they were. In that sense, there is exactly one general triangle. This well-known fact is proven here formally. [LightweightJava] title = Lightweight Java author = Rok Strniša , Matthew Parkinson topic = Computer Science/Programming Languages/Language Definitions date = 2011-02-07 abstract = A fully-formalized and extensible minimal imperative fragment of Java. [Lower_Semicontinuous] title = Lower Semicontinuous Functions author = Bogdan Grechuk topic = Mathematics/Analysis date = 2011-01-08 abstract = We define the notions of lower and upper semicontinuity for functions from a metric space to the extended real line. We prove that a function is both lower and upper semicontinuous if and only if it is continuous. We also give several equivalent characterizations of lower semicontinuity. In particular, we prove that a function is lower semicontinuous if and only if its epigraph is a closed set. Also, we introduce the notion of the lower semicontinuous hull of an arbitrary function and prove its basic properties. [RIPEMD-160-SPARK] title = RIPEMD-160 author = Fabian Immler topic = Computer Science/Programming Languages/Static Analysis date = 2011-01-10 abstract = This work presents a verification of an implementation in SPARK/ADA of the cryptographic hash-function RIPEMD-160. A functional specification of RIPEMD-160 is given in Isabelle/HOL. Proofs for the verification conditions generated by the static-analysis toolset of SPARK certify the functional correctness of the implementation. extra-history = Change history: [2015-11-09]: Entry is now obsolete, moved to Isabelle distribution. [Regular-Sets] title = Regular Sets and Expressions author = Alexander Krauss , Tobias Nipkow topic = Computer Science/Automata and Formal Languages date = 2010-05-12 abstract = This is a library of constructions on regular expressions and languages. It provides the operations of concatenation, Kleene star and derivative on languages. Regular expressions and their meaning are defined. An executable equivalence checker for regular expressions is verified; it does not need automata but works directly on regular expressions. By mapping regular expressions to binary relations, an automatic and complete proof method for (in)equalities of binary relations over union, concatenation and (reflexive) transitive closure is obtained.

    Extended regular expressions with complement and intersection are also defined and an equivalence checker is provided. extra-history = Change history: [2011-08-26]: Christian Urban added a theory about derivatives and partial derivatives of regular expressions
    [2012-05-10]: Tobias Nipkow added extended regular expressions
    [2012-05-10]: Tobias Nipkow added equivalence checking with partial derivatives [Regex_Equivalence] title = Unified Decision Procedures for Regular Expression Equivalence author = Tobias Nipkow , Dmitriy Traytel topic = Computer Science/Automata and Formal Languages date = 2014-01-30 abstract = We formalize a unified framework for verified decision procedures for regular expression equivalence. Five recently published formalizations of such decision procedures (three based on derivatives, two on marked regular expressions) can be obtained as instances of the framework. We discover that the two approaches based on marked regular expressions, which were previously thought to be the same, are different, and one seems to produce uniformly smaller automata. The common framework makes it possible to compare the performance of the different decision procedures in a meaningful way. The formalization is described in a paper of the same name presented at Interactive Theorem Proving 2014. [MSO_Regex_Equivalence] title = Decision Procedures for MSO on Words Based on Derivatives of Regular Expressions author = Dmitriy Traytel , Tobias Nipkow topic = Computer Science/Automata and Formal Languages, Logic date = 2014-06-12 abstract = Monadic second-order logic on finite words (MSO) is a decidable yet expressive logic into which many decision problems can be encoded. Since MSO formulas correspond to regular languages, equivalence of MSO formulas can be reduced to the equivalence of some regular structures (e.g. automata). We verify an executable decision procedure for MSO formulas that is not based on automata but on regular expressions.

    Decision procedures for regular expression equivalence have been formalized before, usually based on Brzozowski derivatives. Yet, for a straightforward embedding of MSO formulas into regular expressions an extension of regular expressions with a projection operation is required. We prove total correctness and completeness of an equivalence checker for regular expressions extended in that way. We also define a language-preserving translation of formulas into regular expressions with respect to two different semantics of MSO.

    The formalization is described in this ICFP 2013 functional pearl. [Formula_Derivatives] title = Derivatives of Logical Formulas author = Dmitriy Traytel topic = Computer Science/Automata and Formal Languages, Logic date = 2015-05-28 abstract = We formalize new decision procedures for WS1S, M2L(Str), and Presburger Arithmetics. Formulas of these logics denote regular languages. Unlike traditional decision procedures, we do not translate formulas into automata (nor into regular expressions), at least not explicitly. Instead we devise notions of derivatives (inspired by Brzozowski derivatives for regular expressions) that operate on formulas directly and compute a syntactic bisimulation using these derivatives. The treatment of Boolean connectives and quantifiers is uniform for all mentioned logics and is abstracted into a locale. This locale is then instantiated by different atomic formulas and their derivatives (which may differ even for the same logic under different encodings of interpretations as formal words).

    The WS1S instance is described in the draft paper A Coalgebraic Decision Procedure for WS1S by the author. -depends-on = Coinductive_Languages, Deriving, Show, List-Index [Myhill-Nerode] title = The Myhill-Nerode Theorem Based on Regular Expressions author = Chunhan Wu <>, Xingyuan Zhang <>, Christian Urban topic = Computer Science/Automata and Formal Languages date = 2011-08-26 abstract = There are many proofs of the Myhill-Nerode theorem using automata. In this library we give a proof entirely based on regular expressions, since regularity of languages can be conveniently defined using regular expressions (it is more painful in HOL to define regularity in terms of automata). We prove the first direction of the Myhill-Nerode theorem by solving equational systems that involve regular expressions. For the second direction we give two proofs: one using tagging-functions and another using partial derivatives. We also establish various closure properties of regular languages. Most details of the theories are described in our ITP 2011 paper. [Boolean_Expression_Checkers] title = Boolean Expression Checkers author = Tobias Nipkow date = 2014-06-08 topic = Computer Science/Algorithms, Logic abstract = This entry provides executable checkers for the following properties of boolean expressions: satisfiability, tautology and equivalence. Internally, the checkers operate on binary decision trees and are reasonably efficient (for purely functional algorithms). extra-history = Change history: [2015-09-23]: Salomon Sickert added an interface that does not require the usage of the Boolean formula datatype. Furthermore the general Mapping type is used instead of an associative list. [Presburger-Automata] title = Formalizing the Logic-Automaton Connection author = Stefan Berghofer , Markus Reiter <> date = 2009-12-03 topic = Computer Science/Automata and Formal Languages, Logic abstract = This work presents a formalization of a library for automata on bit strings. It forms the basis of a reflection-based decision procedure for Presburger arithmetic, which is efficiently executable thanks to Isabelle's code generator. With this work, we therefore provide a mechanized proof of a well-known connection between logic and automata theory. The formalization is also described in a publication [TPHOLs 2009]. [Functional-Automata] title = Functional Automata author = Tobias Nipkow date = 2004-03-30 topic = Computer Science/Automata and Formal Languages abstract = This theory defines deterministic and nondeterministic automata in a functional representation: the transition function/relation and the finality predicate are just functions. Hence the state space may be infinite. It is shown how to convert regular expressions into such automata. A scanner (generator) is implemented with the help of functional automata: the scanner chops the input up into longest recognized substrings. Finally we also show how to convert a certain subclass of functional automata (essentially the finite deterministic ones) into regular sets. -depends-on = Regular-Sets [Statecharts] title = Formalizing Statecharts using Hierarchical Automata author = Steffen Helke , Florian Kammüller topic = Computer Science/Automata and Formal Languages date = 2010-08-08 abstract = We formalize in Isabelle/HOL the abtract syntax and a synchronous step semantics for the specification language Statecharts. The formalization is based on Hierarchical Automata which allow a structural decomposition of Statecharts into Sequential Automata. To support the composition of Statecharts, we introduce calculating operators to construct a Hierarchical Automaton in a stepwise manner. Furthermore, we present a complete semantics of Statecharts including a theory of data spaces, which enables the modelling of racing effects. We also adapt CTL for Statecharts to build a bridge for future combinations with model checking. However the main motivation of this work is to provide a sound and complete basis for reasoning on Statecharts. As a central meta theorem we prove that the well-formedness of a Statechart is preserved by the semantics. [Stuttering_Equivalence] title = Stuttering Equivalence author = Stephan Merz topic = Computer Science/Automata and Formal Languages date = 2012-05-07 abstract =

    Two omega-sequences are stuttering equivalent if they differ only by finite repetitions of elements. Stuttering equivalence is a fundamental concept in the theory of concurrent and distributed systems. Notably, Lamport argues that refinement notions for such systems should be insensitive to finite stuttering. Peled and Wilke showed that all PLTL (propositional linear-time temporal logic) properties that are insensitive to stuttering equivalence can be expressed without the next-time operator. Stuttering equivalence is also important for certain verification techniques such as partial-order reduction for model checking.

    We formalize stuttering equivalence in Isabelle/HOL. Our development relies on the notion of stuttering sampling functions that may skip blocks of identical sequence elements. We also encode PLTL and prove the theorem due to Peled and Wilke.

    extra-history = Change history: [2013-01-31]: Added encoding of PLTL and proved Peled and Wilke's theorem. Adjusted abstract accordingly. [Coinductive_Languages] title = A Codatatype of Formal Languages author = Dmitriy Traytel topic = Computer Science/Automata and Formal Languages date = 2013-11-15 abstract =

    We define formal languages as a codataype of infinite trees branching over the alphabet. Each node in such a tree indicates whether the path to this node constitutes a word inside or outside of the language. This codatatype is isormorphic to the set of lists representation of languages, but caters for definitions by corecursion and proofs by coinduction.

    Regular operations on languages are then defined by primitive corecursion. A difficulty arises here, since the standard definitions of concatenation and iteration from the coalgebraic literature are not primitively corecursive-they require guardedness up-to union/concatenation. Without support for up-to corecursion, these operation must be defined as a composition of primitive ones (and proved being equal to the standard definitions). As an exercise in coinduction we also prove the axioms of Kleene algebra for the defined regular operations.

    Furthermore, a language for context-free grammars given by productions in Greibach normal form and an initial nonterminal is constructed by primitive corecursion, yielding an executable decision procedure for the word problem without further ado.

    [Tree-Automata] title = Tree Automata author = Peter Lammich date = 2009-11-25 topic = Computer Science/Automata and Formal Languages abstract = This work presents a machine-checked tree automata library for Standard-ML, OCaml and Haskell. The algorithms are efficient by using appropriate data structures like RB-trees. The available algorithms for non-deterministic automata include membership query, reduction, intersection, union, and emptiness check with computation of a witness for non-emptiness. The executable algorithms are derived from less-concrete, non-executable algorithms using data-refinement techniques. The concrete data structures are from the Isabelle Collections Framework. Moreover, this work contains a formalization of the class of tree-regular languages and its closure properties under set operations. -depends-on = Collections [Depth-First-Search] title = Depth First Search author = Toshiaki Nishihara <>, Yasuhiko Minamide date = 2004-06-24 topic = Computer Science/Algorithms abstract = Depth-first search of a graph is formalized with recdef. It is shown that it visits all of the reachable nodes from a given list of nodes. Executable ML code of depth-first search is obtained using the code generation feature of Isabelle/HOL. [FFT] title = Fast Fourier Transform author = Clemens Ballarin date = 2005-10-12 topic = Computer Science/Algorithms abstract = We formalise a functional implementation of the FFT algorithm over the complex numbers, and its inverse. Both are shown equivalent to the usual definitions of these operations through Vandermonde matrices. They are also shown to be inverse to each other, more precisely, that composition of the inverse and the transformation yield the identity up to a scalar. [Gauss-Jordan-Elim-Fun] title = Gauss-Jordan Elimination for Matrices Represented as Functions author = Tobias Nipkow date = 2011-08-19 topic = Computer Science/Algorithms, Mathematics/Algebra abstract = This theory provides a compact formulation of Gauss-Jordan elimination for matrices represented as functions. Its distinctive feature is succinctness. It is not meant for large computations. [UpDown_Scheme] title = Verification of the UpDown Scheme author = Johannes Hölzl date = 2015-01-28 topic = Computer Science/Algorithms abstract = The UpDown scheme is a recursive scheme used to compute the stiffness matrix on a special form of sparse grids. Usually, when discretizing a Euclidean space of dimension d we need O(n^d) points, for n points along each dimension. Sparse grids are a hierarchical representation where the number of points is reduced to O(n * log(n)^d). One disadvantage of such sparse grids is that the algorithm now operate recursively in the dimensions and levels of the sparse grid.

    The UpDown scheme allows us to compute the stiffness matrix on such a sparse grid. The stiffness matrix represents the influence of each representation function on the L^2 scalar product. For a detailed description see Dirk Pflüger's PhD thesis. This formalization was developed as an interdisciplinary project (IDP) at the Technische Universität München. [GraphMarkingIBP] title = Verification of the Deutsch-Schorr-Waite Graph Marking Algorithm using Data Refinement author = Viorel Preoteasa , Ralph-Johan Back date = 2010-05-28 topic = Computer Science/Algorithms abstract = The verification of the Deutsch-Schorr-Waite graph marking algorithm is used as a benchmark in many formalizations of pointer programs. The main purpose of this mechanization is to show how data refinement of invariant based programs can be used in verifying practical algorithms. The verification starts with an abstract algorithm working on a graph given by a relation next on nodes. Gradually the abstract program is refined into Deutsch-Schorr-Waite graph marking algorithm where only one bit per graph node of additional memory is used for marking. -depends-on = DataRefinementIBP extra-history = Change history: [2012-01-05]: Updated for the new definition of data refinement and the new syntax for demonic and angelic update statements [Efficient-Mergesort] title = Efficient Mergesort topic = Computer Science/Algorithms date = 2011-11-09 author = Christian Sternagel abstract = We provide a formalization of the mergesort algorithm as used in GHC's Data.List module, proving correctness and stability. Furthermore, experimental data suggests that generated (Haskell-)code for this algorithm is much faster than for previous algorithms available in the Isabelle distribution. extra-history = Change history: [2012-10-24]: Added reference to journal article [SATSolverVerification] title = Formal Verification of Modern SAT Solvers author = Filip Maric date = 2008-07-23 topic = Computer Science/Algorithms abstract = This document contains formal correctness proofs of modern SAT solvers. Following (Krstic et al, 2007) and (Nieuwenhuis et al., 2006), solvers are described using state-transition systems. Several different SAT solver descriptions are given and their partial correctness and termination is proved. These include:

    • a solver based on classical DPLL procedure (using only a backtrack-search with unit propagation),
    • a very general solver with backjumping and learning (similar to the description given in (Nieuwenhuis et al., 2006)), and
    • a solver with a specific conflict analysis algorithm (similar to the description given in (Krstic et al., 2007)).
    Within the SAT solver correctness proofs, a large number of lemmas about propositional logic and CNF formulae are proved. This theory is self-contained and could be used for further exploring of properties of CNF based SAT algorithms. [Transitive-Closure] title = Executable Transitive Closures of Finite Relations topic = Computer Science/Algorithms date = 2011-03-14 author = Christian Sternagel , René Thiemann license = LGPL abstract = We provide a generic work-list algorithm to compute the transitive closure of finite relations where only successors of newly detected states are generated. This algorithm is then instantiated for lists over arbitrary carriers and red black trees (which are faster but require a linear order on the carrier), respectively. Our formalization was performed as part of the IsaFoR/CeTA project where reflexive transitive closures of large tree automata have to be computed. -depends-on = Collections, Abstract-Rewriting extra-history = Change history: [2014-09-04] added example simprocs in Finite_Transitive_Closure_Simprocs [Transitive-Closure-II] title = Executable Transitive Closures topic = Computer Science/Algorithms date = 2012-02-29 author = René Thiemann license = LGPL abstract = We provide a generic work-list algorithm to compute the (reflexive-)transitive closure of relations where only successors of newly detected states are generated. In contrast to our previous work, the relations do not have to be finite, but each element must only have finitely many (indirect) successors. Moreover, a subsumption relation can be used instead of pure equality. An executable variant of the algorithm is available where the generic operations are instantiated with list operations. This formalization was performed as part of the IsaFoR/CeTA project, and it has been used to certify size-change termination proofs where large transitive closures have to be computed. -depends-on = Regular-Sets [MuchAdoAboutTwo] title = Much Ado About Two author = Sascha Böhme date = 2007-11-06 topic = Computer Science/Algorithms abstract = This article is an Isabelle formalisation of a paper with the same title. In a similar way as Knuth's 0-1-principle for sorting algorithms, that paper develops a 0-1-2-principle for parallel prefix computations. [DiskPaxos] title = Proving the Correctness of Disk Paxos date = 2005-06-22 author = Mauro Jaskelioff , Stephan Merz topic = Computer Science/Algorithms/Distributed abstract = Disk Paxos is an algorithm for building arbitrary fault-tolerant distributed systems. The specification of Disk Paxos has been proved correct informally and tested using the TLC model checker, but up to now, it has never been fully formally verified. In this work we have formally verified its correctness using the Isabelle theorem prover and the HOL logic system, showing that Isabelle is a practical tool for verifying properties of TLA+ specifications. [GenClock] title = Formalization of a Generalized Protocol for Clock Synchronization author = Alwen Tiu date = 2005-06-24 topic = Computer Science/Algorithms/Distributed abstract = We formalize the generalized Byzantine fault-tolerant clock synchronization protocol of Schneider. This protocol abstracts from particular algorithms or implementations for clock synchronization. This abstraction includes several assumptions on the behaviors of physical clocks and on general properties of concrete algorithms/implementations. Based on these assumptions the correctness of the protocol is proved by Schneider. His proof was later verified by Shankar using the theorem prover EHDM (precursor to PVS). Our formalization in Isabelle/HOL is based on Shankar's formalization. [ClockSynchInst] title = Instances of Schneider's generalized protocol of clock synchronization author = Damián Barsotti date = 2006-03-15 topic = Computer Science/Algorithms/Distributed abstract = F. B. Schneider ("Understanding protocols for Byzantine clock synchronization") generalizes a number of protocols for Byzantine fault-tolerant clock synchronization and presents a uniform proof for their correctness. In Schneider's schema, each processor maintains a local clock by periodically adjusting each value to one computed by a convergence function applied to the readings of all the clocks. Then, correctness of an algorithm, i.e. that the readings of two clocks at any time are within a fixed bound of each other, is based upon some conditions on the convergence function. To prove that a particular clock synchronization algorithm is correct it suffices to show that the convergence function used by the algorithm meets Schneider's conditions. Using the theorem prover Isabelle, we formalize the proofs that the convergence functions of two algorithms, namely, the Interactive Convergence Algorithm (ICA) of Lamport and Melliar-Smith and the Fault-tolerant Midpoint algorithm of Lundelius-Lynch, meet Schneider's conditions. Furthermore, we experiment on handling some parts of the proofs with fully automatic tools like ICS and CVC-lite. These theories are part of a joint work with Alwen Tiu and Leonor P. Nieto "Verification of Clock Synchronization Algorithms: Experiments on a combination of deductive tools" in proceedings of AVOCS 2005. In this work the correctness of Schneider schema was also verified using Isabelle (entry GenClock in AFP). [Heard_Of] title = Verifying Fault-Tolerant Distributed Algorithms in the Heard-Of Model date = 2012-07-27 author = Henri Debrat , Stephan Merz topic = Computer Science/Algorithms/Distributed abstract = Distributed computing is inherently based on replication, promising increased tolerance to failures of individual computing nodes or communication channels. Realizing this promise, however, involves quite subtle algorithmic mechanisms, and requires precise statements about the kinds and numbers of faults that an algorithm tolerates (such as process crashes, communication faults or corrupted values). The landmark theorem due to Fischer, Lynch, and Paterson shows that it is impossible to achieve Consensus among N asynchronously communicating nodes in the presence of even a single permanent failure. Existing solutions must rely on assumptions of "partial synchrony".

    Indeed, there have been numerous misunderstandings on what exactly a given algorithm is supposed to realize in what kinds of environments. Moreover, the abundance of subtly different computational models complicates comparisons between different algorithms. Charron-Bost and Schiper introduced the Heard-Of model for representing algorithms and failure assumptions in a uniform framework, simplifying comparisons between algorithms.

    In this contribution, we represent the Heard-Of model in Isabelle/HOL. We define two semantics of runs of algorithms with different unit of atomicity and relate these through a reduction theorem that allows us to verify algorithms in the coarse-grained semantics (where proofs are easier) and infer their correctness for the fine-grained one (which corresponds to actual executions). We instantiate the framework by verifying six Consensus algorithms that differ in the underlying algorithmic mechanisms and the kinds of faults they tolerate. [Consensus_Refined] title = Consensus Refined date = 2015-03-18 author = Ognjen Maric , Christoph Sprenger topic = Computer Science/Algorithms/Distributed abstract = Algorithms for solving the consensus problem are fundamental to distributed computing. Despite their brevity, their ability to operate in concurrent, asynchronous and failure-prone environments comes at the cost of complex and subtle behaviors. Accordingly, understanding how they work and proving their correctness is a non-trivial endeavor where abstraction is immensely helpful. Moreover, research on consensus has yielded a large number of algorithms, many of which appear to share common algorithmic ideas. A natural question is whether and how these similarities can be distilled and described in a precise, unified way. In this work, we combine stepwise refinement and lockstep models to provide an abstract and unified view of a sizeable family of consensus algorithms. Our models provide insights into the design choices underlying the different algorithms, and classify them based on those choices. [Abortable_Linearizable_Modules] title = Abortable Linearizable Modules author = Rachid Guerraoui , Viktor Kuncak , Giuliano Losa date = 2012-03-01 topic = Computer Science/Algorithms/Distributed abstract = We define the Abortable Linearizable Module automaton (ALM for short) and prove its key composition property using the IOA theory of HOLCF. The ALM is at the heart of the Speculative Linearizability framework. This framework simplifies devising correct speculative algorithms by enabling their decomposition into independent modules that can be analyzed and proved correct in isolation. It is particularly useful when working in a distributed environment, where the need to tolerate faults and asynchrony has made current monolithic protocols so intricate that it is no longer tractable to check their correctness. Our theory contains a typical example of a refinement proof in the I/O-automata framework of Lynch and Tuttle. [Amortized_Complexity] title = Amortized Complexity Verified author = Tobias Nipkow date = 2014-07-07 topic = Computer Science/Data Structures -depends-on = Splay_Tree, Skew_Heap abstract = A framework for the analysis of the amortized complexity of (functional) data structures is formalized in Isabelle/HOL and applied to a number of standard examples and to the folowing non-trivial ones: skew heaps, splay trees, splay heaps and pairing heaps. This work is described in a paper published in the proceedings of the conference on Interactive Theorem Proving ITP 2015. extra-history = Change history: [2015-03-17]: Added pairing heaps by Hauke Brinkop.
    [Dynamic_Tables] title = Parameterized Dynamic Tables author = Tobias Nipkow date = 2015-06-07 topic = Computer Science/Data Structures -depends-on = Amortized_Complexity abstract = This article formalizes the amortized analysis of dynamic tables parameterized with their minimal and maximal load factors and the expansion and contraction factors.

    A full description is found in a companion paper. [AVL-Trees] title = AVL Trees author = Tobias Nipkow , Cornelia Pusch <> date = 2004-03-19 topic = Computer Science/Data Structures abstract = Two formalizations of AVL trees with room for extensions. The first formalization is monolithic and shorter, the second one in two stages, longer and a bit simpler. The final implementation is the same. If you are interested in developing this further, please contact gerwin.klein@nicta.com.au. extra-history = Change history: [2011-04-11]: Ondrej Kuncar added delete function [BDD] title = BDD Normalisation author = Veronika Ortner <>, Norbert Schirmer <> date = 2008-02-29 topic = Computer Science/Data Structures abstract = We present the verification of the normalisation of a binary decision diagram (BDD). The normalisation follows the original algorithm presented by Bryant in 1986 and transforms an ordered BDD in a reduced, ordered and shared BDD. The verification is based on Hoare logics. -depends-on = Simpl [BinarySearchTree] title = Binary Search Trees author = Viktor Kuncak date = 2004-04-05 topic = Computer Science/Data Structures abstract = The correctness is shown of binary search tree operations (lookup, insert and remove) implementing a set. Two versions are given, for both structured and linear (tactic-style) proofs. An implementation of integer-indexed maps is also verified. [Splay_Tree] title = Splay Tree author = Tobias Nipkow date = 2014-08-12 topic = Computer Science/Data Structures abstract = Splay trees are self-adjusting binary search trees which were invented by Sleator and Tarjan [JACM 1985]. This entry provides executable and verified functional splay trees.

    The amortized complexity of splay trees is analyzed in the AFP entry Amortized Complexity. [Skew_Heap] title = Skew Heap author = Tobias Nipkow date = 2014-08-13 topic = Computer Science/Data Structures abstract = Skew heaps are an amazingly simple and lightweight implementation of priority queues. They were invented by Sleator and Tarjan [SIAM 1986] and have logarithmic amortized complexity. This entry provides executable and verified functional skew heaps.

    The amortized complexity of skew heaps is analyzed in the AFP entry Amortized Complexity. [Priority_Queue_Braun] title = Priority Queues Based on Braun Trees author = Tobias Nipkow date = 2014-09-04 topic = Computer Science/Data Structures abstract = This theory implements priority queues via Braun trees. Insertion and deletion take logarithmic time and preserve the balanced nature of Braun trees. [Binomial-Queues] title = Functional Binomial Queues author = René Neumann date = 2010-10-28 topic = Computer Science/Data Structures abstract = Priority queues are an important data structure and efficient implementations of them are crucial. We implement a functional variant of binomial queues in Isabelle/HOL and show its functional correctness. A verification against an abstract reference specification of priority queues has also been attempted, but could not be achieved to the full extent. [Binomial-Heaps] title = Binomial Heaps and Skew Binomial Heaps author = Rene Meis , Finn Nielsen , Peter Lammich date = 2010-10-28 topic = Computer Science/Data Structures abstract = We implement and prove correct binomial heaps and skew binomial heaps. Both are data-structures for priority queues. While binomial heaps have logarithmic findMin, deleteMin, insert, and meld operations, skew binomial heaps have constant time findMin, insert, and meld operations, and only the deleteMin-operation is logarithmic. This is achieved by using skew links to avoid cascading linking on insert-operations, and data-structural bootstrapping to get constant-time findMin and meld operations. Our implementation follows the paper by Brodal and Okasaki. [Finger-Trees] title = Finger Trees author = Benedikt Nordhoff , Stefan Körner , Peter Lammich date = 2010-10-28 topic = Computer Science/Data Structures abstract = We implement and prove correct 2-3 finger trees. Finger trees are a general purpose data structure, that can be used to efficiently implement other data structures, such as priority queues. Intuitively, a finger tree is an annotated sequence, where the annotations are elements of a monoid. Apart from operations to access the ends of the sequence, the main operation is to split the sequence at the point where a monotone predicate over the sum of the left part of the sequence becomes true for the first time. The implementation follows the paper of Hinze and Paterson. The code generator can be used to get efficient, verified code. [Trie] title = Trie author = Andreas Lochbihler , Tobias Nipkow date = 2015-03-30 topic = Computer Science/Data Structures abstract = This article formalizes the ``trie'' data structure invented by Fredkin [CACM 1960]. It also provides a specialization where the entries in the trie are lists. extra-0 = Origin: This article was extracted from existing articles by the authors. [FinFun] title = Code Generation for Functions as Data author = Andreas Lochbihler date = 2009-05-06 topic = Computer Science/Data Structures abstract = FinFun is now part of the Isabelle distribution. The entry is kept for archival; maintained but not developed further. Use the Isabelle distribution version instead.

    FinFuns are total functions that are constant except for a finite set of points, i.e. a generalisation of finite maps. They are formalised as a new type in Isabelle/HOL such that the code generator can handle equality tests and quantification on FinFuns. On the code output level, FinFuns are explicitly represented by constant functions and pointwise updates, similarly to associative lists. Inside the logic, they behave like ordinary functions with extensionality. Via the update/constant pattern, a recursion combinator and an induction rule for FinFuns allow for defining and reasoning about operators on FinFun that are also executable.

    extra-history = Change history: [2010-08-13]: new concept domain of a FinFun as a FinFun (revision 34b3517cbc09)
    [2010-11-04]: new conversion function from FinFun to list of elements in the domain (revision 0c167102e6ed)
    [2012-03-07]: replace sets as FinFuns by predicates as FinFuns because the set type constructor has been reintroduced (revision b7aa87989f3a) [Collections] title = Collections Framework author = Peter Lammich contributors = Andreas Lochbihler , Thomas Tuerk <> date = 2009-11-25 topic = Computer Science/Data Structures abstract = This development provides an efficient, extensible, machine checked collections framework. The library adopts the concepts of interface, implementation and generic algorithm from object-oriented programming and implements them in Isabelle/HOL. The framework features the use of data refinement techniques to refine an abstract specification (using high-level concepts like sets) to a more concrete implementation (using collection datastructures, like red-black-trees). The code-generator of Isabelle/HOL can be used to generate efficient code. -depends-on = Binomial-Heaps, Finger-Trees, Refine_Monadic extra-history = Change history: [2010-10-08]: New Interfaces: OrderedSet, OrderedMap, List. Fifo now implements list-interface: Function names changed: put/get --> enqueue/dequeue. New Implementations: ArrayList, ArrayHashMap, ArrayHashSet, TrieMap, TrieSet. Invariant-free datastructures: Invariant implicitely hidden in typedef. Record-interfaces: All operations of an interface encapsulated as record. Examples moved to examples subdirectory.
    [2010-12-01]: New Interfaces: Priority Queues, Annotated Lists. Implemented by finger trees, (skew) binomial queues.
    [2011-10-10]: SetSpec: Added operations: sng, isSng, bexists, size_abort, diff, filter, iterate_rule_insertP MapSpec: Added operations: sng, isSng, iterate_rule_insertP, bexists, size, size_abort, restrict, map_image_filter, map_value_image_filter Some maintenance changes
    [2012-04-25]: New iterator foundation by Tuerk. Various maintenance changes.
    [2012-08]: Collections V2. New features: Polymorphic iterators. Generic algorithm instantiation where required. Naming scheme changed from xx_opname to xx.opname. A compatibility file CollectionsV1 tries to simplify porting of existing theories, by providing old naming scheme and the old monomorphic iterator locales.
    [2013-09]: Added Generic Collection Framework based on Autoref. The GenCF provides: Arbitrary nesting, full integration with Autoref.
    [2014-06]: Maintenace changes to GenCF: Optimized inj_image on list_set. op_set_cart (Cartesian product). big-Union operation. atLeastLessThan - operation ({a..<b})
    [Containers] title = Light-weight Containers author = Andreas Lochbihler contributors = René Thiemann date = 2013-04-15 topic = Computer Science/Data Structures abstract = This development provides a framework for container types like sets and maps such that generated code implements these containers with different (efficient) data structures. Thanks to type classes and refinement during code generation, this light-weight approach can seamlessly replace Isabelle's default setup for code generation. Heuristics automatically pick one of the available data structures depending on the type of elements to be stored, but users can also choose on their own. The extensible design permits to add more implementations at any time.

    To support arbitrary nesting of sets, we define a linear order on sets based on a linear order of the elements and provide efficient implementations. It even allows to compare complements with non-complements. -depends-on = Deriving extra-history = Change history: [2013-07-11]: add pretty printing for sets (revision 7f3f52c5f5fa)
    [2013-09-20]: provide generators for canonical type class instantiations (revision 159f4401f4a8 by René Thiemann)
    [2014-07-08]: add support for going from partial functions to mappings (revision 7a6fc957e8ed) [FileRefinement] title = File Refinement author = Karen Zee , Viktor Kuncak date = 2004-12-09 topic = Computer Science/Data Structures abstract = These theories illustrates the verification of basic file operations (file creation, file read and file write) in the Isabelle theorem prover. We describe a file at two levels of abstraction: an abstract file represented as a resizable array, and a concrete file represented using data blocks. [Datatype_Order_Generator] title = Generating linear orders for datatypes author = René Thiemann date = 2012-08-07 topic = Computer Science/Data Structures abstract = We provide a framework for registering automatic methods to derive class instances of datatypes, as it is possible using Haskell's ``deriving Ord, Show, ...'' feature.

    We further implemented such automatic methods to derive (linear) orders or hash-functions which are required in the Isabelle Collection Framework. Moreover, for the tactic of Huffman and Krauss to show that a datatype is countable, we implemented a wrapper so that this tactic becomes accessible in our framework.

    Our formalization was performed as part of the IsaFoR/CeTA project. With our new tactic we could completely remove tedious proofs for linear orders of two datatypes.

    This development is aimed at datatypes generated by the "old_datatype" command. -depends-on = Collections, Deriving [Deriving] title = Deriving class instances for datatypes author = Christian Sternagel , René Thiemann date = 2015-03-11 topic = Computer Science/Data Structures abstract = We provide a framework for registering automatic methods to derive class instances of datatypes, as it is possible using Haskell's ``deriving Ord, Show, ...'' feature. We further implemented such automatic methods to derive comparators, linear orders, parametrizable equality functions, and hash-functions which are required in the Isabelle Collection Framework and the Container Framework. Moreover, for the tactic of Blanchette to show that a datatype is countable, we implemented a wrapper so that this tactic becomes accessible in our framework. All of the generators are based on the infrastructure that is provided by the BNF-based datatype package. Our formalization was performed as part of the IsaFoR/CeTA project. With our new tactics we could remove several tedious proofs for (conditional) linear orders, and conditional equality operators within IsaFoR and the Container Framework. -depends-on = Collections [List-Index] title = List Index date = 2010-02-20 author = Tobias Nipkow topic = Computer Science/Data Structures abstract = This theory provides functions for finding the index of an element in a list, by predicate and by value. [List-Infinite] title = Infinite Lists date = 2011-02-23 author = David Trachtenherz topic = Computer Science/Data Structures abstract = We introduce a theory of infinite lists in HOL formalized as functions over naturals (folder ListInf, theories ListInf and ListInf_Prefix). It also provides additional results for finite lists (theory ListInf/List2), natural numbers (folder CommonArith, esp. division/modulo, naturals with infinity), sets (folder CommonSet, esp. cutting/truncating sets, traversing sets of naturals). [Matrix] title = Executable Matrix Operations on Matrices of Arbitrary Dimensions topic = Computer Science/Data Structures date = 2010-06-17 author = Christian Sternagel , René Thiemann license = LGPL abstract = We provide the operations of matrix addition, multiplication, transposition, and matrix comparisons as executable functions over ordered semirings. Moreover, it is proven that strongly normalizing (monotone) orders can be lifted to strongly normalizing (monotone) orders over matrices. We further show that the standard semirings over the naturals, integers, and rationals, as well as the arctic semirings satisfy the axioms that are required by our matrix theory. Our formalization is part of the CeTA system which contains several termination techniques. The provided theories have been essential to formalize matrix-interpretations and arctic interpretations. -depends-on = Abstract-Rewriting extra-history = Change history: [2010-09-17]: Moved theory on arbitrary (ordered) semirings to Abstract Rewriting. [Matrix_Tensor] title = Tensor Product of Matrices topic = Computer Science/Data Structures, Mathematics/Algebra date = 2016-01-18 author = T.V.H. Prathamesh abstract = In this work, the Kronecker tensor product of matrices and the proofs of some of its properties are formalized. Properties which have been formalized include associativity of the tensor product and the mixed-product property. -depends-on = Matrix [Huffman] title = The Textbook Proof of Huffman's Algorithm author = Jasmin Christian Blanchette date = 2008-10-15 topic = Computer Science/Data Structures abstract = Huffman's algorithm is a procedure for constructing a binary tree with minimum weighted path length. This report presents a formal proof of the correctness of Huffman's algorithm written using Isabelle/HOL. Our proof closely follows the sketches found in standard algorithms textbooks, uncovering a few snags in the process. Another distinguishing feature of our formalization is the use of custom induction rules to help Isabelle's automatic tactics, leading to very short proofs for most of the lemmas. [Partial_Function_MR] title = Mutually Recursive Partial Functions author = René Thiemann topic = Computer Science/Functional Programming date = 2014-02-18 license = LGPL abstract = We provide a wrapper around the partial-function command that supports mutual recursion. [Lifting_Definition_Option] title = Lifting Definition Option author = René Thiemann topic = Computer Science/Functional Programming date = 2014-10-13 license = LGPL abstract = We implemented a command that can be used to easily generate elements of a restricted type {x :: 'a. P x}, provided the definition is of the form f ys = (if check ys then Some(generate ys :: 'a) else None) where ys is a list of variables y1 ... yn and check ys ==> P(generate ys) can be proved.

    In principle, such a definition is also directly possible using the lift_definition command. However, then this definition will not be suitable for code-generation. To this end, we automated a more complex construction of Joachim Breitner which is amenable for code-generation, and where the test check ys will only be performed once. In the automation, one auxiliary type is created, and Isabelle's lifting- and transfer-package is invoked several times. [Coinductive] title = Coinductive topic = Computer Science/Functional Programming author = Andreas Lochbihler contributors = Johannes Hölzl date = 2010-02-12 abstract = This article collects formalisations of general-purpose coinductive data types and sets. Currently, it contains coinductive natural numbers, coinductive lists, i.e. lazy lists or streams, infinite streams, coinductive terminated lists, coinductive resumptions, a library of operations on coinductive lists, and a version of König's lemma as an application for coinductive lists.
    The initial theory was contributed by Paulson and Wenzel. Extensions and other coinductive formalisations of general interest are welcome. extra-history = Change history: [2010-06-10]: coinductive lists: setup for quotient package (revision 015574f3bf3c)
    [2010-06-28]: new codatatype terminated lazy lists (revision e12de475c558)
    [2010-08-04]: terminated lazy lists: setup for quotient package; more lemmas (revision 6ead626f1d01)
    [2010-08-17]: Koenig's lemma as an example application for coinductive lists (revision f81ce373fa96)
    [2011-02-01]: lazy implementation of coinductive (terminated) lists for the code generator (revision 6034973dce83)
    [2011-07-20]: new codatatype resumption (revision 811364c776c7)
    [2012-06-27]: new codatatype stream with operations (with contributions by Peter Gammie) (revision dd789a56473c)
    [2013-03-13]: construct codatatypes with the BNF package and adjust the definitions and proofs, setup for lifting and transfer packages (revision f593eda5b2c0)
    [2013-09-20]: stream theory uses type and operations from HOL/BNF/Examples/Stream (revision 692809b2b262)
    [2014-04-03]: ccpo structure on codatatypes used to define ldrop, ldropWhile, lfilter, lconcat as least fixpoint; ccpo topology on coinductive lists contributed by Johannes Hölzl; added examples (revision 23cd8156bd42)
    [Stream-Fusion] title = Stream Fusion author = Brian Huffman topic = Computer Science/Functional Programming date = 2009-04-29 abstract = Stream Fusion is a system for removing intermediate list structures from Haskell programs; it consists of a Haskell library along with several compiler rewrite rules. (The library is available online.)

    These theories contain a formalization of much of the Stream Fusion library in HOLCF. Lazy list and stream types are defined, along with coercions between the two types, as well as an equivalence relation for streams that generate the same list. List and stream versions of map, filter, foldr, enumFromTo, append, zipWith, and concatMap are defined, and the stream versions are shown to respect stream equivalence. [Tycon] title = Type Constructor Classes and Monad Transformers author = Brian Huffman date = 2012-06-26 topic = Computer Science/Functional Programming abstract = These theories contain a formalization of first class type constructors and axiomatic constructor classes for HOLCF. This work is described in detail in the ICFP 2012 paper Formal Verification of Monad Transformers by the author. The formalization is a revised and updated version of earlier joint work with Matthews and White.

    Based on the hierarchy of type classes in Haskell, we define classes for functors, monads, monad-plus, etc. Each one includes all the standard laws as axioms. We also provide a new user command, tycondef, for defining new type constructors in HOLCF. Using tycondef, we instantiate the type class hierarchy with various monads and monad transformers. [CoreC++] title = CoreC++ author = Daniel Wasserrab date = 2006-05-15 topic = Computer Science/Programming Languages/Language Definitions abstract = We present an operational semantics and type safety proof for multiple inheritance in C++. The semantics models the behavior of method calls, field accesses, and two forms of casts in C++ class hierarchies. For explanations see the OOPSLA 2006 paper by Wasserrab, Nipkow, Snelting and Tip. [FeatherweightJava] title = A Theory of Featherweight Java in Isabelle/HOL author = J. Nathan Foster , Dimitrios Vytiniotis date = 2006-03-31 topic = Computer Science/Programming Languages/Language Definitions abstract = We formalize the type system, small-step operational semantics, and type soundness proof for Featherweight Java, a simple object calculus, in Isabelle/HOL. [Jinja] title = Jinja is not Java author = Gerwin Klein , Tobias Nipkow date = 2005-06-01 topic = Computer Science/Programming Languages/Language Definitions abstract = We introduce Jinja, a Java-like programming language with a formal semantics designed to exhibit core features of the Java language architecture. Jinja is a compromise between realism of the language and tractability and clarity of the formal semantics. The following aspects are formalised: a big and a small step operational semantics for Jinja and a proof of their equivalence; a type system and a definite initialisation analysis; a type safety proof of the small step semantics; a virtual machine (JVM), its operational semantics and its type system; a type safety proof for the JVM; a bytecode verifier, i.e. data flow analyser for the JVM; a correctness proof of the bytecode verifier w.r.t. the type system; a compiler and a proof that it preserves semantics and well-typedness. The emphasis of this work is not on particular language features but on providing a unified model of the source language, the virtual machine and the compiler. The whole development has been carried out in the theorem prover Isabelle/HOL. [JinjaThreads] title = Jinja with Threads author = Andreas Lochbihler date = 2007-12-03 topic = Computer Science/Programming Languages/Language Definitions abstract = We extend the Jinja source code semantics by Klein and Nipkow with Java-style arrays and threads. Concurrency is captured in a generic framework semantics for adding concurrency through interleaving to a sequential semantics, which features dynamic thread creation, inter-thread communication via shared memory, lock synchronisation and joins. Also, threads can suspend themselves and be notified by others. We instantiate the framework with the adapted versions of both Jinja source and byte code and show type safety for the multithreaded case. Equally, the compiler from source to byte code is extended, for which we prove weak bisimilarity between the source code small step semantics and the defensive Jinja virtual machine. On top of this, we formalise the JMM and show the DRF guarantee and consistency. For description of the different parts, see Lochbihler's papers at FOOL 2008, ESOP 2010, ITP 2011, and ESOP 2012. -depends-on = Collections, Coinductive extra-history = Change history: [2008-04-23]: added bytecode formalisation with arrays and threads, added thread joins (revision f74a8be156a7)
    [2009-04-27]: added verified compiler from source code to bytecode; encapsulate native methods in separate semantics (revision e4f26541e58a)
    [2009-11-30]: extended compiler correctness proof to infinite and deadlocking computations (revision e50282397435)
    [2010-06-08]: added thread interruption; new abstract memory model with sequential consistency as implementation (revision 0cb9e8dbd78d)
    [2010-06-28]: new thread interruption model (revision c0440d0a1177)
    [2010-10-15]: preliminary version of the Java memory model for source code (revision 02fee0ef3ca2)
    [2010-12-16]: improved version of the Java memory model, also for bytecode executable scheduler for source code semantics (revision 1f41c1842f5a)
    [2011-02-02]: simplified code generator setup new random scheduler (revision 3059dafd013f)
    [2011-07-21]: new interruption model, generalized JMM proof of DRF guarantee, allow class Object to declare methods and fields, simplified subtyping relation, corrected division and modulo implementation (revision 46e4181ed142)
    [2012-02-16]: added example programs (revision bf0b06c8913d)
    [2012-11-21]: type safety proof for the Java memory model, allow spurious wake-ups (revision 76063d860ae0)
    [2013-05-16]: support for non-deterministic memory allocators (revision cc3344a49ced) [Locally-Nameless-Sigma] title = Locally Nameless Sigma Calculus author = Ludovic Henrio , Florian Kammüller , Bianca Lutz , Henry Sudhof date = 2010-04-30 topic = Computer Science/Programming Languages/Language Definitions abstract = We present a Theory of Objects based on the original functional sigma-calculus by Abadi and Cardelli but with an additional parameter to methods. We prove confluence of the operational semantics following the outline of Nipkow's proof of confluence for the lambda-calculus reusing his theory Commutation, a generic diamond lemma reduction. We furthermore formalize a simple type system for our sigma-calculus including a proof of type safety. The entire development uses the concept of Locally Nameless representation for binders. We reuse an earlier proof of confluence for a simpler sigma-calculus based on de Bruijn indices and lists to represent objects. [AutoFocus-Stream] title = AutoFocus Stream Processing for Single-Clocking and Multi-Clocking Semantics author = David Trachtenherz date = 2011-02-23 topic = Computer Science/Programming Languages/Language Definitions abstract = We formalize the AutoFocus Semantics (a time-synchronous subset of the Focus formalism) as stream processing functions on finite and infinite message streams represented as finite/infinite lists. The formalization comprises both the conventional single-clocking semantics (uniform global clock for all components and communications channels) and its extension to multi-clocking semantics (internal execution clocking of a component may be a multiple of the external communication clocking). The semantics is defined by generic stream processing functions making it suitable for simulation/code generation in Isabelle/HOL. Furthermore, a number of AutoFocus semantics properties are formalized using definitions from the IntervalLogic theories. [FocusStreamsCaseStudies] title = Stream Processing Components: Isabelle/HOL Formalisation and Case Studies author = Maria Spichkova date = 2013-11-14 topic = Computer Science/Programming Languages/Language Definitions abstract = This set of theories presents an Isabelle/HOL formalisation of stream processing components introduced in Focus, a framework for formal specification and development of interactive systems. This is an extended and updated version of the formalisation, which was elaborated within the methodology "Focus on Isabelle". In addition, we also applied the formalisation on three case studies that cover different application areas: process control (Steam Boiler System), data transmission (FlexRay communication protocol), memory and processing components (Automotive-Gateway System). [Isabelle_Meta_Model] title = A Meta-Model for the Isabelle API author = Frédéric Tuong , Burkhart Wolff date = 2015-09-16 topic = Computer Science/Programming Languages/Language Definitions abstract = We represent a theory of (a fragment of) Isabelle/HOL in Isabelle/HOL. The purpose of this exercise is to write packages for domain-specific specifications such as class models, B-machines, ..., and generally speaking, any domain-specific languages whose abstract syntax can be defined by a HOL "datatype". On this basis, the Isabelle code-generator can then be used to generate code for global context transformations as well as tactic code.

    Consequently the package is geared towards parsing, printing and code-generation to the Isabelle API. It is at the moment not sufficiently rich for doing meta theory on Isabelle itself. Extensions in this direction are possible though.

    Moreover, the chosen fragment is fairly rudimentary. However it should be easily adapted to one's needs if a package is written on top of it. The supported API contains types, terms, transformation of global context like definitions and data-type declarations as well as infrastructure for Isar-setups.

    This theory is drawn from the Featherweight OCL project where it is used to construct a package for object-oriented data-type theories generated from UML class diagrams. The Featherweight OCL, for example, allows for both the direct execution of compiled tactic code by the Isabelle API as well as the generation of ".thy"-files for debugging purposes.

    Gained experience from this project shows that the compiled code is sufficiently efficient for practical purposes while being based on a formal model on which properties of the package can be proven such as termination of certain transformations, correctness, etc. [PCF] title = Logical Relations for PCF author = Peter Gammie date = 2012-07-01 topic = Computer Science/Programming Languages/Lambda Calculi abstract = We apply Andy Pitts's methods of defining relations over domains to several classical results in the literature. We show that the Y combinator coincides with the domain-theoretic fixpoint operator, that parallel-or and the Plotkin existential are not definable in PCF, that the continuation semantics for PCF coincides with the direct semantics, and that our domain-theoretic semantics for PCF is adequate for reasoning about contextual equivalence in an operational semantics. Our version of PCF is untyped and has both strict and non-strict function abstractions. The development is carried out in HOLCF. [POPLmark-deBruijn] title = POPLmark Challenge Via de Bruijn Indices author = Stefan Berghofer date = 2007-08-02 topic = Computer Science/Programming Languages/Lambda Calculi abstract = We present a solution to the POPLmark challenge designed by Aydemir et al., which has as a goal the formalization of the meta-theory of System F<:. The formalization is carried out in the theorem prover Isabelle/HOL using an encoding based on de Bruijn indices. We start with a relatively simple formalization covering only the basic features of System F<:, and explain how it can be extended to also cover records and more advanced binding constructs. [Lam-ml-Normalization] title = Strong Normalization of Moggis's Computational Metalanguage author = Christian Doczkal date = 2010-08-29 topic = Computer Science/Programming Languages/Lambda Calculi abstract = Handling variable binding is one of the main difficulties in formal proofs. In this context, Moggi's computational metalanguage serves as an interesting case study. It features monadic types and a commuting conversion rule that rearranges the binding structure. Lindley and Stark have given an elegant proof of strong normalization for this calculus. The key construction in their proof is a notion of relational TT-lifting, using stacks of elimination contexts to obtain a Girard-Tait style logical relation. I give a formalization of their proof in Isabelle/HOL-Nominal with a particular emphasis on the treatment of bound variables. [MiniML] title = Mini ML author = Wolfgang Naraschewski <>, Tobias Nipkow date = 2004-03-19 topic = Computer Science/Programming Languages/Type Systems abstract = This theory defines the type inference rules and the type inference algorithm W for MiniML (simply-typed lambda terms with let) due to Milner. It proves the soundness and completeness of W w.r.t. the rules. [Simpl] title = A Sequential Imperative Programming Language Syntax, Semantics, Hoare Logics and Verification Environment author = Norbert Schirmer <> date = 2008-02-29 topic = Computer Science/Programming Languages/Language Definitions, Computer Science/Programming Languages/Logics license = LGPL abstract = We present the theory of Simpl, a sequential imperative programming language. We introduce its syntax, its semantics (big and small-step operational semantics) and Hoare logics for both partial as well as total correctness. We prove soundness and completeness of the Hoare logic. We integrate and automate the Hoare logic in Isabelle/HOL to obtain a practically usable verification environment for imperative programs. Simpl is independent of a concrete programming language but expressive enough to cover all common language features: mutually recursive procedures, abrupt termination and exceptions, runtime faults, local and global variables, pointers and heap, expressions with side effects, pointers to procedures, partial application and closures, dynamic method invocation and also unbounded nondeterminism. [Separation_Algebra] title = Separation Algebra author = Gerwin Klein , Rafal Kolanski , Andrew Boyton date = 2012-05-11 topic = Computer Science/Programming Languages/Logics license = BSD abstract = We present a generic type class implementation of separation algebra for Isabelle/HOL as well as lemmas and generic tactics which can be used directly for any instantiation of the type class.

    The ex directory contains example instantiations that include structures such as a heap or virtual memory.

    The abstract separation algebra is based upon "Abstract Separation Logic" by Calcagno et al. These theories are also the basis of the ITP 2012 rough diamond "Mechanised Separation Algebra" by the authors.

    The aim of this work is to support and significantly reduce the effort for future separation logic developments in Isabelle/HOL by factoring out the part of separation logic that can be treated abstractly once and for all. This includes developing typical default rule sets for reasoning as well as automated tactic support for separation logic. [Separation_Logic_Imperative_HOL] title = A Separation Logic Framework for Imperative HOL author = Peter Lammich , Rene Meis date = 2012-11-14 topic = Computer Science/Programming Languages/Logics license = BSD abstract = We provide a framework for separation-logic based correctness proofs of Imperative HOL programs. Our framework comes with a set of proof methods to automate canonical tasks such as verification condition generation and frame inference. Moreover, we provide a set of examples that show the applicability of our framework. The examples include algorithms on lists, hash-tables, and union-find trees. We also provide abstract interfaces for lists, maps, and sets, that allow to develop generic imperative algorithms and use data-refinement techniques.
    As we target Imperative HOL, our programs can be translated to efficiently executable code in various target languages, including ML, OCaml, Haskell, and Scala. [Inductive_Confidentiality] title = Inductive Study of Confidentiality author = Giampaolo Bella date = 2012-05-02 topic = Computer Science/Security abstract = This document contains the full theory files accompanying article Inductive Study of Confidentiality --- for Everyone in Formal Aspects of Computing. They aim at an illustrative and didactic presentation of the Inductive Method of protocol analysis, focusing on the treatment of one of the main goals of security protocols: confidentiality against a threat model. The treatment of confidentiality, which in fact forms a key aspect of all protocol analysis tools, has been found cryptic by many learners of the Inductive Method, hence the motivation for this work. The theory files in this document guide the reader step by step towards design and proof of significant confidentiality theorems. These are developed against two threat models, the standard Dolev-Yao and a more audacious one, the General Attacker, which turns out to be particularly useful also for teaching purposes. [Possibilistic_Noninterference] title = Possibilistic Noninterference author = Andrei Popescu , Johannes Hölzl date = 2012-09-10 topic = Computer Science/Security, Computer Science/Programming Languages/Type Systems abstract = We formalize a wide variety of Volpano/Smith-style noninterference notions for a while language with parallel composition. We systematize and classify these notions according to compositionality w.r.t. the language constructs. Compositionality yields sound syntactic criteria (a.k.a. type systems) in a uniform way.

    An article about these proofs is published in the proceedings of the conference Certified Programs and Proofs 2012. [SIFUM_Type_Systems] title = A Formalization of Assumptions and Guarantees for Compositional Noninterference author = Sylvia Grewe , Heiko Mantel , Daniel Schoepe date = 2014-04-23 topic = Computer Science/Security, Computer Science/Programming Languages/Type Systems abstract = Research in information-flow security aims at developing methods to identify undesired information leaks within programs from private (high) sources to public (low) sinks. For a concurrent system, it is desirable to have compositional analysis methods that allow for analyzing each thread independently and that nevertheless guarantee that the parallel composition of successfully analyzed threads satisfies a global security guarantee. However, such a compositional analysis should not be overly pessimistic about what an environment might do with shared resources. Otherwise, the analysis will reject many intuitively secure programs.

    The paper "Assumptions and Guarantees for Compositional Noninterference" by Mantel et. al. presents one solution for this problem: an approach for compositionally reasoning about non-interference in concurrent programs via rely-guarantee-style reasoning. We present an Isabelle/HOL formalization of the concepts and proofs of this approach. [Strong_Security] title = A Formalization of Strong Security author = Sylvia Grewe , Alexander Lux , Heiko Mantel , Jens Sauer date = 2014-04-23 topic = Computer Science/Security, Computer Science/Programming Languages/Type Systems abstract = Research in information-flow security aims at developing methods to identify undesired information leaks within programs from private sources to public sinks. Noninterference captures this intuition. Strong security from Sabelfeld and Sands formalizes noninterference for concurrent systems.

    We present an Isabelle/HOL formalization of strong security for arbitrary security lattices (Sabelfeld and Sands use a two-element security lattice in the original publication). The formalization includes compositionality proofs for strong security and a soundness proof for a security type system that checks strong security for programs in a simple while language with dynamic thread creation.

    Our formalization of the security type system is abstract in the language for expressions and in the semantic side conditions for expressions. It can easily be instantiated with different syntactic approximations for these side conditions. The soundness proof of such an instantiation boils down to showing that these syntactic approximations imply the semantic side conditions. [WHATandWHERE_Security] title = A Formalization of Declassification with WHAT-and-WHERE-Security author = Sylvia Grewe , Alexander Lux , Heiko Mantel , Jens Sauer date = 2014-04-23 topic = Computer Science/Security, Computer Science/Programming Languages/Type Systems abstract = Research in information-flow security aims at developing methods to identify undesired information leaks within programs from private sources to public sinks. Noninterference captures this intuition by requiring that no information whatsoever flows from private sources to public sinks. However, in practice this definition is often too strict: Depending on the intuitive desired security policy, the controlled declassification of certain private information (WHAT) at certain points in the program (WHERE) might not result in an undesired information leak.

    We present an Isabelle/HOL formalization of such a security property for controlled declassification, namely WHAT&WHERE-security from "Scheduler-Independent Declassification" by Lux, Mantel, and Perner. The formalization includes compositionality proofs for and a soundness proof for a security type system that checks for programs in a simple while language with dynamic thread creation.

    Our formalization of the security type system is abstract in the language for expressions and in the semantic side conditions for expressions. It can easily be instantiated with different syntactic approximations for these side conditions. The soundness proof of such an instantiation boils down to showing that these syntactic approximations imply the semantic side conditions.

    This Isabelle/HOL formalization uses theories from the entry Strong Security. [VolpanoSmith] title = A Correctness Proof for the Volpano/Smith Security Typing System author = Gregor Snelting , Daniel Wasserrab date = 2008-09-02 topic = Computer Science/Programming Languages/Type Systems, Computer Science/Security abstract = The Volpano/Smith/Irvine security type systems requires that variables are annotated as high (secret) or low (public), and provides typing rules which guarantee that secret values cannot leak to public output ports. This property of a program is called confidentiality. For a simple while-language without threads, our proof shows that typeability in the Volpano/Smith system guarantees noninterference. Noninterference means that if two initial states for program execution are low-equivalent, then the final states are low-equivalent as well. This indeed implies that secret values cannot leak to public ports. The proof defines an abstract syntax and operational semantics for programs, formalizes noninterference, and then proceeds by rule induction on the operational semantics. The mathematically most intricate part is the treatment of implicit flows. Note that the Volpano/Smith system is not flow-sensitive and thus quite unprecise, resulting in false alarms. However, due to the correctness property, all potential breaks of confidentiality are discovered. [Abstract-Hoare-Logics] title = Abstract Hoare Logics author = Tobias Nipkow date = 2006-08-08 topic = Computer Science/Programming Languages/Logics abstract = These therories describe Hoare logics for a number of imperative language constructs, from while-loops to mutually recursive procedures. Both partial and total correctness are treated. In particular a proof system for total correctness of recursive procedures in the presence of unbounded nondeterminism is presented. [Kleene_Algebra] title = Kleene Algebra author = Alasdair Armstrong , Georg Struth , Tjark Weber date = 2013-01-15 topic = Computer Science/Programming Languages/Logics, Computer Science/Automata and Formal Languages, Mathematics/Algebra abstract = These files contain a formalisation of variants of Kleene algebras and their most important models as axiomatic type classes in Isabelle/HOL. Kleene algebras are foundational structures in computing with applications ranging from automata and language theory to computational modeling, program construction and verification.

    We start with formalising dioids, which are additively idempotent semirings, and expand them by axiomatisations of the Kleene star for finite iteration and an omega operation for infinite iteration. We show that powersets over a given monoid, (regular) languages, sets of paths in a graph, sets of computation traces, binary relations and formal power series form Kleene algebras, and consider further models based on lattices, max-plus semirings and min-plus semirings. We also demonstrate that dioids are closed under the formation of matrices (proofs for Kleene algebras remain to be completed).

    On the one hand we have aimed at a reference formalisation of variants of Kleene algebras that covers a wide range of variants and the core theorems in a structured and modular way and provides readable proofs at text book level. On the other hand, we intend to use this algebraic hierarchy and its models as a generic algebraic middle-layer from which programming applications can quickly be explored, implemented and verified. [KAT_and_DRA] title = Kleene Algebra with Tests and Demonic Refinement Algebras author = Alasdair Armstrong , Victor B. F. Gomes , Georg Struth date = 2014-01-23 -depends-on = Kleene_Algebra topic = Computer Science/Programming Languages/Logics, Computer Science/Automata and Formal Languages, Mathematics/Algebra abstract = We formalise Kleene algebra with tests (KAT) and demonic refinement algebra (DRA) in Isabelle/HOL. KAT is relevant for program verification and correctness proofs in the partial correctness setting. While DRA targets similar applications in the context of total correctness. Our formalisation contains the two most important models of these algebras: binary relations in the case of KAT and predicate transformers in the case of DRA. In addition, we derive the inference rules for Hoare logic in KAT and its relational model and present a simple formally verified program verification tool prototype based on the algebraic approach. [KAD] title = Kleene Algebras with Domain author = Victor B. F. Gomes , Walter Guttmann, Peter Höfner , Georg Struth , Tjark Weber date = 2016-04-12 -depends-on = Kleene_Algebra topic = Computer Science/Programming Languages/Logics, Computer Science/Automata and Formal Languages, Mathematics/Algebra abstract = Kleene algebras with domain are Kleene algebras endowed with an operation that maps each element of the algebra to its domain of definition (or its complement) in abstract fashion. They form a simple algebraic basis for Hoare logics, dynamic logics or predicate transformer semantics. We formalise a modular hierarchy of algebras with domain and antidomain (domain complement) operations in Isabelle/HOL that ranges from domain and antidomain semigroups to modal Kleene algebras and divergence Kleene algebras. We link these algebras with models of binary relations and program traces. We include some examples from modal logics, termination and program analysis. [Regular_Algebras] title = Regular Algebras author = Simon Foster , Georg Struth date = 2014-05-21 -depends-on = Kleene_Algebra topic = Computer Science/Automata and Formal Languages, Mathematics/Algebra abstract = Regular algebras axiomatise the equational theory of regular expressions as induced by regular language identity. We use Isabelle/HOL for a detailed systematic study of regular algebras given by Boffa, Conway, Kozen and Salomaa. We investigate the relationships between these classes, formalise a soundness proof for the smallest class (Salomaa's) and obtain completeness of the largest one (Boffa's) relative to a deep result by Krob. In addition we provide a large collection of regular identities in the general setting of Boffa's axiom. Our regular algebra hierarchy is orthogonal to the Kleene algebra hierarchy in the Archive of Formal Proofs; we have not aimed at an integration for pragmatic reasons. [BytecodeLogicJmlTypes] title = A Bytecode Logic for JML and Types author = Lennart Beringer , Martin Hofmann date = 2008-12-12 topic = Computer Science/Programming Languages/Logics abstract = This document contains the Isabelle/HOL sources underlying the paper A bytecode logic for JML and types by Beringer and Hofmann, updated to Isabelle 2008. We present a program logic for a subset of sequential Java bytecode that is suitable for representing both, features found in high-level specification language JML as well as interpretations of high-level type systems. To this end, we introduce a fine-grained collection of assertions, including strong invariants, local annotations and VDM-reminiscent partial-correctness specifications. Thanks to a goal-oriented structure and interpretation of judgements, verification may proceed without recourse to an additional control flow analysis. The suitability for interpreting intensional type systems is illustrated by the proof-carrying-code style encoding of a type system for a first-order functional language which guarantees a constant upper bound on the number of objects allocated throughout an execution, be the execution terminating or non-terminating. Like the published paper, the formal development is restricted to a comparatively small subset of the JVML, lacking (among other features) exceptions, arrays, virtual methods, and static fields. This shortcoming has been overcome meanwhile, as our paper has formed the basis of the Mobius base logic, a program logic for the full sequential fragment of the JVML. Indeed, the present formalisation formed the basis of a subsequent formalisation of the Mobius base logic in the proof assistant Coq, which includes a proof of soundness with respect to the Bicolano operational semantics by Pichardie. [DataRefinementIBP] title = Semantics and Data Refinement of Invariant Based Programs author = Viorel Preoteasa , Ralph-Johan Back date = 2010-05-28 topic = Computer Science/Programming Languages/Logics abstract = The invariant based programming is a technique of constructing correct programs by first identifying the basic situations (pre- and post-conditions and invariants) that can occur during the execution of the program, and then defining the transitions and proving that they preserve the invariants. Data refinement is a technique of building correct programs working on concrete datatypes as refinements of more abstract programs. In the theories presented here we formalize the predicate transformer semantics for invariant based programs and their data refinement. -depends-on = LatticeProperties extra-history = Change history: [2012-01-05]: Moved some general complete lattice properties to the AFP entry Lattice Properties. Changed the definition of the data refinement relation to be more general and updated all corresponding theorems. Added new syntax for demonic and angelic update statements. [RefinementReactive] title = Formalization of Refinement Calculus for Reactive Systems author = Viorel Preoteasa date = 2014-10-08 topic = Computer Science/Programming Languages/Logics abstract = We present a formalization of refinement calculus for reactive systems. Refinement calculus is based on monotonic predicate transformers (monotonic functions from sets of post-states to sets of pre-states), and it is a powerful formalism for reasoning about imperative programs. We model reactive systems as monotonic property transformers that transform sets of output infinite sequences into sets of input infinite sequences. Within this semantics we can model refinement of reactive systems, (unbounded) angelic and demonic nondeterminism, sequential composition, and other semantic properties. We can model systems that may fail for some inputs, and we can model compatibility of systems. We can specify systems that have liveness properties using linear temporal logic, and we can refine system specifications into systems based on symbolic transitions systems, suitable for implementations. [SIFPL] title = Secure information flow and program logics author = Lennart Beringer , Martin Hofmann date = 2008-11-10 topic = Computer Science/Programming Languages/Logics, Computer Science/Security abstract = We present interpretations of type systems for secure information flow in Hoare logic, complementing previous encodings in relational program logics. We first treat the imperative language IMP, extended by a simple procedure call mechanism. For this language we consider base-line non-interference in the style of Volpano et al. and the flow-sensitive type system by Hunt and Sands. In both cases, we show how typing derivations may be used to automatically generate proofs in the program logic that certify the absence of illicit flows. We then add instructions for object creation and manipulation, and derive appropriate proof rules for base-line non-interference. As a consequence of our work, standard verification technology may be used for verifying that a concrete program satisfies the non-interference property.

    The present proof development represents an update of the formalisation underlying our paper [CSF 2007] and is intended to resolve any ambiguities that may be present in the paper. [TLA] title = A Definitional Encoding of TLA* in Isabelle/HOL author = Gudmund Grov , Stephan Merz date = 2011-11-19 topic = Computer Science/Programming Languages/Logics abstract = We mechanise the logic TLA* [Merz 1999], an extension of Lamport's Temporal Logic of Actions (TLA) [Lamport 1994] for specifying and reasoning about concurrent and reactive systems. Aiming at a framework for mechanising] the verification of TLA (or TLA*) specifications, this contribution reuses some elements from a previous axiomatic encoding of TLA in Isabelle/HOL by the second author [Merz 1998], which has been part of the Isabelle distribution. In contrast to that previous work, we give here a shallow, definitional embedding, with the following highlights:

    • a theory of infinite sequences, including a formalisation of the concepts of stuttering invariance central to TLA and TLA*;
    • a definition of the semantics of TLA*, which extends TLA by a mutually-recursive definition of formulas and pre-formulas, generalising TLA action formulas;
    • a substantial set of derived proof rules, including the TLA* axioms and Lamport's proof rules for system verification;
    • a set of examples illustrating the usage of Isabelle/TLA* for reasoning about systems.
    Note that this work is unrelated to the ongoing development of a proof system for the specification language TLA+, which includes an encoding of TLA+ as a new Isabelle object logic [Chaudhuri et al 2010]. [Compiling-Exceptions-Correctly] title = Compiling Exceptions Correctly author = Tobias Nipkow date = 2004-07-09 topic = Computer Science/Programming Languages/Compiling abstract = An exception compilation scheme that dynamically creates and removes exception handler entries on the stack. A formalization of an article of the same name by Hutton and Wright. [NormByEval] title = Normalization by Evaluation author = Klaus Aehlig , Tobias Nipkow date = 2008-02-18 topic = Computer Science/Programming Languages/Compiling abstract = This article formalizes normalization by evaluation as implemented in Isabelle. Lambda calculus plus term rewriting is compiled into a functional program with pattern matching. It is proved that the result of a successful evaluation is a) correct, i.e. equivalent to the input, and b) in normal form. [Program-Conflict-Analysis] title = Formalization of Conflict Analysis of Programs with Procedures, Thread Creation, and Monitors topic = Computer Science/Programming Languages/Static Analysis author = Peter Lammich , Markus Müller-Olm date = 2007-12-14 abstract = In this work we formally verify the soundness and precision of a static program analysis that detects conflicts (e. g. data races) in programs with procedures, thread creation and monitors with the Isabelle theorem prover. As common in static program analysis, our program model abstracts guarded branching by nondeterministic branching, but completely interprets the call-/return behavior of procedures, synchronization by monitors, and thread creation. The analysis is based on the observation that all conflicts already occur in a class of particularly restricted schedules. These restricted schedules are suited to constraint-system-based program analysis. The formalization is based upon a flowgraph-based program model with an operational semantics as reference point. [Shivers-CFA] title = Shivers' Control Flow Analysis topic = Computer Science/Programming Languages/Static Analysis author = Joachim Breitner date = 2010-11-16 abstract = In his dissertation, Olin Shivers introduces a concept of control flow graphs for functional languages, provides an algorithm to statically derive a safe approximation of the control flow graph and proves this algorithm correct. In this research project, Shivers' algorithms and proofs are formalized in the HOLCF extension of HOL. [Slicing] title = Towards Certified Slicing author = Daniel Wasserrab date = 2008-09-16 topic = Computer Science/Programming Languages/Static Analysis abstract = Slicing is a widely-used technique with applications in e.g. compiler technology and software security. Thus verification of algorithms in these areas is often based on the correctness of slicing, which should ideally be proven independent of concrete programming languages and with the help of well-known verifying techniques such as proof assistants. As a first step in this direction, this contribution presents a framework for dynamic and static intraprocedural slicing based on control flow and program dependence graphs. Abstracting from concrete syntax we base the framework on a graph representation of the program fulfilling certain structural and well-formedness properties.

    The formalization consists of the basic framework (in subdirectory Basic/), the correctness proof for dynamic slicing (in subdirectory Dynamic/), the correctness proof for static intraprocedural slicing (in subdirectory StaticIntra/) and instantiations of the framework with a simple While language (in subdirectory While/) and the sophisticated object-oriented bytecode language of Jinja (in subdirectory JinjaVM/). For more information on the framework, see the TPHOLS 2008 paper by Wasserrab and Lochbihler and the PLAS 2009 paper by Wasserrab et al. -depends-on = Jinja [HRB-Slicing] title = Backing up Slicing: Verifying the Interprocedural Two-Phase Horwitz-Reps-Binkley Slicer author = Daniel Wasserrab date = 2009-11-13 topic = Computer Science/Programming Languages/Static Analysis abstract = After verifying dynamic and static interprocedural slicing, we present a modular framework for static interprocedural slicing. To this end, we formalized the standard two-phase slicer from Horwitz, Reps and Binkley (see their TOPLAS 12(1) 1990 paper) together with summary edges as presented by Reps et al. (see FSE 1994). The framework is again modular in the programming language by using an abstract CFG, defined via structural and well-formedness properties. Using a weak simulation between the original and sliced graph, we were able to prove the correctness of static interprocedural slicing. We also instantiate our framework with a simple While language with procedures. This shows that the chosen abstractions are indeed valid. -depends-on = Jinja [WorkerWrapper] title = The Worker/Wrapper Transformation author = Peter Gammie date = 2009-10-30 topic = Computer Science/Programming Languages/Transformations abstract = Gill and Hutton formalise the worker/wrapper transformation, building on the work of Launchbury and Peyton-Jones who developed it as a way of changing the type at which a recursive function operates. This development establishes the soundness of the technique and several examples of its use. [JiveDataStoreModel] title = Jive Data and Store Model author = Nicole Rauch , Norbert Schirmer <> date = 2005-06-20 license = LGPL topic = Computer Science/Programming Languages/Misc abstract = This document presents the formalization of an object-oriented data and store model in Isabelle/HOL. This model is being used in the Java Interactive Verification Environment, Jive. [HotelKeyCards] title = Hotel Key Card System author = Tobias Nipkow date = 2006-09-09 topic = Computer Science/Security abstract = Two models of an electronic hotel key card system are contrasted: a state based and a trace based one. Both are defined, verified, and proved equivalent in the theorem prover Isabelle/HOL. It is shown that if a guest follows a certain safety policy regarding her key cards, she can be sure that nobody but her can enter her room. [RSAPSS] title = SHA1, RSA, PSS and more author = Christina Lindenberg <>, Kai Wirt <> date = 2005-05-02 topic = Computer Science/Security abstract = Formal verification is getting more and more important in computer science. However the state of the art formal verification methods in cryptography are very rudimentary. These theories are one step to provide a tool box allowing the use of formal methods in every aspect of cryptography. Moreover we present a proof of concept for the feasibility of verification techniques to a standard signature algorithm. ; todo: multiple entries with different naming scheme [InformationFlowSlicing] title = Information Flow Noninterference via Slicing author = Daniel Wasserrab date = 2010-03-23 topic = Computer Science/Security ignore = entry abstract = In this contribution, we show how correctness proofs for intra- and interprocedural slicing can be used to prove that slicing is able to guarantee information flow noninterference. Moreover, we also illustrate how to lift the control flow graphs of the respective frameworks such that they fulfil the additional assumptions needed in the noninterference proofs. A detailed description of the intraprocedural proof and its interplay with the slicing framework can be found in the PLAS'09 paper by Wasserrab et al. -depends-on = Slicing, HRB-Slicing [ComponentDependencies] title = Formalisation and Analysis of Component Dependencies author = Maria Spichkova date = 2014-04-28 topic = Computer Science/System Description Languages abstract = This set of theories presents a formalisation in Isabelle/HOL of data dependencies between components. The approach allows to analyse system structure oriented towards efficient checking of system: it aims at elaborating for a concrete system, which parts of the system are necessary to check a given property. [Verified-Prover] title = A Mechanically Verified, Efficient, Sound and Complete Theorem Prover For First Order Logic author = Tom Ridge <> date = 2004-09-28 topic = Logic abstract = Soundness and completeness for a system of first order logic are formally proved, building on James Margetson's formalization of work by Wainer and Wallen. The completeness proofs naturally suggest an algorithm to derive proofs. This algorithm, which can be implemented tail recursively, is formalized in Isabelle/HOL. The algorithm can be executed via the rewriting tactics of Isabelle. Alternatively, the definitions can be exported to OCaml, yielding a directly executable program. [Completeness] title = Completeness theorem author = James Margetson <>, Tom Ridge <> date = 2004-09-20 topic = Logic abstract = The completeness of first-order logic is proved, following the first five pages of Wainer and Wallen's chapter of the book Proof Theory by Aczel et al., CUP, 1992. Their presentation of formulas allows the proofs to use symmetry arguments. Margetson formalized this theorem by early 2000. The Isar conversion is thanks to Tom Ridge. A paper describing the formalization is available [pdf]. [Ordinal] title = Countable Ordinals author = Brian Huffman date = 2005-11-11 topic = Logic abstract = This development defines a well-ordered type of countable ordinals. It includes notions of continuous and normal functions, recursively defined functions over ordinals, least fixed-points, and derivatives. Much of ordinal arithmetic is formalized, including exponentials and logarithms. The development concludes with formalizations of Cantor Normal Form and Veblen hierarchies over normal functions. [Ordinals_and_Cardinals] title = Ordinals and Cardinals author = Andrei Popescu <> date = 2009-09-01 topic = Logic abstract = We develop a basic theory of ordinals and cardinals in Isabelle/HOL, up to the point where some cardinality facts relevant for the ``working mathematician" become available. Unlike in set theory, here we do not have at hand canonical notions of ordinal and cardinal. Therefore, here an ordinal is merely a well-order relation and a cardinal is an ordinal minim w.r.t. order embedding on its field. extra-history = Change history: [2012-09-25]: This entry has been discontinued because it is now part of the Isabelle distribution. [FOL-Fitting] title = First-Order Logic According to Fitting author = Stefan Berghofer date = 2007-08-02 topic = Logic abstract = We present a formalization of parts of Melvin Fitting's book ``First-Order Logic and Automated Theorem Proving''. The formalization covers the syntax of first-order logic, its semantics, the model existence theorem, a natural deduction proof calculus together with a proof of correctness and completeness, as well as the Löwenheim-Skolem theorem. [SequentInvertibility] title = Invertibility in Sequent Calculi author = Peter Chapman date = 2009-08-28 topic = Logic license = LGPL abstract = The invertibility of the rules of a sequent calculus is important for guiding proof search and can be used in some formalised proofs of Cut admissibility. We present sufficient conditions for when a rule is invertible with respect to a calculus. We illustrate the conditions with examples. It must be noted we give purely syntactic criteria; no guarantees are given as to the suitability of the rules. [LinearQuantifierElim] title = Quantifier Elimination for Linear Arithmetic author = Tobias Nipkow date = 2008-01-11 topic = Logic abstract = This article formalizes quantifier elimination procedures for dense linear orders, linear real arithmetic and Presburger arithmetic. In each case both a DNF-based non-elementary algorithm and one or more (doubly) exponential NNF-based algorithms are formalized, including the well-known algorithms by Ferrante and Rackoff and by Cooper. The NNF-based algorithms for dense linear orders are new but based on Ferrante and Rackoff and on an algorithm by Loos and Weisspfenning which simulates infenitesimals. All algorithms are directly executable. In particular, they yield reflective quantifier elimination procedures for HOL itself. The formalization makes heavy use of locales and is therefore highly modular. [Nat-Interval-Logic] title = Interval Temporal Logic on Natural Numbers author = David Trachtenherz date = 2011-02-23 topic = Logic abstract = We introduce a theory of temporal logic operators using sets of natural numbers as time domain, formalized in a shallow embedding manner. The theory comprises special natural intervals (theory IL_Interval: open and closed intervals, continuous and modulo intervals, interval traversing results), operators for shifting intervals to left/right on the number axis as well as expanding/contracting intervals by constant factors (theory IL_IntervalOperators.thy), and ultimately definitions and results for unary and binary temporal operators on arbitrary natural sets (theory IL_TemporalOperators). -depends-on = List-Infinite [Recursion-Theory-I] title = Recursion Theory I author = Michael Nedzelsky <> date = 2008-04-05 topic = Logic abstract = This document presents the formalization of introductory material from recursion theory --- definitions and basic properties of primitive recursive functions, Cantor pairing function and computably enumerable sets (including a proof of existence of a one-complete computably enumerable set and a proof of the Rice's theorem). [Free-Boolean-Algebra] topic = Logic title = Free Boolean Algebra author = Brian Huffman date = 2010-03-29 abstract = This theory defines a type constructor representing the free Boolean algebra over a set of generators. Values of type (α)formula represent propositional formulas with uninterpreted variables from type α, ordered by implication. In addition to all the standard Boolean algebra operations, the library also provides a function for building homomorphisms to any other Boolean algebra type. [Sort_Encodings] title = Sound and Complete Sort Encodings for First-Order Logic author = Jasmin Christian Blanchette , Andrei Popescu date = 2013-06-27 topic = Logic abstract = This is a formalization of the soundness and completeness properties for various efficient encodings of sorts in unsorted first-order logic used by Isabelle's Sledgehammer tool.

    Essentially, the encodings proceed as follows: a many-sorted problem is decorated with (as few as possible) tags or guards that make the problem monotonic; then sorts can be soundly erased.

    The development employs a formalization of many-sorted first-order logic in clausal form (clauses, structures and the basic properties of the satisfaction relation), which could be of interest as the starting point for other formalizations of first-order logic metatheory. [Abstract-Rewriting] title = Abstract Rewriting topic = Logic/Rewriting date = 2010-06-14 author = Christian Sternagel , René Thiemann license = LGPL abstract = We present an Isabelle formalization of abstract rewriting (see, e.g., the book by Baader and Nipkow). First, we define standard relations like joinability, meetability, conversion, etc. Then, we formalize important properties of abstract rewrite systems, e.g., confluence and strong normalization. Our main concern is on strong normalization, since this formalization is the basis of CeTA (which is mainly about strong normalization of term rewrite systems). Hence lemmas involving strong normalization constitute by far the biggest part of this theory. One of those is Newman's lemma. extra-history = Change history: [2010-09-17]: Added theories defining several (ordered) semirings related to strong normalization and giving some standard instances.
    [2013-10-16]: Generalized delta-orders from rationals to Archimedean fields. [Free-Groups] title = Free Groups author = Joachim Breitner date = 2010-06-24 topic = Mathematics/Algebra abstract = Free Groups are, in a sense, the most generic kind of group. They are defined over a set of generators with no additional relations in between them. They play an important role in the definition of group presentations and in other fields. This theory provides the definition of Free Group as the set of fully canceled words in the generators. The universal property is proven, as well as some isomorphisms results about Free Groups. -depends-on = Ordinals_and_Cardinals extra-history = Change history: [2011-12-11]: Added the Ping Pong Lemma. [CofGroups] title = An Example of a Cofinitary Group in Isabelle/HOL author = Bart Kastermans date = 2009-08-04 topic = Mathematics/Algebra abstract = We formalize the usual proof that the group generated by the function k -> k + 1 on the integers gives rise to a cofinitary group. [Group-Ring-Module] title = Groups, Rings and Modules author = Hidetsune Kobayashi <>, L. Chen <>, H. Murao <> date = 2004-05-18 topic = Mathematics/Algebra abstract = The theory of groups, rings and modules is developed to a great depth. Group theory results include Zassenhaus's theorem and the Jordan-Hoelder theorem. The ring theory development includes ideals, quotient rings and the Chinese remainder theorem. The module development includes the Nakayama lemma, exact sequences and Tensor products. [Robbins-Conjecture] title = A Complete Proof of the Robbins Conjecture author = Matthew Wampler-Doty <> date = 2010-05-22 topic = Mathematics/Algebra abstract = This document gives a formalization of the proof of the Robbins conjecture, following A. Mann, A Complete Proof of the Robbins Conjecture, 2003. [Valuation] title = Fundamental Properties of Valuation Theory and Hensel's Lemma author = Hidetsune Kobayashi <> date = 2007-08-08 topic = Mathematics/Algebra abstract = Convergence with respect to a valuation is discussed as convergence of a Cauchy sequence. Cauchy sequences of polynomials are defined. They are used to formalize Hensel's lemma. -depends-on = Group-Ring-Module [Rank_Nullity_Theorem] title = Rank-Nullity Theorem in Linear Algebra author = Jose Divasón , Jesús Aransay topic = Mathematics/Algebra date = 2013-01-16 abstract = In this contribution, we present some formalizations based on the HOL-Multivariate-Analysis session of Isabelle. Firstly, a generalization of several theorems of such library are presented. Secondly, some definitions and proofs involving Linear Algebra and the four fundamental subspaces of a matrix are shown. Finally, we present a proof of the result known in Linear Algebra as the ``Rank-Nullity Theorem'', which states that, given any linear map f from a finite dimensional vector space V to a vector space W, then the dimension of V is equal to the dimension of the kernel of f (which is a subspace of V) and the dimension of the range of f (which is a subspace of W). The proof presented here is based on the one given by Sheldon Axler in his book Linear Algebra Done Right. As a corollary of the previous theorem, and taking advantage of the relationship between linear maps and matrices, we prove that, for every matrix A (which has associated a linear map between finite dimensional vector spaces), the sum of its null space and its column space (which is equal to the range of the linear map) is equal to the number of columns of A. extra-history = Change history: [2014-07-14]: Added some generalizations that allow us to formalize the Rank-Nullity Theorem over finite dimensional vector spaces, instead of over the more particular euclidean spaces. Updated abstract. [Affine_Arithmetic] title = Affine Arithmetic author = Fabian Immler date = 2014-02-07 topic = Mathematics/Analysis abstract = We give a formalization of affine forms as abstract representations of zonotopes. We provide affine operations as well as overapproximations of some non-affine operations like multiplication and division. Expressions involving those operations can automatically be turned into (executable) functions approximating the original expression in affine arithmetic. extra-history = Change history: [2015-01-31]: added algorithm for zonotope/hyperplane intersection [Cauchy] title = Cauchy's Mean Theorem and the Cauchy-Schwarz Inequality author = Benjamin Porter <> date = 2006-03-14 topic = Mathematics/Analysis abstract = This document presents the mechanised proofs of two popular theorems attributed to Augustin Louis Cauchy - Cauchy's Mean Theorem and the Cauchy-Schwarz Inequality. [Integration] title = Integration theory and random variables author = Stefan Richter date = 2004-11-19 topic = Mathematics/Analysis abstract = Lebesgue-style integration plays a major role in advanced probability. We formalize concepts of elementary measure theory, real-valued random variables as Borel-measurable functions, and a stepwise inductive definition of the integral itself. All proofs are carried out in human readable style using the Isar language. extra-note = Note: This article is of historical interest only. Lebesgue-style integration and probability theory are now available as part of the Isabelle/HOL distribution (directory Probability). [Ordinary_Differential_Equations] title = Ordinary Differential Equations author = Fabian Immler , Johannes Hölzl topic = Mathematics/Analysis date = 2012-04-26 abstract = We formalize initial value problems (IVPs) of ODEs and prove the existence of a unique solution, i.e. the Picard-Lindelöf theorem. We introduce general one-step methods for numerical approximation of the solution and provide an analysis regarding the local and global error of one-step methods. We give an executable specification of the Euler method to approximate the solution of IVPs and prove an explicit bound for the global error. We use arbitrary-precision floating-point numbers and also handle rounding errors when we truncate the numbers for efficiency reasons. extra-history = Change history: [2014-02-13]: added an implementation of the Euler method based on affine arithmetic [Polynomials] title = Executable Multivariate Polynomials author = Christian Sternagel , René Thiemann date = 2010-08-10 topic = Mathematics/Analysis license = LGPL abstract = We define multivariate polynomials over arbitrary (ordered) semirings in combination with (executable) operations like addition, multiplication, and substitution. We also define (weak) monotonicity of polynomials and comparison of polynomials where we provide standard estimations like absolute positiveness or the more recent approach of Neurauter, Zankl, and Middeldorp. Moreover, it is proven that strongly normalizing (monotone) orders can be lifted to strongly normalizing (monotone) orders over polynomials. Our formalization was performed as part of the IsaFoR/CeTA-system which contains several termination techniques. The provided theories have been essential to formalize polynomial interpretations. -depends-on = Abstract-Rewriting extra-history = Change history: [2010-09-17]: Moved theories on arbitrary (ordered) semirings to Abstract Rewriting. [Sqrt_Babylonian] title = Computing N-th Roots using the Babylonian Method author = René Thiemann date = 2013-01-03 topic = Mathematics/Analysis license = LGPL abstract = We implement the Babylonian method to compute n-th roots of numbers. We provide precise algorithms for naturals, integers and rationals, and offer an approximation algorithm for square roots over linear ordered fields. Moreover, there are precise algorithms to compute the floor and the ceiling of n-th roots. extra-history = Change history: [2013-10-16]: Added algorithms to compute floor and ceiling of sqrt of integers. [2014-07-11]: Moved NthRoot_Impl from Real-Impl to this entry. [Sturm_Sequences] title = Sturm's Theorem author = Manuel Eberl date = 2014-01-11 topic = Mathematics/Analysis abstract = Sturm's Theorem states that polynomial sequences with certain properties, so-called Sturm sequences, can be used to count the number of real roots of a real polynomial. This work contains a proof of Sturm's Theorem and code for constructing Sturm sequences efficiently. It also provides the “sturm” proof method, which can decide certain statements about the roots of real polynomials, such as “the polynomial P has exactly n roots in the interval I” or “P(x) > Q(x) for all x ∈ ℝ”. [Sturm_Tarski] title = The Sturm-Tarski Theorem author = Wenda Li date = 2014-09-19 topic = Mathematics/Analysis abstract = We have formalized the Sturm-Tarski theorem (also referred as the Tarski theorem), which generalizes Sturm's theorem. Sturm's theorem is usually used as a way to count distinct real roots, while the Sturm-Tarksi theorem forms the basis for Tarski's classic quantifier elimination for real closed field. [Markov_Models] title = Markov Models author = Johannes Hölzl , Tobias Nipkow date = 2012-01-03 topic = Mathematics/Probability Theory, Computer Science/Automata and Formal Languages abstract = This is a formalization of Markov models in Isabelle/HOL. It builds on Isabelle's probability theory. The available models are currently Discrete-Time Markov Chains and a extensions of them with rewards.

    As application of these models we formalize probabilistic model checking of pCTL formulas, analysis of IPv4 address allocation in ZeroConf and an analysis of the anonymity of the Crowds protocol. See here for the corresponding paper. [Probabilistic_System_Zoo] title = A Zoo of Probabilistic Systems author = Johannes Hölzl , Andreas Lochbihler , Dmitriy Traytel date = 2015-05-27 topic = Computer Science/Automata and Formal Languages abstract = Numerous models of probabilistic systems are studied in the literature. Coalgebra has been used to classify them into system types and compare their expressiveness. We formalize the resulting hierarchy of probabilistic system types by modeling the semantics of the different systems as codatatypes. This approach yields simple and concise proofs, as bisimilarity coincides with equality for codatatypes.

    This work is described in detail in the ITP 2015 publication by the authors. [Density_Compiler] title = A Verified Compiler for Probability Density Functions author = Manuel Eberl , Johannes Hölzl , Tobias Nipkow date = 2014-10-09 topic = Mathematics/Probability Theory, Computer Science/Programming Languages/Compiling abstract = Bhat et al. [TACAS 2013] developed an inductive compiler that computes density functions for probability spaces described by programs in a probabilistic functional language. In this work, we implement such a compiler for a modified version of this language within the theorem prover Isabelle and give a formal proof of its soundness w.r.t. the semantics of the source and target language. Together with Isabelle's code generation for inductive predicates, this yields a fully verified, executable density compiler. The proof is done in two steps: First, an abstract compiler working with abstract functions modelled directly in the theorem prover's logic is defined and proved sound. Then, this compiler is refined to a concrete version that returns a target-language expression.

    An article with the same title and authors is published in the proceedings of ESOP 2015. A detailed presentation of this work can be found in the first author's master's thesis. [CAVA_Automata] title = The CAVA Automata Library author = Peter Lammich date = 2014-05-28 topic = Computer Science/Automata and Formal Languages abstract = We report on the graph and automata library that is used in the fully verified LTL model checker CAVA. As most components of CAVA use some type of graphs or automata, a common automata library simplifies assembly of the components and reduces redundancy.

    The CAVA Automata Library provides a hierarchy of graph and automata classes, together with some standard algorithms. Its object oriented design allows for sharing of algorithms, theorems, and implementations between its classes, and also simplifies extensions of the library. Moreover, it is integrated into the Automatic Refinement Framework, supporting automatic refinement of the abstract automata types to efficient data structures.

    Note that the CAVA Automata Library is work in progress. Currently, it is very specifically tailored towards the requirements of the CAVA model checker. Nevertheless, the formalization techniques presented here allow an extension of the library to a wider scope. Moreover, they are not limited to graph libraries, but apply to class hierarchies in general.

    The CAVA Automata Library is described in the paper: Peter Lammich, The CAVA Automata Library, Isabelle Workshop 2014. -depends-on = Collections [LTL] title = Linear Temporal Logic author = Salomon Sickert date = 2016-03-01 topic = Logic, Computer Science/Automata and Formal Languages abstract = This theory provides a formalisation of linear temporal logic (LTL) and unifies previous formalisations within the AFP. This entry establishes syntax and semantics for this logic and decouples it from existing entries, yielding a common environment for theories reasoning about LTL. Furthermore a parser written in SML and an executable simplifier are provided. [LTL_to_GBA] title = Converting Linear-Time Temporal Logic to Generalized Büchi Automata author = Alexander Schimpf , Peter Lammich date = 2014-05-28 topic = Computer Science/Automata and Formal Languages abstract = We formalize linear-time temporal logic (LTL) and the algorithm by Gerth et al. to convert LTL formulas to generalized Büchi automata. We also formalize some syntactic rewrite rules that can be applied to optimize the LTL formula before conversion. Moreover, we integrate the Stuttering Equivalence AFP-Entry by Stefan Merz, adapting the lemma that next-free LTL formula cannot distinguish between stuttering equivalent runs to our setting.

    We use the Isabelle Refinement and Collection framework, as well as the Autoref tool, to obtain a refined version of our algorithm, from which efficiently executable code can be extracted. -depends-on = CAVA_Automata [Gabow_SCC] title = Verified Efficient Implementation of Gabow's Strongly Connected Components Algorithm author = Peter Lammich date = 2014-05-28 topic = Computer Science/Algorithms, Mathematics/Graph Theory abstract = We present an Isabelle/HOL formalization of Gabow's algorithm for finding the strongly connected components of a directed graph. Using data refinement techniques, we extract efficient code that performs comparable to a reference implementation in Java. Our style of formalization allows for re-using large parts of the proofs when defining variants of the algorithm. We demonstrate this by verifying an algorithm for the emptiness check of generalized Büchi automata, re-using most of the existing proofs. -depends-on = CAVA_Automata [Promela] title = Promela Formalization author = René Neumann date = 2014-05-28 topic = Computer Science/System Description Languages abstract = We present an executable formalization of the language Promela, the description lasebastien.gouezel@univ-rennes1.frnguage for models of the model checker SPIN. This formalization is part of the work for a completely verified model checker (CAVA), but also serves as a useful (and executable!) description of the semantics of the language itself, something that is currently missing. The formalization uses three steps: It takes an abstract syntax tree generated from an SML parser, removes syntactic sugar and enriches it with type information. This further gets translated into a transition system, on which the semantic engine (read: successor function) operates. -depends-on = CAVA_Automata [CAVA_LTL_Modelchecker] title = A Fully Verified Executable LTL Model Checker author = Javier Esparza , Peter Lammich , René Neumann , Tobias Nipkow , Alexander Schimpf , Jan-Georg Smaus date = 2014-05-28 topic = Computer Science/Automata and Formal Languages abstract = We present an LTL model checker whose code has been completely verified using the Isabelle theorem prover. The checker consists of over 4000 lines of ML code. The code is produced using the Isabelle Refinement Framework, which allows us to split its correctness proof into (1) the proof of an abstract version of the checker, consisting of a few hundred lines of ``formalized pseudocode'', and (2) a verified refinement step in which mathematical sets and other abstract structures are replaced by implementations of efficient structures like red-black trees and functional arrays. This leads to a checker that, while still slower than unverified checkers, can already be used as a trusted reference implementation against which advanced implementations can be tested.

    An early version of this model checker is described in the CAV 2013 paper with the same title. -depends-on = Gabow_SCC, Promela, LTL_to_GBA [Fermat3_4] title = Fermat's Last Theorem for Exponents 3 and 4 and the Parametrisation of Pythagorean Triples author = Roelof Oosterhuis date = 2007-08-12 topic = Mathematics/Number Theory abstract = This document presents the mechanised proofs of

    • Fermat's Last Theorem for exponents 3 and 4 and
    • the parametrisation of Pythagorean Triples.
    [Perfect-Number-Thm] title = Perfect Number Theorem author = Mark Ijbema date = 2009-11-22 topic = Mathematics/Number Theory abstract = These theories present the mechanised proof of the Perfect Number Theorem. [SumSquares] title = Sums of Two and Four Squares author = Roelof Oosterhuis date = 2007-08-12 topic = Mathematics/Number Theory abstract = This document presents the mechanised proofs of the following results:
    • any prime number of the form 4m+1 can be written as the sum of two squares;
    • any natural number can be written as the sum of four squares
    -depends-on = Fermat3_4 [Lehmer] title = Lehmer's Theorem author = Simon Wimmer , Lars Noschinski date = 2013-07-22 topic = Mathematics/Number Theory abstract = In 1927, Lehmer presented criterions for primality, based on the converse of Fermat's litte theorem. This work formalizes the second criterion from Lehmer's paper, a necessary and sufficient condition for primality.

    As a side product we formalize some properties of Euler's phi-function, the notion of the order of an element of a group, and the cyclicity of the multiplicative group of a finite field. [Pratt_Certificate] title = Pratt's Primality Certificates author = Simon Wimmer , Lars Noschinski date = 2013-07-22 topic = Mathematics/Number Theory abstract = In 1975, Pratt introduced a proof system for certifying primes. He showed that a number p is prime iff a primality certificate for p exists. By showing a logarithmic upper bound on the length of the certificates in size of the prime number, he concluded that the decision problem for prime numbers is in NP. This work formalizes soundness and completeness of Pratt's proof system as well as an upper bound for the size of the certificate. -depends-on = Lehmer [ArrowImpossibilityGS] title = Arrow and Gibbard-Satterthwaite author = Tobias Nipkow date = 2008-09-01 topic = Mathematics/Economics abstract = This article formalizes two proofs of Arrow's impossibility theorem due to Geanakoplos and derives the Gibbard-Satterthwaite theorem as a corollary. One formalization is based on utility functions, the other one on strict partial orders.

    An article about these proofs is found here. [SenSocialChoice] title = Some classical results in Social Choice Theory author = Peter Gammie date = 2008-11-09 topic = Mathematics/Economics abstract = Drawing on Sen's landmark work "Collective Choice and Social Welfare" (1970), this development proves Arrow's General Possibility Theorem, Sen's Liberal Paradox and May's Theorem in a general setting. The goal was to make precise the classical statements and proofs of these results, and to provide a foundation for more recent results such as the Gibbard-Satterthwaite and Duggan-Schwartz theorems. [Vickrey_Clarke_Groves] title = VCG - Combinatorial Vickrey-Clarke-Groves Auctions author = Marco B. Caminati , Manfred Kerber , Christoph Lange, Colin Rowat date = 2015-04-30 topic = Mathematics/Economics abstract = A VCG auction (named after their inventors Vickrey, Clarke, and Groves) is a generalization of the single-good, second price Vickrey auction to the case of a combinatorial auction (multiple goods, from which any participant can bid on each possible combination). We formalize in this entry VCG auctions, including tie-breaking and prove that the functions for the allocation and the price determination are well-defined. Furthermore we show that the allocation function allocates goods only to participants, only goods in the auction are allocated, and no good is allocated twice. We also show that the price function is non-negative. These properties also hold for the automatically extracted Scala code. [Topology] title = Topology author = Stefan Friedrich <> date = 2004-04-26 topic = Mathematics/Topology abstract = This entry contains two theories. The first, Topology, develops the basic notions of general topology. The second, which can be viewed as a demonstration of the first, is called LList_Topology. It develops the topology of lazy lists. -depends-on = Lazy-Lists-II [Knot_Theory] title = Knot Theory author = T.V.H. Prathamesh date = 2016-01-20 topic = Mathematics/Topology abstract = This work contains a formalization of some topics in knot theory. The concepts that were formalized include definitions of tangles, links, framed links and link/tangle equivalence. The formalization is based on a formulation of links in terms of tangles. We further construct and prove the invariance of the Bracket polynomial. Bracket polynomial is an invariant of framed links closely linked to the Jones polynomial. This is perhaps the first attempt to formalize any aspect of knot theory in an interactive proof assistant. -depends-on = Matrix_Tensor [Graph_Theory] title = Graph Theory author = Lars Noschinski date = 2013-04-28 topic = Mathematics/Graph Theory abstract = This development provides a formalization of directed graphs, supporting (labelled) multi-edges and infinite graphs. A polymorphic edge type allows edges to be treated as pairs of vertices, if multi-edges are not required. Formalized properties are i.a. walks (and related concepts), connectedness and subgraphs and basic properties of isomorphisms.

    This formalization is used to prove characterizations of Euler Trails, Shortest Paths and Kuratowski subgraphs. [Planarity_Certificates] title = Planarity Certificates author = Lars Noschinski date = 2015-11-11 topic = Mathematics/Graph Theory -depends-on = Simpl, Case_Labeling, Graph_Theory, List-Index, Transitive-Closure abstract = This development provides a formalization of planarity based on combinatorial maps and proves that Kuratowski's theorem implies combinatorial planarity. Moreover, it contains verified implementations of programs checking certificates for planarity (i.e., a combinatorial map) or non-planarity (i.e., a Kuratowski subgraph). [Max-Card-Matching] title = Maximum Cardinality Matching author = Christine Rizkallah date = 2011-07-21 topic = Mathematics/Graph Theory abstract = A matching in a graph G is a subset M of the edges of G such that no two share an endpoint. A matching has maximum cardinality if its cardinality is at least as large as that of any other matching. An odd-set cover OSC of a graph G is a labeling of the nodes of G with integers such that every edge of G is either incident to a node labeled 1 or connects two nodes labeled with the same number i ≥ 2. This article proves Edmonds theorem: Let M be a matching in a graph G and let OSC be an odd-set cover of G. For any i ≥ 0, let n(i) be the number of nodes labeled i. If |M| = n(1) + ∑i ≥ 2(n(i) div 2), then M is a maximum cardinality matching. [Girth_Chromatic] title = A Probabilistic Proof of the Girth-Chromatic Number Theorem author = Lars Noschinski date = 2012-02-06 topic = Mathematics/Graph Theory abstract = This works presents a formalization of the Girth-Chromatic number theorem in graph theory, stating that graphs with arbitrarily large girth and chromatic number exist. The proof uses the theory of Random Graphs to prove the existence with probabilistic arguments. [Random_Graph_Subgraph_Threshold] title = Properties of Random Graphs -- Subgraph Containment author = Lars Hupel date = 2014-02-13 topic = Mathematics/Graph Theory, Mathematics/Probability Theory abstract = Random graphs are graphs with a fixed number of vertices, where each edge is present with a fixed probability. We are interested in the probability that a random graph contains a certain pattern, for example a cycle or a clique. A very high edge probability gives rise to perhaps too many edges (which degrades performance for many algorithms), whereas a low edge probability might result in a disconnected graph. We prove a theorem about a threshold probability such that a higher edge probability will asymptotically almost surely produce a random graph with the desired subgraph. [Flyspeck-Tame] title = Flyspeck I: Tame Graphs author = Gertrud Bauer <>, Tobias Nipkow date = 2006-05-22 topic = Mathematics/Graph Theory abstract = These theories present the verified enumeration of tame plane graphs as defined by Thomas C. Hales in his proof of the Kepler Conjecture in his book Dense Sphere Packings. A Blueprint for Formal Proofs. [CUP 2012]. The values of the constants in the definition of tameness are identical to those in the Flyspeck project. The IJCAR 2006 paper by Nipkow, Bauer and Schultz refers to the original version of Hales' proof, the ITP 2011 paper by Nipkow refers to the Blueprint version of the proof. extra-history = Change history: [2010-11-02]: modified theories to reflect the modified definition of tameness in Hales' revised proof.
    [2014-07-03]: modified constants in def of tameness and Archive according to the final state of the Flyspeck proof. [Well_Quasi_Orders] title = Well-Quasi-Orders author = Christian Sternagel date = 2012-04-13 topic = Mathematics/Combinatorics abstract = Based on Isabelle/HOL's type class for preorders, we introduce a type class for well-quasi-orders (wqo) which is characterized by the absence of "bad" sequences (our proofs are along the lines of the proof of Nash-Williams, from which we also borrow terminology). Our main results are instantiations for the product type, the list type, and a type of finite trees, which (almost) directly follow from our proofs of (1) Dickson's Lemma, (2) Higman's Lemma, and (3) Kruskal's Tree Theorem. More concretely:

    • If the sets A and B are wqo then their Cartesian product is wqo.
    • If the set A is wqo then the set of finite lists over A is wqo.
    • If the set A is wqo then the set of finite trees over A is wqo.
    The research was funded by the Austrian Science Fund (FWF): J3202. extra-history = Change history: [2012-06-11]: Added Kruskal's Tree Theorem.
    [2012-12-19]: New variant of Kruskal's tree theorem for terms (as opposed to variadic terms, i.e., trees), plus finite version of the tree theorem as corollary.
    [2013-05-16]: Simplified construction of minimal bad sequences.
    [2014-07-09]: Simplified proofs of Higman's lemma and Kruskal's tree theorem, based on homogeneous sequences. [Marriage] title = Hall's Marriage Theorem author = Dongchen Jiang , Tobias Nipkow date = 2010-12-17 topic = Mathematics/Combinatorics abstract = Two proofs of Hall's Marriage Theorem: one due to Halmos and Vaughan, one due to Rado. extra-history = Change history: [2011-09-09]: Added Rado's proof [Bondy] title = Bondy's Theorem author = Jeremy Avigad , Stefan Hetzl date = 2012-10-27 topic = Mathematics/Combinatorics abstract = A proof of Bondy's theorem following B. Bollabas, Combinatorics, 1986, Cambridge University Press. [Ramsey-Infinite] title = Ramsey's theorem, infinitary version author = Tom Ridge <> date = 2004-09-20 topic = Mathematics/Combinatorics abstract = This formalization of Ramsey's theorem (infinitary version) is taken from Boolos and Jeffrey, Computability and Logic, 3rd edition, Chapter 26. It differs slightly from the text by assuming a slightly stronger hypothesis. In particular, the induction hypothesis is stronger, holding for any infinite subset of the naturals. This avoids the rather peculiar mapping argument between kj and aikj on p.263, which is unnecessary and slightly mars this really beautiful result. [Derangements] title = Derangements Formula author = Lukas Bulwahn date = 2015-06-27 topic = Mathematics/Combinatorics abstract = The Derangements Formula describes the number of fixpoint-free permutations as a closed formula. This theorem is the 88th theorem in a list of the ``Top 100 Mathematical Theorems''. [Euler_Partition] title = Euler's Partition Theorem author = Lukas Bulwahn date = 2015-11-19 topic = Mathematics/Combinatorics abstract = Euler's Partition Theorem states that the number of partitions with only distinct parts is equal to the number of partitions with only odd parts. The combinatorial proof follows John Harrison's HOL Light formalization. This theorem is the 45th theorem of the Top 100 Theorems list. [Discrete_Summation] title = Discrete Summation author = Florian Haftmann contributors = Amine Chaieb <> date = 2014-04-13 topic = Mathematics/Combinatorics abstract = These theories introduce basic concepts and proofs about discrete summation: shifts, formal summation, falling factorials and stirling numbers. As proof of concept, a simple summation conversion is provided. [Open_Induction] title = Open Induction author = Mizuhito Ogawa <>, Christian Sternagel date = 2012-11-02 topic = Mathematics/Combinatorics abstract = A proof of the open induction schema based on J.-C. Raoult, Proving open properties by induction, Information Processing Letters 29, 1988, pp.19-23.

    This research was supported by the Austrian Science Fund (FWF): J3202.

    [Category] title = Category Theory to Yoneda's Lemma author = Greg O'Keefe date = 2005-04-21 topic = Mathematics/Category Theory license = LGPL abstract = This development proves Yoneda's lemma and aims to be readable by humans. It only defines what is needed for the lemma: categories, functors and natural transformations. Limits, adjunctions and other important concepts are not included. extra-history = Change history: [2010-04-23]: The definition of the constant equinumerous was slightly too weak in the original submission and has been fixed in revision 8c2b5b3c995f. [Category2] title = Category Theory author = Alexander Katovsky date = 2010-06-20 topic = Mathematics/Category Theory abstract = This article presents a development of Category Theory in Isabelle/HOL. A Category is defined using records and locales. Functors and Natural Transformations are also defined. The main result that has been formalized is that the Yoneda functor is a full and faithful embedding. We also formalize the completeness of many sorted monadic equational logic. Extensive use is made of the HOLZF theory in both cases. For an informal description see here [pdf]. [FunWithFunctions] title = Fun With Functions author = Tobias Nipkow date = 2008-08-26 topic = Mathematics/Misc abstract = This is a collection of cute puzzles of the form ``Show that if a function satisfies the following constraints, it must be ...'' Please add further examples to this collection! [FunWithTilings] title = Fun With Tilings author = Tobias Nipkow , Lawrence C. Paulson date = 2008-11-07 topic = Mathematics/Misc abstract = Tilings are defined inductively. It is shown that one form of mutilated chess board cannot be tiled with dominoes, while another one can be tiled with L-shaped tiles. Please add further fun examples of this kind! [Lazy-Lists-II] title = Lazy Lists II author = Stefan Friedrich <> date = 2004-04-26 topic = Computer Science/Data Structures abstract = This theory contains some useful extensions to the LList (lazy list) theory by Larry Paulson, including finite, infinite, and positive llists over an alphabet, as well as the new constants take and drop and the prefix order of llists. Finally, the notions of safety and liveness in the sense of Alpern and Schneider (1985) are defined. -depends-on = Coinductive [Ribbon_Proofs] title = Ribbon Proofs author = John Wickerson date = 2013-01-19 topic = Computer Science/Programming Languages/Logics abstract = This document concerns the theory of ribbon proofs: a diagrammatic proof system, based on separation logic, for verifying program correctness. We include the syntax, proof rules, and soundness results for two alternative formalisations of ribbon proofs.

    Compared to traditional proof outlines, ribbon proofs emphasise the structure of a proof, so are intelligible and pedagogical. Because they contain less redundancy than proof outlines, and allow each proof step to be checked locally, they may be more scalable. Where proof outlines are cumbersome to modify, ribbon proofs can be visually manoeuvred to yield proofs of variant programs. [Koenigsberg_Friendship] title = The Königsberg Bridge Problem and the Friendship Theorem author = Wenda Li date = 2013-07-19 topic = Mathematics/Graph Theory -depends-on = Dijkstra_Shortest_Path abstract = This development provides a formalization of undirected graphs and simple graphs, which are based on Benedikt Nordhoff and Peter Lammich's simple formalization of labelled directed graphs in the archive. Then, with our formalization of graphs, we show both necessary and sufficient conditions for Eulerian trails and circuits as well as the fact that the Königsberg Bridge Problem does not have a solution. In addition, we show the Friendship Theorem in simple graphs. [IEEE_Floating_Point] title = A Formal Model of IEEE Floating Point Arithmetic author = Lei Yu date = 2013-07-27 topic = Computer Science/Data Structures abstract = This development provides a formal model of IEEE-754 floating-point arithmetic. This formalization, including formal specification of the standard and proofs of important properties of floating-point arithmetic, forms the foundation for verifying programs with floating-point computation. There is also a code generation setup for floats so that we can execute programs using this formalization in functional programming languages. [Native_Word] title = Native Word author = Andreas Lochbihler contributors = Peter Lammich date = 2013-09-17 topic = Computer Science/Data Structures abstract = This entry makes machine words and machine arithmetic available for code generation from Isabelle/HOL. It provides a common abstraction that hides the differences between the different target languages. The code generator maps these operations to the APIs of the target languages. Apart from that, we extend the available bit operations on types int and integer, and map them to the operations in the target languages. extra-history = Change history: [2013-11-06]: added conversion function between native words and characters (revision fd23d9a7fe3a) [2014-03-31]: added words of default size in the target language (by Peter Lammich) (revision 25caf5065833) [2014-10-06]: proper test setup with compilation and execution of tests in all target languages (revision 5d7a1c9ae047) [XML] title = XML author = Christian Sternagel , René Thiemann date = 2014-10-03 topic = Computer Science/Functional Programming, Computer Science/Data Structures -depends-on = Show, Partial_Function_MR, Certification_Monads abstract = This entry provides an XML library for Isabelle/HOL. This includes parsing and pretty printing of XML trees as well as combinators for transforming XML trees into arbitrary user-defined data. The main contribution of this entry is an interface (fit for code generation) that allows for communication between verified programs formalized in Isabelle/HOL and the outside world via XML. This library was developed as part of the IsaFoR/CeTA project to which we refer for examples of its usage. [HereditarilyFinite] title = The Hereditarily Finite Sets author = Lawrence C. Paulson date = 2013-11-17 topic = Logic abstract = The theory of hereditarily finite sets is formalised, following the development of Swierczkowski. An HF set is a finite collection of other HF sets; they enjoy an induction principle and satisfy all the axioms of ZF set theory apart from the axiom of infinity, which is negated. All constructions that are possible in ZF set theory (Cartesian products, disjoint sums, natural numbers, functions) without using infinite sets are possible here. The definition of addition for the HF sets follows Kirby. This development forms the foundation for the Isabelle proof of Gödel's incompleteness theorems, which has been formalised separately. extra-history = Change history: [2015-02-23]: Added the theory "Finitary" defining the class of types that can be embedded in hf, including int, char, option, list, etc. [Incompleteness] title = Gödel's Incompleteness Theorems author = Lawrence C. Paulson -depends-on = HereditarilyFinite date = 2013-11-17 topic = Logic abstract = Gödel's two incompleteness theorems are formalised, following a careful presentation by Swierczkowski, in the theory of hereditarily finite sets. This represents the first ever machine-assisted proof of the second incompleteness theorem. Compared with traditional formalisations using Peano arithmetic (see e.g. Boolos), coding is simpler, with no need to formalise the notion of multiplication (let alone that of a prime number) in the formalised calculus upon which the theorem is based. However, other technical problems had to be solved in order to complete the argument. [Finite_Automata_HF] title = Finite Automata in Hereditarily Finite Set Theory author = Lawrence C. Paulson date = 2015-02-05 topic = Computer Science/Automata and Formal Languages -depends-on = HereditarilyFinite abstract = Finite Automata, both deterministic and non-deterministic, for regular languages. The Myhill-Nerode Theorem. Closure under intersection, concatenation, etc. Regular expressions define regular languages. Closure under reversal; the powerset construction mapping NFAs to DFAs. Left and right languages; minimal DFAs. Brzozowski's minimization algorithm. Uniqueness up to isomorphism of minimal DFAs. [Decreasing-Diagrams] title = Decreasing Diagrams author = Harald Zankl license = LGPL -depends-on = Abstract-Rewriting date = 2013-11-01 topic = Logic/Rewriting abstract = This theory contains a formalization of decreasing diagrams showing that any locally decreasing abstract rewrite system is confluent. We consider the valley (van Oostrom, TCS 1994) and the conversion version (van Oostrom, RTA 2008) and closely follow the original proofs. As an application we prove Newman's lemma. [Decreasing-Diagrams-II] title = Decreasing Diagrams II author = Bertram Felgenhauer license = LGPL -depends-on = Abstract-Rewriting date = 2015-08-20 topic = Logic/Rewriting abstract = This theory formalizes the commutation version of decreasing diagrams for Church-Rosser modulo. The proof follows Felgenhauer and van Oostrom (RTA 2013). The theory also provides important specializations, in particular van Oostrom’s conversion version (TCS 2008) of decreasing diagrams. [GoedelGod] title = Gödel's God in Isabelle/HOL author = Christoph Benzmüller , Bruno Woltzenlogel Paleo date = 2013-11-12 topic = Logic abstract = Dana Scott's version of Gödel's proof of God's existence is formalized in quantified modal logic KB (QML KB). QML KB is modeled as a fragment of classical higher-order logic (HOL); thus, the formalization is essentially a formalization in HOL. [Tail_Recursive_Functions] title = A General Method for the Proof of Theorems on Tail-recursive Functions author = Pasquale Noce date = 2013-12-01 topic = Computer Science/Functional Programming abstract =

    Tail-recursive function definitions are sometimes more straightforward than alternatives, but proving theorems on them may be roundabout because of the peculiar form of the resulting recursion induction rules.

    This paper describes a proof method that provides a general solution to this problem by means of suitable invariants over inductive sets, and illustrates the application of such method by examining two case studies.

    [CryptoBasedCompositionalProperties] title = Compositional Properties of Crypto-Based Components author = Maria Spichkova date = 2014-01-11 topic = Computer Science/Security abstract = This paper presents an Isabelle/HOL set of theories which allows the specification of crypto-based components and the verification of their composition properties wrt. cryptographic aspects. We introduce a formalisation of the security property of data secrecy, the corresponding definitions and proofs. Please note that here we import the Isabelle/HOL theory ListExtras.thy, presented in the AFP entry FocusStreamsCaseStudies-AFP. [Featherweight_OCL] title = Featherweight OCL: A Proposal for a Machine-Checked Formal Semantics for OCL 2.5 author = Achim D. Brucker , Frédéric Tuong , Burkhart Wolff date = 2014-01-16 topic = Computer Science/System Description Languages abstract = The Unified Modeling Language (UML) is one of the few modeling languages that is widely used in industry. While UML is mostly known as diagrammatic modeling language (e.g., visualizing class models), it is complemented by a textual language, called Object Constraint Language (OCL). The current version of OCL is based on a four-valued logic that turns UML into a formal language. Any type comprises the elements "invalid" and "null" which are propagated as strict and non-strict, respectively. Unfortunately, the former semi-formal semantics of this specification language, captured in the "Annex A" of the OCL standard, leads to different interpretations of corner cases. We formalize the core of OCL: denotational definitions, a logical calculus and operational rules that allow for the execution of OCL expressions by a mixture of term rewriting and code compilation. Our formalization reveals several inconsistencies and contradictions in the current version of the OCL standard. Overall, this document is intended to provide the basis for a machine-checked text "Annex A" of the OCL standard targeting at tool implementors. extra-history = Change history: [2015-10-13]: afp-devel@ea3b38fc54d6 and hol-testgen@12148
       Update of Featherweight OCL including a change in the abstract.
    [2014-01-16]: afp-devel@9091ce05cb20 and hol-testgen@10241
       New Entry: Featherweight OCL [Relation_Algebra] title = Relation Algebra author = Alasdair Armstrong , Simon Foster , Georg Struth , Tjark Weber date = 2014-01-25 topic = Mathematics/Algebra -depends-on = Kleene_Algebra abstract = Tarski's algebra of binary relations is formalised along the lines of the standard textbooks of Maddux and Schmidt and Ströhlein. This includes relation-algebraic concepts such as subidentities, vectors and a domain operation as well as various notions associated to functions. Relation algebras are also expanded by a reflexive transitive closure operation, and they are linked with Kleene algebras and models of binary relations and Boolean matrices. [Secondary_Sylow] title = Secondary Sylow Theorems author = Jakob von Raumer date = 2014-01-28 topic = Mathematics/Algebra abstract = These theories extend the existing proof of the first Sylow theorem (written by Florian Kammueller and L. C. Paulson) by what are often called the second, third and fourth Sylow theorems. These theorems state propositions about the number of Sylow p-subgroups of a group and the fact that they are conjugate to each other. The proofs make use of an implementation of group actions and their properties. [Jordan_Hoelder] title = The Jordan-Hölder Theorem author = Jakob von Raumer date = 2014-09-09 topic = Mathematics/Algebra -depends-on = Secondary_Sylow abstract = This submission contains theories that lead to a formalization of the proof of the Jordan-Hölder theorem about composition series of finite groups. The theories formalize the notions of isomorphism classes of groups, simple groups, normal series, composition series, maximal normal subgroups. Furthermore, they provide proofs of the second isomorphism theorem for groups, the characterization theorem for maximal normal subgroups as well as many useful lemmas about normal subgroups and factor groups. The proof is inspired by course notes of Stuart Rankin. [Cayley_Hamilton] title = The Cayley-Hamilton Theorem author = Stephan Adelsberger , Stefan Hetzl , Florian Pollak date = 2014-09-15 topic = Mathematics/Algebra abstract = This document contains a proof of the Cayley-Hamilton theorem based on the development of matrices in HOL/Multivariate Analysis. [Probabilistic_Noninterference] title = Probabilistic Noninterference author = Andrei Popescu , Johannes Hölzl date = 2014-03-11 topic = Computer Science/Security abstract = We formalize a probabilistic noninterference for a multi-threaded language with uniform scheduling, where probabilistic behaviour comes from both the scheduler and the individual threads. We define notions probabilistic noninterference in two variants: resumption-based and trace-based. For the resumption-based notions, we prove compositionality w.r.t. the language constructs and establish sound type-system-like syntactic criteria. This is a formalization of the mathematical development presented at CPP 2013 and CALCO 2013. It is the probabilistic variant of the Possibilistic Noninterference AFP entry. [HyperCTL] title = A shallow embedding of HyperCTL* author = Markus N. Rabe , Peter Lammich , Andrei Popescu date = 2014-04-16 topic = Computer Science/Security, Logic abstract = We formalize HyperCTL*, a temporal logic for expressing security properties. We first define a shallow embedding of HyperCTL*, within which we prove inductive and coinductive rules for the operators. Then we show that a HyperCTL* formula captures Goguen-Meseguer noninterference, a landmark information flow property. We also define a deep embedding and connect it to the shallow embedding by a denotational semantics, for which we prove sanity w.r.t. dependence on the free variables. Finally, we show that under some finiteness assumptions about the model, noninterference is given by a (finitary) syntactic formula. [Bounded_Deducibility_Security] title = Bounded-Deducibility Security author = Andrei Popescu , Peter Lammich date = 2014-04-22 topic = Computer Science/Security abstract = This is a formalization of bounded-deducibility security (BD security), a flexible notion of information-flow security applicable to arbitrary input-output automata. It generalizes Sutherland's classic notion of nondeducibility by factoring in declassification bounds and trigger, whereas nondeducibility states that, in a system, information cannot flow between specified sources and sinks, BD security indicates upper bounds for the flow and triggers under which these upper bounds are no longer guaranteed. [Network_Security_Policy_Verification] title = Network Security Policy Verification author = Cornelius Diekmann date = 2014-07-04 topic = Computer Science/Security abstract = We present a unified theory for verifying network security policies. A security policy is represented as directed graph. To check high-level security goals, security invariants over the policy are expressed. We cover monotonic security invariants, i.e. prohibiting more does not harm security. We provide the following contributions for the security invariant theory.
    • Secure auto-completion of scenario-specific knowledge, which eases usability.
    • Security violations can be repaired by tightening the policy iff the security invariants hold for the deny-all policy.
    • An algorithm to compute a security policy.
    • A formalization of stateful connection semantics in network security mechanisms.
    • An algorithm to compute a secure stateful implementation of a policy.
    • An executable implementation of all the theory.
    • Examples, ranging from an aircraft cabin data network to the analysis of a large real-world firewall.
    • More examples: A fully automated translation of high-level security goals to both firewall and SDN configurations (see Examples/Distributed_WebApp.thy).
    For a detailed description, see extra-history = Change history: [2015-04-14]: Added Distributed WebApp example and improved graphviz visualization (revision 4dde08ca2ab8)
    [Abstract_Completeness] title = Abstract Completeness author = Jasmin Christian Blanchette , Andrei Popescu , Dmitriy Traytel date = 2014-04-16 topic = Logic abstract = A formalization of an abstract property of possibly infinite derivation trees (modeled by a codatatype), representing the core of a proof (in Beth/Hintikka style) of the first-order logic completeness theorem, independent of the concrete syntax or inference rules. This work is described in detail in the IJCAR 2014 publication by the authors. The abstract proof can be instantiated for a wide range of Gentzen and tableau systems as well as various flavors of FOL---e.g., with or without predicates, equality, or sorts. Here, we give only a toy example instantiation with classical propositional logic. A more serious instance---many-sorted FOL with equality---is described elsewhere [Blanchette and Popescu, FroCoS 2013]. [Pop_Refinement] title = Pop-Refinement author = Alessandro Coglio date = 2014-07-03 topic = Computer Science/Refinement abstract = Pop-refinement is an approach to stepwise refinement, carried out inside an interactive theorem prover by constructing a monotonically decreasing sequence of predicates over deeply embedded target programs. The sequence starts with a predicate that characterizes the possible implementations, and ends with a predicate that characterizes a unique program in explicit syntactic form. Pop-refinement enables more requirements (e.g. program-level and non-functional) to be captured in the initial specification and preserved through refinement. Security requirements expressed as hyperproperties (i.e. predicates over sets of traces) are always preserved by pop-refinement, unlike the popular notion of refinement as trace set inclusion. Two simple examples in Isabelle/HOL are presented, featuring program-level requirements, non-functional requirements, and hyperproperties. [VectorSpace] title = Vector Spaces author = Holden Lee date = 2014-08-29 topic = Mathematics/Algebra abstract = This formalisation of basic linear algebra is based completely on locales, building off HOL-Algebra. It includes basic definitions: linear combinations, span, linear independence; linear transformations; interpretation of function spaces as vector spaces; the direct sum of vector spaces, sum of subspaces; the replacement theorem; existence of bases in finite-dimensional; vector spaces, definition of dimension; the rank-nullity theorem. Some concepts are actually defined and proved for modules as they also apply there. Infinite-dimensional vector spaces are supported, but dimension is only supported for finite-dimensional vector spaces. The proofs are standard; the proofs of the replacement theorem and rank-nullity theorem roughly follow the presentation in Linear Algebra by Friedberg, Insel, and Spence. The rank-nullity theorem generalises the existing development in the Archive of Formal Proof (originally using type classes, now using a mix of type classes and locales). [Special_Function_Bounds] title = Real-Valued Special Functions: Upper and Lower Bounds author = Lawrence C. Paulson date = 2014-08-29 topic = Mathematics/Analysis abstract = This development proves upper and lower bounds for several familiar real-valued functions. For sin, cos, exp and sqrt, it defines and verifies infinite families of upper and lower bounds, mostly based on Taylor series expansions. For arctan, ln and exp, it verifies a finite collection of upper and lower bounds, originally obtained from the functions' continued fraction expansions using the computer algebra system Maple. A common theme in these proofs is to take the difference between a function and its approximation, which should be zero at one point, and then consider the sign of the derivative. The immediate purpose of this development is to verify axioms used by MetiTarski, an automatic theorem prover for real-valued special functions. Crucial to MetiTarski's operation is the provision of upper and lower bounds for each function of interest. [Landau_Symbols] title = Landau Symbols author = Manuel Eberl date = 2015-07-14 topic = Mathematics/Analysis abstract = This entry provides Landau symbols to describe and reason about the asymptotic growth of functions for sufficiently large inputs. A number of simplification procedures are provided for additional convenience: cancelling of dominated terms in sums under a Landau symbol, cancelling of common factors in products, and a decision procedure for Landau expressions containing products of powers of functions like x, ln(x), ln(ln(x)) etc. [Akra_Bazzi] title = The Akra-Bazzi theorem and the Master theorem author = Manuel Eberl date = 2015-07-14 topic = Mathematics/Analysis abstract = This article contains a formalisation of the Akra-Bazzi method based on a proof by Leighton. It is a generalisation of the well-known Master Theorem for analysing the complexity of Divide & Conquer algorithms. We also include a generalised version of the Master theorem based on the Akra-Bazzi theorem, which is easier to apply than the Akra-Bazzi theorem itself.

    Some proof methods that facilitate applying the Master theorem are also included. For a more detailed explanation of the formalisation and the proof methods, see the accompanying paper (publication forthcoming). -depends-on = Landau_Symbols [Cartan_FP] title = The Cartan Fixed Point Theorems author = Lawrence Paulson date = 2016-03-08 topic = Mathematics/Analysis abstract = The Cartan fixed point theorems concern the group of holomorphic automorphisms on a connected open set of Cn. Ciolli et al. have formalised the one-dimensional case of these theorems in HOL Light. This entry contains their proofs, ported to Isabelle/HOL. Thus it addresses the authors' remark that "it would be important to write a formal proof in a language that can be read by both humans and machines". [Gauss_Jordan] title = Gauss-Jordan Algorithm and Its Applications author = Jose Divasón , Jesús Aransay topic = Computer Science/Algorithms date = 2014-09-03 abstract = The Gauss-Jordan algorithm states that any matrix over a field can be transformed by means of elementary row operations to a matrix in reduced row echelon form. The formalization is based on the Rank Nullity Theorem entry of the AFP and on the HOL-Multivariate-Analysis session of Isabelle, where matrices are represented as functions over finite types. We have set up the code generator to make this representation executable. In order to improve the performance, a refinement to immutable arrays has been carried out. We have formalized some of the applications of the Gauss-Jordan algorithm. Thanks to this development, the following facts can be computed over matrices whose elements belong to a field: Ranks, Determinants, Inverses, Bases and dimensions and Solutions of systems of linear equations. Code can be exported to SML and Haskell. [Echelon_Form] title = Echelon Form author = Jose Divasón , Jesús Aransay topic = Computer Science/Algorithms, Mathematics/Algebra date = 2015-02-12 abstract = We formalize an algorithm to compute the Echelon Form of a matrix. We have proved its existence over Bézout domains and made it executable over Euclidean domains, such as the integer ring and the univariate polynomials over a field. This allows us to compute determinants, inverses and characteristic polynomials of matrices. The work is based on the HOL-Multivariate Analysis library, and on both the Gauss-Jordan and Cayley-Hamilton AFP entries. As a by-product, some algebraic structures have been implemented (principal ideal domains, Bézout domains...). The algorithm has been refined to immutable arrays and code can be generated to functional languages as well. [QR_Decomposition] title = QR Decomposition author = Jose Divasón , Jesús Aransay topic = Computer Science/Algorithms, Mathematics/Algebra date = 2015-02-12 abstract = QR decomposition is an algorithm to decompose a real matrix A into the product of two other matrices Q and R, where Q is orthogonal and R is invertible and upper triangular. The algorithm is useful for the least squares problem; i.e., the computation of the best approximation of an unsolvable system of linear equations. As a side-product, the Gram-Schmidt process has also been formalized. A refinement using immutable arrays is presented as well. The development relies, among others, on the AFP entry "Implementing field extensions of the form Q[sqrt(b)]" by René Thiemann, which allows execution of the algorithm using symbolic computations. Verified code can be generated and executed using floats as well. extra-history = Change history: [2015-06-18]: The second part of the Fundamental Theorem of Linear Algebra has been generalized to more general inner product spaces. [Hermite] title = Hermite Normal Form author = Jose Divasón , Jesús Aransay topic = Computer Science/Algorithms, Mathematics/Algebra date = 2015-07-07 abstract = Hermite Normal Form is a canonical matrix analogue of Reduced Echelon Form, but involving matrices over more general rings. In this work we formalise an algorithm to compute the Hermite Normal Form of a matrix by means of elementary row operations, taking advantage of the Echelon Form AFP entry. We have proven the correctness of such an algorithm and refined it to immutable arrays. Furthermore, we have also formalised the uniqueness of the Hermite Normal Form of a matrix. Code can be exported and some examples of execution involving integer matrices and polynomial matrices are presented as well. [Imperative_Insertion_Sort] title = Imperative Insertion Sort author = Christian Sternagel date = 2014-09-25 topic = Computer Science/Algorithms abstract = The insertion sort algorithm of Cormen et al. (Introduction to Algorithms) is expressed in Imperative HOL and proved to be correct and terminating. For this purpose we also provide a theory about imperative loop constructs with accompanying induction/invariant rules for proving partial and total correctness. Furthermore, the formalized algorithm is fit for code generation. [Stream_Fusion_Code] title = Stream Fusion in HOL with Code Generation author = Andreas Lochbihler , Alexandra Maximova date = 2014-10-10 topic = Computer Science/Functional Programming abstract = Stream Fusion is a system for removing intermediate list data structures from functional programs, in particular Haskell. This entry adapts stream fusion to Isabelle/HOL and its code generator. We define stream types for finite and possibly infinite lists and stream versions for most of the fusible list functions in the theories List and Coinductive_List, and prove them correct with respect to the conversion functions between lists and streams. The Stream Fusion transformation itself is implemented as a simproc in the preprocessor of the code generator. [Brian Huffman's AFP entry formalises stream fusion in HOLCF for the domain of lazy lists to prove the GHC compiler rewrite rules correct. In contrast, this work enables Isabelle's code generator to perform stream fusion itself. To that end, it covers both finite and coinductive lists from the HOL library and the Coinductive entry. The fusible list functions require specification and proof principles different from Huffman's.] [Case_Labeling] title = Generating Cases from Labeled Subgoals author = Lars Noschinski date = 2015-07-21 topic = Tools, Computer Science/Programming Languages/Misc abstract = Isabelle/Isar provides named cases to structure proofs. This article contains an implementation of a proof method casify, which can be used to easily extend proof tools with support for named cases. Such a proof tool must produce labeled subgoals, which are then interpreted by casify.

    As examples, this work contains verification condition generators producing named cases for three languages: The Hoare language from HOL/Library, a monadic language for computations with failure (inspired by the AutoCorres tool), and a language of conditional expressions. These VCGs are demonstrated by a number of example programs. [DPT-SAT-Solver] title = A Fast SAT Solver for Isabelle in Standard ML topic = Tools author = Armin Heller <> date = 2009-12-09 abstract = This contribution contains a fast SAT solver for Isabelle written in Standard ML. By loading the theory DPT_SAT_Solver, the SAT solver installs itself (under the name ``dptsat'') and certain Isabelle tools like Refute will start using it automatically. This is a port of the DPT (Decision Procedure Toolkit) SAT Solver written in OCaml. [Rep_Fin_Groups] title = Representations of Finite Groups topic = Mathematics/Algebra author = Jeremy Sylvestre date = 2015-08-12 abstract = We provide a formal framework for the theory of representations of finite groups, as modules over the group ring. Along the way, we develop the general theory of groups (relying on the group_add class for the basics), modules, and vector spaces, to the extent required for theory of group representations. We then provide formal proofs of several important introductory theorems in the subject, including Maschke's theorem, Schur's lemma, and Frobenius reciprocity. We also prove that every irreducible representation is isomorphic to a submodule of the group ring, leading to the fact that for a finite group there are only finitely many isomorphism classes of irreducible representations. In all of this, no restriction is made on the characteristic of the ring or field of scalars until the definition of a group representation, and then the only restriction made is that the characteristic must not divide the order of the group. [Noninterference_Inductive_Unwinding] title = The Inductive Unwinding Theorem for CSP Noninterference Security topic = Computer Science/Security author = Pasquale Noce date = 2015-08-18 -depends-on = Noninterference_Ipurge_Unwinding abstract =

    The necessary and sufficient condition for CSP noninterference security stated by the Ipurge Unwinding Theorem is expressed in terms of a pair of event lists varying over the set of process traces. This does not render it suitable for the subsequent application of rule induction in the case of a process defined inductively, since rule induction may rather be applied to a single variable ranging over an inductively defined set.

    Starting from the Ipurge Unwinding Theorem, this paper derives a necessary and sufficient condition for CSP noninterference security that involves a single event list varying over the set of process traces, and is thus suitable for rule induction; hence its name, Inductive Unwinding Theorem. Similarly to the Ipurge Unwinding Theorem, the new theorem only requires to consider individual accepted and refused events for each process trace, and applies to the general case of a possibly intransitive noninterference policy. Specific variants of this theorem are additionally proven for deterministic processes and trace set processes.

    [Jordan_Normal_Form] title = Matrices, Jordan Normal Forms, and Spectral Radius Theory topic = Mathematics/Algebra author = René Thiemann , Akihisa Yamada date = 2015-08-21 -depends-on = Containers, Gauss_Jordan, Matrix, Abstract-Rewriting, Show, Polynomial_Interpolation, VectorSpace abstract =

    Matrix interpretations are useful as measure functions in termination proving. In order to use these interpretations also for complexity analysis, the growth rate of matrix powers has to examined. Here, we formalized a central result of spectral radius theory, namely that the growth rate is polynomially bounded if and only if the spectral radius of a matrix is at most one.

    To formally prove this result we first studied the growth rates of matrices in Jordan normal form, and partially prove the result that every complex matrix has a Jordan normal form: we are restricted to upper-triangular matrices since we did not yet formalize the Schur decomposition.

    The whole development is based on a new abstract type for matrices, which is also executable by a suitable setup of the code generator. It completely subsumes our former AFP-entry on executable matrices, and its main advantage is its close connection to the HMA-representation which allowed us to easily adapt existing proofs on determinants.

    All the results have been applied to improve CeTA, our certifier to validate termination and complexity proof certificates.

    extra-history = Change history: [2016-01-07]: Added Schur-decomposition, Gram-Schmidt orthogonalization, uniqueness of Jordan normal forms [LTL_to_DRA] title = Converting Linear Temporal Logic to Deterministic (Generalized) Rabin Automata topic = Computer Science/Automata and Formal Languages author = Salomon Sickert date = 2015-09-04 -depends-on = Boolean_Expression_Checkers, List-Index, KBPs abstract = Recently, Javier Esparza and Jan Kretinsky proposed a new method directly translating linear temporal logic (LTL) formulas to deterministic (generalized) Rabin automata. Compared to the existing approaches of constructing a non-deterministic Buechi-automaton in the first step and then applying a determinization procedure (e.g. some variant of Safra's construction) in a second step, this new approach preservers a relation between the formula and the states of the resulting automaton. While the old approach produced a monolithic structure, the new method is compositional. Furthermore, in some cases the resulting automata are much smaller than the automata generated by existing approaches. In order to ensure the correctness of the construction, this entry contains a complete formalisation and verification of the translation. Furthermore from this basis executable code is generated. extra-history = Change history: [2015-09-23]: Enable code export for the eager unfolding optimisation and reduce running time of the generated tool. Moreover, add support for the mlton SML compiler. [Timed_Automata] title = Timed Automata author = Simon Wimmer date = 2016-03-08 topic = Computer Science/Automata and Formal Languages abstract = Timed automata are a widely used formalism for modeling real-time systems, which is employed in a class of successful model checkers such as UPPAAL [LPY97], HyTech [HHWt97] or Kronos [Yov97]. This work formalizes the theory for the subclass of diagonal-free timed automata, which is sufficient to model many interesting problems. We first define the basic concepts and semantics of diagonal-free timed automata. Based on this, we prove two types of decidability results for the language emptiness problem. The first is the classic result of Alur and Dill [AD90, AD94], which uses a finite partitioning of the state space into so-called `regions`. Our second result focuses on an approach based on `Difference Bound Matrices (DBMs)`, which is practically used by model checkers. We prove the correctness of the basic forward analysis operations on DBMs. One of these operations is the Floyd-Warshall algorithm for the all-pairs shortest paths problem. To obtain a finite search space, a widening operation has to be used for this kind of analysis. We use Patricia Bouyer's [Bou04] approach to prove that this widening operation is correct in the sense that DBM-based forward analysis in combination with the widening operation also decides language emptiness. The interesting property of this proof is that the first decidability result is reused to obtain the second one. [Parity_Game] title = Positional Determinacy of Parity Games author = Christoph Dittmann date = 2015-11-02 topic = Computer Science/Games -depends-on = Coinductive abstract = We present a formalization of parity games (a two-player game on directed graphs) and a proof of their positional determinacy in Isabelle/HOL. This proof works for both finite and infinite games. [Ergodic_Theory] title = Ergodic Theory author = Sebastien Gouezel date = 2015-12-01 topic = Mathematics/Probability Theory abstract = Ergodic theory is the branch of mathematics that studies the behaviour of measure preserving transformations, in finite or infinite measure. It interacts both with probability theory (mainly through measure theory) and with geometry as a lot of interesting examples are from geometric origin. We implement the first definitions and theorems of ergodic theory, including notably Poicaré recurrence theorem for finite measure preserving systems (together with the notion of conservativity in general), induced maps, Kac's theorem, Birkhoff theorem (arguably the most important theorem in ergodic theory), and variations around it such as conservativity of the corresponding skew product, or Atkinson lemma. [Latin_Square] title = Latin Square author = Alexander Bentkamp date = 2015-12-02 topic = Mathematics/Combinatorics -depends-on = Marriage abstract = A Latin Square is a n x n table filled with integers from 1 to n where each number appears exactly once in each row and each column. A Latin Rectangle is a partially filled n x n table with r filled rows and n-r empty rows, such that each number appears at most once in each row and each column. The main result of this theory is that any Latin Rectangle can be completed to a Latin Square. [Applicative_Lifting] title = Applicative Lifting author = Andreas Lochbihler , Joshua Schneider <> date = 2015-12-22 topic = Computer Science/Functional Programming abstract = Applicative functors augment computations with effects by lifting function application to types which model the effects. As the structure of the computation cannot depend on the effects, applicative expressions can be analysed statically. This allows us to lift universally quantified equations to the effectful types, as observed by Hinze. Thus, equational reasoning over effectful computations can be reduced to pure types.

    This entry provides a package for registering applicative functors and two proof methods for lifting of equations over applicative functors. The first method normalises applicative expressions according to the laws of applicative functors. This way, equations whose two sides contain the same list of variables can be lifted to every applicative functor.

    To lift larger classes of equations, the second method exploits a number of additional properties (e.g., commutativity of effects) provided the properties have been declared for the concrete applicative functor at hand upon registration.

    We declare several types from the Isabelle library as applicative functors and illustrate the use of the methods with two examples: the lifting of the arithmetic type class hierarchy to streams and the verification of a relabelling function on binary trees. We also formalise and verify the normalisation algorithm used by the first proof method.

    [Stern_Brocot] title = The Stern-Brocot Tree author = Peter Gammie , Andreas Lochbihler date = 2015-12-22 topic = Mathematics/Number Theory abstract = The Stern-Brocot tree contains all rational numbers exactly once and in their lowest terms. We formalise the Stern-Brocot tree as a coinductive tree using recursive and iterative specifications, which we have proven equivalent, and show that it indeed contains all the numbers as stated. Following Hinze, we prove that the Stern-Brocot tree can be linearised looplessly into Stern's diatonic sequence (also known as Dijkstra's fusc function) and that it is a permutation of the Bird tree.

    The reasoning stays at an abstract level by appealing to the uniqueness of solutions of guarded recursive equations and lifting algebraic laws point-wise to trees and streams using applicative functors.

    -depends-on = Applicative_Lifting [Algebraic_Numbers] title = Algebraic Numbers in Isabelle/HOL topic = Mathematics/Algebra author = René Thiemann , Akihisa Yamada date = 2015-12-22 -depends-on = Sturm_Sequences, Partial_Function_MR, Sqrt_Babylonian, Show, Jordan_Normal_Form, Deriving, Polynomial_Factorization abstract = Based on existing libraries for matrices, factorization of rational polynomials, and Sturm's theorem, we formalized algebraic numbers in Isabelle/HOL. Our development serves as an implementation for real and complex numbers, and it admits to compute roots and completely factorize real and complex polynomials, provided that all coefficients are rational numbers. Moreover, we provide two implementations to display algebraic numbers, an injective and expensive one, or a faster but approximative version.

    To this end, we mechanized several results on resultants, which also required us to prove that polynomials over a unique factorization domain form again a unique factorization domain.

    extra-history = Change history: [2016-01-29]: Split off Polynomial Interpolation and Polynomial Factorization [Polynomial_Interpolation] title = Polynomial Interpolation topic = Mathematics/Algebra author = René Thiemann , Akihisa Yamada date = 2016-01-29 -depends-on = Sqrt_Babylonian abstract = We formalized three algorithms for polynomial interpolation over arbitrary fields: Lagrange's explicit expression, the recursive algorithm of Neville and Aitken, and the Newton interpolation in combination with an efficient implementation of divided differences. Variants of these algorithms for integer polynomials are also available, where sometimes the interpolation can fail; e.g., there is no linear integer polynomial p such that p(0) = 0 and p(2) = 1. Moreover, for the Newton interpolation for integer polynomials, we proved that all intermediate results that are computed during the algorithm must be integers. This admits an early failure detection in the implementation. Finally, we proved the uniqueness of polynomial interpolation.

    The development also contains improved code equations to speed up the division of integers in target languages. [Polynomial_Factorization] title = Polynomial Factorization topic = Mathematics/Algebra author = René Thiemann , Akihisa Yamada date = 2016-01-29 -depends-on = Polynomial_Interpolation, Partial_Function_MR, Sqrt_Babylonian, Show, Jordan_Normal_Form abstract = Based on existing libraries for polynomial interpolation and matrices, we formalized several factorization algorithms for polynomials, including Kronecker's algorithm for integer polynomials, Yun's square-free factorization algorithm for field polynomials, and Berlekamp's algorithm for polynomials over finite fields. By combining the last one with Hensel's lifting, we derive an efficient factorization algorithm for the integer polynomials, which is then lifted for rational polynomials by mechanizing Gauss' lemma. Finally, we assembled a combined factorization algorithm for rational polynomials, which combines all the mentioned algorithms and additionally uses the explicit formula for roots of quadratic polynomials and a rational root test.

    As side products, we developed division algorithms for polynomials over integral domains, as well as primality-testing and prime-factorization algorithms for integers. [Formal_SSA] title = Verified Construction of Static Single Assignment Form author = Sebastian Ullrich , Denis Lohner date = 2016-02-05 topic = Computer Science/Algorithms, Computer Science/Programming Languages/Transformations abstract = We define a functional variant of the static single assignment (SSA) form construction algorithm described by Braun et al., which combines simplicity and efficiency. The definition is based on a general, abstract control flow graph representation using Isabelle locales.

    We prove that the algorithm's output is semantically equivalent to the input according to a small-step semantics, and that it is in minimal SSA form for the common special case of reducible inputs. We then show the satisfiability of the locale assumptions by giving instantiations for a simple While language.

    Furthermore, we use a generic instantiation based on typedefs in order to extract OCaml code and replace the unverified SSA construction algorithm of the CompCertSSA project with it. -depends-on = Slicing, Dijkstra_Shortest_Path [PropResPI] title = Propositional Resolution and Prime Implicates Generation author = Nicolas Peltier date = 2016-03-11 topic = Logic abstract = We provide formal proofs in Isabelle-HOL (using mostly structured Isar proofs) of the soundness and completeness of the Resolution rule in propositional logic. The completeness proofs take into account the usual redundancy elimination rules (tautology elimination and subsumption), and several refinements of the Resolution rule are considered: ordered resolution (with selection functions), positive and negative resolution, semantic resolution and unit resolution (the latter refinement is complete only for clause sets that are Horn- renamable). We also define a concrete procedure for computing saturated sets and establish its soundness and completeness. The clause sets are not assumed to be finite, so that the results can be applied to formulas obtained by grounding sets of first-order clauses (however, a total ordering among atoms is assumed to be given). Next, we show that the unrestricted Resolution rule is deductive- complete, in the sense that it is able to generate all (prime) implicates of any set of propositional clauses (i.e., all entailment- minimal, non-valid, clausal consequences of the considered set). The generation of prime implicates is an important problem, with many applications in artificial intelligence and verification (for abductive reasoning, knowledge compilation, diagnosis, debugging etc.). We also show that implicates can be computed in an incremental way, by fixing an ordering among all the atoms in the considered sets and resolving upon these atoms one by one in the considered order (with no backtracking). This feature is critical for the efficient computation of prime implicates. Building on these results, we provide a procedure for computing such implicates and establish its soundness and completeness.