diff --git a/admin/site/themes/afp/layouts/entries/single.html b/admin/site/themes/afp/layouts/entries/single.html
--- a/admin/site/themes/afp/layouts/entries/single.html
+++ b/admin/site/themes/afp/layouts/entries/single.html
@@ -1,154 +1,155 @@
{{- define "main" -}}
{{- $site := . -}}
{{- $path := .Site.Params.afpUrls.html -}}
{{- if isset .Site.Data "status" -}}
{{- $path = .Site.Params.afpUrls.htmlDevel -}}
{{- end -}}
{{- $entry_name := .File.BaseFileName -}}
{{- if isset .Site.Data "status" -}}
This is a development version of this entry. It might change over time and is not stable. Please refer to release versions for citations.
{{- end -}}
Abstract
{{- trim .Params.abstract "\n" | safeHTML -}}
{{- if (eq .Params.licence "BSD") -}}
BSD License
{{- else if (eq .Params.license "LGPL") -}}
GNU Lesser General Public License (LGPL)
{{- else -}}
{{- printf "%s License" .Params.license -}}
{{- end -}}
{{- with .Params.extra -}}
{{- range $key, $value := . -}}
{{- humanize $key -}}
{{- $value | safeHTML -}}
{{- end -}}
{{- end -}}
{{- $Scratch := newScratch -}}
{{- with .Params.dependencies -}}
Depends On
{{- range . -}}
{{- printf "[%s](/entries/%s.html)" . . | $.Page.RenderString -}}
{{- end -}}
{{- end -}}
{{- with (index .Site.Taxonomies.dependencies (urlize $entry_name)) -}}
Used by
{{- range . -}}
{{- printf "[%s](%s)" .File.BaseFileName .RelPermalink | $.Page.RenderString -}}
{{- end -}}
{{- end -}}
{{- with .Params.topics -}}
Topics
{{- range . -}}
{{- printf "[%s](/topics/%s)" . (urlize .) | $.Page.RenderString -}}
{{- end -}}
{{- end -}}
- Theories
+
+ {{- range .Params.sessions -}}
+ {{ $session := .session }}
+ Theories of {{ $session }}
- {{ $file := printf "assets/theories/%s.html" $entry_name}}
- {{- with .Params.theories -}}
- {{- range . -}}
- {{- printf "[%s](%s/theories#%s)" . (urlize $entry_name) . | $.Page.RenderString -}}
- {{- end -}}
+ {{- range .theories }}
+ {{ . }}
{{- end -}}
+ {{- end -}}
{{ $related := .Site.RegularPages.Related . | first 3 }}
{{ with $related }}
Related Entries
{{ range . }}
{{- printf "[%s](%s)" .Title .RelPermalink | $.Page.RenderString -}}
{{ end }}
{{ end }}
{{- if isset .Site.Data "status" -}}
{{- $status := "none" -}}
{{- range where .Site.Data.status.entries "entry" $entry_name -}}
{{- $status = .status -}}
{{- end -}}
Status: {{- $status -}}
{{ .Site.Data.status.build_data.time }}
Isabelle/{{ .Site.Data.status.build_data.isabelle_id }}
AFP/{{ .Site.Data.status.build_data.afp_id }}
{{ .Site.Data.status.build_data.job }}
{{- else -}}
PDFs
Proof
outline
Proof
document
Dependencies
{{- end -}}
{{- end -}}
diff --git a/admin/site/themes/afp/layouts/partials/theory-navigation.html b/admin/site/themes/afp/layouts/partials/theory-navigation.html
--- a/admin/site/themes/afp/layouts/partials/theory-navigation.html
+++ b/admin/site/themes/afp/layouts/partials/theory-navigation.html
@@ -1,39 +1,39 @@
-{{- $entry_name := .File.BaseFileName -}}
+{{- $entry_name := .Params.entry -}}
{{- $site := . -}}
{{- $path := .Site.Params.afpUrls.html -}}
{{- if isset .Site.Data "status" -}}
{{- $path = .Site.Params.afpUrls.htmlDevel -}}
{{- end -}}
diff --git a/admin/site/themes/afp/static/js/theory.js b/admin/site/themes/afp/static/js/theory.js
--- a/admin/site/themes/afp/static/js/theory.js
+++ b/admin/site/themes/afp/static/js/theory.js
@@ -1,278 +1,278 @@
/* constants */
const ID_THEORY_LIST = 'theories'
const CLASS_LOADER = 'loader'
const CLASS_ANIMATION = 'animation'
const ATTRIBUTE_THEORY_SRC = 'theory-src'
const CLASS_NAVBAR_TYPE = 'theory-navbar-type'
const CLASS_THY_NAV = 'thy-nav'
const PARAM_NAVBAR_TYPE = 'theory-navbar-type'
const ID_NAVBAR_TYPE_SELECTOR = 'navbar-type-selector'
const ID_NAVBAR = 'theory-navbar'
const NAVBAR_TYPES = ['fact', 'type', 'const']
/* routing */
function target(base_href, rel_href) {
const href_parts = rel_href.split('/')
if (href_parts.length === 1) return `#${href_parts[0]}`
else if (href_parts.length === 3 && href_parts[0] === '..' && href_parts[1] !== '..') {
- return `/entries/${href_parts[1].toLowerCase()}/theories#${href_parts[2]}`
+ return `/theories/${href_parts[1].toLowerCase()}/#${href_parts[2]}`
}
else return `${base_href}/${rel_href}`
}
function to_id(thy_name, ref) {
if (ref) return `${thy_name}.html#${ref}`
else return `${thy_name}.html`
}
const to_svg_id = (thy_name) => `${thy_name}#svg`
const to_container_id = (thy_name) => `${thy_name}#container`
const to_collapsible_id = (thy_name) => `${thy_name}#collapsible`
const to_spinner_id = (thy_name) => `${thy_name}#spinner`
const to_nav_id = (thy_name) => `${thy_name}#nav`
const to_ul_id = (thy_name) => `${thy_name}#ul`
const of_ul_id = (id) => id.split('#').slice(0, -1).join('#')
const to_a_id = (thy_name) => `${thy_name}#a`
/* document translation */
function translate(base_href, thy_name, thy_body) {
const thy_refs = [...thy_body.getElementsByTagName('span')].map((span) => {
let ref = span.getAttribute('id')
if (ref) {
span.setAttribute('id', to_id(thy_name, ref))
}
return ref
}).filter(e => e)
for (const link of thy_body.getElementsByTagName('a')) {
const rel_href = link.getAttribute('href')
link.setAttribute('href', target(base_href, rel_href))
}
return thy_refs
}
/* theory lazy-loading */
async function fetch_theory_body(href) {
const html_str = await fetch(href).then((http_res) => {
if (http_res.status !== 200) return Promise.resolve(`${http_res.statusText}`)
else return http_res.text()
}).catch((_) => {
console.log(`Could not load theory at '${href}'. Redirecting...`)
window.location.replace(href)
})
const parser = new DOMParser()
const html = parser.parseFromString(html_str, 'text/html')
return html.getElementsByTagName('body')[0]
}
async function load_theory(thy_name, href) {
const thy_body = await fetch_theory_body(href)
const refs = translate(href, thy_name, thy_body)
const collapse = document.getElementById(to_collapsible_id(thy_name))
collapse.append(...Array(...thy_body.children).slice(1))
return refs
}
async function open_theory(thy_name) {
const container = document.getElementById(to_container_id(thy_name))
if (container) {
if (document.getElementById(to_collapsible_id(thy_name))) open(container)
else {
const collapsible = parse_elem(`
`)
container.appendChild(collapsible)
open(container)
let refs = await load_theory(thy_name, container.getAttribute(ATTRIBUTE_THEORY_SRC))
await load_theory_nav(thy_name, refs)
const spinner = document.getElementById(to_spinner_id(thy_name))
spinner.parentNode.removeChild(spinner)
}
}
}
function nav_tree_rec(thy_name, path, key, ref_parts, type) {
const rec_ref = ref_parts.filter(e => e.length > 0)
const id = to_id(thy_name, `${path.join('.')}.${key}|${type}`)
let res
if (rec_ref.length < ref_parts.length) {
res = `${escape_html(key)} `
} else {
const head_id = to_id(thy_name, `${[...path, key, ...ref_parts[0]].join('.')}|${type}`)
res = `${escape_html(key)} `
}
if (rec_ref.length > 1) {
const by_key = group_by(rec_ref)
const children = Object.keys(by_key).map((key1) => `
${nav_tree_rec(thy_name, [...path, key], key1, by_key[key1], type)} `)
return `
${res}
`
} else return res
}
function nav_tree(thy_name, refs, type) {
let trees = Object.entries(group_by(refs || [])).map(([key, parts]) =>
`${nav_tree_rec(thy_name, [thy_name], key, parts, type)} `)
return parse_elem(`
`)
}
const cached_refs = Object.fromEntries(NAVBAR_TYPES.map(t => [t, {}]))
const load_theory_nav = (thy_name, refs) => {
let selected = get_query(PARAM_NAVBAR_TYPE) || NAVBAR_TYPES[0]
let by_type = group_by(refs.filter(ref => ref.includes('|')).map((id) => id.split('|').reverse()))
let type_selector = document.getElementById(ID_NAVBAR_TYPE_SELECTOR)
let options = [...type_selector.options].map(e => e.value)
for (let [type, elems] of Object.entries(by_type)) {
if (NAVBAR_TYPES.includes(type) && !options.includes(type)) {
type_selector.appendChild(parse_elem(`${type} `))
}
let parts_by_thy = group_by(elems.map((s) => s[0].split('.')))
if (NAVBAR_TYPES.includes(type)) cached_refs[type][thy_name] = parts_by_thy[thy_name]
}
let tree = nav_tree(thy_name, cached_refs[selected][thy_name], selected)
document.getElementById(to_nav_id(thy_name)).appendChild(tree)
ScrollSpy.instance.refresh()
}
/* state */
let navbar_last_opened = []
/* controls */
const follow_theory_hash = async () => {
let hash = window.location.hash
if (hash.length > 1) {
const id = hash.slice(1)
const thy_name = strip_suffix(id.split('#')[0], '.html')
await open_theory(thy_name)
const elem = document.getElementById(id)
if (elem) elem.scrollIntoView()
}
}
const toggle_theory = async (thy_name) => {
const hash = `#${to_id(thy_name)}`
const collapsible = document.getElementById(to_container_id(thy_name))
if (collapsible) {
if (!collapse(collapsible)) {
if (window.location.hash === hash) open(collapsible)
else window.location.hash = hash
}
} else window.location.hash = hash
}
const change_selector = (type) => {
let old_type = get_query(PARAM_NAVBAR_TYPE)
if (!old_type || old_type !== type) {
set_query(PARAM_NAVBAR_TYPE, type)
for (const elem of document.getElementsByClassName(CLASS_NAVBAR_TYPE)) {
let thy_name = of_ul_id(elem.id)
elem.replaceWith(nav_tree(thy_name, cached_refs[type][thy_name], type))
}
ScrollSpy.instance.refresh()
}
}
const open_tree = (elem) => {
if (elem.classList.contains(CLASS_COLLAPSIBLE)) {
if (open(elem)) navbar_last_opened.push(elem)
}
if (elem.parentElement) open_tree(elem.parentElement)
}
const sync_navbar = (link) => {
for (const elem of navbar_last_opened){
collapse(elem)
}
open_tree(link.parentElement)
link.scrollIntoView({block: "center"})
}
/* setup */
const init = async () => {
const theory_list = document.getElementById(ID_THEORY_LIST)
const navbar = document.getElementById(ID_NAVBAR)
if (theory_list && navbar) {
const thy_names = []
for (const theory of theory_list.children) {
thy_names.push(theory.id)
const href = theory.getAttribute('href')
const thy_name = theory.id
const thy_collapsible = parse_elem(`
`)
theory.replaceWith(thy_collapsible)
}
const type = get_query(PARAM_NAVBAR_TYPE) ? get_query(PARAM_NAVBAR_TYPE) : NAVBAR_TYPES[0]
navbar.appendChild(parse_elem(`
${type}
`))
navbar.append(...thy_names.map((thy_name) => parse_elem(`
${thy_name}
`)))
navbar.insertAdjacentElement('afterend', document.createElement('hr'));
window.onhashchange = follow_theory_hash
window.addEventListener(EVENT_SPY_ACTIVATE, (e) => sync_navbar(e.relatedTarget))
new ScrollSpy(document.body, ID_NAVBAR)
await follow_theory_hash()
}
}
document.addEventListener('DOMContentLoaded', init)
diff --git a/tools/afp_site_gen.scala b/tools/afp_site_gen.scala
--- a/tools/afp_site_gen.scala
+++ b/tools/afp_site_gen.scala
@@ -1,321 +1,320 @@
/* Author: Fabian Huch, TU Muenchen
Generation and compilation of SSG project for the AFP website.
*/
package afp
import isabelle._
object AFP_Site_Gen
{
/* json */
object JSON
{
type T = isabelle.JSON.T
object Object
{
type T = isabelle.JSON.Object.T
}
def from_authors(authors: List[Metadata.Author]): T =
{
authors.map(author => author.id -> isabelle.JSON.Object(
"name" -> author.name,
"emails" -> author.emails.map(_.address),
"homepages" -> author.homepages.map(_.url))).toMap
}
def from_topics(topics: List[Metadata.Topic]): T =
Metadata.TOML.from_topics(topics)
def from_affiliations(affiliations: List[Metadata.Affiliation]): Object.T =
{
Utils.group_sorted(affiliations, (a: Metadata.Affiliation) => a.author).view.mapValues(author_affiliations =>
{
val homepage = author_affiliations.collectFirst { case Metadata.Homepage(_, _, url) => url }
val email = author_affiliations.collectFirst { case Metadata.Email(_, _, address) => address }
isabelle.JSON.Object(
homepage.map(s => List("homepage" -> s)).getOrElse(Nil) :::
email.map(s => List("email" -> s)).getOrElse(Nil): _*)
}).toMap
}
def from_change_history(history: Metadata.Change_History): Object.T =
{
if (history.isEmpty) {
Map.empty
} else {
Map("Change history" -> history.map {
case (date, str) => "[" + date.toString + "] " + str
}.mkString("\n"))
}
}
def from_entry(entry: Metadata.Entry): JSON.Object.T =
isabelle.JSON.Object(
"title" -> entry.title ::
"authors" -> entry.authors.map(_.author).distinct ::
"affiliations" -> from_affiliations(entry.authors ++ entry.contributors) ::
(if (entry.contributors.nonEmpty) "contributors" -> entry.contributors.map(_.author).distinct :: Nil
else Nil) :::
"date" -> entry.date.toString ::
"topics" -> entry.topics.map(_.id) ::
"abstract" -> entry.`abstract` ::
"license" -> entry.license.name ::
(if (entry.releases.nonEmpty)
"releases" -> entry.releases.map(r => r.isabelle -> r.date.toString).toMap :: Nil
else Nil) :::
(if (entry.note.nonEmpty) "note" -> entry.note :: Nil else Nil) :::
(if (entry.change_history.nonEmpty || entry.extra.nonEmpty)
"extra" -> (from_change_history(entry.change_history) ++ entry.extra) :: Nil
else Nil): _*)
def from_keywords(keywords: List[String]): T =
keywords.zipWithIndex.map
{ case (keyword, i) => isabelle.JSON.Object("id" -> i, "keyword" -> keyword) }
}
/* keyword extraction */
private val replacements = List(
"<[^>]*>".r -> "",
"[^\\w\\s()'.,;:-]".r -> " ",
"\\s+".r -> " ")
def extract_keywords(text: String): List[String] =
{
val stripped_text =
replacements.foldLeft(text) { case (res, (regex, replacement)) => regex.replaceAllIn(res, replacement) }
val arg = quote(stripped_text.replaceAll("\"", "\\\""))
val keyword_cmd = "from keywords import *; print_keywords(" + arg + ")"
Python.run(keyword_cmd).check.out_lines
}
/* site generation */
def afp_site_gen(
layout: Hugo.Layout,
status_file: Option[Path],
afp_structure: AFP_Structure,
progress: Progress = new Progress()): Unit =
{
/* add authors */
progress.echo("Preparing authors...")
val authors = afp_structure.load_authors
val authors_by_id = Utils.grouped_sorted(authors, (a: Metadata.Author) => a.id)
layout.write_data(Path.basic("authors.json"), JSON.from_authors(authors))
/* add topics */
progress.echo("Preparing topics...")
val topics = afp_structure.load_topics
def sub_topics(topic: Metadata.Topic): List[Metadata.Topic] = topic :: topic.sub_topics.flatMap(sub_topics)
val topics_by_id = Utils.grouped_sorted(topics.flatMap(sub_topics), (t: Metadata.Topic) => t.id)
layout.write_data(Path.basic("topics.json"), JSON.from_topics(topics))
/* add licenses */
progress.echo("Preparing licenses...")
val licenses_by_id = Utils.grouped_sorted(afp_structure.load_licenses,
(l: Metadata.License) => l.id)
/* add releases */
progress.echo("Preparing releases...")
val releases_by_entry = afp_structure.load_releases.groupBy(_.entry)
/* extract keywords */
progress.echo("Extracting keywords...")
var seen_keywords = Set.empty[String]
val entry_keywords = afp_structure.entries.map(name =>
{
val entry = afp_structure.load_entry(name, authors_by_id, topics_by_id, licenses_by_id,
releases_by_entry)
val Keyword = """\('([^']*)', ([^)]*)\)""".r
val scored_keywords = extract_keywords(entry.`abstract`).map {
case Keyword(keyword, score) => keyword -> score.toDouble
case s => error("Could not parse: " + s)
}
seen_keywords ++= scored_keywords.map(_._1)
name -> scored_keywords.filter(_._2 > 1.0).map(_._1)
}).toMap
seen_keywords = seen_keywords.filter(k => !k.endsWith("s") || !seen_keywords.contains(k.stripSuffix("s")))
layout.write_static(Path.make(List("data", "keywords.json")), JSON.from_keywords(seen_keywords.toList))
def get_keywords(name: Metadata.Entry.Name): List[String] =
entry_keywords(name).filter(seen_keywords.contains).take(8)
/* add entries and theory listings */
progress.echo("Preparing entries...")
val sessions_structure = afp_structure.sessions_structure
val sessions_deps = Sessions.deps(sessions_structure)
for (name <- afp_structure.entries) {
val entry = afp_structure.load_entry(name, authors_by_id, topics_by_id, licenses_by_id,
releases_by_entry)
val deps =
for {
session <- afp_structure.entry_sessions(name)
dep <- sessions_structure.imports_graph.imm_preds(session.name)
if session.name != dep && sessions_structure(dep).groups.contains("AFP")
} yield dep
- val topo_theories =
- for {
- session <- afp_structure.entry_sessions(name)
- base = sessions_deps(session.name)
- node <- base.session_theories
- } yield node.theory_base_name
+ val theories = afp_structure.entry_sessions(name).map { session =>
+ val base = sessions_deps(session.name)
+ val theories = base.session_theories.map(_.theory_base_name)
+ val session_json = isabelle.JSON.Object(
+ "title" -> session.name,
+ "entry" -> name,
+ "url" -> ("/theories/" + session.name.toLowerCase),
+ "theories" -> theories)
+ layout.write_content(Path.make(List("theories", session.name + ".md")), session_json)
+ isabelle.JSON.Object("session" -> session.name, "theories" -> theories)
+ }
val entry_json = JSON.from_entry(entry) ++ isabelle.JSON.Object(
- "dependencies" -> deps.distinct,
- "theories" -> topo_theories,
- "url" -> ("/entries/" + name + ".html"),
- "keywords" -> get_keywords(name))
-
- val theories_json = isabelle.JSON.Object(
- "title" -> entry.title,
- "url" -> ("/entries/" + name.toLowerCase + "/theories"),
- "theories" -> topo_theories)
+ "dependencies" -> deps.distinct,
+ "sessions" -> theories,
+ "url" -> ("/entries/" + name + ".html"),
+ "keywords" -> get_keywords(name))
layout.write_content(Path.make(List("entries", name + ".md")), entry_json)
- layout.write_content(Path.make(List("theories", name + ".md")), theories_json)
}
/* add statistics */
progress.echo("Preparing statistics...")
val statistics_cmd = "from statistics import *; add_statistics(" +
commas_quote(
List(
Path.explode("$AFP_BASE").absolute.implode,
Path.explode("$AFP").absolute.implode,
layout.data_dir.implode)) +
")"
Python.run(statistics_cmd).check
(layout.data_dir + Path.basic("statistics.html")).file.delete()
/* project */
progress.echo("Preparing project files")
layout.copy_project()
/* status */
status_file match {
case Some(status_file) =>
progress.echo("Preparing devel version...")
val status_json = isabelle.JSON.parse(File.read(status_file))
layout.write_data(Path.basic("status.json"), status_json)
case None =>
}
progress.echo("Finished sitegen preparation.")
}
/* build site */
def afp_build_site(
out_dir: Path, layout: Hugo.Layout,
do_watch: Boolean = false,
progress: Progress = new Progress()): Unit =
{
if (do_watch) {
Hugo.watch(layout, out_dir, progress).check
} else {
progress.echo("Building site...")
Hugo.build(layout, out_dir).check
progress.echo("Build in " + (out_dir + Path.basic("index.html")).absolute.implode)
}
}
/* tool wrapper */
val isabelle_tool = Isabelle_Tool("afp_site_gen", "generates afp website source", Scala_Project.here, args =>
{
var base_dir = Path.explode("$AFP_BASE")
var status_file: Option[Path] = None
var hugo_dir = base_dir + Path.make(List("web", "hugo"))
var out_dir: Path = base_dir + Path.make(List("web", "out"))
var build_only = false
var devel_mode = false
val getopts = Getopts("""
Usage: isabelle afp_site_gen [OPTIONS]
Options are:
-B DIR afp base dir (default """" + base_dir.implode + """")
-D FILE build status file for devel version
-H DIR generated hugo project dir (default """" + hugo_dir.implode + """")
-O DIR output dir for build (default """ + out_dir.implode + """)
-b build only
-d devel mode (overrides hugo dir, builds site in watch mode)
Generates the AFP website source. HTML files of entries are dynamically loaded.
Providing a status file will build the development version of the archive.
Site will be built from generated source if output dir is specified.
""",
"B:" -> (arg => base_dir = Path.explode(arg)),
"D:" -> (arg => status_file = Some(Path.explode(arg))),
"H:" -> (arg => hugo_dir = Path.explode(arg)),
"O:" -> (arg => out_dir = Path.explode(arg)),
"b" -> (_ => build_only = true),
"d" -> (_ => devel_mode = true))
getopts(args)
if (devel_mode) hugo_dir = base_dir + Path.make(List("admin", "site"))
val afp_structure = AFP_Structure(base_dir)
val layout = Hugo.Layout(hugo_dir)
val progress = new Console_Progress()
if (!build_only) {
progress.echo("Preparing site generation in " + hugo_dir.implode)
afp_site_gen(layout = layout, status_file = status_file, afp_structure = afp_structure,
progress = progress)
}
afp_build_site(out_dir = out_dir, layout = layout, do_watch = devel_mode, progress = progress)
})
}
\ No newline at end of file