diff --git a/src/Pure/General/file.scala b/src/Pure/General/file.scala --- a/src/Pure/General/file.scala +++ b/src/Pure/General/file.scala @@ -1,336 +1,323 @@ /* Title: Pure/General/file.scala Author: Makarius File-system operations. */ package isabelle import java.io.{BufferedWriter, OutputStreamWriter, FileOutputStream, BufferedOutputStream, OutputStream, InputStream, FileInputStream, BufferedInputStream, BufferedReader, InputStreamReader, File => JFile, IOException} import java.nio.file.{StandardOpenOption, Path => JPath, Files, SimpleFileVisitor, FileVisitOption, FileVisitResult} import java.nio.file.attribute.BasicFileAttributes import java.net.{URI, URL, MalformedURLException} import java.util.zip.{GZIPInputStream, GZIPOutputStream} import java.util.EnumSet import org.tukaani.xz.{XZInputStream, XZOutputStream} import scala.collection.mutable object File { /* standard path (Cygwin or Posix) */ def standard_path(path: Path): String = path.expand.implode def standard_path(platform_path: String): String = isabelle.setup.Environment.standard_path(platform_path) def standard_path(file: JFile): String = standard_path(file.getPath) def standard_url(name: String): String = try { val url = new URL(name) if (url.getProtocol == "file" && Url.is_wellformed_file(name)) standard_path(Url.parse_file(name)) else name } catch { case _: MalformedURLException => standard_path(name) } /* platform path (Windows or Posix) */ def platform_path(standard_path: String): String = isabelle.setup.Environment.platform_path(standard_path) def platform_path(path: Path): String = platform_path(standard_path(path)) def platform_file(path: Path): JFile = new JFile(platform_path(path)) /* platform files */ def absolute(file: JFile): JFile = file.toPath.toAbsolutePath.normalize.toFile def absolute_name(file: JFile): String = absolute(file).getPath def canonical(file: JFile): JFile = file.getCanonicalFile def canonical_name(file: JFile): String = canonical(file).getPath def path(file: JFile): Path = Path.explode(standard_path(file)) def pwd(): Path = path(Path.current.absolute_file) def uri(file: JFile): URI = file.toURI def uri(path: Path): URI = path.file.toURI def url(file: JFile): URL = uri(file).toURL def url(path: Path): URL = url(path.file) /* relative paths */ def relative_path(base: Path, other: Path): Option[Path] = { val base_path = base.java_path val other_path = other.java_path if (other_path.startsWith(base_path)) Some(path(base_path.relativize(other_path).toFile)) else None } /* bash path */ def bash_path(path: Path): String = Bash.string(standard_path(path)) def bash_path(file: JFile): String = Bash.string(standard_path(file)) def bash_platform_path(path: Path): String = Bash.string(platform_path(path)) /* directory entries */ def check_dir(path: Path): Path = if (path.is_dir) path else error("No such directory: " + path) def check_file(path: Path): Path = if (path.is_file) path else error("No such file: " + path) /* directory content */ def read_dir(dir: Path): List[String] = { if (!dir.is_dir) error("No such directory: " + dir.toString) val files = dir.file.listFiles if (files == null) Nil else files.toList.map(_.getName).sorted } def get_dir(dir: Path): String = read_dir(dir).filter(name => (dir + Path.basic(name)).is_dir) match { case List(entry) => entry case dirs => error("Exactly one directory entry expected: " + commas_quote(dirs.sorted)) } def find_files( start: JFile, pred: JFile => Boolean = _ => true, include_dirs: Boolean = false, follow_links: Boolean = false ): List[JFile] = { val result = new mutable.ListBuffer[JFile] def check(file: JFile): Unit = if (pred(file)) result += file if (start.isFile) check(start) else if (start.isDirectory) { val options = if (follow_links) EnumSet.of(FileVisitOption.FOLLOW_LINKS) else EnumSet.noneOf(classOf[FileVisitOption]) Files.walkFileTree(start.toPath, options, Integer.MAX_VALUE, new SimpleFileVisitor[JPath] { override def preVisitDirectory( path: JPath, attrs: BasicFileAttributes ): FileVisitResult = { if (include_dirs) check(path.toFile) FileVisitResult.CONTINUE } override def visitFile( path: JPath, attrs: BasicFileAttributes ): FileVisitResult = { val file = path.toFile if (include_dirs || !file.isDirectory) check(file) FileVisitResult.CONTINUE } } ) } result.toList } /* read */ def read(file: JFile): String = Bytes.read(file).text def read(path: Path): String = read(path.file) def read_stream(reader: BufferedReader): String = { val output = new StringBuilder(100) var c = -1 while ({ c = reader.read; c != -1 }) output += c.toChar reader.close() output.toString } def read_stream(stream: InputStream): String = read_stream(new BufferedReader(new InputStreamReader(stream, UTF8.charset))) def read_gzip(file: JFile): String = read_stream(new GZIPInputStream(new BufferedInputStream(new FileInputStream(file)))) def read_gzip(path: Path): String = read_gzip(path.file) def read_xz(file: JFile): String = read_stream(new XZInputStream(new BufferedInputStream(new FileInputStream(file)))) def read_xz(path: Path): String = read_xz(path.file) /* read lines */ def read_line(reader: BufferedReader): Option[String] = { val line = try { reader.readLine} catch { case _: IOException => null } Option(line).map(Library.trim_line) } def read_lines(reader: BufferedReader, progress: String => Unit): List[String] = { val result = new mutable.ListBuffer[String] var line: Option[String] = None while ({ line = read_line(reader); line.isDefined }) { progress(line.get) result += line.get } reader.close() result.toList } /* write */ def writer(file: JFile): BufferedWriter = new BufferedWriter(new OutputStreamWriter(new FileOutputStream(file), UTF8.charset)) def write_file( file: JFile, text: String, make_stream: OutputStream => OutputStream ): Unit = { val stream = make_stream(new FileOutputStream(file)) using(new BufferedWriter(new OutputStreamWriter(stream, UTF8.charset)))(_.append(text)) } def write(file: JFile, text: String): Unit = write_file(file, text, s => s) def write(path: Path, text: String): Unit = write(path.file, text) def write_gzip(file: JFile, text: String): Unit = write_file(file, text, (s: OutputStream) => new GZIPOutputStream(new BufferedOutputStream(s))) def write_gzip(path: Path, text: String): Unit = write_gzip(path.file, text) def write_xz(file: JFile, text: String, options: XZ.Options): Unit = File.write_file(file, text, s => new XZOutputStream(new BufferedOutputStream(s), options)) def write_xz(file: JFile, text: String): Unit = write_xz(file, text, XZ.options()) def write_xz(path: Path, text: String, options: XZ.Options): Unit = write_xz(path.file, text, options) def write_xz(path: Path, text: String): Unit = write_xz(path, text, XZ.options()) def write_backup(path: Path, text: String): Unit = { if (path.is_file) Isabelle_System.move_file(path, path.backup) write(path, text) } def write_backup2(path: Path, text: String): Unit = { if (path.is_file) Isabelle_System.move_file(path, path.backup2) write(path, text) } /* append */ def append(file: JFile, text: String): Unit = Files.write(file.toPath, UTF8.bytes(text), StandardOpenOption.APPEND, StandardOpenOption.CREATE) def append(path: Path, text: String): Unit = append(path.file, text) /* change */ def change( path: Path, init: Boolean = false, strict: Boolean = false )(f: String => String): Unit = { if (!path.is_file && init) write(path, "") val x = read(path) val y = f(x) if (x != y) write(path, y) else if (strict) error("Unchanged file: " + path) } def change_lines(path: Path, init: Boolean = false, strict: Boolean = false)( f: List[String] => List[String]): Unit = change(path, init = init, strict = strict)(text => cat_lines(f(split_lines(text)))) /* eq */ def eq(file1: JFile, file2: JFile): Boolean = try { Files.isSameFile(file1.toPath, file2.toPath) } catch { case ERROR(_) => false } def eq(path1: Path, path2: Path): Boolean = eq(path1.file, path2.file) /* eq_content */ def eq_content(file1: JFile, file2: JFile): Boolean = if (eq(file1, file2)) true else if (file1.length != file2.length) false else Bytes.read(file1) == Bytes.read(file2) def eq_content(path1: Path, path2: Path): Boolean = eq_content(path1.file, path2.file) /* permissions */ def is_executable(path: Path): Boolean = { if (Platform.is_windows) Isabelle_System.bash("test -x " + bash_path(path)).check.ok else path.file.canExecute } def set_executable(path: Path, flag: Boolean): Unit = { if (Platform.is_windows && flag) Isabelle_System.chmod("a+x", path) else if (Platform.is_windows) Isabelle_System.chmod("a-x", path) else path.file.setExecutable(flag, false) } /* content */ - def content(path: Path, content: Bytes): Content_Bytes = new Content_Bytes(path, content) - def content(path: Path, content: String): Content_String = new Content_String(path, content) + def content(path: Path, content: Bytes): Content = new Content(path, content) + def content(path: Path, content: String): Content = new Content(path, Bytes(content)) def content(path: Path, content: XML.Body): Content_XML = new Content_XML(path, content) - trait Content { - def path: Path - def write(dir: Path): Unit + final class Content private[File](val path: Path, val content: Bytes) { override def toString: String = path.toString - } - final class Content_Bytes private[File](val path: Path, val content: Bytes) extends Content { def write(dir: Path): Unit = { val full_path = dir + path Isabelle_System.make_directory(full_path.expand.dir) Bytes.write(full_path, content) } } - final class Content_String private[File](val path: Path, val content: String) extends Content { - def write(dir: Path): Unit = { - val full_path = dir + path - Isabelle_System.make_directory(full_path.expand.dir) - File.write(full_path, content) - } - } - final class Content_XML private[File](val path: Path, val content: XML.Body) { override def toString: String = path.toString - def output(out: XML.Body => String): Content_String = - new Content_String(path, out(content)) + def output(out: XML.Body => String): Content = new Content(path, Bytes(out(content))) } } diff --git a/src/Pure/System/classpath.scala b/src/Pure/System/classpath.scala --- a/src/Pure/System/classpath.scala +++ b/src/Pure/System/classpath.scala @@ -1,95 +1,95 @@ /* Title: Pure/System/classpath.scala Author: Makarius Java classpath and Scala services. */ package isabelle import java.io.{File => JFile} import java.nio.file.Files import java.net.URLClassLoader import scala.jdk.CollectionConverters._ object Classpath { abstract class Service type Service_Class = Class[Service] def apply( jar_files: List[JFile] = Nil, - jar_contents: List[File.Content_Bytes] = Nil): Classpath = + jar_contents: List[File.Content] = Nil): Classpath = { val jar_files0 = for { s <- space_explode(JFile.pathSeparatorChar, System.getProperty("java.class.path", "")) if s.nonEmpty } yield File.absolute(new JFile(s)) val jar_files1 = jar_files.flatMap(start => File.find_files(start, _.getName.endsWith(".jar"))) .map(File.absolute) val tmp_jars = for (jar <- jar_contents) yield { val tmp_jar = Files.createTempFile("jar", "jar").toFile tmp_jar.deleteOnExit() Bytes.write(tmp_jar, jar.content) tmp_jar } new Classpath(jar_files0 ::: jar_files1, tmp_jars) } } class Classpath private(static_jars: List[JFile], dynamic_jars: List[JFile]) { def jars: List[JFile] = static_jars ::: dynamic_jars override def toString: String = jars.mkString("Classpath(", ", ", ")") def platform_path: String = jars.map(_.getPath).mkString(JFile.pathSeparator) val class_loader: ClassLoader = { val this_class_loader = this.getClass.getClassLoader if (dynamic_jars.isEmpty) this_class_loader else { new URLClassLoader(dynamic_jars.map(File.url).toArray, this_class_loader) { override def finalize(): Unit = { for (jar <- dynamic_jars) { try { jar.delete() } catch { case _: Throwable => } } } } } } private def init_services(where: String, names: List[String]): List[Classpath.Service_Class] = { for (name <- names) yield { def err(msg: String): Nothing = error("Bad Isabelle/Scala service " + quote(name) + " in " + where + "\n" + msg) try { Class.forName(name, true, class_loader).asInstanceOf[Classpath.Service_Class] } catch { case _: ClassNotFoundException => err("Class not found") case exn: Throwable => err(Exn.message(exn)) } } } val services: List[Classpath.Service_Class] = { val variable = "ISABELLE_SCALA_SERVICES" val services_env = init_services(quote(variable), space_explode(':', Isabelle_System.getenv_strict(variable))) val services_jars = jars.flatMap(jar => init_services(File.standard_path(jar), isabelle.setup.Build.get_services(jar.toPath).asScala.toList)) services_env ::: services_jars } def make_services[C](c: Class[C]): List[C] = for { c1 <- services if Library.is_subclass(c1, c) } yield c1.getDeclaredConstructor().newInstance().asInstanceOf[C] } diff --git a/src/Pure/Thy/document_build.scala b/src/Pure/Thy/document_build.scala --- a/src/Pure/Thy/document_build.scala +++ b/src/Pure/Thy/document_build.scala @@ -1,495 +1,495 @@ /* Title: Pure/Thy/document_build.scala Author: Makarius Build theory document (PDF) from session database. */ package isabelle object Document_Build { /* document variants */ abstract class Document_Name { def name: String def path: Path = Path.basic(name) override def toString: String = name } object Document_Variant { def parse(opt: String): Document_Variant = Library.space_explode('=', opt) match { case List(name) => Document_Variant(name, Latex.Tags.empty) case List(name, tags) => Document_Variant(name, Latex.Tags(tags)) case _ => error("Malformed document variant: " + quote(opt)) } } sealed case class Document_Variant(name: String, tags: Latex.Tags) extends Document_Name { def print: String = if (tags.toString.isEmpty) name else name + "=" + tags.toString } sealed case class Document_Input(name: String, sources: SHA1.Digest) extends Document_Name { override def toString: String = name } sealed case class Document_Output(name: String, sources: SHA1.Digest, log_xz: Bytes, pdf: Bytes) extends Document_Name { override def toString: String = name def log: String = log_xz.uncompress().text def log_lines: List[String] = split_lines(log) def write(db: SQL.Database, session_name: String): Unit = write_document(db, session_name, this) def write(dir: Path): Path = { val path = dir + Path.basic(name).pdf Isabelle_System.make_directory(path.expand.dir) Bytes.write(path, pdf) path } } /* SQL data model */ object Data { val session_name = SQL.Column.string("session_name").make_primary_key val name = SQL.Column.string("name").make_primary_key val sources = SQL.Column.string("sources") val log_xz = SQL.Column.bytes("log_xz") val pdf = SQL.Column.bytes("pdf") val table = SQL.Table("isabelle_documents", List(session_name, name, sources, log_xz, pdf)) def where_equal(session_name: String, name: String = ""): SQL.Source = "WHERE " + Data.session_name.equal(session_name) + (if (name == "") "" else " AND " + Data.name.equal(name)) } def read_documents(db: SQL.Database, session_name: String): List[Document_Input] = { val select = Data.table.select(List(Data.name, Data.sources), Data.where_equal(session_name)) db.using_statement(select)(stmt => stmt.execute_query().iterator({ res => val name = res.string(Data.name) val sources = res.string(Data.sources) Document_Input(name, SHA1.fake_digest(sources)) }).toList) } def read_document( db: SQL.Database, session_name: String, name: String ): Option[Document_Output] = { val select = Data.table.select(sql = Data.where_equal(session_name, name)) db.using_statement(select)({ stmt => val res = stmt.execute_query() if (res.next()) { val name = res.string(Data.name) val sources = res.string(Data.sources) val log_xz = res.bytes(Data.log_xz) val pdf = res.bytes(Data.pdf) Some(Document_Output(name, SHA1.fake_digest(sources), log_xz, pdf)) } else None }) } def write_document(db: SQL.Database, session_name: String, doc: Document_Output): Unit = { db.using_statement(Data.table.insert()){ stmt => stmt.string(1) = session_name stmt.string(2) = doc.name stmt.string(3) = doc.sources.toString stmt.bytes(4) = doc.log_xz stmt.bytes(5) = doc.pdf stmt.execute() } } /* context */ val texinputs: Path = Path.explode("~~/lib/texinputs") val isabelle_styles: List[Path] = List("isabelle.sty", "isabellesym.sty", "pdfsetup.sty", "railsetup.sty"). map(name => texinputs + Path.basic(name)) def context( session_context: Export.Session_Context, document_session: Option[Sessions.Base] = None, progress: Progress = new Progress ): Context = new Context(session_context, document_session, progress) final class Context private[Document_Build]( session_context: Export.Session_Context, document_session: Option[Sessions.Base], val progress: Progress = new Progress ) { /* session info */ private val base = document_session getOrElse session_context.session_base private val info = session_context.sessions_structure(base.session_name) def session: String = info.name def options: Options = info.options override def toString: String = session - val classpath: List[File.Content_Bytes] = session_context.classpath() + val classpath: List[File.Content] = session_context.classpath() def document_bibliography: Boolean = options.bool("document_bibliography") def document_logo: Option[String] = options.string("document_logo") match { case "" => None case "_" => Some("") case name => Some(name) } def document_build: String = options.string("document_build") def get_engine(): Engine = { val name = document_build Classpath(jar_contents = classpath).make_services(classOf[Engine]) .find(_.name == name).getOrElse(error("Bad document_build engine " + quote(name))) } /* document content */ def documents: List[Document_Variant] = info.documents def proper_session_theories: List[Document.Node.Name] = base.proper_session_theories def document_theories: List[Document.Node.Name] = proper_session_theories ::: base.document_theories lazy val document_latex: List[File.Content_XML] = for (name <- document_theories) yield { val path = Path.basic(tex_name(name)) val entry = session_context(name.theory, Export.DOCUMENT_LATEX, permissive = true) val content = YXML.parse_body(entry.text) File.content(path, content) } lazy val session_graph: File.Content = { val path = Presentation.session_graph_path val content = graphview.Graph_File.make_pdf(options, base.session_graph_display) File.content(path, content) } lazy val session_tex: File.Content = { val path = Path.basic("session.tex") val content = Library.terminate_lines( base.proper_session_theories.map(name => "\\input{" + tex_name(name) + "}")) File.content(path, content) } lazy val isabelle_logo: Option[File.Content] = { document_logo.map(logo_name => Isabelle_System.with_tmp_file("logo", ext = "pdf") { tmp_path => Logo.create_logo(logo_name, output_file = tmp_path, quiet = true) val path = Path.basic("isabelle_logo.pdf") val content = Bytes.read(tmp_path) File.content(path, content) }) } /* document directory */ def prepare_directory( dir: Path, doc: Document_Variant, latex_output: Latex.Output ): Directory = { val doc_dir = Isabelle_System.make_directory(dir + Path.basic(doc.name)) /* actual sources: with SHA1 digest */ isabelle_styles.foreach(Isabelle_System.copy_file(_, doc_dir)) val comment_latex = options.bool("document_comment_latex") if (!comment_latex) { Isabelle_System.copy_file(texinputs + Path.basic("comment.sty"), doc_dir) } doc.tags.sty(comment_latex).write(doc_dir) for ((base_dir, src) <- info.document_files) { Isabelle_System.copy_file_base(info.dir + base_dir, src, doc_dir) } session_tex.write(doc_dir) for (content <- document_latex) { content.output(latex_output(_, file_pos = content.path.implode_symbolic)) .write(doc_dir) } val root_name1 = "root_" + doc.name val root_name = if ((doc_dir + Path.explode(root_name1).tex).is_file) root_name1 else "root" val digests1 = List(doc.print, document_logo.toString, document_build).map(SHA1.digest) val digests2 = File.find_files(doc_dir.file, follow_links = true).map(SHA1.digest) val sources = SHA1.digest_set(digests1 ::: digests2) /* derived material: without SHA1 digest */ isabelle_logo.foreach(_.write(doc_dir)) session_graph.write(doc_dir) Directory(doc_dir, doc, root_name, sources) } def old_document(directory: Directory): Option[Document_Output] = for { db <- session_context.session_db() old_doc <- read_document(db, session, directory.doc.name) if old_doc.sources == directory.sources } yield old_doc } sealed case class Directory( doc_dir: Path, doc: Document_Variant, root_name: String, sources: SHA1.Digest ) { def root_name_script(ext: String = ""): String = Bash.string(if (ext.isEmpty) root_name else root_name + "." + ext) def conditional_script(ext: String, exe: String, after: String = ""): String = "if [ -f " + root_name_script(ext) + " ]\n" + "then\n" + " " + exe + " " + root_name_script() + "\n" + (if (after.isEmpty) "" else " " + after) + "fi\n" def log_errors(): List[String] = Latex.latex_errors(doc_dir, root_name) ::: Bibtex.bibtex_errors(doc_dir, root_name) def make_document(log: List[String], errors: List[String]): Document_Output = { val root_pdf = Path.basic(root_name).pdf val result_pdf = doc_dir + root_pdf if (errors.nonEmpty) { val errors1 = errors ::: List("Failed to build document " + quote(doc.name)) throw new Build_Error(log, Exn.cat_message(errors1: _*)) } else if (!result_pdf.is_file) { val message = "Bad document result: expected to find " + root_pdf throw new Build_Error(log, message) } else { val log_xz = Bytes(cat_lines(log)).compress() val pdf = Bytes.read(result_pdf) Document_Output(doc.name, sources, log_xz, pdf) } } } /* build engines */ abstract class Engine(val name: String) extends Isabelle_System.Service { override def toString: String = name def prepare_directory(context: Context, dir: Path, doc: Document_Variant): Directory def build_document(context: Context, directory: Directory, verbose: Boolean): Document_Output } abstract class Bash_Engine(name: String) extends Engine(name) { def prepare_directory(context: Context, dir: Path, doc: Document_Variant): Directory = context.prepare_directory(dir, doc, new Latex.Output(context.options)) def use_pdflatex: Boolean = false def latex_script(context: Context, directory: Directory): String = (if (use_pdflatex) "$ISABELLE_PDFLATEX" else "$ISABELLE_LUALATEX") + " " + directory.root_name_script() + "\n" def bibtex_script(context: Context, directory: Directory, latex: Boolean = false): String = { val ext = if (context.document_bibliography) "aux" else "bib" directory.conditional_script(ext, "$ISABELLE_BIBTEX", after = if (latex) latex_script(context, directory) else "") } def makeindex_script(context: Context, directory: Directory, latex: Boolean = false): String = directory.conditional_script("idx", "$ISABELLE_MAKEINDEX", after = if (latex) latex_script(context, directory) else "") def use_build_script: Boolean = false def build_script(context: Context, directory: Directory): String = { val has_build_script = (directory.doc_dir + Path.explode("build")).is_file if (!use_build_script && has_build_script) { error("Unexpected document build script for option document_build=" + quote(context.document_build)) } else if (use_build_script && !has_build_script) error("Missing document build script") else if (has_build_script) "./build pdf " + Bash.string(directory.doc.name) else { "set -e\n" + latex_script(context, directory) + bibtex_script(context, directory, latex = true) + makeindex_script(context, directory) + latex_script(context, directory) + makeindex_script(context, directory, latex = true) } } def build_document( context: Context, directory: Directory, verbose: Boolean ): Document_Output = { val result = context.progress.bash( build_script(context, directory), cwd = directory.doc_dir.file, echo = verbose, watchdog = Time.seconds(0.5)) val log = result.out_lines ::: result.err_lines val errors = (if (result.ok) Nil else List(result.err)) ::: directory.log_errors() directory.make_document(log, errors) } } class LuaLaTeX_Engine extends Bash_Engine("lualatex") class PDFLaTeX_Engine extends Bash_Engine("pdflatex") { override def use_pdflatex: Boolean = true } class Build_Engine extends Bash_Engine("build") { override def use_build_script: Boolean = true } /* build documents */ def tex_name(name: Document.Node.Name): String = name.theory_base_name + ".tex" class Build_Error(val log_lines: List[String], val message: String) extends Exn.User_Error(message) def build_documents( context: Context, output_sources: Option[Path] = None, output_pdf: Option[Path] = None, verbose: Boolean = false ): List[Document_Output] = { val progress = context.progress val engine = context.get_engine() val documents = for (doc <- context.documents) yield { Isabelle_System.with_tmp_dir("document") { tmp_dir => progress.echo("Preparing " + context.session + "/" + doc.name + " ...") val start = Time.now() output_sources.foreach(engine.prepare_directory(context, _, doc)) val directory = engine.prepare_directory(context, tmp_dir, doc) val document = context.old_document(directory) getOrElse engine.build_document(context, directory, verbose) val stop = Time.now() val timing = stop - start progress.echo("Finished " + context.session + "/" + doc.name + " (" + timing.message_hms + " elapsed time)") document } } for (dir <- output_pdf; doc <- documents) { val path = doc.write(dir) progress.echo("Document at " + path.absolute) } documents } /* Isabelle tool wrapper */ val isabelle_tool = Isabelle_Tool("document", "prepare session theory document", Scala_Project.here, { args => var output_sources: Option[Path] = None var output_pdf: Option[Path] = None var verbose_latex = false var dirs: List[Path] = Nil var options = Options.init() var verbose_build = false val getopts = Getopts(""" Usage: isabelle document [OPTIONS] SESSION Options are: -O DIR output directory for LaTeX sources and resulting PDF -P DIR output directory for resulting PDF -S DIR output directory for LaTeX sources -V verbose latex -d DIR include session directory -o OPTION override Isabelle system OPTION (via NAME=VAL or NAME) -v verbose build Prepare the theory document of a session. """, "O:" -> (arg => { val dir = Path.explode(arg) output_sources = Some(dir) output_pdf = Some(dir) }), "P:" -> (arg => { output_pdf = Some(Path.explode(arg)) }), "S:" -> (arg => { output_sources = Some(Path.explode(arg)) }), "V" -> (_ => verbose_latex = true), "d:" -> (arg => dirs = dirs ::: List(Path.explode(arg))), "o:" -> (arg => options = options + arg), "v" -> (_ => verbose_build = true)) val more_args = getopts(args) val session = more_args match { case List(a) => a case _ => getopts.usage() } val progress = new Console_Progress(verbose = verbose_build) val store = Sessions.store(options) progress.interrupt_handler { val res = Build.build(options, selection = Sessions.Selection.session(session), dirs = dirs, progress = progress, verbose = verbose_build) if (!res.ok) error("Failed to build session " + quote(session)) val deps = Sessions.load_structure(options + "document=pdf", dirs = dirs). selection_deps(Sessions.Selection.session(session)) val session_base_info = deps.base_info(session) if (output_sources.isEmpty && output_pdf.isEmpty) { progress.echo_warning("No output directory") } using(Export.open_session_context(store, session_base_info)) { session_context => build_documents( context(session_context, progress = progress), output_sources = output_sources, output_pdf = output_pdf, verbose = verbose_latex) } } }) } diff --git a/src/Pure/Thy/export.scala b/src/Pure/Thy/export.scala --- a/src/Pure/Thy/export.scala +++ b/src/Pure/Thy/export.scala @@ -1,572 +1,572 @@ /* Title: Pure/Thy/export.scala Author: Makarius Manage theory exports: compressed blobs. */ package isabelle import scala.annotation.tailrec import scala.util.matching.Regex object Export { /* artefact names */ val DOCUMENT_ID = "PIDE/document_id" val FILES = "PIDE/files" val MARKUP = "PIDE/markup" val MESSAGES = "PIDE/messages" val DOCUMENT_PREFIX = "document/" val DOCUMENT_LATEX = DOCUMENT_PREFIX + "latex" val DOCUMENT_CITATIONS = DOCUMENT_PREFIX + "citations" val THEORY_PREFIX: String = "theory/" val PROOFS_PREFIX: String = "proofs/" val THEORY_PARENTS: String = THEORY_PREFIX + "parents" def explode_name(s: String): List[String] = space_explode('/', s) def implode_name(elems: Iterable[String]): String = elems.mkString("/") /* SQL data model */ object Data { val session_name = SQL.Column.string("session_name").make_primary_key val theory_name = SQL.Column.string("theory_name").make_primary_key val name = SQL.Column.string("name").make_primary_key val executable = SQL.Column.bool("executable") val compressed = SQL.Column.bool("compressed") val body = SQL.Column.bytes("body") val table = SQL.Table("isabelle_exports", List(session_name, theory_name, name, executable, compressed, body)) def where_equal(session_name: String, theory_name: String = "", name: String = ""): SQL.Source = "WHERE " + Data.session_name.equal(session_name) + (if (theory_name == "") "" else " AND " + Data.theory_name.equal(theory_name)) + (if (name == "") "" else " AND " + Data.name.equal(name)) } def compound_name(a: String, b: String): String = if (a.isEmpty) b else a + ":" + b sealed case class Entry_Name(session: String = "", theory: String = "", name: String = "") { val compound_name: String = Export.compound_name(theory, name) def make_path(prune: Int = 0): Path = { val elems = theory :: space_explode('/', name) if (elems.length < prune + 1) { error("Cannot prune path by " + prune + " element(s): " + Path.make(elems)) } else Path.make(elems.drop(prune)) } def readable(db: SQL.Database): Boolean = { val select = Data.table.select(List(Data.name), Data.where_equal(session, theory, name)) db.using_statement(select)(stmt => stmt.execute_query().next()) } def read(db: SQL.Database, cache: XML.Cache): Option[Entry] = { val select = Data.table.select(List(Data.executable, Data.compressed, Data.body), Data.where_equal(session, theory, name)) db.using_statement(select) { stmt => val res = stmt.execute_query() if (res.next()) { val executable = res.bool(Data.executable) val compressed = res.bool(Data.compressed) val bytes = res.bytes(Data.body) val body = Future.value(compressed, bytes) Some(Entry(this, executable, body, cache)) } else None } } } def read_theory_names(db: SQL.Database, session_name: String): List[String] = { val select = Data.table.select(List(Data.theory_name), Data.where_equal(session_name, name = THEORY_PARENTS)) + " ORDER BY " + Data.theory_name db.using_statement(select)(stmt => stmt.execute_query().iterator(_.string(Data.theory_name)).toList) } def read_entry_names(db: SQL.Database, session_name: String): List[Entry_Name] = { val select = Data.table.select(List(Data.theory_name, Data.name), Data.where_equal(session_name)) + " ORDER BY " + Data.theory_name + ", " + Data.name db.using_statement(select)(stmt => stmt.execute_query().iterator(res => Entry_Name(session = session_name, theory = res.string(Data.theory_name), name = res.string(Data.name))).toList) } def message(msg: String, theory_name: String, name: String): String = msg + " " + quote(name) + " for theory " + quote(theory_name) def empty_entry(theory_name: String, name: String): Entry = Entry(Entry_Name(theory = theory_name, name = name), false, Future.value(false, Bytes.empty), XML.Cache.none) sealed case class Entry( entry_name: Entry_Name, executable: Boolean, body: Future[(Boolean, Bytes)], cache: XML.Cache ) { def session_name: String = entry_name.session def theory_name: String = entry_name.theory def name: String = entry_name.name override def toString: String = name def compound_name: String = entry_name.compound_name def name_has_prefix(s: String): Boolean = name.startsWith(s) val name_elems: List[String] = explode_name(name) def name_extends(elems: List[String]): Boolean = name_elems.startsWith(elems) && name_elems != elems def text: String = uncompressed.text def uncompressed: Bytes = { val (compressed, bytes) = body.join if (compressed) bytes.uncompress(cache = cache.xz) else bytes } def uncompressed_yxml: XML.Body = YXML.parse_body(UTF8.decode_permissive(uncompressed), cache = cache) def write(db: SQL.Database): Unit = { val (compressed, bytes) = body.join db.using_statement(Data.table.insert()) { stmt => stmt.string(1) = session_name stmt.string(2) = theory_name stmt.string(3) = name stmt.bool(4) = executable stmt.bool(5) = compressed stmt.bytes(6) = bytes stmt.execute() } } } def make_regex(pattern: String): Regex = { @tailrec def make(result: List[String], depth: Int, chs: List[Char]): Regex = chs match { case '*' :: '*' :: rest => make("[^:]*" :: result, depth, rest) case '*' :: rest => make("[^:/]*" :: result, depth, rest) case '?' :: rest => make("[^:/]" :: result, depth, rest) case '\\' :: c :: rest => make(("\\" + c) :: result, depth, rest) case '{' :: rest => make("(" :: result, depth + 1, rest) case ',' :: rest if depth > 0 => make("|" :: result, depth, rest) case '}' :: rest if depth > 0 => make(")" :: result, depth - 1, rest) case c :: rest if ".+()".contains(c) => make(("\\" + c) :: result, depth, rest) case c :: rest => make(c.toString :: result, depth, rest) case Nil => result.reverse.mkString.r } make(Nil, 0, pattern.toList) } def make_matcher(pats: List[String]): Entry_Name => Boolean = { val regs = pats.map(make_regex) (entry_name: Entry_Name) => regs.exists(_.pattern.matcher(entry_name.compound_name).matches) } def make_entry( session_name: String, args: Protocol.Export.Args, bytes: Bytes, cache: XML.Cache ): Entry = { val body = if (args.compress) Future.fork(bytes.maybe_compress(cache = cache.xz)) else Future.value((false, bytes)) val entry_name = Entry_Name(session = session_name, theory = args.theory_name, name = args.name) Entry(entry_name, args.executable, body, cache) } /* database consumer thread */ def consumer(db: SQL.Database, cache: XML.Cache, progress: Progress = new Progress): Consumer = new Consumer(db, cache, progress) class Consumer private[Export](db: SQL.Database, cache: XML.Cache, progress: Progress) { private val errors = Synchronized[List[String]](Nil) private val consumer = Consumer_Thread.fork_bulk[(Entry, Boolean)](name = "export")( bulk = { case (entry, _) => entry.body.is_finished }, consume = { (args: List[(Entry, Boolean)]) => val results = db.transaction { for ((entry, strict) <- args) yield { if (progress.stopped) { entry.body.cancel() Exn.Res(()) } else if (entry.entry_name.readable(db)) { if (strict) { val msg = message("Duplicate export", entry.theory_name, entry.name) errors.change(msg :: _) } Exn.Res(()) } else Exn.capture { entry.write(db) } } } (results, true) }) def make_entry(session_name: String, args: Protocol.Export.Args, body: Bytes): Unit = { if (!progress.stopped && !body.is_empty) { consumer.send(Export.make_entry(session_name, args, body, cache) -> args.strict) } } def shutdown(close: Boolean = false): List[String] = { consumer.shutdown() if (close) db.close() errors.value.reverse ::: (if (progress.stopped) List("Export stopped") else Nil) } } /* context for database access */ def open_database_context(store: Sessions.Store): Database_Context = { val database_server = if (store.database_server) Some(store.open_database_server()) else None new Database_Context(store, database_server) } def open_session_context0(store: Sessions.Store, session: String): Session_Context = open_database_context(store).open_session0(session, close_database_context = true) def open_session_context( store: Sessions.Store, session_base_info: Sessions.Base_Info, document_snapshot: Option[Document.Snapshot] = None ): Session_Context = { open_database_context(store).open_session( session_base_info, document_snapshot = document_snapshot, close_database_context = true) } class Database_Context private[Export]( val store: Sessions.Store, val database_server: Option[SQL.Database] ) extends AutoCloseable { database_context => override def toString: String = { val s = database_server match { case Some(db) => db.toString case None => "input_dirs = " + store.input_dirs.map(_.absolute).mkString(", ") } "Database_Context(" + s + ")" } def cache: Term.Cache = store.cache def close(): Unit = database_server.foreach(_.close()) def open_output_database(session: String): Session_Database = database_server match { case Some(db) => new Session_Database(session, db) case None => new Session_Database(session, store.open_database(session, output = true)) { override def close(): Unit = db.close() } } def open_session0(session: String, close_database_context: Boolean = false): Session_Context = open_session(Sessions.base_info0(session), close_database_context = close_database_context) def open_session( session_base_info: Sessions.Base_Info, document_snapshot: Option[Document.Snapshot] = None, close_database_context: Boolean = false ): Session_Context = { val session_name = session_base_info.check.base.session_name val session_hierarchy = session_base_info.sessions_structure.build_hierarchy(session_name) val session_databases = database_server match { case Some(db) => session_hierarchy.map(name => new Session_Database(name, db)) case None => val attempts = session_hierarchy.map(name => name -> store.try_open_database(name, server = false)) attempts.collectFirst({ case (name, None) => name }) match { case Some(bad) => for ((_, Some(db)) <- attempts) db.close() store.error_database(bad) case None => for ((name, Some(db)) <- attempts) yield { new Session_Database(name, db) { override def close(): Unit = this.db.close() } } } } new Session_Context(database_context, session_base_info, session_databases, document_snapshot) { override def close(): Unit = { session_databases.foreach(_.close()) if (close_database_context) database_context.close() } } } } class Session_Database private[Export](val session: String, val db: SQL.Database) extends AutoCloseable { def close(): Unit = () lazy private [Export] val theory_names: List[String] = read_theory_names(db, session) lazy private [Export] val entry_names: List[Entry_Name] = read_entry_names(db, session) } class Session_Context private[Export]( val database_context: Database_Context, session_base_info: Sessions.Base_Info, db_hierarchy: List[Session_Database], document_snapshot: Option[Document.Snapshot] ) extends AutoCloseable { session_context => def close(): Unit = () def cache: Term.Cache = database_context.cache def sessions_structure: Sessions.Structure = session_base_info.sessions_structure def session_base: Sessions.Base = session_base_info.base def session_name: String = if (document_snapshot.isDefined) Sessions.DRAFT else session_base.session_name def session_database(session: String = session_name): Option[Session_Database] = db_hierarchy.find(_.session == session) def session_db(session: String = session_name): Option[SQL.Database] = session_database(session = session).map(_.db) def session_stack: List[String] = ((if (document_snapshot.isDefined) List(session_name) else Nil) ::: db_hierarchy.map(_.session)).reverse private def select[A]( session: String, select1: Entry_Name => Option[A], select2: Session_Database => List[A] ): List[A] = { def sel(name: String): List[A] = if (name == Sessions.DRAFT) { (for { snapshot <- document_snapshot.iterator entry_name <- snapshot.all_exports.keysIterator res <- select1(entry_name).iterator } yield entry_name -> res).toList.sortBy(_._1.compound_name).map(_._2) } else { session_database(name).map(select2).getOrElse(Nil) } if (session.nonEmpty) sel(session) else session_stack.flatMap(sel) } def entry_names(session: String = session_name): List[Entry_Name] = select(session, Some(_), _.entry_names) def theory_names(session: String = session_name): List[String] = select(session, a => if(a.name == THEORY_PARENTS) Some(a.theory) else None, _.theory_names) def get(theory: String, name: String): Option[Entry] = { def snapshot_entry: Option[Entry] = for { snapshot <- document_snapshot entry_name = Entry_Name(session = Sessions.DRAFT, theory = theory, name = name) entry <- snapshot.all_exports.get(entry_name) } yield entry def db_entry: Option[Entry] = db_hierarchy.view.map(database => Export.Entry_Name(session = database.session, theory = theory, name = name) .read(database.db, cache)) .collectFirst({ case Some(entry) => entry }) snapshot_entry orElse db_entry } def apply(theory: String, name: String, permissive: Boolean = false): Entry = get(theory, name) match { case None if permissive => empty_entry(theory, name) case None => error("Missing export entry " + quote(compound_name(theory, name))) case Some(entry) => entry } def theory(theory: String, other_cache: Option[Term.Cache] = None): Theory_Context = new Theory_Context(session_context, theory, other_cache) - def classpath(): List[File.Content_Bytes] = { + def classpath(): List[File.Content] = { (for { session <- session_stack.iterator info <- sessions_structure.get(session).iterator if info.export_classpath.nonEmpty matcher = make_matcher(info.export_classpath) entry_name <- entry_names(session = session).iterator if matcher(entry_name) entry <- get(entry_name.theory, entry_name.name).iterator } yield File.content(entry.entry_name.make_path(), entry.uncompressed)).toList } override def toString: String = "Export.Session_Context(" + commas_quote(session_stack) + ")" } class Theory_Context private[Export]( val session_context: Session_Context, val theory: String, other_cache: Option[Term.Cache] ) { def cache: Term.Cache = other_cache getOrElse session_context.cache def get(name: String): Option[Entry] = session_context.get(theory, name) def apply(name: String, permissive: Boolean = false): Entry = session_context.apply(theory, name, permissive = permissive) def uncompressed_yxml(name: String): XML.Body = get(name) match { case Some(entry) => entry.uncompressed_yxml case None => Nil } def document_id(): Option[Long] = apply(DOCUMENT_ID, permissive = true).text match { case Value.Long(id) => Some(id) case _ => None } def files(): Option[(String, List[String])] = split_lines(apply(FILES, permissive = true).text) match { case Nil => None case thy_file :: blobs_files => Some((thy_file, blobs_files)) } override def toString: String = "Export.Theory_Context(" + quote(theory) + ")" } /* export to file-system */ def export_files( store: Sessions.Store, session_name: String, export_dir: Path, progress: Progress = new Progress, export_prune: Int = 0, export_list: Boolean = false, export_patterns: List[String] = Nil ): Unit = { using(store.open_database(session_name)) { db => val entry_names = read_entry_names(db, session_name) // list if (export_list) { for (entry_name <- entry_names) progress.echo(entry_name.compound_name) } // export if (export_patterns.nonEmpty) { val matcher = make_matcher(export_patterns) for { entry_name <- entry_names if matcher(entry_name) entry <- entry_name.read(db, store.cache) } { val path = export_dir + entry_name.make_path(prune = export_prune) progress.echo("export " + path + (if (entry.executable) " (executable)" else "")) Isabelle_System.make_directory(path.dir) val bytes = entry.uncompressed if (!path.is_file || Bytes.read(path) != bytes) Bytes.write(path, bytes) File.set_executable(path, entry.executable) } } } } /* Isabelle tool wrapper */ val default_export_dir: Path = Path.explode("export") val isabelle_tool = Isabelle_Tool("export", "retrieve theory exports", Scala_Project.here, { args => /* arguments */ var export_dir = default_export_dir var dirs: List[Path] = Nil var export_list = false var no_build = false var options = Options.init() var export_prune = 0 var export_patterns: List[String] = Nil val getopts = Getopts(""" Usage: isabelle export [OPTIONS] SESSION Options are: -O DIR output directory for exported files (default: """ + default_export_dir + """) -d DIR include session directory -l list exports -n no build of session -o OPTION override Isabelle system OPTION (via NAME=VAL or NAME) -p NUM prune path of exported files by NUM elements -x PATTERN extract files matching pattern (e.g. "*:**" for all) List or export theory exports for SESSION: named blobs produced by isabelle build. Option -l or -x is required; option -x may be repeated. The PATTERN language resembles glob patterns in the shell, with ? and * (both excluding ":" and "/"), ** (excluding ":"), and [abc] or [^abc], and variants {pattern1,pattern2,pattern3}. """, "O:" -> (arg => export_dir = Path.explode(arg)), "d:" -> (arg => dirs = dirs ::: List(Path.explode(arg))), "l" -> (_ => export_list = true), "n" -> (_ => no_build = true), "o:" -> (arg => options = options + arg), "p:" -> (arg => export_prune = Value.Int.parse(arg)), "x:" -> (arg => export_patterns ::= arg)) val more_args = getopts(args) val session_name = more_args match { case List(session_name) if export_list || export_patterns.nonEmpty => session_name case _ => getopts.usage() } val progress = new Console_Progress() /* build */ if (!no_build) { val rc = progress.interrupt_handler { Build.build_logic(options, session_name, progress = progress, dirs = dirs) } if (rc != Process_Result.RC.ok) sys.exit(rc) } /* export files */ val store = Sessions.store(options) export_files(store, session_name, export_dir, progress = progress, export_prune = export_prune, export_list = export_list, export_patterns = export_patterns) }) }