diff --git a/src/Pure/Admin/isabelle_cronjob.scala b/src/Pure/Admin/isabelle_cronjob.scala --- a/src/Pure/Admin/isabelle_cronjob.scala +++ b/src/Pure/Admin/isabelle_cronjob.scala @@ -1,644 +1,644 @@ /* Title: Pure/Admin/isabelle_cronjob.scala Author: Makarius Main entry point for administrative cronjob at TUM. */ package isabelle import java.nio.file.Files import scala.annotation.tailrec object Isabelle_Cronjob { /* global resources: owned by main cronjob */ val backup = "lxbroy10:cronjob" val main_dir: Path = Path.explode("~/cronjob") val main_state_file: Path = main_dir + Path.explode("run/main.state") val current_log: Path = main_dir + Path.explode("run/main.log") // owned by log service val cumulative_log: Path = main_dir + Path.explode("log/main.log") // owned by log service val isabelle_repos: Path = main_dir + Path.explode("isabelle") val afp_repos: Path = main_dir + Path.explode("AFP") val mailman_archives_dir = Path.explode("~/cronjob/Mailman") val build_log_dirs = List(Path.explode("~/log"), Path.explode("~/afp/log"), Path.explode("~/cronjob/log")) /** logger tasks **/ sealed case class Logger_Task(name: String = "", body: Logger => Unit) /* init and exit */ def get_rev(): String = Mercurial.repository(isabelle_repos).id() def get_afp_rev(): String = Mercurial.repository(afp_repos).id() val init: Logger_Task = Logger_Task("init", logger => { Isabelle_Devel.make_index() Mercurial.setup_repository(Isabelle_System.isabelle_repository.root, isabelle_repos) Mercurial.setup_repository(Isabelle_System.afp_repository.root, afp_repos) File.write(logger.log_dir + Build_Log.log_filename("isabelle_identify", logger.start_date), Build_Log.Identify.content(logger.start_date, Some(get_rev()), Some(get_afp_rev()))) Isabelle_System.bash( """rsync -a --include="*/" --include="plain_identify*" --exclude="*" """ + Bash.string(backup + "/log/.") + " " + File.bash_path(main_dir) + "/log/.").check if (!Isabelle_Devel.cronjob_log.is_file) - Files.createSymbolicLink(Isabelle_Devel.cronjob_log.file.toPath, current_log.file.toPath) + Files.createSymbolicLink(Isabelle_Devel.cronjob_log.java_path, current_log.java_path) }) val exit: Logger_Task = Logger_Task("exit", logger => { Isabelle_System.bash( "rsync -a " + File.bash_path(main_dir) + "/log/." + " " + Bash.string(backup) + "/log/.") .check }) /* Mailman archives */ val mailman_archives: Logger_Task = Logger_Task("mailman_archives", logger => { Mailman.isabelle_users.download(mailman_archives_dir) Mailman.isabelle_dev.download(mailman_archives_dir) }) /* build release */ val build_release: Logger_Task = Logger_Task("build_release", logger => { Isabelle_Devel.release_snapshot(logger.options, get_rev(), get_afp_rev()) }) /* remote build_history */ sealed case class Item( known: Boolean, isabelle_version: String, afp_version: Option[String], pull_date: Date) { def unknown: Boolean = !known def versions: (String, Option[String]) = (isabelle_version, afp_version) def known_versions(rev: String, afp_rev: Option[String]): Boolean = known && rev != "" && isabelle_version == rev && (afp_rev.isEmpty || afp_rev.get != "" && afp_version == afp_rev) } def recent_items(db: SQL.Database, days: Int, rev: String, afp_rev: Option[String], sql: SQL.Source): List[Item] = { val afp = afp_rev.isDefined val select = Build_Log.Data.select_recent_versions( days = days, rev = rev, afp_rev = afp_rev, sql = "WHERE " + sql) db.using_statement(select)(stmt => stmt.execute_query().iterator(res => { val known = res.bool(Build_Log.Data.known) val isabelle_version = res.string(Build_Log.Prop.isabelle_version) val afp_version = if (afp) proper_string(res.string(Build_Log.Prop.afp_version)) else None val pull_date = res.date(Build_Log.Data.pull_date(afp)) Item(known, isabelle_version, afp_version, pull_date) }).toList) } def unknown_runs(items: List[Item]): List[List[Item]] = { val (run, rest) = Library.take_prefix[Item](_.unknown, items.dropWhile(_.known)) if (run.nonEmpty) run :: unknown_runs(rest) else Nil } sealed case class Remote_Build( description: String, host: String, actual_host: String = "", user: String = "", port: Int = 0, proxy_host: String = "", proxy_user: String = "", proxy_port: Int = 0, self_update: Boolean = false, historic: Boolean = false, history: Int = 0, history_base: String = "build_history_base", java_heap: String = "", options: String = "", args: String = "", afp: Boolean = false, bulky: Boolean = false, more_hosts: List[String] = Nil, detect: SQL.Source = "", active: Boolean = true) { def ssh_session(context: SSH.Context): SSH.Session = context.open_session(host = host, user = user, port = port, actual_host = actual_host, proxy_host = proxy_host, proxy_user = proxy_user, proxy_port = proxy_port, permissive = proxy_host.nonEmpty) def sql: SQL.Source = Build_Log.Prop.build_engine.toString + " = " + SQL.string(Build_History.engine) + " AND " + SQL.member(Build_Log.Prop.build_host.ident, host :: more_hosts) + (if (detect == "") "" else " AND " + SQL.enclose(detect)) def profile: Build_Status.Profile = Build_Status.Profile(description, history = history, afp = afp, bulky = bulky, sql = sql) def pick( options: Options, rev: String = "", filter: Item => Boolean = _ => true): Option[(String, Option[String])] = { val afp_rev = if (afp) Some(get_afp_rev()) else None val store = Build_Log.store(options) using(store.open_database())(db => { def pick_days(days: Int, gap: Int): Option[(String, Option[String])] = { val items = recent_items(db, days, rev, afp_rev, sql).filter(filter) def runs = unknown_runs(items).filter(run => run.length >= gap) if (historic || items.exists(_.known_versions(rev, afp_rev))) { val longest_run = runs.foldLeft(List.empty[Item]) { case (item1, item2) => if (item1.length >= item2.length) item1 else item2 } if (longest_run.isEmpty) None else Some(longest_run(longest_run.length / 2).versions) } else if (rev != "") Some((rev, afp_rev)) else runs.flatten.headOption.map(_.versions) } pick_days(options.int("build_log_history") max history, 2) orElse pick_days(200, 5) orElse pick_days(2000, 1) }) } def build_history_options: String = " -h " + Bash.string(host) + " " + (java_heap match { case "" => "" case h => "-e 'ISABELLE_TOOL_JAVA_OPTIONS=\"$ISABELLE_TOOL_JAVA_OPTIONS -Xmx" + h + "\"' " }) + options } val remote_builds_old: List[Remote_Build] = List( Remote_Build("Linux A", "i21of4", user = "i21isatest", proxy_host = "lxbroy10", proxy_user = "i21isatest", self_update = true, options = "-m32 -M1x4,2,4" + " -e ISABELLE_OCAML=ocaml -e ISABELLE_OCAMLC=ocamlc -e ISABELLE_OCAML_SETUP=true" + " -e ISABELLE_GHC_SETUP=true" + " -e ISABELLE_MLTON=mlton" + " -e ISABELLE_SMLNJ=sml" + " -e ISABELLE_SWIPL=swipl", args = "-a -d '~~/src/Benchmarks'"), Remote_Build("Linux A", "lxbroy9", java_heap = "2g", options = "-m32 -B -M1x2,2", args = "-N -g timing"), Remote_Build("Linux Benchmarks", "lxbroy5", historic = true, history = 90, java_heap = "2g", options = "-m32 -B -M1x2,2 -t Benchmarks" + " -e ISABELLE_GHC=ghc -e ISABELLE_MLTON=mlton -e ISABELLE_OCAML=ocaml" + " -e ISABELLE_OCAMLC=ocamlc -e ISABELLE_OCAMLFIND=ocamlfind -e ISABELLE_SMLNJ=sml" + " -e ISABELLE_SWIPL=swipl", args = "-N -a -d '~~/src/Benchmarks'", detect = Build_Log.Prop.build_tags.toString + " = " + SQL.string("Benchmarks")), Remote_Build("macOS 10.14 Mojave (Old)", "lapnipkow3", options = "-m32 -M1,2 -e ISABELLE_GHC_SETUP=true -p pide_session=false", self_update = true, args = "-a -d '~~/src/Benchmarks'"), Remote_Build("AFP old bulky", "lrzcloud1", self_update = true, proxy_host = "lxbroy10", proxy_user = "i21isatest", options = "-m64 -M6 -U30000 -s10 -t AFP", args = "-g large -g slow", afp = true, bulky = true, detect = Build_Log.Prop.build_tags.toString + " = " + SQL.string("AFP")), Remote_Build("AFP old", "lxbroy7", args = "-N -X large -X slow", afp = true, detect = Build_Log.Prop.build_tags.toString + " = " + SQL.string("AFP")), Remote_Build("Poly/ML 5.7 Linux", "lxbroy8", history_base = "37074e22e8be", options = "-m32 -B -M1x2,2 -t polyml-5.7 -i 'init_component /home/isabelle/contrib/polyml-5.7'", args = "-N -g timing", detect = Build_Log.Prop.build_tags.toString + " = " + SQL.string("polyml-5.7") + " AND " + Build_Log.Settings.ML_OPTIONS + " <> " + SQL.string("-H 500")), Remote_Build("Poly/ML 5.7.1 Linux", "lxbroy8", history_base = "a9d5b59c3e12", options = "-m32 -B -M1x2,2 -t polyml-5.7.1-pre2 -i 'init_component /home/isabelle/contrib/polyml-test-905dae2ebfda'", args = "-N -g timing", detect = Build_Log.Prop.build_tags.toString + " = " + SQL.string("polyml-5.7.1-pre1") + " OR " + Build_Log.Prop.build_tags + " = " + SQL.string("polyml-5.7.1-pre2")), Remote_Build("Poly/ML 5.7 macOS", "macbroy2", history_base = "37074e22e8be", options = "-m32 -B -M1x4,4 -t polyml-5.7 -i 'init_component /home/isabelle/contrib/polyml-5.7'", args = "-a", detect = Build_Log.Prop.build_tags.toString + " = " + SQL.string("polyml-5.7")), Remote_Build("Poly/ML 5.7.1 macOS", "macbroy2", history_base = "a9d5b59c3e12", options = "-m32 -B -M1x4,4 -t polyml-5.7.1-pre2 -i 'init_component /home/isabelle/contrib/polyml-test-905dae2ebfda'", args = "-a", detect = Build_Log.Prop.build_tags.toString + " = " + SQL.string("polyml-5.7.1-pre1") + " OR " + Build_Log.Prop.build_tags + " = " + SQL.string("polyml-5.7.1-pre2")), Remote_Build("macOS", "macbroy2", options = "-m32 -M8" + " -e ISABELLE_GHC=ghc -e ISABELLE_MLTON=mlton -e ISABELLE_OCAML=ocaml" + " -e ISABELLE_OCAMLC=ocamlc -e ISABELLE_OCAML_SETUP=true" + " -e ISABELLE_OPAM_ROOT=\"$ISABELLE_HOME/opam\"" + " -e ISABELLE_SMLNJ=/home/isabelle/smlnj/110.85/bin/sml" + " -p pide_session=false", args = "-a", detect = Build_Log.Prop.build_tags.undefined, history_base = "2c0f24e927dd"), Remote_Build("macOS, quick_and_dirty", "macbroy2", options = "-m32 -M8 -t quick_and_dirty -p pide_session=false", args = "-a -o quick_and_dirty", detect = Build_Log.Prop.build_tags.toString + " = " + SQL.string("quick_and_dirty"), history_base = "2c0f24e927dd"), Remote_Build("macOS, skip_proofs", "macbroy2", options = "-m32 -M8 -t skip_proofs -p pide_session=false", args = "-a -o skip_proofs", detect = Build_Log.Prop.build_tags.toString + " = " + SQL.string("skip_proofs"), history_base = "2c0f24e927dd"), Remote_Build("Poly/ML test", "lxbroy8", options = "-m32 -B -M1x2,2 -t polyml-test -i 'init_component /home/isabelle/contrib/polyml-5.7-20170217'", args = "-N -g timing", detect = Build_Log.Prop.build_tags.toString + " = " + SQL.string("polyml-test")), Remote_Build("macOS 10.12 Sierra", "macbroy30", options = "-m32 -M2 -p pide_session=false", args = "-a", detect = Build_Log.Prop.build_start.toString + " > date '2017-03-03'"), Remote_Build("macOS 10.10 Yosemite", "macbroy31", options = "-m32 -M2 -p pide_session=false", args = "-a"), Remote_Build("macOS 10.8 Mountain Lion", "macbroy30", options = "-m32 -M2", args = "-a", detect = Build_Log.Prop.build_start.toString + " < date '2017-03-03'")) ::: { for { (n, hosts) <- List(1 -> List("lxbroy6"), 2 -> List("lxbroy8", "lxbroy5")) } yield { Remote_Build("AFP old", host = hosts.head, more_hosts = hosts.tail, options = "-m32 -M1x2 -t AFP -P" + n + " -e ISABELLE_GHC=ghc" + " -e ISABELLE_MLTON=mlton" + " -e ISABELLE_OCAML=ocaml -e ISABELLE_OCAMLC=ocamlc -e ISABELLE_OCAMLFIND=ocamlfind" + " -e ISABELLE_SMLNJ=sml", args = "-N -X large -X slow", afp = true, detect = Build_Log.Prop.build_tags.toString + " = " + SQL.string("AFP")) } } val remote_builds1: List[List[Remote_Build]] = { List( List(Remote_Build("Linux B", "lxbroy10", historic = true, history = 90, options = "-m32 -B -M1x4,2,4,6", args = "-N -g timing")), List(Remote_Build("macOS 10.13 High Sierra", "lapbroy68", options = "-m32 -B -M1,2,4 -e ISABELLE_GHC_SETUP=true -p pide_session=false", self_update = true, args = "-a -d '~~/src/Benchmarks'")), List( Remote_Build("macOS 11 Big Sur", "mini1", options = "-m32 -B -M1x2,2,4 -p pide_session=false" + " -e ISABELLE_OCAML=ocaml -e ISABELLE_OCAMLC=ocamlc -e ISABELLE_OCAML_SETUP=true" + " -e ISABELLE_GHC_SETUP=true" + " -e ISABELLE_MLTON=/usr/local/bin/mlton" + " -e ISABELLE_SMLNJ=/usr/local/smlnj/bin/sml" + " -e ISABELLE_SWIPL=/usr/local/bin/swipl", self_update = true, args = "-a -d '~~/src/Benchmarks'")), List( Remote_Build("macOS 10.14 Mojave", "mini2", options = "-m32 -B -M1x2,2,4 -p pide_session=false" + " -e ISABELLE_OCAML=ocaml -e ISABELLE_OCAMLC=ocamlc -e ISABELLE_OCAML_SETUP=true" + " -e ISABELLE_GHC_SETUP=true" + " -e ISABELLE_MLTON=/usr/local/bin/mlton" + " -e ISABELLE_SMLNJ=/usr/local/smlnj/bin/sml" + " -e ISABELLE_SWIPL=/usr/local/bin/swipl", self_update = true, args = "-a -d '~~/src/Benchmarks'"), Remote_Build("macOS, quick_and_dirty", "mini2", options = "-m32 -M4 -t quick_and_dirty -p pide_session=false", self_update = true, args = "-a -o quick_and_dirty", detect = Build_Log.Prop.build_tags.toString + " = " + SQL.string("quick_and_dirty")), Remote_Build("macOS, skip_proofs", "mini2", options = "-m32 -M4 -t skip_proofs -p pide_session=false", args = "-a -o skip_proofs", detect = Build_Log.Prop.build_tags.toString + " = " + SQL.string("skip_proofs"))), List(Remote_Build("macOS 10.15 Catalina", "laramac01", user = "makarius", proxy_host = "laraserver", proxy_user = "makarius", self_update = true, options = "-m32 -M4 -e ISABELLE_GHC_SETUP=true -p pide_session=false", args = "-a -d '~~/src/Benchmarks'")), List( Remote_Build("Windows", "vmnipkow9", historic = true, history = 90, self_update = true, options = "-m32 -M4" + " -C /cygdrive/d/isatest/contrib" + " -e ISABELLE_OCAML=ocaml -e ISABELLE_OCAMLC=ocamlc -e ISABELLE_OCAML_SETUP=true" + " -e ISABELLE_GHC_SETUP=true" + " -e ISABELLE_SMLNJ=/usr/local/smlnj-110.81/bin/sml", args = "-a", detect = Build_Log.Settings.ML_PLATFORM.toString + " = " + SQL.string("x86-windows") + " OR " + Build_Log.Settings.ML_PLATFORM + " = " + SQL.string("x86_64_32-windows")), Remote_Build("Windows", "vmnipkow9", historic = true, history = 90, self_update = true, options = "-m64 -M4" + " -C /cygdrive/d/isatest/contrib" + " -e ISABELLE_OCAML=ocaml -e ISABELLE_OCAMLC=ocamlc -e ISABELLE_OCAML_SETUP=true" + " -e ISABELLE_GHC_SETUP=true" + " -e ISABELLE_SMLNJ=/usr/local/smlnj-110.81/bin/sml", args = "-a", detect = Build_Log.Settings.ML_PLATFORM.toString + " = " + SQL.string("x86_64-windows")))) } val remote_builds2: List[List[Remote_Build]] = List( List( Remote_Build("AFP", "lrzcloud2", actual_host = "10.195.4.41", self_update = true, proxy_host = "lxbroy10", proxy_user = "i21isatest", java_heap = "8g", options = "-m32 -M1x6 -t AFP" + " -e ISABELLE_GHC=ghc" + " -e ISABELLE_MLTON=mlton" + " -e ISABELLE_OCAML=ocaml -e ISABELLE_OCAMLC=ocamlc -e ISABELLE_OCAMLFIND=ocamlfind" + " -e ISABELLE_SMLNJ=sml", args = "-a -X large -X slow", afp = true, detect = Build_Log.Prop.build_tags.toString + " = " + SQL.string("AFP")), Remote_Build("AFP", "lrzcloud2", actual_host = "10.195.4.41", self_update = true, proxy_host = "lxbroy10", proxy_user = "i21isatest", java_heap = "8g", options = "-m64 -M8 -U30000 -s10 -t AFP", args = "-g large -g slow", afp = true, bulky = true, detect = Build_Log.Prop.build_tags.toString + " = " + SQL.string("AFP")))) def remote_build_history(rev: String, afp_rev: Option[String], i: Int, r: Remote_Build) : Logger_Task = { val task_name = "build_history-" + r.host Logger_Task(task_name, logger => { using(r.ssh_session(logger.ssh_context))(ssh => { val results = Build_History.remote_build_history(ssh, isabelle_repos, isabelle_repos.ext(r.host), isabelle_identifier = "cronjob_build_history", self_update = r.self_update, rev = rev, afp_rev = afp_rev, options = " -N " + Bash.string(task_name) + (if (i < 0) "" else "_" + (i + 1).toString) + " -R " + Bash.string(Components.default_component_repository) + " -C '$USER_HOME/.isabelle/contrib' -f " + r.build_history_options, args = "-o timeout=10800 " + r.args) for ((log_name, bytes) <- results) { logger.log(Date.now(), log_name) Bytes.write(logger.log_dir + Path.explode(log_name), bytes) } }) }) } val build_status_profiles: List[Build_Status.Profile] = (remote_builds_old :: remote_builds1 ::: remote_builds2).flatten.map(_.profile) /** task logging **/ object Log_Service { def apply(options: Options, progress: Progress = new Progress): Log_Service = new Log_Service(SSH.init_context(options), progress) } class Log_Service private(val ssh_context: SSH.Context, progress: Progress) { current_log.file.delete private val thread: Consumer_Thread[String] = Consumer_Thread.fork("cronjob: logger", daemon = true)( consume = (text: String) => { // critical File.append(current_log, text + "\n") File.append(cumulative_log, text + "\n") progress.echo(text) true }) def shutdown(): Unit = { thread.shutdown() } val hostname: String = Isabelle_System.hostname() def log(date: Date, task_name: String, msg: String): Unit = if (task_name != "") thread.send( "[" + Build_Log.print_date(date) + ", " + hostname + ", " + task_name + "]: " + msg) def start_logger(start_date: Date, task_name: String): Logger = new Logger(this, start_date, task_name) def run_task(start_date: Date, task: Logger_Task): Unit = { val logger = start_logger(start_date, task.name) val res = Exn.capture { task.body(logger) } val end_date = Date.now() val err = res match { case Exn.Res(_) => None case Exn.Exn(exn) => Output.writeln("Exception trace for " + quote(task.name) + ":") exn.printStackTrace() val first_line = split_lines(Exn.message(exn)).headOption getOrElse "exception" Some(first_line) } logger.log_end(end_date, err) } def fork_task(start_date: Date, task: Logger_Task): Task = new Task(task.name, run_task(start_date, task)) } class Logger private[Isabelle_Cronjob]( val log_service: Log_Service, val start_date: Date, val task_name: String) { def ssh_context: SSH.Context = log_service.ssh_context def options: Options = ssh_context.options def log(date: Date, msg: String): Unit = log_service.log(date, task_name, msg) def log_end(end_date: Date, err: Option[String]): Unit = { val elapsed_time = end_date.time - start_date.time val msg = (if (err.isEmpty) "finished" else "ERROR " + err.get) + (if (elapsed_time.seconds < 3.0) "" else " (" + elapsed_time.message_hms + " elapsed time)") log(end_date, msg) } val log_dir = Isabelle_System.make_directory(main_dir + Build_Log.log_subdir(start_date)) log(start_date, "started") } class Task private[Isabelle_Cronjob](name: String, body: => Unit) { private val future: Future[Unit] = Future.thread("cronjob: " + name) { body } def is_finished: Boolean = future.is_finished } /** cronjob **/ def cronjob(progress: Progress, exclude_task: Set[String]): Unit = { /* soft lock */ val still_running = try { Some(File.read(main_state_file)) } catch { case ERROR(_) => None } still_running match { case None | Some("") => case Some(running) => error("Isabelle cronjob appears to be still running: " + running) } /* log service */ val log_service = Log_Service(Options.init(), progress = progress) def run(start_date: Date, task: Logger_Task): Unit = log_service.run_task(start_date, task) def run_now(task: Logger_Task): Unit = run(Date.now(), task) /* structured tasks */ def SEQ(tasks: List[Logger_Task]): Logger_Task = Logger_Task(body = _ => for (task <- tasks.iterator if !exclude_task(task.name) || task.name == "") run_now(task)) def PAR(tasks: List[Logger_Task]): Logger_Task = Logger_Task(body = _ => { @tailrec def join(running: List[Task]): Unit = { running.partition(_.is_finished) match { case (Nil, Nil) => case (Nil, _ :: _) => Time.seconds(0.5).sleep(); join(running) case (_ :: _, remaining) => join(remaining) } } val start_date = Date.now() val running = for (task <- tasks if !exclude_task(task.name)) yield log_service.fork_task(start_date, task) join(running) }) /* repository structure */ val hg = Mercurial.repository(isabelle_repos) val hg_graph = hg.graph() def history_base_filter(r: Remote_Build): Item => Boolean = { val base_rev = hg.id(r.history_base) val nodes = hg_graph.all_succs(List(base_rev)).toSet (item: Item) => nodes(item.isabelle_version) } /* main */ val main_start_date = Date.now() File.write(main_state_file, main_start_date.toString + " " + log_service.hostname) run(main_start_date, Logger_Task("isabelle_cronjob", logger => run_now( SEQ(List( init, PAR(List(mailman_archives, build_release)), PAR( List(remote_builds1, remote_builds2).map(remote_builds => SEQ(List( PAR(remote_builds.map(_.filter(_.active)).map(seq => SEQ( for { (r, i) <- (if (seq.length <= 1) seq.map((_, -1)) else seq.zipWithIndex) (rev, afp_rev) <- r.pick(logger.options, hg.id(), history_base_filter(r)) } yield remote_build_history(rev, afp_rev, i, r)))), Logger_Task("jenkins_logs", _ => Jenkins.download_logs(logger.options, Jenkins.build_log_jobs, main_dir)), Logger_Task("build_log_database", logger => Isabelle_Devel.build_log_database(logger.options, build_log_dirs)), Logger_Task("build_status", logger => Isabelle_Devel.build_status(logger.options)))))), exit))))) log_service.shutdown() main_state_file.file.delete } /** command line entry point **/ def main(args: Array[String]): Unit = { Command_Line.tool { var force = false var verbose = false var exclude_task = Set.empty[String] val getopts = Getopts(""" Usage: Admin/cronjob/main [OPTIONS] Options are: -f apply force to do anything -v verbose -x NAME exclude tasks with this name """, "f" -> (_ => force = true), "v" -> (_ => verbose = true), "x:" -> (arg => exclude_task += arg)) val more_args = getopts(args) if (more_args.nonEmpty) getopts.usage() val progress = if (verbose) new Console_Progress() else new Progress if (force) cronjob(progress, exclude_task) else error("Need to apply force to do anything") } } } diff --git a/src/Pure/General/file.scala b/src/Pure/General/file.scala --- a/src/Pure/General/file.scala +++ b/src/Pure/General/file.scala @@ -1,295 +1,295 @@ /* Title: Pure/General/file.scala Author: Makarius File-system operations. */ package isabelle import java.io.{BufferedWriter, OutputStreamWriter, FileOutputStream, BufferedOutputStream, OutputStream, InputStream, FileInputStream, BufferedInputStream, BufferedReader, InputStreamReader, File => JFile, IOException} import java.nio.file.{StandardOpenOption, Path => JPath, Files, SimpleFileVisitor, FileVisitOption, FileVisitResult} import java.nio.file.attribute.BasicFileAttributes import java.net.{URL, MalformedURLException} import java.util.zip.{GZIPInputStream, GZIPOutputStream} import java.util.EnumSet import org.tukaani.xz.{XZInputStream, XZOutputStream} import scala.collection.mutable object File { /* standard path (Cygwin or Posix) */ def standard_path(path: Path): String = path.expand.implode def standard_path(platform_path: String): String = isabelle.setup.Environment.standard_path(platform_path) def standard_path(file: JFile): String = standard_path(file.getPath) def standard_url(name: String): String = try { val url = new URL(name) if (url.getProtocol == "file" && Url.is_wellformed_file(name)) standard_path(Url.parse_file(name)) else name } catch { case _: MalformedURLException => standard_path(name) } /* platform path (Windows or Posix) */ def platform_path(standard_path: String): String = isabelle.setup.Environment.platform_path(standard_path) def platform_path(path: Path): String = platform_path(standard_path(path)) def platform_file(path: Path): JFile = new JFile(platform_path(path)) /* platform files */ def absolute(file: JFile): JFile = file.toPath.toAbsolutePath.normalize.toFile def absolute_name(file: JFile): String = absolute(file).getPath def canonical(file: JFile): JFile = file.getCanonicalFile def canonical_name(file: JFile): String = canonical(file).getPath def path(file: JFile): Path = Path.explode(standard_path(file)) def pwd(): Path = path(Path.current.absolute_file) /* relative paths */ def relative_path(base: Path, other: Path): Option[Path] = { - val base_path = base.file.toPath - val other_path = other.file.toPath + val base_path = base.java_path + val other_path = other.java_path if (other_path.startsWith(base_path)) Some(path(base_path.relativize(other_path).toFile)) else None } /* bash path */ def bash_path(path: Path): String = Bash.string(standard_path(path)) def bash_path(file: JFile): String = Bash.string(standard_path(file)) def bash_platform_path(path: Path): String = Bash.string(platform_path(path)) /* directory entries */ def check_dir(path: Path): Path = if (path.is_dir) path else error("No such directory: " + path) def check_file(path: Path): Path = if (path.is_file) path else error("No such file: " + path) /* directory content */ def read_dir(dir: Path): List[String] = { if (!dir.is_dir) error("No such directory: " + dir.toString) val files = dir.file.listFiles if (files == null) Nil else files.toList.map(_.getName).sorted } def get_dir(dir: Path): String = read_dir(dir).filter(name => (dir + Path.basic(name)).is_dir) match { case List(entry) => entry case dirs => error("Exactly one directory entry expected: " + commas_quote(dirs.sorted)) } def find_files( start: JFile, pred: JFile => Boolean = _ => true, include_dirs: Boolean = false, follow_links: Boolean = false): List[JFile] = { val result = new mutable.ListBuffer[JFile] def check(file: JFile): Unit = if (pred(file)) result += file if (start.isFile) check(start) else if (start.isDirectory) { val options = if (follow_links) EnumSet.of(FileVisitOption.FOLLOW_LINKS) else EnumSet.noneOf(classOf[FileVisitOption]) Files.walkFileTree(start.toPath, options, Integer.MAX_VALUE, new SimpleFileVisitor[JPath] { override def preVisitDirectory(path: JPath, attrs: BasicFileAttributes): FileVisitResult = { if (include_dirs) check(path.toFile) FileVisitResult.CONTINUE } override def visitFile(path: JPath, attrs: BasicFileAttributes): FileVisitResult = { val file = path.toFile if (include_dirs || !file.isDirectory) check(file) FileVisitResult.CONTINUE } } ) } result.toList } /* read */ def read(file: JFile): String = Bytes.read(file).text def read(path: Path): String = read(path.file) def read_stream(reader: BufferedReader): String = { val output = new StringBuilder(100) var c = -1 while ({ c = reader.read; c != -1 }) output += c.toChar reader.close() output.toString } def read_stream(stream: InputStream): String = read_stream(new BufferedReader(new InputStreamReader(stream, UTF8.charset))) def read_gzip(file: JFile): String = read_stream(new GZIPInputStream(new BufferedInputStream(new FileInputStream(file)))) def read_gzip(path: Path): String = read_gzip(path.file) def read_xz(file: JFile): String = read_stream(new XZInputStream(new BufferedInputStream(new FileInputStream(file)))) def read_xz(path: Path): String = read_xz(path.file) /* read lines */ def read_line(reader: BufferedReader): Option[String] = { val line = try { reader.readLine} catch { case _: IOException => null } Option(line).map(Library.trim_line) } def read_lines(reader: BufferedReader, progress: String => Unit): List[String] = { val result = new mutable.ListBuffer[String] var line: Option[String] = None while ({ line = read_line(reader); line.isDefined }) { progress(line.get) result += line.get } reader.close() result.toList } /* write */ def writer(file: JFile): BufferedWriter = new BufferedWriter(new OutputStreamWriter(new FileOutputStream(file), UTF8.charset)) def write_file( file: JFile, text: String, make_stream: OutputStream => OutputStream): Unit = { val stream = make_stream(new FileOutputStream(file)) using(new BufferedWriter(new OutputStreamWriter(stream, UTF8.charset)))(_.append(text)) } def write(file: JFile, text: String): Unit = write_file(file, text, s => s) def write(path: Path, text: String): Unit = write(path.file, text) def write_gzip(file: JFile, text: String): Unit = write_file(file, text, (s: OutputStream) => new GZIPOutputStream(new BufferedOutputStream(s))) def write_gzip(path: Path, text: String): Unit = write_gzip(path.file, text) def write_xz(file: JFile, text: String, options: XZ.Options): Unit = File.write_file(file, text, s => new XZOutputStream(new BufferedOutputStream(s), options)) def write_xz(file: JFile, text: String): Unit = write_xz(file, text, XZ.options()) def write_xz(path: Path, text: String, options: XZ.Options): Unit = write_xz(path.file, text, options) def write_xz(path: Path, text: String): Unit = write_xz(path, text, XZ.options()) def write_backup(path: Path, text: String): Unit = { if (path.is_file) Isabelle_System.move_file(path, path.backup) write(path, text) } def write_backup2(path: Path, text: String): Unit = { if (path.is_file) Isabelle_System.move_file(path, path.backup2) write(path, text) } /* change */ def change(file: JFile, f: String => String): Unit = { val x = read(file) val y = f(x) if (x != y) write(file, y) } def change_lines(file: JFile, f: List[String] => List[String]): Unit = change(file, text => cat_lines(f(split_lines(text)))) def change(path: Path, f: String => String): Unit = change(path.file, f) def change_lines(path: Path, f: List[String] => List[String]): Unit = change_lines(path.file, f) /* append */ def append(file: JFile, text: String): Unit = Files.write(file.toPath, UTF8.bytes(text), StandardOpenOption.APPEND, StandardOpenOption.CREATE) def append(path: Path, text: String): Unit = append(path.file, text) /* eq */ def eq(file1: JFile, file2: JFile): Boolean = try { Files.isSameFile(file1.toPath, file2.toPath) } catch { case ERROR(_) => false } def eq(path1: Path, path2: Path): Boolean = eq(path1.file, path2.file) /* eq_content */ def eq_content(file1: JFile, file2: JFile): Boolean = if (eq(file1, file2)) true else if (file1.length != file2.length) false else Bytes.read(file1) == Bytes.read(file2) def eq_content(path1: Path, path2: Path): Boolean = eq_content(path1.file, path2.file) /* permissions */ def is_executable(path: Path): Boolean = { if (Platform.is_windows) Isabelle_System.bash("test -x " + bash_path(path)).check.ok else path.file.canExecute } def set_executable(path: Path, flag: Boolean): Unit = { if (Platform.is_windows && flag) Isabelle_System.chmod("a+x", path) else if (Platform.is_windows) Isabelle_System.chmod("a-x", path) else path.file.setExecutable(flag, false) } } diff --git a/src/Pure/General/path.scala b/src/Pure/General/path.scala --- a/src/Pure/General/path.scala +++ b/src/Pure/General/path.scala @@ -1,314 +1,317 @@ /* Title: Pure/General/path.scala Author: Makarius Algebra of file-system paths: basic POSIX notation, extended by named roots (e.g. //foo) and variables (e.g. $BAR). */ package isabelle import java.util.{Map => JMap} import java.io.{File => JFile} +import java.nio.file.{Path => JPath} import scala.util.matching.Regex object Path { /* path elements */ sealed abstract class Elem private case class Root(name: String) extends Elem private case class Basic(name: String) extends Elem private case class Variable(name: String) extends Elem private case object Parent extends Elem private def err_elem(msg: String, s: String): Nothing = error(msg + " path element " + quote(s)) private val illegal_elem = Set("", "~", "~~", ".", "..") private val illegal_char = "/\\$:\"'<>|?*" private def check_elem(s: String): String = if (illegal_elem.contains(s)) err_elem("Illegal", s) else { for (c <- s) { if (c.toInt < 32) err_elem("Illegal control character " + c.toInt + " in", s) if (illegal_char.contains(c)) err_elem("Illegal character " + quote(c.toString) + " in", s) } s } private def root_elem(s: String): Elem = Root(check_elem(s)) private def basic_elem(s: String): Elem = Basic(check_elem(s)) private def variable_elem(s: String): Elem = Variable(check_elem(s)) private def apply_elem(y: Elem, xs: List[Elem]): List[Elem] = (y, xs) match { case (Root(_), _) => List(y) case (Parent, Root(_) :: _) => xs case (Parent, Basic(_) :: rest) => rest case _ => y :: xs } private def norm_elems(elems: List[Elem]): List[Elem] = elems.foldRight(List.empty[Elem])(apply_elem) private def implode_elem(elem: Elem, short: Boolean): String = elem match { case Root("") => "" case Root(s) => "//" + s case Basic(s) => s case Variable("USER_HOME") if short => "~" case Variable("ISABELLE_HOME") if short => "~~" case Variable(s) => "$" + s case Parent => ".." } private def squash_elem(elem: Elem): String = elem match { case Root("") => "ROOT" case Root(s) => "SERVER_" + s case Basic(s) => s case Variable(s) => s case Parent => "PARENT" } /* path constructors */ val current: Path = new Path(Nil) val root: Path = new Path(List(Root(""))) def named_root(s: String): Path = new Path(List(root_elem(s))) def make(elems: List[String]): Path = new Path(elems.reverse.map(basic_elem)) def basic(s: String): Path = new Path(List(basic_elem(s))) def variable(s: String): Path = new Path(List(variable_elem(s))) val parent: Path = new Path(List(Parent)) val USER_HOME: Path = variable("USER_HOME") val ISABELLE_HOME: Path = variable("ISABELLE_HOME") /* explode */ def explode(str: String): Path = { def explode_elem(s: String): Elem = try { if (s == "..") Parent else if (s == "~") Variable("USER_HOME") else if (s == "~~") Variable("ISABELLE_HOME") else if (s.startsWith("$")) variable_elem(s.substring(1)) else basic_elem(s) } catch { case ERROR(msg) => cat_error(msg, "The error(s) above occurred in " + quote(str)) } val ss = space_explode('/', str) val r = ss.takeWhile(_.isEmpty).length val es = ss.dropWhile(_.isEmpty) val (roots, raw_elems) = if (r == 0) (Nil, es) else if (r == 1) (List(Root("")), es) else if (es.isEmpty) (List(Root("")), Nil) else (List(root_elem(es.head)), es.tail) val elems = raw_elems.filterNot(s => s.isEmpty || s == ".").map(explode_elem) new Path(norm_elems(elems reverse_::: roots)) } def is_wellformed(str: String): Boolean = try { explode(str); true } catch { case ERROR(_) => false } def is_valid(str: String): Boolean = try { explode(str).expand; true } catch { case ERROR(_) => false } def split(str: String): List[Path] = space_explode(':', str).filterNot(_.isEmpty).map(explode) /* encode */ val encode: XML.Encode.T[Path] = (path => XML.Encode.string(path.implode)) /* reserved names */ private val reserved_windows: Set[String] = Set("CON", "PRN", "AUX", "NUL", "COM1", "COM2", "COM3", "COM4", "COM5", "COM6", "COM7", "COM8", "COM9", "LPT1", "LPT2", "LPT3", "LPT4", "LPT5", "LPT6", "LPT7", "LPT8", "LPT9") def is_reserved(name: String): Boolean = Long_Name.explode(name).exists(a => reserved_windows.contains(Word.uppercase(a))) /* case-insensitive names */ def check_case_insensitive(paths: List[Path]): Unit = { val table = paths.foldLeft(Multi_Map.empty[String, String]) { case (tab, path) => val name = path.expand.implode tab.insert(Word.lowercase(name), name) } val collisions = (for { (_, coll) <- table.iterator_list if coll.length > 1 } yield coll).toList.flatten if (collisions.nonEmpty) { error(("Collision of file names due case-insensitivity:" :: collisions).mkString("\n ")) } } } final class Path private(protected val elems: List[Path.Elem]) // reversed elements { override def hashCode: Int = elems.hashCode override def equals(that: Any): Boolean = that match { case other: Path => elems == other.elems case _ => false } def is_current: Boolean = elems.isEmpty def is_absolute: Boolean = elems.nonEmpty && elems.last.isInstanceOf[Path.Root] def is_root: Boolean = elems match { case List(Path.Root(_)) => true case _ => false } def is_basic: Boolean = elems match { case List(Path.Basic(_)) => true case _ => false } def starts_basic: Boolean = elems.nonEmpty && elems.last.isInstanceOf[Path.Basic] def +(other: Path): Path = new Path(other.elems.foldRight(elems)(Path.apply_elem)) /* implode */ private def gen_implode(short: Boolean): String = elems match { case Nil => "." case List(Path.Root("")) => "/" case _ => elems.map(Path.implode_elem(_, short)).reverse.mkString("/") } def implode: String = gen_implode(false) def implode_short: String = gen_implode(true) override def toString: String = quote(implode) /* base element */ private def split_path: (Path, String) = elems match { case Path.Basic(s) :: xs => (new Path(xs), s) case _ => error("Cannot split path into dir/base: " + toString) } def dir: Path = split_path._1 def base: Path = new Path(List(Path.Basic(split_path._2))) def ext(e: String): Path = if (e == "") this else { val (prfx, s) = split_path prfx + Path.basic(s + "." + e) } def xz: Path = ext("xz") def xml: Path = ext("xml") def html: Path = ext("html") def tex: Path = ext("tex") def pdf: Path = ext("pdf") def thy: Path = ext("thy") def tar: Path = ext("tar") def gz: Path = ext("gz") def log: Path = ext("log") def backup: Path = { val (prfx, s) = split_path prfx + Path.basic(s + "~") } def backup2: Path = { val (prfx, s) = split_path prfx + Path.basic(s + "~~") } def platform_exe: Path = if (Platform.is_windows) ext("exe") else this private val Ext = new Regex("(.*)\\.([^.]*)") def split_ext: (Path, String) = { val (prefix, base) = split_path base match { case Ext(b, e) => (prefix + Path.basic(b), e) case _ => (prefix + Path.basic(base), "") } } def drop_ext: Path = split_ext._1 def get_ext: String = split_ext._2 def squash: Path = new Path(elems.map(elem => Path.Basic(Path.squash_elem(elem)))) /* expand */ def expand_env(env: JMap[String, String]): Path = { def eval(elem: Path.Elem): List[Path.Elem] = elem match { case Path.Variable(s) => val path = Path.explode(Isabelle_System.getenv_strict(s, env)) if (path.elems.exists(_.isInstanceOf[Path.Variable])) error("Illegal path variable nesting: " + Properties.Eq(s, path.toString)) else path.elems case x => List(x) } new Path(Path.norm_elems(elems.flatMap(eval))) } def expand: Path = expand_env(Isabelle_System.settings()) def file_name: String = expand.base.implode /* implode wrt. given directories */ def implode_symbolic: String = { val directories = Library.space_explode(':', Isabelle_System.getenv("ISABELLE_DIRECTORIES")).reverse val full_name = expand.implode directories.view.flatMap(a => try { val b = Path.explode(a).expand.implode if (full_name == b) Some(a) else { Library.try_unprefix(b + "/", full_name) match { case Some(name) => Some(a + "/" + name) case None => None } } } catch { case ERROR(_) => None }).headOption.getOrElse(implode) } def position: Position.T = Position.File(implode_symbolic) /* platform files */ def file: JFile = File.platform_file(this) def is_file: Boolean = file.isFile def is_dir: Boolean = file.isDirectory + def java_path: JPath = file.toPath + def absolute_file: JFile = File.absolute(file) def canonical_file: JFile = File.canonical(file) def absolute: Path = File.path(absolute_file) def canonical: Path = File.path(canonical_file) } diff --git a/src/Pure/System/isabelle_system.scala b/src/Pure/System/isabelle_system.scala --- a/src/Pure/System/isabelle_system.scala +++ b/src/Pure/System/isabelle_system.scala @@ -1,478 +1,478 @@ /* Title: Pure/System/isabelle_system.scala Author: Makarius Miscellaneous Isabelle system operations. */ package isabelle import java.util.{Map => JMap} import java.io.{File => JFile, IOException} import java.nio.file.{Path => JPath, Files, SimpleFileVisitor, FileVisitResult, StandardCopyOption, FileSystemException} import java.nio.file.attribute.BasicFileAttributes object Isabelle_System { /* settings */ def settings(): JMap[String, String] = isabelle.setup.Environment.settings() def getenv(name: String, env: JMap[String, String] = settings()): String = Option(env.get(name)).getOrElse("") def getenv_strict(name: String, env: JMap[String, String] = settings()): String = proper_string(getenv(name, env)) getOrElse error("Undefined Isabelle environment variable: " + quote(name)) /* services */ abstract class Service @volatile private var _services: Option[List[Class[Service]]] = None def services(): List[Class[Service]] = { if (_services.isEmpty) init() // unsynchronized check _services.get } def make_services[C](c: Class[C]): List[C] = for { c1 <- services() if Library.is_subclass(c1, c) } yield c1.getDeclaredConstructor().newInstance().asInstanceOf[C] /* init settings + services */ def init(isabelle_root: String = "", cygwin_root: String = ""): Unit = { isabelle.setup.Environment.init(isabelle_root, cygwin_root) synchronized { if (_services.isEmpty) { val variable = "ISABELLE_SCALA_SERVICES" val services = for (name <- space_explode(':', getenv_strict(variable))) yield { def err(msg: String): Nothing = error("Bad entry " + quote(name) + " in " + variable + "\n" + msg) try { Class.forName(name).asInstanceOf[Class[Service]] } catch { case _: ClassNotFoundException => err("Class not found") case exn: Throwable => err(Exn.message(exn)) } } _services = Some(services) } } } /* getetc -- static distribution parameters */ def getetc(name: String, root: Path = Path.ISABELLE_HOME): Option[String] = { val path = root + Path.basic("etc") + Path.basic(name) if (path.is_file) { Library.trim_split_lines(File.read(path)) match { case Nil => None case List(s) => Some(s) case _ => error("Single line expected in " + path.absolute) } } else None } /* Isabelle distribution identification */ def isabelle_id(root: Path = Path.ISABELLE_HOME): String = getetc("ISABELLE_ID", root = root) orElse Mercurial.archive_id(root) getOrElse { if (Mercurial.is_repository(root)) Mercurial.repository(root).parent() else error("Failed to identify Isabelle distribution " + root) } object Isabelle_Id extends Scala.Fun_String("isabelle_id") { val here = Scala_Project.here def apply(arg: String): String = isabelle_id() } def isabelle_tags(root: Path = Path.ISABELLE_HOME): String = getetc("ISABELLE_TAGS", root = root) orElse Mercurial.archive_tags(root) getOrElse { if (Mercurial.is_repository(root)) { val hg = Mercurial.repository(root) hg.tags(rev = hg.parent()) } else "" } def isabelle_identifier(): Option[String] = proper_string(getenv("ISABELLE_IDENTIFIER")) def isabelle_heading(): String = isabelle_identifier() match { case None => "" case Some(version) => " (" + version + ")" } def isabelle_name(): String = getenv_strict("ISABELLE_NAME") def identification(): String = "Isabelle/" + isabelle_id() + isabelle_heading() /** file-system operations **/ /* scala functions */ private def apply_paths(args: List[String], fun: List[Path] => Unit): List[String] = { fun(args.map(Path.explode)); Nil } private def apply_paths1(args: List[String], fun: Path => Unit): List[String] = apply_paths(args, { case List(path) => fun(path) }) private def apply_paths2(args: List[String], fun: (Path, Path) => Unit): List[String] = apply_paths(args, { case List(path1, path2) => fun(path1, path2) }) private def apply_paths3(args: List[String], fun: (Path, Path, Path) => Unit): List[String] = apply_paths(args, { case List(path1, path2, path3) => fun(path1, path2, path3) }) /* permissions */ def chmod(arg: String, path: Path): Unit = bash("chmod " + arg + " " + File.bash_path(path)).check def chown(arg: String, path: Path): Unit = bash("chown " + arg + " " + File.bash_path(path)).check /* directories */ def make_directory(path: Path): Path = { if (!path.is_dir) { - try { Files.createDirectories(path.file.toPath) } + try { Files.createDirectories(path.java_path) } catch { case ERROR(_) => error("Failed to create directory: " + path.absolute) } } path } def new_directory(path: Path): Path = if (path.is_dir) error("Directory already exists: " + path.absolute) else make_directory(path) def copy_dir(dir1: Path, dir2: Path): Unit = { val res = bash("cp -a " + File.bash_path(dir1) + " " + File.bash_path(dir2)) if (!res.ok) { cat_error("Failed to copy directory " + dir1.absolute + " to " + dir2.absolute, res.err) } } object Make_Directory extends Scala.Fun_Strings("make_directory") { val here = Scala_Project.here def apply(args: List[String]): List[String] = apply_paths1(args, make_directory) } object Copy_Dir extends Scala.Fun_Strings("copy_dir") { val here = Scala_Project.here def apply(args: List[String]): List[String] = apply_paths2(args, copy_dir) } /* copy files */ def copy_file(src: JFile, dst: JFile): Unit = { val target = if (dst.isDirectory) new JFile(dst, src.getName) else dst if (!File.eq(src, target)) { try { Files.copy(src.toPath, target.toPath, StandardCopyOption.COPY_ATTRIBUTES, StandardCopyOption.REPLACE_EXISTING) } catch { case ERROR(msg) => cat_error("Failed to copy file " + File.path(src).absolute + " to " + File.path(dst).absolute, msg) } } } def copy_file(src: Path, dst: Path): Unit = copy_file(src.file, dst.file) def copy_file_base(base_dir: Path, src: Path, target_dir: Path): Unit = { val src1 = src.expand val src1_dir = src1.dir if (!src1.starts_basic) error("Illegal path specification " + src1 + " beyond base directory") copy_file(base_dir + src1, Isabelle_System.make_directory(target_dir + src1_dir)) } object Copy_File extends Scala.Fun_Strings("copy_file") { val here = Scala_Project.here def apply(args: List[String]): List[String] = apply_paths2(args, copy_file) } object Copy_File_Base extends Scala.Fun_Strings("copy_file_base") { val here = Scala_Project.here def apply(args: List[String]): List[String] = apply_paths3(args, copy_file_base) } /* move files */ def move_file(src: JFile, dst: JFile): Unit = { val target = if (dst.isDirectory) new JFile(dst, src.getName) else dst if (!File.eq(src, target)) Files.move(src.toPath, target.toPath, StandardCopyOption.REPLACE_EXISTING) } def move_file(src: Path, dst: Path): Unit = move_file(src.file, dst.file) /* symbolic link */ def symlink(src: Path, dst: Path, force: Boolean = false): Unit = { val src_file = src.file val dst_file = dst.file val target = if (dst_file.isDirectory) new JFile(dst_file, src_file.getName) else dst_file if (force) target.delete def cygwin_link(): Unit = isabelle.setup.Environment.cygwin_link(File.standard_path(src), target) try { Files.createSymbolicLink(target.toPath, src_file.toPath) } catch { case _: UnsupportedOperationException if Platform.is_windows => cygwin_link() case _: FileSystemException if Platform.is_windows => cygwin_link() } } /* tmp files */ def isabelle_tmp_prefix(): JFile = { val path = Path.explode("$ISABELLE_TMP_PREFIX") path.file.mkdirs // low-level mkdirs to avoid recursion via Isabelle environment File.platform_file(path) } def tmp_file(name: String, ext: String = "", base_dir: JFile = isabelle_tmp_prefix()): JFile = { val suffix = if (ext == "") "" else "." + ext val file = Files.createTempFile(base_dir.toPath, name, suffix).toFile file.deleteOnExit file } def with_tmp_file[A](name: String, ext: String = "")(body: Path => A): A = { val file = tmp_file(name, ext) try { body(File.path(file)) } finally { file.delete } } /* tmp dirs */ def rm_tree(root: JFile): Unit = { root.delete if (root.isDirectory) { Files.walkFileTree(root.toPath, new SimpleFileVisitor[JPath] { override def visitFile(file: JPath, attrs: BasicFileAttributes): FileVisitResult = { try { Files.deleteIfExists(file) } catch { case _: IOException => } FileVisitResult.CONTINUE } override def postVisitDirectory(dir: JPath, e: IOException): FileVisitResult = { if (e == null) { try { Files.deleteIfExists(dir) } catch { case _: IOException => } FileVisitResult.CONTINUE } else throw e } } ) } } def rm_tree(root: Path): Unit = rm_tree(root.file) object Rm_Tree extends Scala.Fun_Strings("rm_tree") { val here = Scala_Project.here def apply(args: List[String]): List[String] = apply_paths1(args, rm_tree) } def tmp_dir(name: String, base_dir: JFile = isabelle_tmp_prefix()): JFile = { val dir = Files.createTempDirectory(base_dir.toPath, name).toFile dir.deleteOnExit dir } def with_tmp_dir[A](name: String)(body: Path => A): A = { val dir = tmp_dir(name) try { body(File.path(dir)) } finally { rm_tree(dir) } } /* quasi-atomic update of directory */ def update_directory(dir: Path, f: Path => Unit): Unit = { val new_dir = dir.ext("new") val old_dir = dir.ext("old") rm_tree(new_dir) rm_tree(old_dir) f(new_dir) if (dir.is_dir) move_file(dir, old_dir) move_file(new_dir, dir) rm_tree(old_dir) } /** external processes **/ /* GNU bash */ def bash(script: String, cwd: JFile = null, env: JMap[String, String] = settings(), redirect: Boolean = false, progress_stdout: String => Unit = (_: String) => (), progress_stderr: String => Unit = (_: String) => (), watchdog: Option[Bash.Watchdog] = None, strict: Boolean = true, cleanup: () => Unit = () => ()): Process_Result = { Bash.process(script, cwd = cwd, env = env, redirect = redirect, cleanup = cleanup). result(progress_stdout = progress_stdout, progress_stderr = progress_stderr, watchdog = watchdog, strict = strict) } private lazy val gnutar_check: Boolean = try { bash("tar --version").check.out.containsSlice("GNU tar") || error("") } catch { case ERROR(_) => false } def gnutar( args: String, dir: Path = Path.current, original_owner: Boolean = false, strip: Int = 0, redirect: Boolean = false): Process_Result = { val options = (if (dir.is_current) "" else "-C " + File.bash_path(dir) + " ") + (if (original_owner) "" else "--owner=root --group=staff ") + (if (strip <= 0) "" else "--strip-components=" + strip + " ") if (gnutar_check) bash("tar " + options + args, redirect = redirect) else error("Expected to find GNU tar executable") } def require_command(cmd: String, test: String = "--version"): Unit = { if (!bash(Bash.string(cmd) + " " + test).ok) error("Missing system command: " + quote(cmd)) } def hostname(): String = bash("hostname -s").check.out def open(arg: String): Unit = bash("exec \"$ISABELLE_OPEN\" " + Bash.string(arg) + " >/dev/null 2>/dev/null &") def pdf_viewer(arg: Path): Unit = bash("exec \"$PDF_VIEWER\" " + File.bash_path(arg) + " >/dev/null 2>/dev/null &") def open_external_file(name: String): Boolean = { val ext = Library.take_suffix((c: Char) => c != '.', name.toList)._2.mkString val external = ext.nonEmpty && Library.space_explode(':', getenv("ISABELLE_EXTERNAL_FILES")).contains(ext) if (external) { if (ext == "pdf" && Path.is_wellformed(name)) pdf_viewer(Path.explode(name)) else open(name) } external } def export_isabelle_identifier(isabelle_identifier: String): String = if (isabelle_identifier == "") "" else "export ISABELLE_IDENTIFIER=" + Bash.string(isabelle_identifier) + "\n" /** Isabelle resources **/ /* repository clone with Admin */ def admin(): Boolean = Path.explode("~~/Admin").is_dir /* default logic */ def default_logic(args: String*): String = { args.find(_ != "") match { case Some(logic) => logic case None => getenv_strict("ISABELLE_LOGIC") } } /* download file */ def download(url_name: String, progress: Progress = new Progress): HTTP.Content = { val url = Url(url_name) progress.echo("Getting " + quote(url_name)) try { HTTP.Client.get(url) } catch { case ERROR(msg) => cat_error("Failed to download " + quote(url_name), msg) } } def download_file(url_name: String, file: Path, progress: Progress = new Progress): Unit = Bytes.write(file, download(url_name, progress = progress).bytes) object Download extends Scala.Fun("download", thread = true) { val here = Scala_Project.here override def invoke(args: List[Bytes]): List[Bytes] = args match { case List(url) => List(download(url.text).bytes) } } /* repositories */ val isabelle_repository: Mercurial.Server = Mercurial.Server("https://isabelle.sketis.net/repos/isabelle") val afp_repository: Mercurial.Server = Mercurial.Server("https://isabelle.sketis.net/repos/afp-devel") def official_releases(): List[String] = Library.trim_split_lines( isabelle_repository.read_file(Path.explode("Admin/Release/official"))) } diff --git a/src/Pure/Thy/sessions.scala b/src/Pure/Thy/sessions.scala --- a/src/Pure/Thy/sessions.scala +++ b/src/Pure/Thy/sessions.scala @@ -1,1523 +1,1523 @@ /* Title: Pure/Thy/sessions.scala Author: Makarius Cumulative session information. */ package isabelle import java.io.{File => JFile} import java.nio.ByteBuffer import java.nio.channels.FileChannel import java.nio.file.StandardOpenOption import scala.collection.immutable.{SortedSet, SortedMap} import scala.collection.mutable object Sessions { /* session and theory names */ val ROOTS: Path = Path.explode("ROOTS") val ROOT: Path = Path.explode("ROOT") val roots_name: String = "ROOTS" val root_name: String = "ROOT" val theory_name: String = "Pure.Sessions" val UNSORTED = "Unsorted" val DRAFT = "Draft" def is_pure(name: String): Boolean = name == Thy_Header.PURE def exclude_session(name: String): Boolean = name == "" || name == DRAFT def exclude_theory(name: String): Boolean = name == root_name || name == "README" || name == "index" || name == "bib" /* ROOTS file format */ class File_Format extends isabelle.File_Format { val format_name: String = roots_name val file_ext = "" override def detect(name: String): Boolean = Thy_Header.split_file_name(name) match { case Some((_, file_name)) => file_name == roots_name case None => false } override def theory_suffix: String = "ROOTS_file" override def theory_content(name: String): String = """theory "ROOTS" imports Pure begin ROOTS_file """ + Outer_Syntax.quote_string(name) + """ end""" } /* base info and source dependencies */ sealed case class Base( pos: Position.T = Position.none, session_directories: Map[JFile, String] = Map.empty, global_theories: Map[String, String] = Map.empty, session_theories: List[Document.Node.Name] = Nil, document_theories: List[Document.Node.Name] = Nil, loaded_theories: Graph[String, Outer_Syntax] = Graph.string, used_theories: List[(Document.Node.Name, Options)] = Nil, load_commands: Map[Document.Node.Name, List[Command_Span.Span]] = Map.empty, known_theories: Map[String, Document.Node.Entry] = Map.empty, known_loaded_files: Map[String, List[Path]] = Map.empty, overall_syntax: Outer_Syntax = Outer_Syntax.empty, imported_sources: List[(Path, SHA1.Digest)] = Nil, sources: List[(Path, SHA1.Digest)] = Nil, session_graph_display: Graph_Display.Graph = Graph_Display.empty_graph, errors: List[String] = Nil) { override def toString: String = "Sessions.Base(loaded_theories = " + loaded_theories.size + ", used_theories = " + used_theories.length + ")" def theory_qualifier(name: String): String = global_theories.getOrElse(name, Long_Name.qualifier(name)) def theory_qualifier(name: Document.Node.Name): String = theory_qualifier(name.theory) def loaded_theory(name: String): Boolean = loaded_theories.defined(name) def loaded_theory(name: Document.Node.Name): Boolean = loaded_theory(name.theory) def loaded_theory_syntax(name: String): Option[Outer_Syntax] = if (loaded_theory(name)) Some(loaded_theories.get_node(name)) else None def loaded_theory_syntax(name: Document.Node.Name): Option[Outer_Syntax] = loaded_theory_syntax(name.theory) def theory_syntax(name: Document.Node.Name): Outer_Syntax = loaded_theory_syntax(name) getOrElse overall_syntax def node_syntax(nodes: Document.Nodes, name: Document.Node.Name): Outer_Syntax = nodes(name).syntax orElse loaded_theory_syntax(name) getOrElse overall_syntax } sealed case class Deps(sessions_structure: Structure, session_bases: Map[String, Base]) { override def toString: String = "Sessions.Deps(" + sessions_structure + ")" def is_empty: Boolean = session_bases.isEmpty def apply(name: String): Base = session_bases(name) def get(name: String): Option[Base] = session_bases.get(name) def imported_sources(name: String): List[SHA1.Digest] = session_bases(name).imported_sources.map(_._2) def sources(name: String): List[SHA1.Digest] = session_bases(name).sources.map(_._2) def errors: List[String] = (for { (name, base) <- session_bases.iterator if base.errors.nonEmpty } yield cat_lines(base.errors) + "\nThe error(s) above occurred in session " + quote(name) + Position.here(base.pos) ).toList def check_errors: Deps = errors match { case Nil => this case errs => error(cat_lines(errs)) } } def deps(sessions_structure: Structure, progress: Progress = new Progress, inlined_files: Boolean = false, verbose: Boolean = false, list_files: Boolean = false, check_keywords: Set[String] = Set.empty): Deps = { var cache_sources = Map.empty[JFile, SHA1.Digest] def check_sources(paths: List[Path]): List[(Path, SHA1.Digest)] = { for { path <- paths file = path.file if cache_sources.isDefinedAt(file) || file.isFile } yield { cache_sources.get(file) match { case Some(digest) => (path, digest) case None => val digest = SHA1.digest(file) cache_sources = cache_sources + (file -> digest) (path, digest) } } } val session_bases = sessions_structure.imports_topological_order.foldLeft(Map("" -> sessions_structure.bootstrap)) { case (session_bases, session_name) => progress.expose_interrupt() val info = sessions_structure(session_name) try { val deps_base = info.deps_base(session_bases) val resources = new Resources(sessions_structure, deps_base) if (verbose || list_files) { val groups = if (info.groups.isEmpty) "" else info.groups.mkString(" (", " ", ")") progress.echo("Session " + info.chapter_session + groups) } val dependencies = resources.session_dependencies(info) val overall_syntax = dependencies.overall_syntax val session_theories = dependencies.theories.filter(name => deps_base.theory_qualifier(name) == session_name) val theory_files = dependencies.theories.map(_.path) dependencies.load_commands val (load_commands, load_commands_errors) = try { if (inlined_files) (dependencies.load_commands, Nil) else (Nil, Nil) } catch { case ERROR(msg) => (Nil, List(msg)) } val loaded_files = load_commands.map({ case (name, spans) => dependencies.loaded_files(name, spans) }) val session_files = (theory_files ::: loaded_files.flatMap(_._2) ::: info.document_files.map(file => info.dir + file._1 + file._2)).map(_.expand) val imported_files = if (inlined_files) dependencies.imported_files else Nil if (list_files) progress.echo(cat_lines(session_files.map(_.implode).sorted.map(" " + _))) if (check_keywords.nonEmpty) { Check_Keywords.check_keywords( progress, overall_syntax.keywords, check_keywords, theory_files) } val session_graph_display: Graph_Display.Graph = { def session_node(name: String): Graph_Display.Node = Graph_Display.Node("[" + name + "]", "session." + name) def node(name: Document.Node.Name): Graph_Display.Node = { val qualifier = deps_base.theory_qualifier(name) if (qualifier == info.name) Graph_Display.Node(name.theory_base_name, "theory." + name.theory) else session_node(qualifier) } val required_sessions = dependencies.loaded_theories.all_preds(dependencies.theories.map(_.theory)) .map(theory => deps_base.theory_qualifier(theory)) .filter(name => name != info.name && sessions_structure.defined(name)) val required_subgraph = sessions_structure.imports_graph .restrict(sessions_structure.imports_graph.all_preds(required_sessions).toSet) .transitive_closure .restrict(required_sessions.toSet) .transitive_reduction_acyclic val graph0 = required_subgraph.topological_order.foldLeft(Graph_Display.empty_graph) { case (g, session) => val a = session_node(session) val bs = required_subgraph.imm_preds(session).toList.map(session_node) bs.foldLeft((a :: bs).foldLeft(g)(_.default_node(_, Nil)))(_.add_edge(_, a)) } dependencies.entries.foldLeft(graph0) { case (g, entry) => val a = node(entry.name) val bs = entry.header.imports.map(node).filterNot(_ == a) bs.foldLeft((a :: bs).foldLeft(g)(_.default_node(_, Nil)))(_.add_edge(_, a)) } } val known_theories = dependencies.entries.iterator.map(entry => entry.name.theory -> entry). foldLeft(deps_base.known_theories)(_ + _) val known_loaded_files = deps_base.known_loaded_files ++ loaded_files val import_errors = { val known_sessions = sessions_structure.imports_requirements(List(session_name)).toSet for { name <- dependencies.theories qualifier = deps_base.theory_qualifier(name) if !known_sessions(qualifier) } yield "Bad import of theory " + quote(name.toString) + ": need to include sessions " + quote(qualifier) + " in ROOT" } val document_errors = info.document_theories.flatMap( { case (thy, pos) => val parent_sessions = if (sessions_structure.build_graph.defined(session_name)) { sessions_structure.build_requirements(List(session_name)) } else Nil def err(msg: String): Option[String] = Some(msg + " " + quote(thy) + Position.here(pos)) known_theories.get(thy).map(_.name) match { case None => err("Unknown document theory") case Some(name) => val qualifier = deps_base.theory_qualifier(name) if (session_theories.contains(name)) { err("Redundant document theory from this session:") } else if (parent_sessions.contains(qualifier)) None else if (dependencies.theories.contains(name)) None else err("Document theory from other session not imported properly:") } }) val document_theories = info.document_theories.map({ case (thy, _) => known_theories(thy).name }) val dir_errors = { val ok = info.dirs.map(_.canonical_file).toSet val bad = (for { name <- session_theories.iterator path = name.master_dir_path if !ok(path.canonical_file) path1 = File.relative_path(info.dir.canonical, path).getOrElse(path) } yield (path1, name)).toList val bad_dirs = (for { (path1, _) <- bad } yield path1.toString).distinct.sorted val errs1 = for { (path1, name) <- bad } yield "Implicit use of directory " + path1 + " for theory " + quote(name.toString) val errs2 = if (bad_dirs.isEmpty) Nil else List("Implicit use of session directories: " + commas(bad_dirs)) val errs3 = for (p <- info.dirs if !p.is_dir) yield "No such directory: " + p val errs4 = (for { name <- session_theories.iterator name1 <- resources.find_theory_node(name.theory) if name.node != name1.node } yield "Incoherent theory file import:\n " + name.path + " vs. \n " + name1.path) .toList errs1 ::: errs2 ::: errs3 ::: errs4 } val sources_errors = for (p <- session_files if !p.is_file) yield "No such file: " + p val path_errors = try { Path.check_case_insensitive(session_files ::: imported_files); Nil } catch { case ERROR(msg) => List(msg) } val bibtex_errors = try { info.bibtex_entries; Nil } catch { case ERROR(msg) => List(msg) } val base = Base( pos = info.pos, session_directories = sessions_structure.session_directories, global_theories = sessions_structure.global_theories, session_theories = session_theories, document_theories = document_theories, loaded_theories = dependencies.loaded_theories, used_theories = dependencies.theories_adjunct, load_commands = load_commands.toMap, known_theories = known_theories, known_loaded_files = known_loaded_files, overall_syntax = overall_syntax, imported_sources = check_sources(imported_files), sources = check_sources(session_files), session_graph_display = session_graph_display, errors = dependencies.errors ::: load_commands_errors ::: import_errors ::: document_errors ::: dir_errors ::: sources_errors ::: path_errors ::: bibtex_errors) session_bases + (info.name -> base) } catch { case ERROR(msg) => cat_error(msg, "The error(s) above occurred in session " + quote(info.name) + Position.here(info.pos)) } } Deps(sessions_structure, session_bases) } /* base info */ sealed case class Base_Info( session: String, sessions_structure: Structure, errors: List[String], base: Base, infos: List[Info]) { def check: Base_Info = if (errors.isEmpty) this else error(cat_lines(errors)) } def base_info(options: Options, session: String, progress: Progress = new Progress, dirs: List[Path] = Nil, include_sessions: List[String] = Nil, session_ancestor: Option[String] = None, session_requirements: Boolean = false): Base_Info = { val full_sessions = load_structure(options, dirs = dirs) val selected_sessions = full_sessions.selection(Selection(sessions = session :: session_ancestor.toList)) val info = selected_sessions(session) val ancestor = session_ancestor orElse info.parent val (session1, infos1) = if (session_requirements && ancestor.isDefined) { val deps = Sessions.deps(selected_sessions, progress = progress) val base = deps(session) val ancestor_loaded = deps.get(ancestor.get) match { case Some(ancestor_base) if !selected_sessions.imports_requirements(List(ancestor.get)).contains(session) => ancestor_base.loaded_theories.defined _ case _ => error("Bad ancestor " + quote(ancestor.get) + " for session " + quote(session)) } val required_theories = for { thy <- base.loaded_theories.keys if !ancestor_loaded(thy) && base.theory_qualifier(thy) != session } yield thy if (required_theories.isEmpty) (ancestor.get, Nil) else { val other_name = info.name + "_requirements(" + ancestor.get + ")" Isabelle_System.isabelle_tmp_prefix() (other_name, List( make_info(info.options, dir_selected = false, dir = Path.explode("$ISABELLE_TMP_PREFIX"), chapter = info.chapter, Session_Entry( pos = info.pos, name = other_name, groups = info.groups, path = ".", parent = ancestor, description = "Required theory imports from other sessions", options = Nil, imports = info.deps, directories = Nil, theories = List((Nil, required_theories.map(thy => ((thy, Position.none), false)))), document_theories = Nil, document_files = Nil, export_files = Nil)))) } } else (session, Nil) val full_sessions1 = if (infos1.isEmpty) full_sessions else load_structure(options, dirs = dirs, infos = infos1) val selected_sessions1 = full_sessions1.selection(Selection(sessions = session1 :: session :: include_sessions)) val deps1 = Sessions.deps(selected_sessions1, progress = progress) Base_Info(session1, full_sessions1, deps1.errors, deps1(session1), infos1) } /* cumulative session info */ sealed case class Info( name: String, chapter: String, dir_selected: Boolean, pos: Position.T, groups: List[String], dir: Path, parent: Option[String], description: String, directories: List[Path], options: Options, imports: List[String], theories: List[(Options, List[(String, Position.T)])], global_theories: List[String], document_theories: List[(String, Position.T)], document_files: List[(Path, Path)], export_files: List[(Path, Int, List[String])], meta_digest: SHA1.Digest) { def chapter_session: String = chapter + "/" + name def relative_path(info1: Info): String = if (name == info1.name) "" else if (chapter == info1.chapter) "../" + info1.name + "/" else "../../" + info1.chapter_session + "/" def deps: List[String] = parent.toList ::: imports def deps_base(session_bases: String => Base): Base = { val parent_base = session_bases(parent.getOrElse("")) val imports_bases = imports.map(session_bases) parent_base.copy( known_theories = (for { base <- imports_bases.iterator (_, entry) <- base.known_theories.iterator } yield (entry.name.theory -> entry)).foldLeft(parent_base.known_theories)(_ + _), known_loaded_files = imports_bases.iterator.map(_.known_loaded_files). foldLeft(parent_base.known_loaded_files)(_ ++ _)) } def dirs: List[Path] = dir :: directories def timeout_ignored: Boolean = !options.bool("timeout_build") || Time.seconds(options.real("timeout")) < Time.ms(1) def timeout: Time = Time.seconds(options.real("timeout") * options.real("timeout_scale")) def document_enabled: Boolean = options.string("document") match { case "" | "false" => false case "pdf" | "true" => true case doc => error("Bad document specification " + quote(doc)) } def document_variants: List[Document_Build.Document_Variant] = { val variants = Library.space_explode(':', options.string("document_variants")). map(Document_Build.Document_Variant.parse) val dups = Library.duplicates(variants.map(_.name)) if (dups.nonEmpty) error("Duplicate document variants: " + commas_quote(dups)) variants } def documents: List[Document_Build.Document_Variant] = { val variants = document_variants if (!document_enabled || document_files.isEmpty) Nil else variants } def document_output: Option[Path] = options.string("document_output") match { case "" => None case s => Some(dir + Path.explode(s)) } def browser_info: Boolean = options.bool("browser_info") lazy val bibtex_entries: List[Text.Info[String]] = (for { (document_dir, file) <- document_files.iterator if Bibtex.is_bibtex(file.file_name) info <- Bibtex.entries(File.read(dir + document_dir + file)).iterator } yield info).toList def record_proofs: Boolean = options.int("record_proofs") >= 2 def is_afp: Boolean = chapter == AFP.chapter def is_afp_bulky: Boolean = is_afp && groups.exists(AFP.groups_bulky.contains) } def make_info(options: Options, dir_selected: Boolean, dir: Path, chapter: String, entry: Session_Entry): Info = { try { val name = entry.name if (exclude_session(name)) error("Bad session name") if (is_pure(name) && entry.parent.isDefined) error("Illegal parent session") if (!is_pure(name) && !entry.parent.isDefined) error("Missing parent session") val session_path = dir + Path.explode(entry.path) val directories = entry.directories.map(dir => session_path + Path.explode(dir)) val session_options = options ++ entry.options val theories = entry.theories.map({ case (opts, thys) => (session_options ++ opts, thys.map({ case ((thy, pos), _) => if (exclude_theory(thy)) error("Bad theory name " + quote(thy) + Position.here(pos)) else (thy, pos) })) }) val global_theories = for { (_, thys) <- entry.theories; ((thy, pos), global) <- thys if global } yield { val thy_name = Path.explode(thy).file_name if (Long_Name.is_qualified(thy_name)) error("Bad qualified name for global theory " + quote(thy_name) + Position.here(pos)) else thy_name } val conditions = theories.flatMap(thys => space_explode(',', thys._1.string("condition"))).distinct.sorted. map(x => (x, Isabelle_System.getenv(x) != "")) val document_files = entry.document_files.map({ case (s1, s2) => (Path.explode(s1), Path.explode(s2)) }) val export_files = entry.export_files.map({ case (dir, prune, pats) => (Path.explode(dir), prune, pats) }) val meta_digest = SHA1.digest( (name, chapter, entry.parent, entry.directories, entry.options, entry.imports, entry.theories_no_position, conditions, entry.document_theories_no_position, entry.document_files) .toString) Info(name, chapter, dir_selected, entry.pos, entry.groups, session_path, entry.parent, entry.description, directories, session_options, entry.imports, theories, global_theories, entry.document_theories, document_files, export_files, meta_digest) } catch { case ERROR(msg) => error(msg + "\nThe error(s) above occurred in session entry " + quote(entry.name) + Position.here(entry.pos)) } } object Selection { val empty: Selection = Selection() val all: Selection = Selection(all_sessions = true) def session(session: String): Selection = Selection(sessions = List(session)) } sealed case class Selection( requirements: Boolean = false, all_sessions: Boolean = false, base_sessions: List[String] = Nil, exclude_session_groups: List[String] = Nil, exclude_sessions: List[String] = Nil, session_groups: List[String] = Nil, sessions: List[String] = Nil) { def ++ (other: Selection): Selection = Selection( requirements = requirements || other.requirements, all_sessions = all_sessions || other.all_sessions, base_sessions = Library.merge(base_sessions, other.base_sessions), exclude_session_groups = Library.merge(exclude_session_groups, other.exclude_session_groups), exclude_sessions = Library.merge(exclude_sessions, other.exclude_sessions), session_groups = Library.merge(session_groups, other.session_groups), sessions = Library.merge(sessions, other.sessions)) } object Structure { val empty: Structure = make(Nil) def make(infos: List[Info]): Structure = { def add_edges(graph: Graph[String, Info], kind: String, edges: Info => Iterable[String]) : Graph[String, Info] = { def add_edge(pos: Position.T, name: String, g: Graph[String, Info], parent: String) = { if (!g.defined(parent)) error("Bad " + kind + " session " + quote(parent) + " for " + quote(name) + Position.here(pos)) try { g.add_edge_acyclic(parent, name) } catch { case exn: Graph.Cycles[_] => error(cat_lines(exn.cycles.map(cycle => "Cyclic session dependency of " + cycle.map(c => quote(c.toString)).mkString(" via "))) + Position.here(pos)) } } graph.iterator.foldLeft(graph) { case (g, (name, (info, _))) => edges(info).foldLeft(g)(add_edge(info.pos, name, _, _)) } } val info_graph = infos.foldLeft(Graph.string[Info]) { case (graph, info) => if (graph.defined(info.name)) error("Duplicate session " + quote(info.name) + Position.here(info.pos) + Position.here(graph.get_node(info.name).pos)) else graph.new_node(info.name, info) } val build_graph = add_edges(info_graph, "parent", _.parent) val imports_graph = add_edges(build_graph, "imports", _.imports) val session_positions: List[(String, Position.T)] = (for ((name, (info, _)) <- info_graph.iterator) yield (name, info.pos)).toList val session_directories: Map[JFile, String] = (for { session <- imports_graph.topological_order.iterator info = info_graph.get_node(session) dir <- info.dirs.iterator } yield (info, dir)).foldLeft(Map.empty[JFile, String]) { case (dirs, (info, dir)) => val session = info.name val canonical_dir = dir.canonical_file dirs.get(canonical_dir) match { case Some(session1) => val info1 = info_graph.get_node(session1) error("Duplicate use of directory " + dir + "\n for session " + quote(session1) + Position.here(info1.pos) + "\n vs. session " + quote(session) + Position.here(info.pos)) case None => dirs + (canonical_dir -> session) } } val global_theories: Map[String, String] = (for { session <- imports_graph.topological_order.iterator info = info_graph.get_node(session) thy <- info.global_theories.iterator } yield (info, thy)).foldLeft(Thy_Header.bootstrap_global_theories.toMap) { case (global, (info, thy)) => val qualifier = info.name global.get(thy) match { case Some(qualifier1) if qualifier != qualifier1 => error("Duplicate global theory " + quote(thy) + Position.here(info.pos)) case _ => global + (thy -> qualifier) } } new Structure( session_positions, session_directories, global_theories, build_graph, imports_graph) } } final class Structure private[Sessions]( val session_positions: List[(String, Position.T)], val session_directories: Map[JFile, String], val global_theories: Map[String, String], val build_graph: Graph[String, Info], val imports_graph: Graph[String, Info]) { sessions_structure => def bootstrap: Base = Base( session_directories = session_directories, global_theories = global_theories, overall_syntax = Thy_Header.bootstrap_syntax) def dest_session_directories: List[(String, String)] = for ((file, session) <- session_directories.toList) yield (File.standard_path(file), session) lazy val chapters: SortedMap[String, List[Info]] = build_graph.iterator.foldLeft(SortedMap.empty[String, List[Info]]) { case (chs, (_, (info, _))) => chs + (info.chapter -> (info :: chs.getOrElse(info.chapter, Nil))) } def build_graph_display: Graph_Display.Graph = Graph_Display.make_graph(build_graph) def imports_graph_display: Graph_Display.Graph = Graph_Display.make_graph(imports_graph) def defined(name: String): Boolean = imports_graph.defined(name) def apply(name: String): Info = imports_graph.get_node(name) def get(name: String): Option[Info] = if (defined(name)) Some(apply(name)) else None def theory_qualifier(name: String): String = global_theories.getOrElse(name, Long_Name.qualifier(name)) def check_sessions(names: List[String]): Unit = { val bad_sessions = SortedSet(names.filterNot(defined): _*).toList if (bad_sessions.nonEmpty) error("Undefined session(s): " + commas_quote(bad_sessions)) } def check_sessions(sel: Selection): Unit = check_sessions(sel.base_sessions ::: sel.exclude_sessions ::: sel.sessions) private def selected(graph: Graph[String, Info], sel: Selection): List[String] = { check_sessions(sel) val select_group = sel.session_groups.toSet val select_session = sel.sessions.toSet ++ imports_graph.all_succs(sel.base_sessions) val selected0 = if (sel.all_sessions) graph.keys else { (for { (name, (info, _)) <- graph.iterator if info.dir_selected || select_session(name) || graph.get_node(name).groups.exists(select_group) } yield name).toList } if (sel.requirements) (graph.all_preds(selected0).toSet -- selected0).toList else selected0 } def selection(sel: Selection): Structure = { check_sessions(sel) val excluded = { val exclude_group = sel.exclude_session_groups.toSet val exclude_group_sessions = (for { (name, (info, _)) <- imports_graph.iterator if imports_graph.get_node(name).groups.exists(exclude_group) } yield name).toList imports_graph.all_succs(exclude_group_sessions ::: sel.exclude_sessions).toSet } def restrict(graph: Graph[String, Info]): Graph[String, Info] = { val sessions = graph.all_preds(selected(graph, sel)).filterNot(excluded) graph.restrict(graph.all_preds(sessions).toSet) } new Structure( session_positions, session_directories, global_theories, restrict(build_graph), restrict(imports_graph)) } def selection(session: String): Structure = selection(Selection.session(session)) def selection_deps( selection: Selection, progress: Progress = new Progress, loading_sessions: Boolean = false, inlined_files: Boolean = false, verbose: Boolean = false): Deps = { val deps = Sessions.deps(sessions_structure.selection(selection), progress = progress, inlined_files = inlined_files, verbose = verbose) if (loading_sessions) { val selection_size = deps.sessions_structure.build_graph.size if (selection_size > 1) progress.echo("Loading " + selection_size + " sessions ...") } deps } def hierarchy(session: String): List[String] = build_graph.all_preds(List(session)) def build_selection(sel: Selection): List[String] = selected(build_graph, sel) def build_descendants(ss: List[String]): List[String] = build_graph.all_succs(ss) def build_requirements(ss: List[String]): List[String] = build_graph.all_preds_rev(ss) def build_topological_order: List[String] = build_graph.topological_order def imports_selection(sel: Selection): List[String] = selected(imports_graph, sel) def imports_descendants(ss: List[String]): List[String] = imports_graph.all_succs(ss) def imports_requirements(ss: List[String]): List[String] = imports_graph.all_preds_rev(ss) def imports_topological_order: List[String] = imports_graph.topological_order def bibtex_entries: List[(String, List[String])] = build_topological_order.flatMap(name => apply(name).bibtex_entries match { case Nil => None case entries => Some(name -> entries.map(_.info)) }) def session_chapters: List[(String, String)] = imports_topological_order.map(name => name -> apply(name).chapter) override def toString: String = imports_graph.keys_iterator.mkString("Sessions.Structure(", ", ", ")") } /* parser */ private val CHAPTER = "chapter" private val SESSION = "session" private val IN = "in" private val DESCRIPTION = "description" private val DIRECTORIES = "directories" private val OPTIONS = "options" private val SESSIONS = "sessions" private val THEORIES = "theories" private val GLOBAL = "global" private val DOCUMENT_THEORIES = "document_theories" private val DOCUMENT_FILES = "document_files" private val EXPORT_FILES = "export_files" val root_syntax: Outer_Syntax = Outer_Syntax.empty + "(" + ")" + "+" + "," + "=" + "[" + "]" + GLOBAL + IN + (CHAPTER, Keyword.THY_DECL) + (SESSION, Keyword.THY_DECL) + (DESCRIPTION, Keyword.QUASI_COMMAND) + (DIRECTORIES, Keyword.QUASI_COMMAND) + (OPTIONS, Keyword.QUASI_COMMAND) + (SESSIONS, Keyword.QUASI_COMMAND) + (THEORIES, Keyword.QUASI_COMMAND) + (DOCUMENT_THEORIES, Keyword.QUASI_COMMAND) + (DOCUMENT_FILES, Keyword.QUASI_COMMAND) + (EXPORT_FILES, Keyword.QUASI_COMMAND) abstract class Entry sealed case class Chapter(name: String) extends Entry sealed case class Session_Entry( pos: Position.T, name: String, groups: List[String], path: String, parent: Option[String], description: String, options: List[Options.Spec], imports: List[String], directories: List[String], theories: List[(List[Options.Spec], List[((String, Position.T), Boolean)])], document_theories: List[(String, Position.T)], document_files: List[(String, String)], export_files: List[(String, Int, List[String])]) extends Entry { def theories_no_position: List[(List[Options.Spec], List[(String, Boolean)])] = theories.map({ case (a, b) => (a, b.map({ case ((c, _), d) => (c, d) })) }) def document_theories_no_position: List[String] = document_theories.map(_._1) } private object Parser extends Options.Parser { private val chapter: Parser[Chapter] = { val chapter_name = atom("chapter name", _.is_name) command(CHAPTER) ~! chapter_name ^^ { case _ ~ a => Chapter(a) } } private val session_entry: Parser[Session_Entry] = { val option = option_name ~ opt($$$("=") ~! option_value ^^ { case _ ~ x => x }) ^^ { case x ~ y => (x, y) } val options = $$$("[") ~> rep1sep(option, $$$(",")) <~ $$$("]") val theory_entry = position(theory_name) ~ opt_keyword(GLOBAL) ^^ { case x ~ y => (x, y) } val theories = $$$(THEORIES) ~! ((options | success(Nil)) ~ rep1(theory_entry)) ^^ { case _ ~ (x ~ y) => (x, y) } val in_path = $$$("(") ~! ($$$(IN) ~ path ~ $$$(")")) ^^ { case _ ~ (_ ~ x ~ _) => x } val document_theories = $$$(DOCUMENT_THEORIES) ~! rep1(position(name)) ^^ { case _ ~ x => x } val document_files = $$$(DOCUMENT_FILES) ~! ((in_path | success("document")) ~ rep1(path)) ^^ { case _ ~ (x ~ y) => y.map((x, _)) } val prune = $$$("[") ~! (nat ~ $$$("]")) ^^ { case _ ~ (x ~ _) => x } | success(0) val export_files = $$$(EXPORT_FILES) ~! ((in_path | success("export")) ~ prune ~ rep1(embedded)) ^^ { case _ ~ (x ~ y ~ z) => (x, y, z) } command(SESSION) ~! (position(session_name) ~ (($$$("(") ~! (rep1(name) <~ $$$(")")) ^^ { case _ ~ x => x }) | success(Nil)) ~ (($$$(IN) ~! path ^^ { case _ ~ x => x }) | success(".")) ~ ($$$("=") ~! (opt(session_name ~! $$$("+") ^^ { case x ~ _ => x }) ~ (($$$(DESCRIPTION) ~! text ^^ { case _ ~ x => x }) | success("")) ~ (($$$(OPTIONS) ~! options ^^ { case _ ~ x => x }) | success(Nil)) ~ (($$$(SESSIONS) ~! rep1(session_name) ^^ { case _ ~ x => x }) | success(Nil)) ~ (($$$(DIRECTORIES) ~! rep1(path) ^^ { case _ ~ x => x }) | success(Nil)) ~ rep(theories) ~ (opt(document_theories) ^^ (x => x.getOrElse(Nil))) ~ (rep(document_files) ^^ (x => x.flatten)) ~ rep(export_files)))) ^^ { case _ ~ ((a, pos) ~ b ~ c ~ (_ ~ (d ~ e ~ f ~ g ~ h ~ i ~ j ~ k ~ l))) => Session_Entry(pos, a, b, c, d, e, f, g, h, i, j, k, l) } } def parse_root(path: Path): List[Entry] = { val toks = Token.explode(root_syntax.keywords, File.read(path)) val start = Token.Pos.file(path.implode) parse_all(rep(chapter | session_entry), Token.reader(toks, start)) match { case Success(result, _) => result case bad => error(bad.toString) } } } def parse_root(path: Path): List[Entry] = Parser.parse_root(path) def parse_root_entries(path: Path): List[Session_Entry] = for (entry <- Parser.parse_root(path) if entry.isInstanceOf[Session_Entry]) yield entry.asInstanceOf[Session_Entry] def read_root(options: Options, select: Boolean, path: Path): List[Info] = { var entry_chapter = UNSORTED val infos = new mutable.ListBuffer[Info] parse_root(path).foreach { case Chapter(name) => entry_chapter = name case entry: Session_Entry => infos += make_info(options, select, path.dir, entry_chapter, entry) } infos.toList } def parse_roots(roots: Path): List[String] = { for { line <- split_lines(File.read(roots)) if !(line == "" || line.startsWith("#")) } yield line } /* load sessions from certain directories */ def is_session_dir(dir: Path): Boolean = (dir + ROOT).is_file || (dir + ROOTS).is_file private def check_session_dir(dir: Path): Path = if (is_session_dir(dir)) File.pwd() + dir.expand else error("Bad session root directory (missing ROOT or ROOTS): " + dir.expand.toString) def directories(dirs: List[Path], select_dirs: List[Path]): List[(Boolean, Path)] = { val default_dirs = Components.directories().filter(is_session_dir) for { (select, dir) <- (default_dirs ::: dirs).map((false, _)) ::: select_dirs.map((true, _)) } yield (select, dir.canonical) } def load_structure(options: Options, dirs: List[Path] = Nil, select_dirs: List[Path] = Nil, infos: List[Info] = Nil): Structure = { def load_dir(select: Boolean, dir: Path): List[(Boolean, Path)] = load_root(select, dir) ::: load_roots(select, dir) def load_root(select: Boolean, dir: Path): List[(Boolean, Path)] = { val root = dir + ROOT if (root.is_file) List((select, root)) else Nil } def load_roots(select: Boolean, dir: Path): List[(Boolean, Path)] = { val roots = dir + ROOTS if (roots.is_file) { for { entry <- parse_roots(roots) dir1 = try { check_session_dir(dir + Path.explode(entry)) } catch { case ERROR(msg) => error(msg + "\nThe error(s) above occurred in session catalog " + roots.toString) } res <- load_dir(select, dir1) } yield res } else Nil } val roots = for { (select, dir) <- directories(dirs, select_dirs) res <- load_dir(select, check_session_dir(dir)) } yield res val unique_roots = roots.foldLeft(Map.empty[JFile, (Boolean, Path)]) { case (m, (select, path)) => val file = path.canonical_file m.get(file) match { case None => m + (file -> (select, path)) case Some((select1, path1)) => m + (file -> (select1 || select, path1)) } }.toList.map(_._2) Structure.make(unique_roots.flatMap(p => read_root(options, p._1, p._2)) ::: infos) } /* Isabelle tool wrapper */ val isabelle_tool = Isabelle_Tool("sessions", "explore structure of Isabelle sessions", Scala_Project.here, args => { var base_sessions: List[String] = Nil var select_dirs: List[Path] = Nil var requirements = false var exclude_session_groups: List[String] = Nil var all_sessions = false var dirs: List[Path] = Nil var session_groups: List[String] = Nil var exclude_sessions: List[String] = Nil val getopts = Getopts(""" Usage: isabelle sessions [OPTIONS] [SESSIONS ...] Options are: -B NAME include session NAME and all descendants -D DIR include session directory and select its sessions -R refer to requirements of selected sessions -X NAME exclude sessions from group NAME and all descendants -a select all sessions -d DIR include session directory -g NAME select session group NAME -x NAME exclude session NAME and all descendants Explore the structure of Isabelle sessions and print result names in topological order (on stdout). """, "B:" -> (arg => base_sessions = base_sessions ::: List(arg)), "D:" -> (arg => select_dirs = select_dirs ::: List(Path.explode(arg))), "R" -> (_ => requirements = true), "X:" -> (arg => exclude_session_groups = exclude_session_groups ::: List(arg)), "a" -> (_ => all_sessions = true), "d:" -> (arg => dirs = dirs ::: List(Path.explode(arg))), "g:" -> (arg => session_groups = session_groups ::: List(arg)), "x:" -> (arg => exclude_sessions = exclude_sessions ::: List(arg))) val sessions = getopts(args) val options = Options.init() val selection = Selection(requirements = requirements, all_sessions = all_sessions, base_sessions = base_sessions, exclude_session_groups = exclude_session_groups, exclude_sessions = exclude_sessions, session_groups = session_groups, sessions = sessions) val sessions_structure = load_structure(options, dirs = dirs, select_dirs = select_dirs).selection(selection) for (name <- sessions_structure.imports_topological_order) { Output.writeln(name, stdout = true) } }) /** heap file with SHA1 digest **/ private val sha1_prefix = "SHA1:" def read_heap_digest(heap: Path): Option[String] = { if (heap.is_file) { - using(FileChannel.open(heap.file.toPath, StandardOpenOption.READ))(file => + using(FileChannel.open(heap.java_path, StandardOpenOption.READ))(file => { val len = file.size val n = sha1_prefix.length + SHA1.digest_length if (len >= n) { file.position(len - n) val buf = ByteBuffer.allocate(n) var i = 0 var m = 0 do { m = file.read(buf) if (m != -1) i += m } while (m != -1 && n > i) if (i == n) { val prefix = new String(buf.array(), 0, sha1_prefix.length, UTF8.charset) val s = new String(buf.array(), sha1_prefix.length, SHA1.digest_length, UTF8.charset) if (prefix == sha1_prefix) Some(s) else None } else None } else None }) } else None } def write_heap_digest(heap: Path): String = read_heap_digest(heap) match { case None => val s = SHA1.digest(heap).rep File.append(heap, sha1_prefix + s) s case Some(s) => s } /** persistent store **/ object Session_Info { val session_name = SQL.Column.string("session_name").make_primary_key // Build_Log.Session_Info val session_timing = SQL.Column.bytes("session_timing") val command_timings = SQL.Column.bytes("command_timings") val theory_timings = SQL.Column.bytes("theory_timings") val ml_statistics = SQL.Column.bytes("ml_statistics") val task_statistics = SQL.Column.bytes("task_statistics") val errors = SQL.Column.bytes("errors") val build_log_columns = List(session_name, session_timing, command_timings, theory_timings, ml_statistics, task_statistics, errors) // Build.Session_Info val sources = SQL.Column.string("sources") val input_heaps = SQL.Column.string("input_heaps") val output_heap = SQL.Column.string("output_heap") val return_code = SQL.Column.int("return_code") val build_columns = List(sources, input_heaps, output_heap, return_code) val table = SQL.Table("isabelle_session_info", build_log_columns ::: build_columns) } class Database_Context private[Sessions]( val store: Sessions.Store, database_server: Option[SQL.Database]) extends AutoCloseable { def cache: XML.Cache = store.cache def close(): Unit = database_server.foreach(_.close()) def output_database[A](session: String)(f: SQL.Database => A): A = database_server match { case Some(db) => f(db) case None => using(store.open_database(session, output = true))(f) } def input_database[A](session: String)(f: (SQL.Database, String) => Option[A]): Option[A] = database_server match { case Some(db) => f(db, session) case None => store.try_open_database(session) match { case Some(db) => using(db)(f(_, session)) case None => None } } def read_export( sessions: List[String], theory_name: String, name: String): Option[Export.Entry] = { val attempts = database_server match { case Some(db) => sessions.view.map(session_name => Export.read_entry(db, store.cache, session_name, theory_name, name)) case None => sessions.view.map(session_name => store.try_open_database(session_name) match { case Some(db) => using(db)(Export.read_entry(_, store.cache, session_name, theory_name, name)) case None => None }) } attempts.collectFirst({ case Some(entry) => entry }) } def get_export( session_hierarchy: List[String], theory_name: String, name: String): Export.Entry = read_export(session_hierarchy, theory_name, name) getOrElse Export.empty_entry(theory_name, name) override def toString: String = { val s = database_server match { case Some(db) => db.toString case None => "input_dirs = " + store.input_dirs.map(_.absolute).mkString(", ") } "Database_Context(" + s + ")" } } def store(options: Options, cache: XML.Cache = XML.Cache.make()): Store = new Store(options, cache) class Store private[Sessions](val options: Options, val cache: XML.Cache) { store => override def toString: String = "Store(output_dir = " + output_dir.absolute + ")" /* directories */ val system_output_dir: Path = Path.explode("$ISABELLE_HEAPS_SYSTEM/$ML_IDENTIFIER") val user_output_dir: Path = Path.explode("$ISABELLE_HEAPS/$ML_IDENTIFIER") def system_heaps: Boolean = options.bool("system_heaps") val output_dir: Path = if (system_heaps) system_output_dir else user_output_dir val input_dirs: List[Path] = if (system_heaps) List(system_output_dir) else List(user_output_dir, system_output_dir) def presentation_dir: Path = if (system_heaps) Path.explode("$ISABELLE_BROWSER_INFO_SYSTEM") else Path.explode("$ISABELLE_BROWSER_INFO") /* file names */ def heap(name: String): Path = Path.basic(name) def database(name: String): Path = Path.basic("log") + Path.basic(name).ext("db") def log(name: String): Path = Path.basic("log") + Path.basic(name) def log_gz(name: String): Path = log(name).ext("gz") def output_heap(name: String): Path = output_dir + heap(name) def output_database(name: String): Path = output_dir + database(name) def output_log(name: String): Path = output_dir + log(name) def output_log_gz(name: String): Path = output_dir + log_gz(name) def prepare_output_dir(): Unit = Isabelle_System.make_directory(output_dir + Path.basic("log")) /* heap */ def find_heap(name: String): Option[Path] = input_dirs.map(_ + heap(name)).find(_.is_file) def find_heap_digest(name: String): Option[String] = find_heap(name).flatMap(read_heap_digest) def the_heap(name: String): Path = find_heap(name) getOrElse error("Missing heap image for session " + quote(name) + " -- expected in:\n" + cat_lines(input_dirs.map(dir => " " + dir.expand.implode))) /* database */ def database_server: Boolean = options.bool("build_database_server") def open_database_server(): SQL.Database = PostgreSQL.open_database( user = options.string("build_database_user"), password = options.string("build_database_password"), database = options.string("build_database_name"), host = options.string("build_database_host"), port = options.int("build_database_port"), ssh = options.proper_string("build_database_ssh_host").map(ssh_host => SSH.open_session(options, host = ssh_host, user = options.string("build_database_ssh_user"), port = options.int("build_database_ssh_port"))), ssh_close = true) def open_database_context(): Database_Context = new Database_Context(store, if (database_server) Some(open_database_server()) else None) def try_open_database(name: String, output: Boolean = false): Option[SQL.Database] = { def check(db: SQL.Database): Option[SQL.Database] = if (output || session_info_exists(db)) Some(db) else { db.close(); None } if (database_server) check(open_database_server()) else if (output) Some(SQLite.open_database(output_database(name))) else { (for { dir <- input_dirs.view path = dir + database(name) if path.is_file db <- check(SQLite.open_database(path)) } yield db).headOption } } def open_database(name: String, output: Boolean = false): SQL.Database = try_open_database(name, output = output) getOrElse error("Missing build database for session " + quote(name)) def clean_output(name: String): (Boolean, Boolean) = { val relevant_db = database_server && { try_open_database(name) match { case Some(db) => try { db.transaction { val relevant_db = session_info_defined(db, name) init_session_info(db, name) relevant_db } } finally { db.close() } case None => false } } val del = for { dir <- (if (system_heaps) List(user_output_dir, system_output_dir) else List(user_output_dir)) file <- List(heap(name), database(name), log(name), log_gz(name)) path = dir + file if path.is_file } yield path.file.delete val relevant = relevant_db || del.nonEmpty val ok = del.forall(b => b) (relevant, ok) } /* SQL database content */ def read_bytes(db: SQL.Database, name: String, column: SQL.Column): Bytes = db.using_statement(Session_Info.table.select(List(column), Session_Info.session_name.where_equal(name)))(stmt => { val res = stmt.execute_query() if (!res.next()) Bytes.empty else res.bytes(column) }) def read_properties(db: SQL.Database, name: String, column: SQL.Column): List[Properties.T] = Properties.uncompress(read_bytes(db, name, column), cache = cache) /* session info */ def init_session_info(db: SQL.Database, name: String): Unit = { db.transaction { db.create_table(Session_Info.table) db.using_statement( Session_Info.table.delete(Session_Info.session_name.where_equal(name)))(_.execute()) db.create_table(Export.Data.table) db.using_statement( Export.Data.table.delete(Export.Data.session_name.where_equal(name)))(_.execute()) db.create_table(Document_Build.Data.table) db.using_statement( Document_Build.Data.table.delete( Document_Build.Data.session_name.where_equal(name)))(_.execute()) } } def session_info_exists(db: SQL.Database): Boolean = { val tables = db.tables tables.contains(Session_Info.table.name) && tables.contains(Export.Data.table.name) } def session_info_defined(db: SQL.Database, name: String): Boolean = db.transaction { session_info_exists(db) && { db.using_statement( Session_Info.table.select(List(Session_Info.session_name), Session_Info.session_name.where_equal(name)))(stmt => stmt.execute_query().next()) } } def write_session_info( db: SQL.Database, name: String, build_log: Build_Log.Session_Info, build: Build.Session_Info): Unit = { db.transaction { db.using_statement(Session_Info.table.insert())(stmt => { stmt.string(1) = name stmt.bytes(2) = Properties.encode(build_log.session_timing) stmt.bytes(3) = Properties.compress(build_log.command_timings, cache = cache.xz) stmt.bytes(4) = Properties.compress(build_log.theory_timings, cache = cache.xz) stmt.bytes(5) = Properties.compress(build_log.ml_statistics, cache = cache.xz) stmt.bytes(6) = Properties.compress(build_log.task_statistics, cache = cache.xz) stmt.bytes(7) = Build_Log.compress_errors(build_log.errors, cache = cache.xz) stmt.string(8) = build.sources stmt.string(9) = cat_lines(build.input_heaps) stmt.string(10) = build.output_heap getOrElse "" stmt.int(11) = build.return_code stmt.execute() }) } } def read_session_timing(db: SQL.Database, name: String): Properties.T = Properties.decode(read_bytes(db, name, Session_Info.session_timing), cache = cache) def read_command_timings(db: SQL.Database, name: String): List[Properties.T] = read_properties(db, name, Session_Info.command_timings) def read_theory_timings(db: SQL.Database, name: String): List[Properties.T] = read_properties(db, name, Session_Info.theory_timings) def read_ml_statistics(db: SQL.Database, name: String): List[Properties.T] = read_properties(db, name, Session_Info.ml_statistics) def read_task_statistics(db: SQL.Database, name: String): List[Properties.T] = read_properties(db, name, Session_Info.task_statistics) def read_theories(db: SQL.Database, name: String): List[String] = read_theory_timings(db, name).flatMap(Markup.Name.unapply) def read_errors(db: SQL.Database, name: String): List[String] = Build_Log.uncompress_errors(read_bytes(db, name, Session_Info.errors), cache = cache) def read_build(db: SQL.Database, name: String): Option[Build.Session_Info] = { if (db.tables.contains(Session_Info.table.name)) { db.using_statement(Session_Info.table.select(Session_Info.build_columns, Session_Info.session_name.where_equal(name)))(stmt => { val res = stmt.execute_query() if (!res.next()) None else { Some( Build.Session_Info( res.string(Session_Info.sources), split_lines(res.string(Session_Info.input_heaps)), res.string(Session_Info.output_heap) match { case "" => None case s => Some(s) }, res.int(Session_Info.return_code))) } }) } else None } } }