diff --git a/src/Pure/Admin/isabelle_cronjob.scala b/src/Pure/Admin/isabelle_cronjob.scala --- a/src/Pure/Admin/isabelle_cronjob.scala +++ b/src/Pure/Admin/isabelle_cronjob.scala @@ -1,634 +1,634 @@ /* Title: Pure/Admin/isabelle_cronjob.scala Author: Makarius Main entry point for administrative cronjob at TUM. */ package isabelle import java.nio.file.Files import scala.annotation.tailrec object Isabelle_Cronjob { /* global resources: owned by main cronjob */ val backup = "lxbroy10:cronjob" val main_dir: Path = Path.explode("~/cronjob") val main_state_file: Path = main_dir + Path.explode("run/main.state") val current_log: Path = main_dir + Path.explode("run/main.log") // owned by log service val cumulative_log: Path = main_dir + Path.explode("log/main.log") // owned by log service val isabelle_repos: Path = main_dir + Path.explode("isabelle") val afp_repos: Path = main_dir + Path.explode("AFP") val mailman_archives_dir = Path.explode("~/cronjob/Mailman") val build_log_dirs = List(Path.explode("~/log"), Path.explode("~/afp/log"), Path.explode("~/cronjob/log")) /** logger tasks **/ sealed case class Logger_Task(name: String = "", body: Logger => Unit) /* init and exit */ def get_rev(): String = Mercurial.repository(isabelle_repos).id() def get_afp_rev(): String = Mercurial.repository(afp_repos).id() val init: Logger_Task = Logger_Task("init", logger => { Isabelle_Devel.make_index() Mercurial.setup_repository(Isabelle_System.isabelle_repository.root, isabelle_repos) Mercurial.setup_repository(Isabelle_System.afp_repository.root, afp_repos) File.write(logger.log_dir + Build_Log.log_filename("isabelle_identify", logger.start_date), Build_Log.Identify.content(logger.start_date, Some(get_rev()), Some(get_afp_rev()))) Isabelle_System.bash( """rsync -a --include="*/" --include="plain_identify*" --exclude="*" """ + Bash.string(backup + "/log/.") + " " + File.bash_path(main_dir) + "/log/.").check if (!Isabelle_Devel.cronjob_log.is_file) Files.createSymbolicLink(Isabelle_Devel.cronjob_log.file.toPath, current_log.file.toPath) }) val exit: Logger_Task = Logger_Task("exit", logger => { Isabelle_System.bash( "rsync -a " + File.bash_path(main_dir) + "/log/." + " " + Bash.string(backup) + "/log/.") .check }) /* Mailman archives */ val mailman_archives: Logger_Task = Logger_Task("mailman_archives", logger => { Mailman.isabelle_users.download(mailman_archives_dir) Mailman.isabelle_dev.download(mailman_archives_dir) }) /* build release */ val build_release: Logger_Task = Logger_Task("build_release", logger => { Isabelle_Devel.release_snapshot(logger.options, get_rev(), get_afp_rev()) }) /* remote build_history */ sealed case class Item( known: Boolean, isabelle_version: String, afp_version: Option[String], pull_date: Date) { def unknown: Boolean = !known def versions: (String, Option[String]) = (isabelle_version, afp_version) def known_versions(rev: String, afp_rev: Option[String]): Boolean = known && rev != "" && isabelle_version == rev && (afp_rev.isEmpty || afp_rev.get != "" && afp_version == afp_rev) } def recent_items(db: SQL.Database, days: Int, rev: String, afp_rev: Option[String], sql: SQL.Source): List[Item] = { val afp = afp_rev.isDefined val select = Build_Log.Data.select_recent_versions( days = days, rev = rev, afp_rev = afp_rev, sql = "WHERE " + sql) db.using_statement(select)(stmt => stmt.execute_query().iterator(res => { val known = res.bool(Build_Log.Data.known) val isabelle_version = res.string(Build_Log.Prop.isabelle_version) val afp_version = if (afp) proper_string(res.string(Build_Log.Prop.afp_version)) else None val pull_date = res.date(Build_Log.Data.pull_date(afp)) Item(known, isabelle_version, afp_version, pull_date) }).toList) } def unknown_runs(items: List[Item]): List[List[Item]] = { val (run, rest) = Library.take_prefix[Item](_.unknown, items.dropWhile(_.known)) if (run.nonEmpty) run :: unknown_runs(rest) else Nil } sealed case class Remote_Build( description: String, host: String, actual_host: String = "", user: String = "", port: Int = 0, proxy_host: String = "", proxy_user: String = "", proxy_port: Int = 0, self_update: Boolean = false, historic: Boolean = false, history: Int = 0, history_base: String = "build_history_base", java_heap: String = "", options: String = "", args: String = "", afp: Boolean = false, bulky: Boolean = false, more_hosts: List[String] = Nil, detect: SQL.Source = "", active: Boolean = true) { def ssh_session(context: SSH.Context): SSH.Session = context.open_session(host = host, user = user, port = port, actual_host = actual_host, proxy_host = proxy_host, proxy_user = proxy_user, proxy_port = proxy_port, permissive = proxy_host.nonEmpty) def sql: SQL.Source = Build_Log.Prop.build_engine.toString + " = " + SQL.string(Build_History.engine) + " AND " + SQL.member(Build_Log.Prop.build_host.ident, host :: more_hosts) + (if (detect == "") "" else " AND " + SQL.enclose(detect)) def profile: Build_Status.Profile = Build_Status.Profile(description, history = history, afp = afp, bulky = bulky, sql = sql) def pick( options: Options, rev: String = "", filter: Item => Boolean = _ => true): Option[(String, Option[String])] = { val afp_rev = if (afp) Some(get_afp_rev()) else None val store = Build_Log.store(options) using(store.open_database())(db => { def pick_days(days: Int, gap: Int): Option[(String, Option[String])] = { val items = recent_items(db, days, rev, afp_rev, sql).filter(filter) def runs = unknown_runs(items).filter(run => run.length >= gap) if (historic || items.exists(_.known_versions(rev, afp_rev))) { val longest_run = runs.foldLeft(List.empty[Item]) { case (item1, item2) => if (item1.length >= item2.length) item1 else item2 } if (longest_run.isEmpty) None else Some(longest_run(longest_run.length / 2).versions) } else if (rev != "") Some((rev, afp_rev)) else runs.flatten.headOption.map(_.versions) } pick_days(options.int("build_log_history") max history, 2) orElse pick_days(200, 5) orElse pick_days(2000, 1) }) } def build_history_options: String = " -h " + Bash.string(host) + " " + (java_heap match { case "" => "" case h => "-e 'ISABELLE_TOOL_JAVA_OPTIONS=\"$ISABELLE_TOOL_JAVA_OPTIONS -Xmx" + h + "\"' " }) + options } val remote_builds_old: List[Remote_Build] = List( Remote_Build("Linux A", "lxbroy9", java_heap = "2g", options = "-m32 -B -M1x2,2", args = "-N -g timing"), Remote_Build("Linux Benchmarks", "lxbroy5", historic = true, history = 90, java_heap = "2g", options = "-m32 -B -M1x2,2 -t Benchmarks" + " -e ISABELLE_GHC=ghc -e ISABELLE_MLTON=mlton -e ISABELLE_OCAML=ocaml" + " -e ISABELLE_OCAMLC=ocamlc -e ISABELLE_OCAMLFIND=ocamlfind -e ISABELLE_SMLNJ=sml" + " -e ISABELLE_SWIPL=swipl", args = "-N -a -d '~~/src/Benchmarks'", detect = Build_Log.Prop.build_tags.toString + " = " + SQL.string("Benchmarks")), Remote_Build("macOS 10.14 Mojave (Old)", "lapnipkow3", options = "-m32 -M1,2 -e ISABELLE_GHC_SETUP=true -p pide_session=false", self_update = true, args = "-a -d '~~/src/Benchmarks'"), Remote_Build("AFP old bulky", "lrzcloud1", self_update = true, proxy_host = "lxbroy10", proxy_user = "i21isatest", options = "-m64 -M6 -U30000 -s10 -t AFP", args = "-g large -g slow", afp = true, bulky = true, detect = Build_Log.Prop.build_tags.toString + " = " + SQL.string("AFP")), Remote_Build("AFP old", "lxbroy7", args = "-N -X large -X slow", afp = true, detect = Build_Log.Prop.build_tags.toString + " = " + SQL.string("AFP")), Remote_Build("Poly/ML 5.7 Linux", "lxbroy8", history_base = "37074e22e8be", options = "-m32 -B -M1x2,2 -t polyml-5.7 -i 'init_component /home/isabelle/contrib/polyml-5.7'", args = "-N -g timing", detect = Build_Log.Prop.build_tags.toString + " = " + SQL.string("polyml-5.7") + " AND " + Build_Log.Settings.ML_OPTIONS + " <> " + SQL.string("-H 500")), Remote_Build("Poly/ML 5.7.1 Linux", "lxbroy8", history_base = "a9d5b59c3e12", options = "-m32 -B -M1x2,2 -t polyml-5.7.1-pre2 -i 'init_component /home/isabelle/contrib/polyml-test-905dae2ebfda'", args = "-N -g timing", detect = Build_Log.Prop.build_tags.toString + " = " + SQL.string("polyml-5.7.1-pre1") + " OR " + Build_Log.Prop.build_tags + " = " + SQL.string("polyml-5.7.1-pre2")), Remote_Build("Poly/ML 5.7 macOS", "macbroy2", history_base = "37074e22e8be", options = "-m32 -B -M1x4,4 -t polyml-5.7 -i 'init_component /home/isabelle/contrib/polyml-5.7'", args = "-a", detect = Build_Log.Prop.build_tags.toString + " = " + SQL.string("polyml-5.7")), Remote_Build("Poly/ML 5.7.1 macOS", "macbroy2", history_base = "a9d5b59c3e12", options = "-m32 -B -M1x4,4 -t polyml-5.7.1-pre2 -i 'init_component /home/isabelle/contrib/polyml-test-905dae2ebfda'", args = "-a", detect = Build_Log.Prop.build_tags.toString + " = " + SQL.string("polyml-5.7.1-pre1") + " OR " + Build_Log.Prop.build_tags + " = " + SQL.string("polyml-5.7.1-pre2")), Remote_Build("macOS", "macbroy2", options = "-m32 -M8" + " -e ISABELLE_GHC=ghc -e ISABELLE_MLTON=mlton -e ISABELLE_OCAML=ocaml" + " -e ISABELLE_OCAMLC=ocamlc -e ISABELLE_OCAML_SETUP=true" + " -e ISABELLE_OPAM_ROOT=\"$ISABELLE_HOME/opam\"" + " -e ISABELLE_SMLNJ=/home/isabelle/smlnj/110.85/bin/sml" + " -p pide_session=false", args = "-a", detect = Build_Log.Prop.build_tags.undefined, history_base = "2c0f24e927dd"), Remote_Build("macOS, quick_and_dirty", "macbroy2", options = "-m32 -M8 -t quick_and_dirty -p pide_session=false", args = "-a -o quick_and_dirty", detect = Build_Log.Prop.build_tags.toString + " = " + SQL.string("quick_and_dirty"), history_base = "2c0f24e927dd"), Remote_Build("macOS, skip_proofs", "macbroy2", options = "-m32 -M8 -t skip_proofs -p pide_session=false", args = "-a -o skip_proofs", detect = Build_Log.Prop.build_tags.toString + " = " + SQL.string("skip_proofs"), history_base = "2c0f24e927dd"), Remote_Build("Poly/ML test", "lxbroy8", options = "-m32 -B -M1x2,2 -t polyml-test -i 'init_component /home/isabelle/contrib/polyml-5.7-20170217'", args = "-N -g timing", detect = Build_Log.Prop.build_tags.toString + " = " + SQL.string("polyml-test")), Remote_Build("macOS 10.12 Sierra", "macbroy30", options = "-m32 -M2 -p pide_session=false", args = "-a", detect = Build_Log.Prop.build_start.toString + " > date '2017-03-03'"), Remote_Build("macOS 10.10 Yosemite", "macbroy31", options = "-m32 -M2 -p pide_session=false", args = "-a"), Remote_Build("macOS 10.8 Mountain Lion", "macbroy30", options = "-m32 -M2", args = "-a", detect = Build_Log.Prop.build_start.toString + " < date '2017-03-03'")) ::: { for { (n, hosts) <- List(1 -> List("lxbroy6"), 2 -> List("lxbroy8", "lxbroy5")) } yield { Remote_Build("AFP old", host = hosts.head, more_hosts = hosts.tail, options = "-m32 -M1x2 -t AFP -P" + n + " -e ISABELLE_GHC=ghc" + " -e ISABELLE_MLTON=mlton" + " -e ISABELLE_OCAML=ocaml -e ISABELLE_OCAMLC=ocamlc -e ISABELLE_OCAMLFIND=ocamlfind" + " -e ISABELLE_SMLNJ=sml", args = "-N -X large -X slow", afp = true, detect = Build_Log.Prop.build_tags.toString + " = " + SQL.string("AFP")) } } val remote_builds1: List[List[Remote_Build]] = { List( List(Remote_Build("Linux B", "lxbroy10", historic = true, history = 90, options = "-m32 -B -M1x4,2,4,6", args = "-N -g timing")), List(Remote_Build("macOS 10.13 High Sierra", "lapbroy68", options = "-m32 -B -M1,2,4 -e ISABELLE_GHC_SETUP=true -p pide_session=false", self_update = true, args = "-a -d '~~/src/Benchmarks'")), List( Remote_Build("macOS 11.1 Big Sur", "mini1", options = "-m32 -B -M1x2,2,4 -p pide_session=false" + " -e ISABELLE_OCAML=ocaml -e ISABELLE_OCAMLC=ocamlc -e ISABELLE_OCAML_SETUP=true" + " -e ISABELLE_GHC_SETUP=true" + " -e ISABELLE_MLTON=/usr/local/bin/mlton" + " -e ISABELLE_SMLNJ=/usr/local/smlnj/bin/sml" + " -e ISABELLE_SWIPL=/usr/local/bin/swipl", self_update = true, args = "-a -d '~~/src/Benchmarks'")), List( Remote_Build("macOS 10.14 Mojave", "mini2", options = "-m32 -B -M1x2,2,4 -p pide_session=false" + " -e ISABELLE_OCAML=ocaml -e ISABELLE_OCAMLC=ocamlc -e ISABELLE_OCAML_SETUP=true" + " -e ISABELLE_GHC_SETUP=true" + " -e ISABELLE_MLTON=/usr/local/bin/mlton" + " -e ISABELLE_SMLNJ=/usr/local/smlnj/bin/sml" + " -e ISABELLE_SWIPL=/usr/local/bin/swipl", self_update = true, args = "-a -d '~~/src/Benchmarks'"), Remote_Build("macOS, quick_and_dirty", "mini2", options = "-m32 -M4 -t quick_and_dirty -p pide_session=false", self_update = true, args = "-a -o quick_and_dirty", detect = Build_Log.Prop.build_tags.toString + " = " + SQL.string("quick_and_dirty")), Remote_Build("macOS, skip_proofs", "mini2", options = "-m32 -M4 -t skip_proofs -p pide_session=false", args = "-a -o skip_proofs", detect = Build_Log.Prop.build_tags.toString + " = " + SQL.string("skip_proofs"))), List(Remote_Build("macOS 10.15 Catalina", "laramac01", user = "makarius", proxy_host = "laraserver", proxy_user = "makarius", self_update = true, options = "-m32 -M4 -e ISABELLE_GHC_SETUP=true -p pide_session=false", args = "-a -d '~~/src/Benchmarks'")), List( Remote_Build("Windows", "vmnipkow9", historic = true, history = 90, self_update = true, options = "-m32 -M4" + " -C /cygdrive/d/isatest/contrib" + " -e ISABELLE_OCAML=ocaml -e ISABELLE_OCAMLC=ocamlc -e ISABELLE_OCAML_SETUP=true" + " -e ISABELLE_GHC_SETUP=true" + " -e ISABELLE_SMLNJ=/usr/local/smlnj-110.81/bin/sml", args = "-a", detect = Build_Log.Settings.ML_PLATFORM.toString + " = " + SQL.string("x86-windows") + " OR " + Build_Log.Settings.ML_PLATFORM + " = " + SQL.string("x86_64_32-windows")), Remote_Build("Windows", "vmnipkow9", historic = true, history = 90, self_update = true, options = "-m64 -M4" + " -C /cygdrive/d/isatest/contrib" + " -e ISABELLE_OCAML=ocaml -e ISABELLE_OCAMLC=ocamlc -e ISABELLE_OCAML_SETUP=true" + " -e ISABELLE_GHC_SETUP=true" + " -e ISABELLE_SMLNJ=/usr/local/smlnj-110.81/bin/sml", args = "-a", detect = Build_Log.Settings.ML_PLATFORM.toString + " = " + SQL.string("x86_64-windows")))) } val remote_builds2: List[List[Remote_Build]] = List( List( Remote_Build("AFP", "lrzcloud2", actual_host = "10.195.4.41", self_update = true, proxy_host = "lxbroy10", proxy_user = "i21isatest", java_heap = "8g", options = "-m32 -M1x6 -t AFP" + " -e ISABELLE_GHC=ghc" + " -e ISABELLE_MLTON=mlton" + " -e ISABELLE_OCAML=ocaml -e ISABELLE_OCAMLC=ocamlc -e ISABELLE_OCAMLFIND=ocamlfind" + " -e ISABELLE_SMLNJ=sml", args = "-a -X large -X slow", afp = true, detect = Build_Log.Prop.build_tags.toString + " = " + SQL.string("AFP")), Remote_Build("AFP", "lrzcloud2", actual_host = "10.195.4.41", self_update = true, proxy_host = "lxbroy10", proxy_user = "i21isatest", java_heap = "8g", options = "-m64 -M8 -U30000 -s10 -t AFP", args = "-g large -g slow", afp = true, bulky = true, detect = Build_Log.Prop.build_tags.toString + " = " + SQL.string("AFP")))) def remote_build_history(rev: String, afp_rev: Option[String], i: Int, r: Remote_Build) : Logger_Task = { val task_name = "build_history-" + r.host Logger_Task(task_name, logger => { using(r.ssh_session(logger.ssh_context))(ssh => { val results = Build_History.remote_build_history(ssh, isabelle_repos, isabelle_repos.ext(r.host), isabelle_identifier = "cronjob_build_history", self_update = r.self_update, rev = rev, afp_rev = afp_rev, options = " -N " + Bash.string(task_name) + (if (i < 0) "" else "_" + (i + 1).toString) + " -R " + Bash.string(Components.default_component_repository) + " -C '$USER_HOME/.isabelle/contrib' -f " + r.build_history_options, args = "-o timeout=10800 " + r.args) for ((log_name, bytes) <- results) { logger.log(Date.now(), log_name) Bytes.write(logger.log_dir + Path.explode(log_name), bytes) } }) }) } val build_status_profiles: List[Build_Status.Profile] = (remote_builds_old :: remote_builds1 ::: remote_builds2).flatten.map(_.profile) /** task logging **/ object Log_Service { def apply(options: Options, progress: Progress = new Progress): Log_Service = new Log_Service(SSH.init_context(options), progress) } class Log_Service private(val ssh_context: SSH.Context, progress: Progress) { current_log.file.delete private val thread: Consumer_Thread[String] = Consumer_Thread.fork("cronjob: logger", daemon = true)( consume = (text: String) => { // critical File.append(current_log, text + "\n") File.append(cumulative_log, text + "\n") progress.echo(text) true }) def shutdown(): Unit = { thread.shutdown() } val hostname: String = Isabelle_System.hostname() def log(date: Date, task_name: String, msg: String): Unit = if (task_name != "") thread.send( "[" + Build_Log.print_date(date) + ", " + hostname + ", " + task_name + "]: " + msg) def start_logger(start_date: Date, task_name: String): Logger = new Logger(this, start_date, task_name) def run_task(start_date: Date, task: Logger_Task): Unit = { val logger = start_logger(start_date, task.name) val res = Exn.capture { task.body(logger) } val end_date = Date.now() val err = res match { case Exn.Res(_) => None case Exn.Exn(exn) => Output.writeln("Exception trace for " + quote(task.name) + ":") exn.printStackTrace() val first_line = split_lines(Exn.message(exn)).headOption getOrElse "exception" Some(first_line) } logger.log_end(end_date, err) } def fork_task(start_date: Date, task: Logger_Task): Task = new Task(task.name, run_task(start_date, task)) } class Logger private[Isabelle_Cronjob]( val log_service: Log_Service, val start_date: Date, val task_name: String) { def ssh_context: SSH.Context = log_service.ssh_context def options: Options = ssh_context.options def log(date: Date, msg: String): Unit = log_service.log(date, task_name, msg) def log_end(end_date: Date, err: Option[String]): Unit = { val elapsed_time = end_date.time - start_date.time val msg = (if (err.isEmpty) "finished" else "ERROR " + err.get) + (if (elapsed_time.seconds < 3.0) "" else " (" + elapsed_time.message_hms + " elapsed time)") log(end_date, msg) } val log_dir = Isabelle_System.make_directory(main_dir + Build_Log.log_subdir(start_date)) log(start_date, "started") } class Task private[Isabelle_Cronjob](name: String, body: => Unit) { private val future: Future[Unit] = Future.thread("cronjob: " + name) { body } def is_finished: Boolean = future.is_finished } /** cronjob **/ def cronjob(progress: Progress, exclude_task: Set[String]): Unit = { /* soft lock */ val still_running = try { Some(File.read(main_state_file)) } catch { case ERROR(_) => None } still_running match { case None | Some("") => case Some(running) => error("Isabelle cronjob appears to be still running: " + running) } /* log service */ val log_service = Log_Service(Options.init(), progress = progress) def run(start_date: Date, task: Logger_Task): Unit = log_service.run_task(start_date, task) def run_now(task: Logger_Task): Unit = run(Date.now(), task) /* structured tasks */ def SEQ(tasks: List[Logger_Task]): Logger_Task = Logger_Task(body = _ => for (task <- tasks.iterator if !exclude_task(task.name) || task.name == "") run_now(task)) def PAR(tasks: List[Logger_Task]): Logger_Task = Logger_Task(body = _ => { @tailrec def join(running: List[Task]): Unit = { running.partition(_.is_finished) match { case (Nil, Nil) => - case (Nil, _ :: _) => Time.seconds(0.5).sleep; join(running) + case (Nil, _ :: _) => Time.seconds(0.5).sleep(); join(running) case (_ :: _, remaining) => join(remaining) } } val start_date = Date.now() val running = for (task <- tasks if !exclude_task(task.name)) yield log_service.fork_task(start_date, task) join(running) }) /* repository structure */ val hg = Mercurial.repository(isabelle_repos) val hg_graph = hg.graph() def history_base_filter(r: Remote_Build): Item => Boolean = { val base_rev = hg.id(r.history_base) val nodes = hg_graph.all_succs(List(base_rev)).toSet (item: Item) => nodes(item.isabelle_version) } /* main */ val main_start_date = Date.now() File.write(main_state_file, main_start_date.toString + " " + log_service.hostname) run(main_start_date, Logger_Task("isabelle_cronjob", logger => run_now( SEQ(List( init, PAR(List(mailman_archives, build_release)), PAR( List(remote_builds1, remote_builds2).map(remote_builds => SEQ(List( PAR(remote_builds.map(_.filter(_.active)).map(seq => SEQ( for { (r, i) <- (if (seq.length <= 1) seq.map((_, -1)) else seq.zipWithIndex) (rev, afp_rev) <- r.pick(logger.options, hg.id(), history_base_filter(r)) } yield remote_build_history(rev, afp_rev, i, r)))), Logger_Task("jenkins_logs", _ => Jenkins.download_logs(logger.options, Jenkins.build_log_jobs, main_dir)), Logger_Task("build_log_database", logger => Isabelle_Devel.build_log_database(logger.options, build_log_dirs)), Logger_Task("build_status", logger => Isabelle_Devel.build_status(logger.options)))))), exit))))) log_service.shutdown() main_state_file.file.delete } /** command line entry point **/ def main(args: Array[String]): Unit = { Command_Line.tool { var force = false var verbose = false var exclude_task = Set.empty[String] val getopts = Getopts(""" Usage: Admin/cronjob/main [OPTIONS] Options are: -f apply force to do anything -v verbose -x NAME exclude tasks with this name """, "f" -> (_ => force = true), "v" -> (_ => verbose = true), "x:" -> (arg => exclude_task += arg)) val more_args = getopts(args) if (more_args.nonEmpty) getopts.usage() val progress = if (verbose) new Console_Progress() else new Progress if (force) cronjob(progress, exclude_task) else error("Need to apply force to do anything") } } } diff --git a/src/Pure/General/mercurial.scala b/src/Pure/General/mercurial.scala --- a/src/Pure/General/mercurial.scala +++ b/src/Pure/General/mercurial.scala @@ -1,438 +1,438 @@ /* Title: Pure/General/mercurial.scala Author: Makarius Support for Mercurial repositories, with local or remote repository clone and working directory (via ssh connection). */ package isabelle import scala.annotation.tailrec import scala.collection.mutable object Mercurial { type Graph = isabelle.Graph[String, Unit] /* HTTP server */ object Server { def apply(root: String): Server = new Server(root) def start(root: Path): Server = { val hg = repository(root) val server_process = Future.promise[Bash.Process] val server_root = Future.promise[String] Isabelle_Thread.fork("hg") { val process = Exn.capture { Bash.process(hg.command_line("serve", options = "--port 0 --print-url")) } server_process.fulfill_result(process) Exn.release(process).result(progress_stdout = line => if (!server_root.is_finished) { server_root.fulfill(Library.try_unsuffix("/", line).getOrElse(line)) }) } server_process.join new Server(server_root.join) { override def close(): Unit = server_process.join.terminate() } } } class Server private(val root: String) extends AutoCloseable { override def toString: String = root def close(): Unit = () def changeset(rev: String = "tip", raw: Boolean = false): String = root + (if (raw) "/raw-rev/" else "/rev/") + rev def file(path: Path, rev: String = "tip", raw: Boolean = false): String = root + (if (raw) "/raw-file/" else "/file/") + rev + "/" + path.expand.implode def archive(rev: String = "tip"): String = root + "/archive/" + rev + ".tar.gz" def read_changeset(rev: String = "tip"): String = Url.read(changeset(rev = rev, raw = true)) def read_file(path: Path, rev: String = "tip"): String = Url.read(file(path, rev = rev, raw = true)) def download_archive(rev: String = "tip", progress: Progress = new Progress): HTTP.Content = Isabelle_System.download(archive(rev = rev), progress = progress) def download_dir(dir: Path, rev: String = "tip", progress: Progress = new Progress): Unit = { Isabelle_System.new_directory(dir) Isabelle_System.with_tmp_file("rev", ext = ".tar.gz")(archive_path => { val content = download_archive(rev = rev, progress = progress) Bytes.write(archive_path, content.bytes) progress.echo("Unpacking " + rev + ".tar.gz") Isabelle_System.gnutar("-xzf " + File.bash_path(archive_path), dir = dir, original_owner = true, strip = 1).check }) } } /* command-line syntax */ def optional(s: String, prefix: String = ""): String = if (s == "") "" else " " + prefix + " " + Bash.string(s) def opt_flag(flag: String, b: Boolean): String = if (b) " " + flag else "" def opt_rev(s: String): String = optional(s, "--rev") def opt_template(s: String): String = optional(s, "--template") /* repository archives */ private val Archive_Node = """^node: (\S{12}).*$""".r private val Archive_Tag = """^tag: (\S+).*$""".r sealed case class Archive_Info(lines: List[String]) { def id: Option[String] = lines.collectFirst({ case Archive_Node(a) => a }) def tags: List[String] = for (Archive_Tag(tag) <- lines if tag != "tip") yield tag } def archive_info(root: Path): Option[Archive_Info] = { val path = root + Path.explode(".hg_archival.txt") if (path.is_file) Some(Archive_Info(Library.trim_split_lines(File.read(path)))) else None } def archive_id(root: Path): Option[String] = archive_info(root).flatMap(_.id) def archive_tags(root: Path): Option[String] = archive_info(root).map(info => info.tags.mkString(" ")) /* repository access */ def is_repository(root: Path, ssh: SSH.System = SSH.Local): Boolean = ssh.is_dir(root + Path.explode(".hg")) && new Repository(root, ssh).command("root").ok def repository(root: Path, ssh: SSH.System = SSH.Local): Repository = { val hg = new Repository(root, ssh) hg.command("root").check hg } def find_repository(start: Path, ssh: SSH.System = SSH.Local): Option[Repository] = { @tailrec def find(root: Path): Option[Repository] = if (is_repository(root, ssh)) Some(repository(root, ssh = ssh)) else if (root.is_root) None else find(root + Path.parent) find(ssh.expand_path(start)) } private def make_repository(root: Path, cmd: String, args: String, ssh: SSH.System = SSH.Local) : Repository = { val hg = new Repository(root, ssh) ssh.make_directory(hg.root.dir) hg.command(cmd, args, repository = false).check hg } def init_repository(root: Path, ssh: SSH.System = SSH.Local): Repository = make_repository(root, "init", ssh.bash_path(root), ssh = ssh) def clone_repository(source: String, root: Path, rev: String = "", options: String = "", ssh: SSH.System = SSH.Local): Repository = make_repository(root, "clone", options + " " + Bash.string(source) + " " + ssh.bash_path(root) + opt_rev(rev), ssh = ssh) def setup_repository(source: String, root: Path, ssh: SSH.System = SSH.Local): Repository = { if (ssh.is_dir(root)) { val hg = repository(root, ssh = ssh); hg.pull(remote = source); hg } else clone_repository(source, root, options = "--noupdate", ssh = ssh) } class Repository private[Mercurial](root_path: Path, ssh: SSH.System = SSH.Local) { hg => val root: Path = ssh.expand_path(root_path) def root_url: String = ssh.hg_url + root.implode override def toString: String = ssh.hg_url + root.implode def command_line(name: String, args: String = "", options: String = "", repository: Boolean = true): String = { "export LANG=C HGPLAIN=\n\"${HG:-hg}\" --config " + Bash.string("defaults." + name + "=") + (if (repository) " --repository " + ssh.bash_path(root) else "") + " --noninteractive " + name + " " + options + " " + args } def command( name: String, args: String = "", options: String = "", repository: Boolean = true): Process_Result = { ssh.execute(command_line(name, args = args, options = options, repository = repository)) } def add(files: List[Path]): Unit = hg.command("add", files.map(ssh.bash_path).mkString(" ")) def archive(target: String, rev: String = "", options: String = ""): Unit = hg.command("archive", opt_rev(rev) + " " + Bash.string(target), options).check def heads(template: String = "{node|short}\n", options: String = ""): List[String] = hg.command("heads", opt_template(template), options).check.out_lines def identify(rev: String = "tip", options: String = ""): String = hg.command("id", opt_rev(rev), options).check.out_lines.headOption getOrElse "" def id(rev: String = "tip"): String = identify(rev, options = "-i") def tags(rev: String = "tip"): String = { val result = identify(rev, options = "-t") Library.space_explode(' ', result).filterNot(_ == "tip").mkString(" ") } def paths(args: String = "", options: String = ""): List[String] = hg.command("paths", args = args, options = options).check.out_lines def manifest(rev: String = "", options: String = ""): List[String] = hg.command("manifest", opt_rev(rev), options).check.out_lines def log(rev: String = "", template: String = "", options: String = ""): String = hg.command("log", opt_rev(rev) + opt_template(template), options).check.out def parent(): String = log(rev = "p1()", template = "{node|short}") def push( remote: String = "", rev: String = "", force: Boolean = false, options: String = ""): Unit = { hg.command("push", opt_rev(rev) + opt_flag("--force", force) + optional(remote), options). check_rc(rc => rc == 0 | rc == 1) } def pull(remote: String = "", rev: String = "", options: String = ""): Unit = hg.command("pull", opt_rev(rev) + optional(remote), options).check def update( rev: String = "", clean: Boolean = false, check: Boolean = false, options: String = ""): Unit = { hg.command("update", opt_rev(rev) + opt_flag("--clean", clean) + opt_flag("--check", check), options).check } def known_files(): List[String] = hg.command("status", options = "--modified --added --clean --no-status").check.out_lines def graph(): Graph = { val Node = """^node: (\w{12}) (\w{12}) (\w{12})""".r val log_result = log(template = """node: {node|short} {p1node|short} {p2node|short}\n""") split_lines(log_result).foldLeft(Graph.string[Unit]) { case (graph, Node(x, y, z)) => val deps = List(y, z).filterNot(s => s.forall(_ == '0')) val graph1 = (x :: deps).foldLeft(graph)(_.default_node(_, ())) deps.foldLeft(graph1)({ case (g, dep) => g.add_edge(dep, x) }) case (graph, _) => graph } } } /* check files */ def check_files(files: List[Path], ssh: SSH.System = SSH.Local): (List[Path], List[Path]) = { val outside = new mutable.ListBuffer[Path] val unknown = new mutable.ListBuffer[Path] @tailrec def check(paths: List[Path]): Unit = { paths match { case path :: rest => find_repository(path, ssh) match { case None => outside += path; check(rest) case Some(hg) => val known = hg.known_files().iterator.map(name => (hg.root + Path.explode(name)).canonical_file).toSet if (!known(path.canonical_file)) unknown += path check(rest.filterNot(p => known(p.canonical_file))) } case Nil => } } check(files) (outside.toList, unknown.toList) } /* setup remote vs. local repository */ private def edit_hgrc(local_hg: Repository, path_name: String, source: String): Unit = { val hgrc = local_hg.root + Path.explode(".hg/hgrc") def header(line: String): Boolean = line.startsWith("[paths]") val Entry = """^(\S+)\s*=\s*(.*)$""".r val new_entry = path_name + " = " + source def commit(lines: List[String]): Boolean = { File.write(hgrc, cat_lines(lines)) true } val edited = hgrc.is_file && { val lines = split_lines(File.read(hgrc)) lines.count(header) == 1 && { if (local_hg.paths(options = "-q").contains(path_name)) { val old_source = local_hg.paths(args = path_name).head val old_entry = path_name + ".old = " + old_source val lines1 = lines.map { case Entry(a, b) if a == path_name && b == old_source => new_entry + "\n" + old_entry case line => line } lines != lines1 && commit(lines1) } else { val prefix = lines.takeWhile(line => !header(line)) val n = prefix.length commit(prefix ::: List(lines(n), new_entry) ::: lines.drop(n + 1)) } } } if (!edited) File.append(hgrc, "\n[paths]\n" + new_entry + "\n") } val default_path_name = "default" def hg_setup( remote: String, local_path: Path, remote_name: String = "", path_name: String = default_path_name, remote_exists: Boolean = false, progress: Progress = new Progress): Unit = { /* local repository */ Isabelle_System.make_directory(local_path) val repos_name = proper_string(remote_name) getOrElse local_path.absolute.base.implode val local_hg = if (is_repository(local_path)) repository(local_path) else init_repository(local_path) progress.echo("Local repository " + local_hg.root.absolute) /* remote repository */ val remote_url = remote match { case _ if remote.startsWith("ssh://") => val ssh_url = remote + "/" + repos_name if (!remote_exists) { try { local_hg.command("init", ssh_url, repository = false).check } catch { case ERROR(msg) => progress.echo_warning(msg) } } ssh_url case SSH.Target(user, host) => val phabricator = Phabricator.API(user, host) var repos = phabricator.get_repositories().find(r => r.short_name == repos_name) getOrElse { if (remote_exists) { error("Remote repository " + quote(phabricator.hg_url + "/source/" + repos_name) + " expected to exist") } else phabricator.create_repository(repos_name, short_name = repos_name) } while (repos.importing) { progress.echo("Awaiting remote repository ...") - Time.seconds(0.5).sleep + Time.seconds(0.5).sleep() repos = phabricator.the_repository(repos.phid) } repos.ssh_url case _ => error("Malformed remote specification " + quote(remote)) } progress.echo("Remote repository " + quote(remote_url)) /* synchronize local and remote state */ progress.echo("Synchronizing ...") edit_hgrc(local_hg, path_name, remote_url) local_hg.pull(options = "-u") local_hg.push(remote = remote_url) } val isabelle_tool = Isabelle_Tool("hg_setup", "setup remote vs. local Mercurial repository", Scala_Project.here, args => { var remote_name = "" var path_name = default_path_name var remote_exists = false val getopts = Getopts(""" Usage: isabelle hg_setup [OPTIONS] REMOTE LOCAL_DIR Options are: -n NAME remote repository name (default: base name of LOCAL_DIR) -p PATH Mercurial path name (default: """ + quote(default_path_name) + """) -r assume that remote repository already exists Setup a remote vs. local Mercurial repository: REMOTE either refers to a Phabricator server "user@host" or SSH file server "ssh://user@host/path". """, "n:" -> (arg => remote_name = arg), "p:" -> (arg => path_name = arg), "r" -> (_ => remote_exists = true)) val more_args = getopts(args) val (remote, local_path) = more_args match { case List(arg1, arg2) => (arg1, Path.explode(arg2)) case _ => getopts.usage() } val progress = new Console_Progress hg_setup(remote, local_path, remote_name = remote_name, path_name = path_name, remote_exists = remote_exists, progress = progress) }) } diff --git a/src/Pure/General/ssh.scala b/src/Pure/General/ssh.scala --- a/src/Pure/General/ssh.scala +++ b/src/Pure/General/ssh.scala @@ -1,523 +1,523 @@ /* Title: Pure/General/ssh.scala Author: Makarius SSH client based on JSch (see also http://www.jcraft.com/jsch/examples). */ package isabelle import java.io.{InputStream, OutputStream, ByteArrayOutputStream} import scala.collection.mutable import scala.util.matching.Regex import com.jcraft.jsch.{JSch, Logger => JSch_Logger, Session => JSch_Session, SftpException, OpenSSHConfig, UserInfo, Channel => JSch_Channel, ChannelExec, ChannelSftp, SftpATTRS, JSchException} object SSH { /* target machine: user@host syntax */ object Target { val User_Host: Regex = "^([^@]+)@(.+)$".r def parse(s: String): (String, String) = s match { case User_Host(user, host) => (user, host) case _ => ("", s) } def unapplySeq(s: String): Option[List[String]] = parse(s) match { case (_, "") => None case (user, host) => Some(List(user, host)) } } val default_port = 22 def make_port(port: Int): Int = if (port > 0) port else default_port def port_suffix(port: Int): String = if (port == default_port) "" else ":" + port def user_prefix(user: String): String = proper_string(user) match { case None => "" case Some(name) => name + "@" } def connect_timeout(options: Options): Int = options.seconds("ssh_connect_timeout").ms.toInt def alive_interval(options: Options): Int = options.seconds("ssh_alive_interval").ms.toInt def alive_count_max(options: Options): Int = options.int("ssh_alive_count_max") /* init context */ def init_context(options: Options): Context = { val config_dir = Path.explode(options.string("ssh_config_dir")) if (!config_dir.is_dir) error("Bad ssh config directory: " + config_dir) val jsch = new JSch val config_file = Path.explode(options.string("ssh_config_file")) if (config_file.is_file) jsch.setConfigRepository(OpenSSHConfig.parseFile(File.platform_path(config_file))) val known_hosts = config_dir + Path.explode("known_hosts") if (!known_hosts.is_file) known_hosts.file.createNewFile jsch.setKnownHosts(File.platform_path(known_hosts)) val identity_files = space_explode(':', options.string("ssh_identity_files")).map(Path.explode) for (identity_file <- identity_files if identity_file.is_file) { try { jsch.addIdentity(File.platform_path(identity_file)) } catch { case exn: JSchException => error("Error in ssh identity file " + identity_file + ": " + exn.getMessage) } } new Context(options, jsch) } def open_session(options: Options, host: String, user: String = "", port: Int = 0, actual_host: String = "", proxy_host: String = "", proxy_user: String = "", proxy_port: Int = 0, permissive: Boolean = false): Session = init_context(options).open_session( host = host, user = user, port = port, actual_host = actual_host, proxy_host = proxy_host, proxy_user = proxy_user, proxy_port = proxy_port, permissive = permissive) class Context private[SSH](val options: Options, val jsch: JSch) { def update_options(new_options: Options): Context = new Context(new_options, jsch) private def connect_session(host: String, user: String = "", port: Int = 0, host_key_permissive: Boolean = false, nominal_host: String = "", nominal_user: String = "", on_close: () => Unit = () => ()): Session = { val session = jsch.getSession(proper_string(user).orNull, host, make_port(port)) session.setUserInfo(No_User_Info) session.setServerAliveInterval(alive_interval(options)) session.setServerAliveCountMax(alive_count_max(options)) session.setConfig("MaxAuthTries", "3") if (host_key_permissive) session.setConfig("StrictHostKeyChecking", "no") if (nominal_host != "") session.setHostKeyAlias(nominal_host) if (options.bool("ssh_compression")) { session.setConfig("compression.s2c", "zlib@openssh.com,zlib,none") session.setConfig("compression.c2s", "zlib@openssh.com,zlib,none") session.setConfig("compression_level", "9") } session.connect(connect_timeout(options)) new Session(options, session, on_close, proper_string(nominal_host) getOrElse host, proper_string(nominal_user) getOrElse user) } def open_session( host: String, user: String = "", port: Int = 0, actual_host: String = "", proxy_host: String = "", proxy_user: String = "", proxy_port: Int = 0, permissive: Boolean = false): Session = { val connect_host = proper_string(actual_host) getOrElse host if (proxy_host == "") connect_session(host = connect_host, user = user, port = port) else { val proxy = connect_session(host = proxy_host, port = proxy_port, user = proxy_user) val fw = try { proxy.port_forwarding(remote_host = connect_host, remote_port = make_port(port)) } catch { case exn: Throwable => proxy.close(); throw exn } try { connect_session(host = fw.local_host, port = fw.local_port, host_key_permissive = permissive, nominal_host = host, nominal_user = user, user = user, on_close = () => { fw.close(); proxy.close() }) } catch { case exn: Throwable => fw.close(); proxy.close(); throw exn } } } } /* logging */ def logging(verbose: Boolean = true, debug: Boolean = false): Unit = { JSch.setLogger(if (verbose) new Logger(debug) else null) } private class Logger(debug: Boolean) extends JSch_Logger { def isEnabled(level: Int): Boolean = level != JSch_Logger.DEBUG || debug def log(level: Int, msg: String): Unit = { level match { case JSch_Logger.ERROR | JSch_Logger.FATAL => Output.error_message(msg) case JSch_Logger.WARN => Output.warning(msg) case _ => Output.writeln(msg) } } } /* user info */ object No_User_Info extends UserInfo { def getPassphrase: String = null def getPassword: String = null def promptPassword(msg: String): Boolean = false def promptPassphrase(msg: String): Boolean = false def promptYesNo(msg: String): Boolean = false def showMessage(msg: String): Unit = Output.writeln(msg) } /* port forwarding */ object Port_Forwarding { def open(ssh: Session, ssh_close: Boolean, local_host: String, local_port: Int, remote_host: String, remote_port: Int): Port_Forwarding = { val port = ssh.session.setPortForwardingL(local_host, local_port, remote_host, remote_port) new Port_Forwarding(ssh, ssh_close, local_host, port, remote_host, remote_port) } } class Port_Forwarding private[SSH]( ssh: SSH.Session, ssh_close: Boolean, val local_host: String, val local_port: Int, val remote_host: String, val remote_port: Int) extends AutoCloseable { override def toString: String = local_host + ":" + local_port + ":" + remote_host + ":" + remote_port def close(): Unit = { ssh.session.delPortForwardingL(local_host, local_port) if (ssh_close) ssh.close() } } /* Sftp channel */ type Attrs = SftpATTRS sealed case class Dir_Entry(name: String, is_dir: Boolean) { def is_file: Boolean = !is_dir } /* exec channel */ private val exec_wait_delay = Time.seconds(0.3) class Exec private[SSH](session: Session, channel: ChannelExec) extends AutoCloseable { override def toString: String = "exec " + session.toString def close(): Unit = channel.disconnect val exit_status: Future[Int] = Future.thread("ssh_wait") { - while (!channel.isClosed) exec_wait_delay.sleep + while (!channel.isClosed) exec_wait_delay.sleep() channel.getExitStatus } val stdin: OutputStream = channel.getOutputStream val stdout: InputStream = channel.getInputStream val stderr: InputStream = channel.getErrStream // connect after preparing streams channel.connect(connect_timeout(session.options)) def result( progress_stdout: String => Unit = (_: String) => (), progress_stderr: String => Unit = (_: String) => (), strict: Boolean = true): Process_Result = { stdin.close() def read_lines(stream: InputStream, progress: String => Unit): List[String] = { val result = new mutable.ListBuffer[String] val line_buffer = new ByteArrayOutputStream(100) def line_flush(): Unit = { val line = Library.trim_line(line_buffer.toString(UTF8.charset_name)) progress(line) result += line line_buffer.reset } var c = 0 var finished = false while (!finished) { while ({ c = stream.read; c != -1 && c != 10 }) line_buffer.write(c) if (c == 10) line_flush() else if (channel.isClosed) { if (line_buffer.size > 0) line_flush() finished = true } - else exec_wait_delay.sleep + else exec_wait_delay.sleep() } result.toList } val out_lines = Future.thread("ssh_stdout") { read_lines(stdout, progress_stdout) } val err_lines = Future.thread("ssh_stderr") { read_lines(stderr, progress_stderr) } def terminate(): Unit = { close() out_lines.join err_lines.join exit_status.join } val rc = try { exit_status.join } catch { case Exn.Interrupt() => terminate(); Exn.Interrupt.return_code } close() if (strict && rc == Exn.Interrupt.return_code) throw Exn.Interrupt() Process_Result(rc, out_lines.join, err_lines.join) } } /* session */ class Session private[SSH]( val options: Options, val session: JSch_Session, on_close: () => Unit, val nominal_host: String, val nominal_user: String) extends System { def update_options(new_options: Options): Session = new Session(new_options, session, on_close, nominal_host, nominal_user) def host: String = if (session.getHost == null) "" else session.getHost override def hg_url: String = "ssh://" + user_prefix(nominal_user) + nominal_host + "/" override def toString: String = user_prefix(session.getUserName) + host + port_suffix(session.getPort) + (if (session.isConnected) "" else " (disconnected)") /* port forwarding */ def port_forwarding( remote_port: Int, remote_host: String = "localhost", local_port: Int = 0, local_host: String = "localhost", ssh_close: Boolean = false): Port_Forwarding = Port_Forwarding.open(this, ssh_close, local_host, local_port, remote_host, remote_port) /* sftp channel */ val sftp: ChannelSftp = session.openChannel("sftp").asInstanceOf[ChannelSftp] sftp.connect(connect_timeout(options)) override def close(): Unit = { sftp.disconnect; session.disconnect; on_close() } val settings: Map[String, String] = { val home = sftp.getHome Map("HOME" -> home, "USER_HOME" -> home) } override def expand_path(path: Path): Path = path.expand_env(settings) def remote_path(path: Path): String = expand_path(path).implode override def bash_path(path: Path): String = Bash.string(remote_path(path)) def chmod(permissions: Int, path: Path): Unit = sftp.chmod(permissions, remote_path(path)) def mv(path1: Path, path2: Path): Unit = sftp.rename(remote_path(path1), remote_path(path2)) def rm(path: Path): Unit = sftp.rm(remote_path(path)) def mkdir(path: Path): Unit = sftp.mkdir(remote_path(path)) def rmdir(path: Path): Unit = sftp.rmdir(remote_path(path)) private def test_entry(path: Path, as_dir: Boolean): Boolean = try { val is_dir = sftp.stat(remote_path(path)).isDir if (as_dir) is_dir else !is_dir } catch { case _: SftpException => false } override def is_dir(path: Path): Boolean = test_entry(path, true) override def is_file(path: Path): Boolean = test_entry(path, false) def is_link(path: Path): Boolean = try { sftp.lstat(remote_path(path)).isLink } catch { case _: SftpException => false } override def make_directory(path: Path): Path = { if (!is_dir(path)) { execute( "perl -e \"use File::Path make_path; make_path('" + remote_path(path) + "');\"") if (!is_dir(path)) error("Failed to create directory: " + quote(remote_path(path))) } path } def read_dir(path: Path): List[Dir_Entry] = { if (!is_dir(path)) error("No such directory: " + path.toString) val dir_name = remote_path(path) val dir = sftp.ls(dir_name) (for { i <- (0 until dir.size).iterator a = dir.get(i).asInstanceOf[AnyRef] name = Untyped.get[String](a, "filename") attrs = Untyped.get[Attrs](a, "attrs") if name != "." && name != ".." } yield { Dir_Entry(name, if (attrs.isLink) { try { sftp.stat(dir_name + "/" + name).isDir } catch { case _: SftpException => false } } else attrs.isDir) }).toList.sortBy(_.name) } def find_files( start: Path, pred: Path => Boolean = _ => true, include_dirs: Boolean = false, follow_links: Boolean = false): List[Path] = { val result = new mutable.ListBuffer[Path] def check(path: Path): Unit = { if (pred(path)) result += path } def find(dir: Path): Unit = { if (include_dirs) check(dir) if (follow_links || !is_link(dir)) { for (entry <- read_dir(dir)) { val path = dir + Path.basic(entry.name) if (entry.is_file) check(path) else find(path) } } } if (is_file(start)) check(start) else find(start) result.toList } def open_input(path: Path): InputStream = sftp.get(remote_path(path)) def open_output(path: Path): OutputStream = sftp.put(remote_path(path)) override def read_file(path: Path, local_path: Path): Unit = sftp.get(remote_path(path), File.platform_path(local_path)) def read_bytes(path: Path): Bytes = using(open_input(path))(Bytes.read_stream(_)) def read(path: Path): String = using(open_input(path))(File.read_stream) override def write_file(path: Path, local_path: Path): Unit = sftp.put(File.platform_path(local_path), remote_path(path)) def write_bytes(path: Path, bytes: Bytes): Unit = using(open_output(path))(bytes.write_stream) def write(path: Path, text: String): Unit = using(open_output(path))(stream => Bytes(text).write_stream(stream)) /* exec channel */ def exec(command: String): Exec = { val channel = session.openChannel("exec").asInstanceOf[ChannelExec] channel.setCommand("export USER_HOME=\"$HOME\"\n" + command) new Exec(this, channel) } override def execute(command: String, progress_stdout: String => Unit = (_: String) => (), progress_stderr: String => Unit = (_: String) => (), settings: Boolean = true, strict: Boolean = true): Process_Result = exec(command).result(progress_stdout, progress_stderr, strict) override def isabelle_platform: Isabelle_Platform = Isabelle_Platform(ssh = Some(this)) /* tmp dirs */ def rm_tree(dir: Path): Unit = rm_tree(remote_path(dir)) def rm_tree(remote_dir: String): Unit = execute("rm -r -f " + Bash.string(remote_dir)).check def tmp_dir(): String = execute("mktemp -d -t tmp.XXXXXXXXXX").check.out override def with_tmp_dir[A](body: Path => A): A = { val remote_dir = tmp_dir() try { body(Path.explode(remote_dir)) } finally { rm_tree(remote_dir) } } } /* system operations */ trait System extends AutoCloseable { def close(): Unit = () def hg_url: String = "" def expand_path(path: Path): Path = path.expand def bash_path(path: Path): String = File.bash_path(path) def is_dir(path: Path): Boolean = path.is_dir def is_file(path: Path): Boolean = path.is_file def make_directory(path: Path): Path = Isabelle_System.make_directory(path) def with_tmp_dir[A](body: Path => A): A = Isabelle_System.with_tmp_dir("tmp")(body) def read_file(path1: Path, path2: Path): Unit = Isabelle_System.copy_file(path1, path2) def write_file(path1: Path, path2: Path): Unit = Isabelle_System.copy_file(path2, path1) def execute(command: String, progress_stdout: String => Unit = (_: String) => (), progress_stderr: String => Unit = (_: String) => (), settings: Boolean = true, strict: Boolean = true): Process_Result = Isabelle_System.bash(command, progress_stdout = progress_stdout, progress_stderr = progress_stderr, env = if (settings) Isabelle_System.settings() else null, strict = strict) def isabelle_platform: Isabelle_Platform = Isabelle_Platform() } object Local extends System } diff --git a/src/Pure/General/time.scala b/src/Pure/General/time.scala --- a/src/Pure/General/time.scala +++ b/src/Pure/General/time.scala @@ -1,69 +1,69 @@ /* Title: Pure/General/time.scala Author: Makarius Time based on milliseconds. */ package isabelle import java.util.Locale import java.time.Instant object Time { def seconds(s: Double): Time = new Time((s * 1000.0).round) def minutes(s: Double): Time = new Time((s * 60000.0).round) def ms(m: Long): Time = new Time(m) def hms(h: Int, m: Int, s: Double): Time = seconds(s + 60 * m + 3600 * h) def now(): Time = ms(System.currentTimeMillis()) val zero: Time = ms(0) def print_seconds(s: Double): String = String.format(Locale.ROOT, "%.3f", s.asInstanceOf[AnyRef]) def instant(t: Instant): Time = ms(t.getEpochSecond * 1000L + t.getNano / 1000000L) } final class Time private(val ms: Long) extends AnyVal { def proper_ms: Option[Long] = if (ms == 0) None else Some(ms) def seconds: Double = ms / 1000.0 def minutes: Double = ms / 60000.0 def + (t: Time): Time = new Time(ms + t.ms) def - (t: Time): Time = new Time(ms - t.ms) def compare(t: Time): Int = ms compare t.ms def < (t: Time): Boolean = ms < t.ms def <= (t: Time): Boolean = ms <= t.ms def > (t: Time): Boolean = ms > t.ms def >= (t: Time): Boolean = ms >= t.ms def min(t: Time): Time = if (this < t) this else t def max(t: Time): Time = if (this > t) this else t def is_zero: Boolean = ms == 0 def is_relevant: Boolean = ms >= 1 override def toString: String = Time.print_seconds(seconds) def message: String = toString + "s" def message_hms: String = { val s = ms / 1000 String.format(Locale.ROOT, "%d:%02d:%02d", java.lang.Long.valueOf(s / 3600), java.lang.Long.valueOf((s / 60) % 60), java.lang.Long.valueOf(s % 60)) } def instant: Instant = Instant.ofEpochMilli(ms) - def sleep: Unit = Thread.sleep(ms) + def sleep(): Unit = Thread.sleep(ms) } diff --git a/src/Pure/PIDE/prover.scala b/src/Pure/PIDE/prover.scala --- a/src/Pure/PIDE/prover.scala +++ b/src/Pure/PIDE/prover.scala @@ -1,323 +1,323 @@ /* Title: Pure/PIDE/prover.scala Author: Makarius Options: :folding=explicit: Prover process wrapping. */ package isabelle import java.io.{InputStream, OutputStream, BufferedOutputStream, IOException} object Prover { /* messages */ sealed abstract class Message type Receiver = Message => Unit class Input(val name: String, val args: List[String]) extends Message { override def toString: String = XML.Elem(Markup(Markup.PROVER_COMMAND, List((Markup.NAME, name))), args.flatMap(s => List(XML.newline, XML.elem(Markup.PROVER_ARG, YXML.parse_body(s))))).toString } class Output(val message: XML.Elem) extends Message { def kind: String = message.markup.name def properties: Properties.T = message.markup.properties def body: XML.Body = message.body def is_init: Boolean = kind == Markup.INIT def is_exit: Boolean = kind == Markup.EXIT def is_stdout: Boolean = kind == Markup.STDOUT def is_stderr: Boolean = kind == Markup.STDERR def is_system: Boolean = kind == Markup.SYSTEM def is_status: Boolean = kind == Markup.STATUS def is_report: Boolean = kind == Markup.REPORT def is_syslog: Boolean = is_init || is_exit || is_system || is_stderr override def toString: String = { val res = if (is_status || is_report) message.body.map(_.toString).mkString else Pretty.string_of(message.body, metric = Symbol.Metric) if (properties.isEmpty) kind + " [[" + res + "]]" else kind + " " + (for ((x, y) <- properties) yield x + "=" + y).mkString("{", ",", "}") + " [[" + res + "]]" } } class Malformed(msg: String) extends Exn.User_Error("Malformed prover message: " + msg) def bad_header(print: String): Nothing = throw new Malformed("bad message header\n" + print) def bad_chunks(): Nothing = throw new Malformed("bad message chunks") def the_chunk(chunks: List[Bytes], print: => String): Bytes = chunks match { case List(chunk) => chunk case _ => throw new Malformed("single chunk expected: " + print) } class Protocol_Output(props: Properties.T, val chunks: List[Bytes]) extends Output(XML.Elem(Markup(Markup.PROTOCOL, props), Nil)) { def chunk: Bytes = the_chunk(chunks, toString) lazy val text: String = chunk.text } } class Prover( receiver: Prover.Receiver, cache: XML.Cache, channel: System_Channel, process: Bash.Process) extends Protocol { /** receiver output **/ private def system_output(text: String): Unit = { receiver(new Prover.Output(XML.Elem(Markup(Markup.SYSTEM, Nil), List(XML.Text(text))))) } private def protocol_output(props: Properties.T, chunks: List[Bytes]): Unit = { receiver(new Prover.Protocol_Output(props, chunks)) } private def output(kind: String, props: Properties.T, body: XML.Body): Unit = { val main = XML.Elem(Markup(kind, props), Protocol_Message.clean_reports(body)) val reports = Protocol_Message.reports(props, body) for (msg <- main :: reports) receiver(new Prover.Output(cache.elem(msg))) } private def exit_message(result: Process_Result): Unit = { output(Markup.EXIT, Markup.Process_Result(result), List(XML.Text(result.print_return_code))) } /** process manager **/ private val process_result: Future[Process_Result] = Future.thread("process_result") { val rc = process.join val timing = process.get_timing Process_Result(rc, timing = timing) } private def terminate_process(): Unit = { try { process.terminate() } catch { case exn @ ERROR(_) => system_output("Failed to terminate prover process: " + exn.getMessage) } } private val process_manager = Isabelle_Thread.fork(name = "process_manager") { val stdout = physical_output(false) val (startup_failed, startup_errors) = { var finished: Option[Boolean] = None val result = new StringBuilder(100) while (finished.isEmpty && (process.stderr.ready || !process_result.is_finished)) { while (finished.isEmpty && process.stderr.ready) { try { val c = process.stderr.read if (c == 2) finished = Some(true) else result += c.toChar } catch { case _: IOException => finished = Some(false) } } - Time.seconds(0.05).sleep + Time.seconds(0.05).sleep() } (finished.isEmpty || !finished.get, result.toString.trim) } if (startup_errors != "") system_output(startup_errors) if (startup_failed) { terminate_process() process_result.join stdout.join exit_message(Process_Result(127)) } else { val (command_stream, message_stream) = channel.rendezvous() command_input_init(command_stream) val stderr = physical_output(true) val message = message_output(message_stream) val result = process_result.join system_output("process terminated") command_input_close() for (thread <- List(stdout, stderr, message)) thread.join system_output("process_manager terminated") exit_message(result) } channel.shutdown() } /* management methods */ def join(): Unit = process_manager.join() def terminate(): Unit = { system_output("Terminating prover process") command_input_close() var count = 10 while (!process_result.is_finished && count > 0) { - Time.seconds(0.1).sleep + Time.seconds(0.1).sleep() count -= 1 } if (!process_result.is_finished) terminate_process() } /** process streams **/ /* command input */ private var command_input: Option[Consumer_Thread[List[Bytes]]] = None private def command_input_close(): Unit = command_input.foreach(_.shutdown()) private def command_input_init(raw_stream: OutputStream): Unit = { val name = "command_input" val stream = new BufferedOutputStream(raw_stream) command_input = Some( Consumer_Thread.fork(name)( consume = { case chunks => try { Bytes(chunks.map(_.length).mkString("", ",", "\n")).write_stream(stream) chunks.foreach(_.write_stream(stream)) stream.flush true } catch { case e: IOException => system_output(name + ": " + e.getMessage); false } }, finish = { case () => stream.close(); system_output(name + " terminated") } ) ) } /* physical output */ private def physical_output(err: Boolean): Thread = { val (name, reader, markup) = if (err) ("standard_error", process.stderr, Markup.STDERR) else ("standard_output", process.stdout, Markup.STDOUT) Isabelle_Thread.fork(name = name) { try { var result = new StringBuilder(100) var finished = false while (!finished) { //{{{ var c = -1 var done = false while (!done && (result.isEmpty || reader.ready)) { c = reader.read if (c >= 0) result.append(c.asInstanceOf[Char]) else done = true } if (result.nonEmpty) { output(markup, Nil, List(XML.Text(Symbol.decode(result.toString)))) result.clear() } else { reader.close() finished = true } //}}} } } catch { case e: IOException => system_output(name + ": " + e.getMessage) } system_output(name + " terminated") } } /* message output */ private def message_output(stream: InputStream): Thread = { def decode_chunk(chunk: Bytes): XML.Body = Symbol.decode_yxml_failsafe(chunk.text, cache = cache) val thread_name = "message_output" Isabelle_Thread.fork(name = thread_name) { try { var finished = false while (!finished) { Byte_Message.read_message(stream) match { case None => finished = true case Some(header :: chunks) => decode_chunk(header) match { case List(XML.Elem(Markup(kind, props), Nil)) => if (kind == Markup.PROTOCOL) protocol_output(props, chunks) else output(kind, props, decode_chunk(Prover.the_chunk(chunks, kind))) case _ => Prover.bad_header(header.toString) } case Some(_) => Prover.bad_chunks() } } } catch { case e: IOException => system_output("Cannot read message:\n" + e.getMessage) case e: Prover.Malformed => system_output(e.getMessage) } stream.close() system_output(thread_name + " terminated") } } /** protocol commands **/ var trace: Boolean = false def protocol_command_raw(name: String, args: List[Bytes]): Unit = command_input match { case Some(thread) if thread.is_active => if (trace) { val payload = args.foldLeft(0) { case (n, b) => n + b.length } Output.writeln( "protocol_command " + name + ", args = " + args.length + ", payload = " + payload) } thread.send(Bytes(name) :: args) case _ => error("Inactive prover input thread for command " + quote(name)) } def protocol_command_args(name: String, args: List[String]): Unit = { receiver(new Prover.Input(name, args)) protocol_command_raw(name, args.map(Bytes(_))) } def protocol_command(name: String, args: String*): Unit = protocol_command_args(name, args.toList) } diff --git a/src/Pure/PIDE/session.scala b/src/Pure/PIDE/session.scala --- a/src/Pure/PIDE/session.scala +++ b/src/Pure/PIDE/session.scala @@ -1,784 +1,784 @@ /* Title: Pure/PIDE/session.scala Author: Makarius Options: :folding=explicit: PIDE editor session, potentially with running prover process. */ package isabelle import scala.collection.immutable.Queue import scala.collection.mutable import scala.annotation.tailrec object Session { /* outlets */ object Consumer { def apply[A](name: String)(consume: A => Unit): Consumer[A] = new Consumer[A](name, consume) } final class Consumer[-A] private(val name: String, val consume: A => Unit) class Outlet[A](dispatcher: Consumer_Thread[() => Unit]) { private val consumers = Synchronized[List[Consumer[A]]](Nil) def += (c: Consumer[A]): Unit = consumers.change(Library.update(c)) def -= (c: Consumer[A]): Unit = consumers.change(Library.remove(c)) def post(a: A): Unit = { for (c <- consumers.value.iterator) { dispatcher.send(() => try { c.consume(a) } catch { case exn: Throwable => Output.error_message("Consumer failed: " + quote(c.name) + "\n" + Exn.message(exn)) }) } } } /* change */ sealed case class Change( previous: Document.Version, syntax_changed: List[Document.Node.Name], deps_changed: Boolean, doc_edits: List[Document.Edit_Command], consolidate: List[Document.Node.Name], version: Document.Version) case object Change_Flush /* events */ //{{{ case class Command_Timing(props: Properties.T) case class Theory_Timing(props: Properties.T) case class Runtime_Statistics(props: Properties.T) case class Task_Statistics(props: Properties.T) case class Global_Options(options: Options) case object Caret_Focus case class Raw_Edits(doc_blobs: Document.Blobs, edits: List[Document.Edit_Text]) case class Dialog_Result(id: Document_ID.Generic, serial: Long, result: String) case class Build_Theories(id: String, master_dir: Path, theories: List[(Options, List[Path])]) case class Commands_Changed( assignment: Boolean, nodes: Set[Document.Node.Name], commands: Set[Command]) sealed abstract class Phase { def print: String = this match { case Terminated(result) => if (result.ok) "finished" else "failed" case _ => Word.lowercase(this.toString) } } case object Inactive extends Phase // stable case object Startup extends Phase // transient case object Ready extends Phase // metastable case object Shutdown extends Phase // transient case class Terminated(result: Process_Result) extends Phase // stable //}}} /* syslog */ private[Session] class Syslog(limit: Int) { private var queue = Queue.empty[XML.Elem] private var length = 0 def += (msg: XML.Elem): Unit = synchronized { queue = queue.enqueue(msg) length += 1 if (length > limit) queue = queue.dequeue._2 } def content: String = synchronized { cat_lines(queue.iterator.map(XML.content)) + (if (length > limit) "\n(A total of " + length + " messages...)" else "") } } /* protocol handlers */ type Protocol_Function = Prover.Protocol_Output => Boolean abstract class Protocol_Handler extends Isabelle_System.Service { def init(session: Session): Unit = {} def exit(): Unit = {} def functions: List[(String, Protocol_Function)] = Nil def prover_options(options: Options): Options = options } } class Session(_session_options: => Options, val resources: Resources) extends Document.Session { session => val cache: XML.Cache = XML.Cache.make() def build_blobs_info(name: Document.Node.Name): Command.Blobs_Info = Command.Blobs_Info.none /* global flags */ @volatile var timing: Boolean = false @volatile var verbose: Boolean = false /* dynamic session options */ def session_options: Options = _session_options def output_delay: Time = session_options.seconds("editor_output_delay") def consolidate_delay: Time = session_options.seconds("editor_consolidate_delay") def prune_delay: Time = session_options.seconds("editor_prune_delay") def prune_size: Int = session_options.int("editor_prune_size") def syslog_limit: Int = session_options.int("editor_syslog_limit") def reparse_limit: Int = session_options.int("editor_reparse_limit") /* dispatcher */ private val dispatcher = Consumer_Thread.fork[() => Unit]("Session.dispatcher", daemon = true) { case e => e(); true } def assert_dispatcher[A](body: => A): A = { assert(dispatcher.check_thread) body } def require_dispatcher[A](body: => A): A = { require(dispatcher.check_thread, "not on dispatcher thread") body } def send_dispatcher(body: => Unit): Unit = { if (dispatcher.check_thread) body else dispatcher.send(() => body) } def send_wait_dispatcher(body: => Unit): Unit = { if (dispatcher.check_thread) body else dispatcher.send_wait(() => body) } /* outlets */ val finished_theories = new Session.Outlet[Document.Snapshot](dispatcher) val command_timings = new Session.Outlet[Session.Command_Timing](dispatcher) val theory_timings = new Session.Outlet[Session.Theory_Timing](dispatcher) val runtime_statistics = new Session.Outlet[Session.Runtime_Statistics](dispatcher) val task_statistics = new Session.Outlet[Session.Task_Statistics](dispatcher) val global_options = new Session.Outlet[Session.Global_Options](dispatcher) val caret_focus = new Session.Outlet[Session.Caret_Focus.type](dispatcher) val raw_edits = new Session.Outlet[Session.Raw_Edits](dispatcher) val commands_changed = new Session.Outlet[Session.Commands_Changed](dispatcher) val phase_changed = new Session.Outlet[Session.Phase](dispatcher) val syslog_messages = new Session.Outlet[Prover.Output](dispatcher) val raw_output_messages = new Session.Outlet[Prover.Output](dispatcher) val trace_events = new Session.Outlet[Simplifier_Trace.Event.type](dispatcher) val debugger_updates = new Session.Outlet[Debugger.Update.type](dispatcher) val all_messages = new Session.Outlet[Prover.Message](dispatcher) // potential bottle-neck! /** main protocol manager **/ /* internal messages */ private case class Start(start_prover: Prover.Receiver => Prover) private case object Stop private case class Get_State(promise: Promise[Document.State]) private case class Cancel_Exec(exec_id: Document_ID.Exec) private case class Protocol_Command_Raw(name: String, args: List[Bytes]) private case class Protocol_Command_Args(name: String, args: List[String]) private case class Update_Options(options: Options) private case object Consolidate_Execution private case object Prune_History /* phase */ private def post_phase(new_phase: Session.Phase): Session.Phase = { phase_changed.post(new_phase) new_phase } private val _phase = Synchronized[Session.Phase](Session.Inactive) private def phase_=(new_phase: Session.Phase): Unit = _phase.change(_ => post_phase(new_phase)) def phase: Session.Phase = _phase.value def is_ready: Boolean = phase == Session.Ready /* syslog */ private val syslog = new Session.Syslog(syslog_limit) def syslog_content(): String = syslog.content /* pipelined change parsing */ private case class Text_Edits( previous: Future[Document.Version], doc_blobs: Document.Blobs, text_edits: List[Document.Edit_Text], consolidate: List[Document.Node.Name], version_result: Promise[Document.Version]) private val change_parser = Consumer_Thread.fork[Text_Edits]("change_parser", daemon = true) { case Text_Edits(previous, doc_blobs, text_edits, consolidate, version_result) => val prev = previous.get_finished val change = Timing.timeit("parse_change", timing) { resources.parse_change(reparse_limit, prev, doc_blobs, text_edits, consolidate) } version_result.fulfill(change.version) manager.send(change) true } /* buffered changes */ private object change_buffer { private var assignment: Boolean = false private var nodes: Set[Document.Node.Name] = Set.empty private var commands: Set[Command] = Set.empty def flush(): Unit = synchronized { if (assignment || nodes.nonEmpty || commands.nonEmpty) commands_changed.post(Session.Commands_Changed(assignment, nodes, commands)) if (nodes.nonEmpty) consolidation.update(nodes) assignment = false nodes = Set.empty commands = Set.empty } private val delay_flush = Delay.first(output_delay) { flush() } def invoke(assign: Boolean, edited_nodes: List[Document.Node.Name], cmds: List[Command]): Unit = synchronized { assignment |= assign for (node <- edited_nodes) { nodes += node } for (command <- cmds) { nodes += command.node_name command.blobs_names.foreach(nodes += _) commands += command } delay_flush.invoke() } def shutdown(): Unit = { delay_flush.revoke() flush() } } /* postponed changes */ private object postponed_changes { private var postponed: List[Session.Change] = Nil def store(change: Session.Change): Unit = synchronized { postponed ::= change } def flush(state: Document.State): List[Session.Change] = synchronized { val (assigned, unassigned) = postponed.partition(change => state.is_assigned(change.previous)) postponed = unassigned assigned.reverse } } /* node consolidation */ private object consolidation { private val delay = Delay.first(consolidate_delay) { manager.send(Consolidate_Execution) } private val init_state: Option[Set[Document.Node.Name]] = Some(Set.empty) private val state = Synchronized(init_state) def exit(): Unit = { delay.revoke() state.change(_ => None) } def update(new_nodes: Set[Document.Node.Name] = Set.empty): Unit = { val active = state.change_result(st => (st.isDefined, st.map(nodes => if (nodes.isEmpty) new_nodes else nodes ++ new_nodes))) if (active) delay.invoke() } def flush(): Set[Document.Node.Name] = state.change_result(st => if (st.isDefined) (st.get, init_state) else (Set.empty, None)) } /* prover process */ private object prover { private val variable = Synchronized[Option[Prover]](None) def defined: Boolean = variable.value.isDefined def get: Prover = variable.value.get def set(p: Prover): Unit = variable.change(_ => Some(p)) def reset: Unit = variable.change(_ => None) def await_reset(): Unit = variable.guarded_access({ case None => Some((), None) case _ => None }) } /* file formats and protocol handlers */ private lazy val file_formats: File_Format.Session = File_Format.registry.start_session(session) private val protocol_handlers = Protocol_Handlers.init(session) def get_protocol_handler[C <: Session.Protocol_Handler](c: Class[C]): Option[C] = protocol_handlers.get(c.getName) match { case Some(h) if Library.is_subclass(h.getClass, c) => Some(h.asInstanceOf[C]) case _ => None } def init_protocol_handler(handler: Session.Protocol_Handler): Unit = protocol_handlers.init(handler) def prover_options(options: Options): Options = protocol_handlers.prover_options(file_formats.prover_options(options)) /* debugger */ private val debugger_handler = new Debugger.Handler(this) init_protocol_handler(debugger_handler) def debugger: Debugger = debugger_handler.debugger /* manager thread */ private val delay_prune = Delay.first(prune_delay) { manager.send(Prune_History) } private val manager: Consumer_Thread[Any] = { /* global state */ val global_state = Synchronized(Document.State.init) /* raw edits */ def handle_raw_edits( doc_blobs: Document.Blobs = Document.Blobs.empty, edits: List[Document.Edit_Text] = Nil, consolidate: List[Document.Node.Name] = Nil): Unit = //{{{ { require(prover.defined, "prover process not defined (handle_raw_edits)") if (edits.nonEmpty) prover.get.discontinue_execution() val previous = global_state.value.history.tip.version val version = Future.promise[Document.Version] global_state.change(_.continue_history(previous, edits, version)) raw_edits.post(Session.Raw_Edits(doc_blobs, edits)) change_parser.send(Text_Edits(previous, doc_blobs, edits, consolidate, version)) } //}}} /* resulting changes */ def handle_change(change: Session.Change): Unit = //{{{ { require(prover.defined, "prover process not defined (handle_change)") // define commands { val id_commands = new mutable.ListBuffer[Command] def id_command(command: Command): Unit = { for { (name, digest) <- command.blobs_defined if !global_state.value.defined_blob(digest) } { change.version.nodes(name).get_blob match { case Some(blob) => global_state.change(_.define_blob(digest)) prover.get.define_blob(digest, blob.bytes) case None => Output.error_message("Missing blob " + quote(name.toString)) } } if (!global_state.value.defined_command(command.id)) { global_state.change(_.define_command(command)) id_commands += command } } for { (_, edit) <- change.doc_edits } { edit.foreach({ case (c1, c2) => c1.foreach(id_command); c2.foreach(id_command) }) } if (id_commands.nonEmpty) { prover.get.define_commands_bulk(resources, id_commands.toList) } } val assignment = global_state.value.the_assignment(change.previous).check_finished global_state.change(_.define_version(change.version, assignment)) prover.get.update(change.previous.id, change.version.id, change.doc_edits, change.consolidate) resources.commit(change) } //}}} /* prover output */ def handle_output(output: Prover.Output): Unit = //{{{ { def bad_output(): Unit = { if (verbose) Output.warning("Ignoring bad prover output: " + output.message.toString) } def change_command(f: Document.State => (Command.State, Document.State)): Unit = { try { val st = global_state.change_result(f) if (!st.command.span.is_theory) { change_buffer.invoke(false, Nil, List(st.command)) } } catch { case _: Document.State.Fail => bad_output() } } output match { case msg: Prover.Protocol_Output => val handled = protocol_handlers.invoke(msg) if (!handled) { msg.properties match { case Protocol.Command_Timing(props, state_id, timing) if prover.defined => command_timings.post(Session.Command_Timing(props)) val message = XML.elem(Markup.STATUS, List(XML.Elem(Markup.Timing(timing), Nil))) change_command(_.accumulate(state_id, cache.elem(message), cache)) case Markup.Theory_Timing(props) => theory_timings.post(Session.Theory_Timing(props)) case Markup.Task_Statistics(props) => task_statistics.post(Session.Task_Statistics(props)) case Protocol.Export(args) if args.id.isDefined && Value.Long.unapply(args.id.get).isDefined => val id = Value.Long.unapply(args.id.get).get val export = Export.make_entry("", args, msg.chunk, cache) change_command(_.add_export(id, (args.serial, export))) case Protocol.Loading_Theory(node_name, id) => val blobs_info = build_blobs_info(node_name) try { global_state.change(_.begin_theory(node_name, id, msg.text, blobs_info)) } catch { case _: Document.State.Fail => bad_output() } case List(Markup.Commands_Accepted.PROPERTY) => msg.text match { case Protocol.Commands_Accepted(ids) => ids.foreach(id => change_command(_.accumulate(id, Protocol.Commands_Accepted.message, cache))) case _ => bad_output() } case List(Markup.Assign_Update.PROPERTY) => msg.text match { case Protocol.Assign_Update(id, edited, update) => try { val (edited_nodes, cmds) = global_state.change_result(_.assign(id, edited, update)) change_buffer.invoke(true, edited_nodes, cmds) manager.send(Session.Change_Flush) } catch { case _: Document.State.Fail => bad_output() } case _ => bad_output() } delay_prune.invoke() case List(Markup.Removed_Versions.PROPERTY) => msg.text match { case Protocol.Removed(removed) => try { global_state.change(_.removed_versions(removed)) manager.send(Session.Change_Flush) } catch { case _: Document.State.Fail => bad_output() } case _ => bad_output() } case _ => bad_output() } } case _ => output.properties match { case Position.Id(state_id) => change_command(_.accumulate(state_id, output.message, cache)) case _ if output.is_init => val init_ok = try { Isabelle_System.make_services(classOf[Session.Protocol_Handler]) .foreach(init_protocol_handler) true } catch { case exn: Throwable => prover.get.protocol_command("Prover.stop", "1", Exn.message(exn)) false } if (init_ok) { prover.get.options(prover_options(session_options)) prover.get.init_session(resources) phase = Session.Ready debugger.ready() } case Markup.Process_Result(result) if output.is_exit => if (prover.defined) protocol_handlers.exit() for (id <- global_state.value.theories.keys) { val snapshot = global_state.change_result(_.end_theory(id)) finished_theories.post(snapshot) } file_formats.stop_session phase = Session.Terminated(result) prover.reset case _ => raw_output_messages.post(output) } } } //}}} /* main thread */ Consumer_Thread.fork[Any]("Session.manager", daemon = true) { case arg: Any => //{{{ arg match { case output: Prover.Output => if (output.is_syslog) { syslog += output.message syslog_messages.post(output) } if (output.is_stdout || output.is_stderr) raw_output_messages.post(output) else handle_output(output) all_messages.post(output) case input: Prover.Input => all_messages.post(input) case Start(start_prover) if !prover.defined => prover.set(start_prover(manager.send(_))) case Stop => consolidation.exit() delay_prune.revoke() if (prover.defined) { global_state.change(_ => Document.State.init) prover.get.terminate() } case Get_State(promise) => promise.fulfill(global_state.value) case Consolidate_Execution => if (prover.defined) { val state = global_state.value state.stable_tip_version match { case None => consolidation.update() case Some(version) => val consolidate = consolidation.flush().iterator.filter(name => !resources.session_base.loaded_theory(name) && !state.node_consolidated(version, name) && state.node_maybe_consolidated(version, name)).toList if (consolidate.nonEmpty) handle_raw_edits(consolidate = consolidate) } } case Prune_History => if (prover.defined) { val old_versions = global_state.change_result(_.remove_versions(prune_size)) if (old_versions.nonEmpty) prover.get.remove_versions(old_versions) } case Update_Options(options) => if (prover.defined && is_ready) { prover.get.options(prover_options(options)) handle_raw_edits() } global_options.post(Session.Global_Options(options)) case Cancel_Exec(exec_id) if prover.defined => prover.get.cancel_exec(exec_id) case Session.Raw_Edits(doc_blobs, edits) if prover.defined => handle_raw_edits(doc_blobs = doc_blobs, edits = edits) case Session.Dialog_Result(id, serial, result) if prover.defined => prover.get.dialog_result(serial, result) handle_output(new Prover.Output(Protocol.Dialog_Result(id, serial, result))) case Protocol_Command_Raw(name, args) if prover.defined => prover.get.protocol_command_raw(name, args) case Protocol_Command_Args(name, args) if prover.defined => prover.get.protocol_command_args(name, args) case change: Session.Change if prover.defined => val state = global_state.value if (!state.removing_versions && state.is_assigned(change.previous)) handle_change(change) else postponed_changes.store(change) case Session.Change_Flush if prover.defined => val state = global_state.value if (!state.removing_versions) postponed_changes.flush(state).foreach(handle_change) case bad => if (verbose) Output.warning("Ignoring bad message: " + bad.toString) } true //}}} } } /* main operations */ def get_state(): Document.State = { if (manager.is_active) { val promise = Future.promise[Document.State] manager.send_wait(Get_State(promise)) promise.join } else Document.State.init } def snapshot(name: Document.Node.Name = Document.Node.Name.empty, pending_edits: List[Text.Edit] = Nil): Document.Snapshot = get_state().snapshot(name, pending_edits) def recent_syntax(name: Document.Node.Name): Outer_Syntax = get_state().recent_finished.version.get_finished.nodes(name).syntax getOrElse resources.session_base.overall_syntax @tailrec final def await_stable_snapshot(): Document.Snapshot = { val snapshot = this.snapshot() if (snapshot.is_outdated) { - output_delay.sleep + output_delay.sleep() await_stable_snapshot() } else snapshot } def start(start_prover: Prover.Receiver => Prover): Unit = { file_formats _phase.change( { case Session.Inactive => manager.send(Start(start_prover)) post_phase(Session.Startup) case phase => error("Cannot start prover in phase " + quote(phase.print)) }) } def stop(): Process_Result = { val was_ready = _phase.guarded_access( { case Session.Startup | Session.Shutdown => None case Session.Terminated(_) => Some((false, phase)) case Session.Inactive => Some((false, post_phase(Session.Terminated(Process_Result(0))))) case Session.Ready => Some((true, post_phase(Session.Shutdown))) }) if (was_ready) manager.send(Stop) prover.await_reset() change_parser.shutdown() change_buffer.shutdown() manager.shutdown() dispatcher.shutdown() phase match { case Session.Terminated(result) => result case phase => error("Bad session phase after shutdown: " + quote(phase.print)) } } def protocol_command_raw(name: String, args: List[Bytes]): Unit = manager.send(Protocol_Command_Raw(name, args)) def protocol_command_args(name: String, args: List[String]): Unit = manager.send(Protocol_Command_Args(name, args)) def protocol_command(name: String, args: String*): Unit = protocol_command_args(name, args.toList) def cancel_exec(exec_id: Document_ID.Exec): Unit = manager.send(Cancel_Exec(exec_id)) def update(doc_blobs: Document.Blobs, edits: List[Document.Edit_Text]): Unit = if (edits.nonEmpty) manager.send_wait(Session.Raw_Edits(doc_blobs, edits)) def update_options(options: Options): Unit = manager.send_wait(Update_Options(options)) def dialog_result(id: Document_ID.Generic, serial: Long, result: String): Unit = manager.send(Session.Dialog_Result(id, serial, result)) } diff --git a/src/Pure/System/bash.scala b/src/Pure/System/bash.scala --- a/src/Pure/System/bash.scala +++ b/src/Pure/System/bash.scala @@ -1,264 +1,264 @@ /* Title: Pure/System/bash.scala Author: Makarius GNU bash processes, with propagation of interrupts. */ package isabelle import java.io.{File => JFile, BufferedReader, InputStreamReader, BufferedWriter, OutputStreamWriter} import scala.annotation.tailrec import scala.jdk.OptionConverters._ object Bash { /* concrete syntax */ private def bash_chr(c: Byte): String = { val ch = c.toChar ch match { case '\t' => "$'\\t'" case '\n' => "$'\\n'" case '\f' => "$'\\f'" case '\r' => "$'\\r'" case _ => if (Symbol.is_ascii_letter(ch) || Symbol.is_ascii_digit(ch) || "+,-./:_".contains(ch)) Symbol.ascii(ch) else if (c < 0) "$'\\x" + Integer.toHexString(256 + c) + "'" else if (c < 16) "$'\\x0" + Integer.toHexString(c) + "'" else if (c < 32 || c >= 127) "$'\\x" + Integer.toHexString(c) + "'" else "\\" + ch } } def string(s: String): String = if (s == "") "\"\"" else UTF8.bytes(s).iterator.map(bash_chr).mkString def strings(ss: List[String]): String = ss.iterator.map(Bash.string).mkString(" ") /* process and result */ type Watchdog = (Time, Process => Boolean) def process(script: String, cwd: JFile = null, env: Map[String, String] = Isabelle_System.settings(), redirect: Boolean = false, cleanup: () => Unit = () => ()): Process = new Process(script, cwd, env, redirect, cleanup) class Process private[Bash]( script: String, cwd: JFile, env: Map[String, String], redirect: Boolean, cleanup: () => Unit) { private val timing_file = Isabelle_System.tmp_file("bash_timing") private val timing = Synchronized[Option[Timing]](None) def get_timing: Timing = timing.value getOrElse Timing.zero private val winpid_file: Option[JFile] = if (Platform.is_windows) Some(Isabelle_System.tmp_file("bash_winpid")) else None private val winpid_script = winpid_file match { case None => script case Some(file) => "read < /proc/self/winpid > /dev/null 2> /dev/null\n" + """echo -n "$REPLY" > """ + File.bash_path(file) + "\n\n" + script } private val script_file: JFile = Isabelle_System.tmp_file("bash_script") File.write(script_file, winpid_script) private val proc = Isabelle_System.process( List(File.platform_path(Path.variable("ISABELLE_BASH_PROCESS")), File.standard_path(timing_file), "bash", File.standard_path(script_file)), cwd = cwd, env = env, redirect = redirect) // channels val stdin: BufferedWriter = new BufferedWriter(new OutputStreamWriter(proc.getOutputStream, UTF8.charset)) val stdout: BufferedReader = new BufferedReader(new InputStreamReader(proc.getInputStream, UTF8.charset)) val stderr: BufferedReader = new BufferedReader(new InputStreamReader(proc.getErrorStream, UTF8.charset)) // signals private val group_pid = stdout.readLine private def process_alive(pid: String): Boolean = (for { p <- Value.Long.unapply(pid) handle <- ProcessHandle.of(p).toScala } yield handle.isAlive) getOrElse false private def root_process_alive(): Boolean = winpid_file match { case None => process_alive(group_pid) case Some(file) => file.exists() && process_alive(Library.trim_line(File.read(file))) } @tailrec private def signal(s: String, count: Int = 1): Boolean = { count <= 0 || { Isabelle_System.process_signal(group_pid, signal = s) val running = root_process_alive() || Isabelle_System.process_signal(group_pid) if (running) { - Time.seconds(0.1).sleep + Time.seconds(0.1).sleep() signal(s, count - 1) } else false } } def terminate(): Unit = Isabelle_Thread.try_uninterruptible { signal("INT", count = 7) && signal("TERM", count = 3) && signal("KILL") proc.destroy() do_cleanup() } def interrupt(): Unit = Isabelle_Thread.try_uninterruptible { Isabelle_System.process_signal(group_pid, "INT") } // JVM shutdown hook private val shutdown_hook = Isabelle_Thread.create(() => terminate()) try { Runtime.getRuntime.addShutdownHook(shutdown_hook) } catch { case _: IllegalStateException => } // cleanup private def do_cleanup(): Unit = { try { Runtime.getRuntime.removeShutdownHook(shutdown_hook) } catch { case _: IllegalStateException => } script_file.delete() winpid_file.foreach(_.delete()) timing.change { case None => if (timing_file.isFile) { val t = Word.explode(File.read(timing_file)) match { case List(Value.Long(elapsed), Value.Long(cpu)) => Timing(Time.ms(elapsed), Time.ms(cpu), Time.zero) case _ => Timing.zero } timing_file.delete Some(t) } else Some(Timing.zero) case some => some } cleanup() } // join def join: Int = { val rc = proc.waitFor do_cleanup() rc } // result def result( progress_stdout: String => Unit = (_: String) => (), progress_stderr: String => Unit = (_: String) => (), watchdog: Option[Watchdog] = None, strict: Boolean = true): Process_Result = { stdin.close() val out_lines = Future.thread("bash_stdout") { File.read_lines(stdout, progress_stdout) } val err_lines = Future.thread("bash_stderr") { File.read_lines(stderr, progress_stderr) } val watchdog_thread = for ((time, check) <- watchdog) yield { Future.thread("bash_watchdog") { while (proc.isAlive) { - time.sleep + time.sleep() if (check(this)) interrupt() } } } val rc = try { join } catch { case Exn.Interrupt() => terminate(); Exn.Interrupt.return_code } watchdog_thread.foreach(_.cancel()) if (strict && rc == Exn.Interrupt.return_code) throw Exn.Interrupt() Process_Result(rc, out_lines.join, err_lines.join, get_timing) } } /* Scala function */ object Process extends Scala.Fun_Strings("bash_process", thread = true) { val here = Scala_Project.here def apply(args: List[String]): List[String] = { val result = Exn.capture { val redirect = args.head == "true" val script = cat_lines(args.tail) Isabelle_System.bash(script, redirect = redirect) } val is_interrupt = result match { case Exn.Res(res) => res.rc == Exn.Interrupt.return_code case Exn.Exn(exn) => Exn.is_interrupt(exn) } result match { case _ if is_interrupt => Nil case Exn.Exn(exn) => List(Exn.message(exn)) case Exn.Res(res) => res.rc.toString :: res.timing.elapsed.ms.toString :: res.timing.cpu.ms.toString :: res.out_lines.length.toString :: res.out_lines ::: res.err_lines } } } } diff --git a/src/Pure/System/scala.scala b/src/Pure/System/scala.scala --- a/src/Pure/System/scala.scala +++ b/src/Pure/System/scala.scala @@ -1,284 +1,284 @@ /* Title: Pure/System/scala.scala Author: Makarius Support for Scala at runtime. */ package isabelle import java.io.{File => JFile, StringWriter, PrintWriter} import scala.tools.nsc.{GenericRunnerSettings, ConsoleWriter, NewLinePrintWriter} import scala.tools.nsc.interpreter.{IMain, Results} import scala.tools.nsc.interpreter.shell.ReplReporterImpl object Scala { /** registered functions **/ abstract class Fun(val name: String, val thread: Boolean = false) { override def toString: String = name def multi: Boolean = true def position: Properties.T = here.position def here: Scala_Project.Here def invoke(args: List[Bytes]): List[Bytes] } abstract class Fun_Strings(name: String, thread: Boolean = false) extends Fun(name, thread = thread) { override def invoke(args: List[Bytes]): List[Bytes] = apply(args.map(_.text)).map(Bytes.apply) def apply(args: List[String]): List[String] } abstract class Fun_String(name: String, thread: Boolean = false) extends Fun_Strings(name, thread = thread) { override def multi: Boolean = false override def apply(args: List[String]): List[String] = List(apply(Library.the_single(args))) def apply(arg: String): String } class Functions(val functions: Fun*) extends Isabelle_System.Service lazy val functions: List[Fun] = Isabelle_System.make_services(classOf[Functions]).flatMap(_.functions) /** demo functions **/ object Echo extends Fun_String("echo") { val here = Scala_Project.here def apply(arg: String): String = arg } object Sleep extends Fun_String("sleep") { val here = Scala_Project.here def apply(seconds: String): String = { val t = seconds match { case Value.Double(s) => Time.seconds(s) case _ => error("Malformed argument: " + quote(seconds)) } val t0 = Time.now() - t.sleep + t.sleep() val t1 = Time.now() (t1 - t0).toString } } /** compiler **/ object Compiler { def context( error: String => Unit = Exn.error, jar_dirs: List[JFile] = Nil): Context = { def find_jars(dir: JFile): List[String] = File.find_files(dir, file => file.getName.endsWith(".jar")). map(File.absolute_name) val class_path = for { prop <- List("isabelle.scala.classpath", "java.class.path") path = System.getProperty(prop, "") if path != "\"\"" elem <- space_explode(JFile.pathSeparatorChar, path) } yield elem val settings = new GenericRunnerSettings(error) settings.classpath.value = (class_path ::: jar_dirs.flatMap(find_jars)).mkString(JFile.pathSeparator) new Context(settings) } def default_print_writer: PrintWriter = new NewLinePrintWriter(new ConsoleWriter, true) class Context private [Compiler](val settings: GenericRunnerSettings) { override def toString: String = settings.toString def interpreter( print_writer: PrintWriter = default_print_writer, class_loader: ClassLoader = null): IMain = { new IMain(settings, new ReplReporterImpl(settings, print_writer)) { override def parentClassLoader: ClassLoader = if (class_loader == null) super.parentClassLoader else class_loader } } def toplevel(interpret: Boolean, source: String): List[String] = { val out = new StringWriter val interp = interpreter(new PrintWriter(out)) val marker = '\u000b' val ok = interp.withLabel(marker.toString) { if (interpret) interp.interpret(source) == Results.Success else (new interp.ReadEvalPrint).compile(source) } out.close() val Error = """(?s)^\S* error: (.*)$""".r val errors = space_explode(marker, Library.strip_ansi_color(out.toString)). collect({ case Error(msg) => "Scala error: " + Library.trim_line(msg) }) if (!ok && errors.isEmpty) List("Error") else errors } } } object Toplevel extends Fun_String("scala_toplevel") { val here = Scala_Project.here def apply(arg: String): String = { val (interpret, source) = YXML.parse_body(arg) match { case Nil => (false, "") case List(XML.Text(source)) => (false, source) case body => import XML.Decode._; pair(bool, string)(body) } val errors = try { Compiler.context().toplevel(interpret, source) } catch { case ERROR(msg) => List(msg) } locally { import XML.Encode._; YXML.string_of_body(list(string)(errors)) } } } /** invoke Scala functions from ML **/ /* invoke function */ object Tag extends Enumeration { val NULL, OK, ERROR, FAIL, INTERRUPT = Value } def function_thread(name: String): Boolean = functions.find(fun => fun.name == name) match { case Some(fun) => fun.thread case None => false } def function_body(name: String, args: List[Bytes]): (Tag.Value, List[Bytes]) = functions.find(fun => fun.name == name) match { case Some(fun) => Exn.capture { fun.invoke(args) } match { case Exn.Res(null) => (Tag.NULL, Nil) case Exn.Res(res) => (Tag.OK, res) case Exn.Exn(Exn.Interrupt()) => (Tag.INTERRUPT, Nil) case Exn.Exn(e) => (Tag.ERROR, List(Bytes(Exn.message(e)))) } case None => (Tag.FAIL, List(Bytes("Unknown Isabelle/Scala function: " + quote(name)))) } /* protocol handler */ class Handler extends Session.Protocol_Handler { private var session: Session = null private var futures = Map.empty[String, Future[Unit]] override def init(session: Session): Unit = synchronized { this.session = session } override def exit(): Unit = synchronized { for ((id, future) <- futures) cancel(id, future) futures = Map.empty } private def result(id: String, tag: Scala.Tag.Value, res: List[Bytes]): Unit = synchronized { if (futures.isDefinedAt(id)) { session.protocol_command_raw("Scala.result", Bytes(id) :: Bytes(tag.id.toString) :: res) futures -= id } } private def cancel(id: String, future: Future[Unit]): Unit = { future.cancel() result(id, Scala.Tag.INTERRUPT, Nil) } private def invoke_scala(msg: Prover.Protocol_Output): Boolean = synchronized { msg.properties match { case Markup.Invoke_Scala(name, id) => def body: Unit = { val (tag, res) = Scala.function_body(name, msg.chunks) result(id, tag, res) } val future = if (Scala.function_thread(name)) { Future.thread(name = Isabelle_Thread.make_name(base = "invoke_scala"))(body) } else Future.fork(body) futures += (id -> future) true case _ => false } } private def cancel_scala(msg: Prover.Protocol_Output): Boolean = synchronized { msg.properties match { case Markup.Cancel_Scala(id) => futures.get(id) match { case Some(future) => cancel(id, future) case None => } true case _ => false } } override val functions = List( Markup.Invoke_Scala.name -> invoke_scala, Markup.Cancel_Scala.name -> cancel_scala) } } class Scala_Functions extends Scala.Functions( Scala.Echo, Scala.Sleep, Scala.Toplevel, Bytes.Decode_Base64, Bytes.Encode_Base64, Doc.Doc_Names, Bash.Process, Bibtex.Check_Database, Isabelle_System.Make_Directory, Isabelle_System.Copy_Dir, Isabelle_System.Copy_File, Isabelle_System.Copy_File_Base, Isabelle_System.Rm_Tree, Isabelle_System.Download, Isabelle_System.Isabelle_Id, Isabelle_Tool.Isabelle_Tools, isabelle.atp.SystemOnTPTP.List_Systems, isabelle.atp.SystemOnTPTP.Run_System) diff --git a/src/Pure/Tools/build.scala b/src/Pure/Tools/build.scala --- a/src/Pure/Tools/build.scala +++ b/src/Pure/Tools/build.scala @@ -1,679 +1,679 @@ /* Title: Pure/Tools/build.scala Author: Makarius Options: :folding=explicit: Build and manage Isabelle sessions. */ package isabelle import scala.collection.immutable.SortedSet import scala.annotation.tailrec object Build { /** auxiliary **/ /* persistent build info */ sealed case class Session_Info( sources: String, input_heaps: List[String], output_heap: Option[String], return_code: Int) { def ok: Boolean = return_code == 0 } /* queue with scheduling information */ private object Queue { type Timings = (List[Properties.T], Double) def load_timings(progress: Progress, store: Sessions.Store, session_name: String): Timings = { val no_timings: Timings = (Nil, 0.0) store.try_open_database(session_name) match { case None => no_timings case Some(db) => def ignore_error(msg: String) = { progress.echo_warning("Ignoring bad database " + db + (if (msg == "") "" else "\n" + msg)) no_timings } try { val command_timings = store.read_command_timings(db, session_name) val session_timing = store.read_session_timing(db, session_name) match { case Markup.Elapsed(t) => t case _ => 0.0 } (command_timings, session_timing) } catch { case ERROR(msg) => ignore_error(msg) case exn: java.lang.Error => ignore_error(Exn.message(exn)) case _: XML.Error => ignore_error("") } finally { db.close() } } } def make_session_timing(sessions_structure: Sessions.Structure, timing: Map[String, Double]) : Map[String, Double] = { val maximals = sessions_structure.build_graph.maximals.toSet def desc_timing(session_name: String): Double = { if (maximals.contains(session_name)) timing(session_name) else { val descendants = sessions_structure.build_descendants(List(session_name)).toSet val g = sessions_structure.build_graph.restrict(descendants) (0.0 :: g.maximals.flatMap(desc => { val ps = g.all_preds(List(desc)) if (ps.exists(p => !timing.isDefinedAt(p))) None else Some(ps.map(timing(_)).sum) })).max } } timing.keySet.iterator.map(name => (name -> desc_timing(name))).toMap.withDefaultValue(0.0) } def apply(progress: Progress, sessions_structure: Sessions.Structure, store: Sessions.Store) : Queue = { val graph = sessions_structure.build_graph val names = graph.keys val timings = names.map(name => (name, load_timings(progress, store, name))) val command_timings = timings.map({ case (name, (ts, _)) => (name, ts) }).toMap.withDefaultValue(Nil) val session_timing = make_session_timing(sessions_structure, timings.map({ case (name, (_, t)) => (name, t) }).toMap) object Ordering extends scala.math.Ordering[String] { def compare(name1: String, name2: String): Int = session_timing(name2) compare session_timing(name1) match { case 0 => sessions_structure(name2).timeout compare sessions_structure(name1).timeout match { case 0 => name1 compare name2 case ord => ord } case ord => ord } } new Queue(graph, SortedSet(names: _*)(Ordering), command_timings) } } private class Queue( graph: Graph[String, Sessions.Info], order: SortedSet[String], val command_timings: String => List[Properties.T]) { def is_inner(name: String): Boolean = !graph.is_maximal(name) def is_empty: Boolean = graph.is_empty def - (name: String): Queue = new Queue(graph.del_node(name), order - name, command_timings) def dequeue(skip: String => Boolean): Option[(String, Sessions.Info)] = { val it = order.iterator.dropWhile(name => skip(name) || !graph.is_minimal(name)) if (it.hasNext) { val name = it.next(); Some((name, graph.get_node(name))) } else None } } /** build with results **/ class Results private[Build](results: Map[String, (Option[Process_Result], Sessions.Info)]) { def sessions: Set[String] = results.keySet def infos: List[Sessions.Info] = results.values.map(_._2).toList def cancelled(name: String): Boolean = results(name)._1.isEmpty def apply(name: String): Process_Result = results(name)._1.getOrElse(Process_Result(1)) def info(name: String): Sessions.Info = results(name)._2 val rc: Int = results.iterator.map({ case (_, (Some(r), _)) => r.rc case (_, (None, _)) => 1 }). foldLeft(0)(_ max _) def ok: Boolean = rc == 0 override def toString: String = rc.toString } def session_finished(session_name: String, process_result: Process_Result): String = "Finished " + session_name + " (" + process_result.timing.message_resources + ")" def session_timing(session_name: String, build_log: Build_Log.Session_Info): String = { val props = build_log.session_timing val threads = Markup.Session_Timing.Threads.unapply(props) getOrElse 1 val timing = Markup.Timing_Properties.parse(props) "Timing " + session_name + " (" + threads + " threads, " + timing.message_factor + ")" } def build( options: Options, selection: Sessions.Selection = Sessions.Selection.empty, presentation: Presentation.Context = Presentation.Context.none, progress: Progress = new Progress, check_unknown_files: Boolean = false, build_heap: Boolean = false, clean_build: Boolean = false, dirs: List[Path] = Nil, select_dirs: List[Path] = Nil, infos: List[Sessions.Info] = Nil, numa_shuffling: Boolean = false, max_jobs: Int = 1, list_files: Boolean = false, check_keywords: Set[String] = Set.empty, fresh_build: Boolean = false, no_build: Boolean = false, soft_build: Boolean = false, verbose: Boolean = false, export_files: Boolean = false): Results = { val build_options = options + "completion_limit=0" + "editor_tracing_messages=0" + "kodkod_scala=false" + ("pide_reports=" + options.bool("build_pide_reports")) val store = Sessions.store(build_options) Isabelle_Fonts.init() /* session selection and dependencies */ val full_sessions = Sessions.load_structure(build_options, dirs = dirs, select_dirs = select_dirs, infos = infos) val full_sessions_selection = full_sessions.imports_selection(selection) def sources_stamp(deps: Sessions.Deps, session_name: String): String = { val digests = full_sessions(session_name).meta_digest :: deps.sources(session_name) ::: deps.imported_sources(session_name) SHA1.digest_set(digests).toString } val deps = { val deps0 = Sessions.deps(full_sessions.selection(selection), progress = progress, inlined_files = true, verbose = verbose, list_files = list_files, check_keywords = check_keywords).check_errors if (soft_build && !fresh_build) { val outdated = deps0.sessions_structure.build_topological_order.flatMap(name => store.try_open_database(name) match { case Some(db) => using(db)(store.read_build(_, name)) match { case Some(build) if build.ok && build.sources == sources_stamp(deps0, name) => None case _ => Some(name) } case None => Some(name) }) Sessions.deps(full_sessions.selection(Sessions.Selection(sessions = outdated)), progress = progress, inlined_files = true).check_errors } else deps0 } /* check unknown files */ if (check_unknown_files) { val source_files = (for { (_, base) <- deps.session_bases.iterator (path, _) <- base.sources.iterator } yield path).toList val exclude_files = List(Path.explode("$POLYML_EXE")).map(_.canonical_file) val unknown_files = Mercurial.check_files(source_files)._2. filterNot(path => exclude_files.contains(path.canonical_file)) if (unknown_files.nonEmpty) { progress.echo_warning("Unknown files (not part of the underlying Mercurial repository):" + unknown_files.map(path => path.expand.implode).sorted.mkString("\n ", "\n ", "")) } } /* main build process */ val queue = Queue(progress, deps.sessions_structure, store) store.prepare_output_dir() if (clean_build) { for (name <- full_sessions.imports_descendants(full_sessions_selection)) { val (relevant, ok) = store.clean_output(name) if (relevant) { if (ok) progress.echo("Cleaned " + name) else progress.echo(name + " FAILED to clean") } } } // scheduler loop case class Result( current: Boolean, heap_digest: Option[String], process: Option[Process_Result], info: Sessions.Info) { def ok: Boolean = process match { case None => false case Some(res) => res.rc == 0 } } def sleep(): Unit = - Isabelle_Thread.interrupt_handler(_ => progress.stop()) { Time.seconds(0.5).sleep } + Isabelle_Thread.interrupt_handler(_ => progress.stop()) { Time.seconds(0.5).sleep() } val numa_nodes = new NUMA.Nodes(numa_shuffling) @tailrec def loop( pending: Queue, running: Map[String, (List[String], Build_Job)], results: Map[String, Result]): Map[String, Result] = { def used_node(i: Int): Boolean = running.iterator.exists( { case (_, (_, job)) => job.numa_node.isDefined && job.numa_node.get == i }) if (pending.is_empty) results else { if (progress.stopped) { for ((_, (_, job)) <- running) job.terminate() } running.find({ case (_, (_, job)) => job.is_finished }) match { case Some((session_name, (input_heaps, job))) => //{{{ finish job val (process_result, heap_digest) = job.join val log_lines = process_result.out_lines.filterNot(Protocol_Message.Marker.test) val process_result_tail = { val tail = job.info.options.int("process_output_tail") process_result.copy( out_lines = "(see also " + store.output_log(session_name).file.toString + ")" :: (if (tail == 0) log_lines else log_lines.drop(log_lines.length - tail max 0))) } val build_log = Build_Log.Log_File(session_name, process_result.out_lines). parse_session_info( command_timings = true, theory_timings = true, ml_statistics = true, task_statistics = true) // write log file if (process_result.ok) { File.write_gzip(store.output_log_gz(session_name), terminate_lines(log_lines)) } else File.write(store.output_log(session_name), terminate_lines(log_lines)) // write database using(store.open_database(session_name, output = true))(db => store.write_session_info(db, session_name, build_log = if (process_result.timeout) build_log.error("Timeout") else build_log, build = Session_Info(sources_stamp(deps, session_name), input_heaps, heap_digest, process_result.rc))) // messages process_result.err_lines.foreach(progress.echo) if (process_result.ok) { if (verbose) progress.echo(session_timing(session_name, build_log)) progress.echo(session_finished(session_name, process_result)) } else { progress.echo(session_name + " FAILED") if (!process_result.interrupted) progress.echo(process_result_tail.out) } loop(pending - session_name, running - session_name, results + (session_name -> Result(false, heap_digest, Some(process_result_tail), job.info))) //}}} case None if running.size < (max_jobs max 1) => //{{{ check/start next job pending.dequeue(running.isDefinedAt) match { case Some((session_name, info)) => val ancestor_results = deps.sessions_structure.build_requirements(List(session_name)). filterNot(_ == session_name).map(results(_)) val ancestor_heaps = ancestor_results.flatMap(_.heap_digest) val do_store = build_heap || Sessions.is_pure(session_name) || queue.is_inner(session_name) val (current, heap_digest) = { store.try_open_database(session_name) match { case Some(db) => using(db)(store.read_build(_, session_name)) match { case Some(build) => val heap_digest = store.find_heap_digest(session_name) val current = !fresh_build && build.ok && build.sources == sources_stamp(deps, session_name) && build.input_heaps == ancestor_heaps && build.output_heap == heap_digest && !(do_store && heap_digest.isEmpty) (current, heap_digest) case None => (false, None) } case None => (false, None) } } val all_current = current && ancestor_results.forall(_.current) if (all_current) loop(pending - session_name, running, results + (session_name -> Result(true, heap_digest, Some(Process_Result(0)), info))) else if (no_build) { progress.echo_if(verbose, "Skipping " + session_name + " ...") loop(pending - session_name, running, results + (session_name -> Result(false, heap_digest, Some(Process_Result(1)), info))) } else if (ancestor_results.forall(_.ok) && !progress.stopped) { progress.echo((if (do_store) "Building " else "Running ") + session_name + " ...") store.clean_output(session_name) using(store.open_database(session_name, output = true))( store.init_session_info(_, session_name)) val numa_node = numa_nodes.next(used_node) val job = new Build_Job(progress, session_name, info, deps, store, do_store, verbose, numa_node, queue.command_timings(session_name)) loop(pending, running + (session_name -> (ancestor_heaps, job)), results) } else { progress.echo(session_name + " CANCELLED") loop(pending - session_name, running, results + (session_name -> Result(false, heap_digest, None, info))) } case None => sleep(); loop(pending, running, results) } ///}}} case None => sleep(); loop(pending, running, results) } } } /* build results */ val results = { val results0 = if (deps.is_empty) { progress.echo_warning("Nothing to build") Map.empty[String, Result] } else Isabelle_Thread.uninterruptible { loop(queue, Map.empty, Map.empty) } new Results( (for ((name, result) <- results0.iterator) yield (name, (result.process, result.info))).toMap) } if (export_files) { for (name <- full_sessions.imports_selection(selection).iterator if results(name).ok) { val info = results.info(name) if (info.export_files.nonEmpty) { progress.echo("Exporting " + info.name + " ...") for ((dir, prune, pats) <- info.export_files) { Export.export_files(store, name, info.dir + dir, progress = if (verbose) progress else new Progress, export_prune = prune, export_patterns = pats) } } } } if (results.rc != 0 && (verbose || !no_build)) { val unfinished = (for { name <- results.sessions.iterator if !results(name).ok } yield name).toList.sorted progress.echo("Unfinished session(s): " + commas(unfinished)) } /* PDF/HTML presentation */ if (!no_build && !progress.stopped && results.ok) { val selected = full_sessions_selection.toSet val presentation_chapters = (for { session_name <- deps.sessions_structure.build_topological_order.iterator info = results.info(session_name) if selected(session_name) && presentation.enabled(info) && results(session_name).ok } yield (info.chapter, (session_name, info.description))).toList if (presentation_chapters.nonEmpty) { val presentation_dir = presentation.dir(store) progress.echo("Presentation in " + presentation_dir.absolute) val resources = Resources.empty val html_context = Presentation.html_context() using(store.open_database_context())(db_context => for ((_, (session_name, _)) <- presentation_chapters) { progress.expose_interrupt() progress.echo("Presenting " + session_name + " ...") Presentation.session_html( resources, session_name, deps, db_context, progress = progress, verbose = verbose, html_context = html_context, elements = Presentation.elements1, presentation = presentation) }) val browser_chapters = presentation_chapters.groupBy(_._1). map({ case (chapter, es) => (chapter, es.map(_._2)) }).filterNot(_._2.isEmpty) for ((chapter, entries) <- browser_chapters) Presentation.update_chapter_index(presentation_dir, chapter, entries) if (browser_chapters.nonEmpty) Presentation.make_global_index(presentation_dir) } } results } /* Isabelle tool wrapper */ val isabelle_tool = Isabelle_Tool("build", "build and manage Isabelle sessions", Scala_Project.here, args => { val build_options = Word.explode(Isabelle_System.getenv("ISABELLE_BUILD_OPTIONS")) var base_sessions: List[String] = Nil var select_dirs: List[Path] = Nil var numa_shuffling = false var presentation = Presentation.Context.none var requirements = false var soft_build = false var exclude_session_groups: List[String] = Nil var all_sessions = false var build_heap = false var clean_build = false var dirs: List[Path] = Nil var export_files = false var fresh_build = false var session_groups: List[String] = Nil var max_jobs = 1 var check_keywords: Set[String] = Set.empty var list_files = false var no_build = false var options = Options.init(opts = build_options) var verbose = false var exclude_sessions: List[String] = Nil val getopts = Getopts(""" Usage: isabelle build [OPTIONS] [SESSIONS ...] Options are: -B NAME include session NAME and all descendants -D DIR include session directory and select its sessions -N cyclic shuffling of NUMA CPU nodes (performance tuning) -P DIR enable HTML/PDF presentation in directory (":" for default) -R refer to requirements of selected sessions -S soft build: only observe changes of sources, not heap images -X NAME exclude sessions from group NAME and all descendants -a select all sessions -b build heap images -c clean build -d DIR include session directory -e export files from session specification into file-system -f fresh build -g NAME select session group NAME -j INT maximum number of parallel jobs (default 1) -k KEYWORD check theory sources for conflicts with proposed keywords -l list session source files -n no build -- test dependencies only -o OPTION override Isabelle system OPTION (via NAME=VAL or NAME) -v verbose -x NAME exclude session NAME and all descendants Build and manage Isabelle sessions, depending on implicit settings: """ + Library.prefix_lines(" ", Build_Log.Settings.show()) + "\n", "B:" -> (arg => base_sessions = base_sessions ::: List(arg)), "D:" -> (arg => select_dirs = select_dirs ::: List(Path.explode(arg))), "N" -> (_ => numa_shuffling = true), "P:" -> (arg => presentation = Presentation.Context.make(arg)), "R" -> (_ => requirements = true), "S" -> (_ => soft_build = true), "X:" -> (arg => exclude_session_groups = exclude_session_groups ::: List(arg)), "a" -> (_ => all_sessions = true), "b" -> (_ => build_heap = true), "c" -> (_ => clean_build = true), "d:" -> (arg => dirs = dirs ::: List(Path.explode(arg))), "e" -> (_ => export_files = true), "f" -> (_ => fresh_build = true), "g:" -> (arg => session_groups = session_groups ::: List(arg)), "j:" -> (arg => max_jobs = Value.Int.parse(arg)), "k:" -> (arg => check_keywords = check_keywords + arg), "l" -> (_ => list_files = true), "n" -> (_ => no_build = true), "o:" -> (arg => options = options + arg), "v" -> (_ => verbose = true), "x:" -> (arg => exclude_sessions = exclude_sessions ::: List(arg))) val sessions = getopts(args) val progress = new Console_Progress(verbose = verbose) val start_date = Date.now() if (verbose) { progress.echo( "Started at " + Build_Log.print_date(start_date) + " (" + Isabelle_System.getenv("ML_IDENTIFIER") + " on " + Isabelle_System.hostname() +")") progress.echo(Build_Log.Settings.show() + "\n") } val results = progress.interrupt_handler { build(options, selection = Sessions.Selection( requirements = requirements, all_sessions = all_sessions, base_sessions = base_sessions, exclude_session_groups = exclude_session_groups, exclude_sessions = exclude_sessions, session_groups = session_groups, sessions = sessions), presentation = presentation, progress = progress, check_unknown_files = Mercurial.is_repository(Path.ISABELLE_HOME), build_heap = build_heap, clean_build = clean_build, dirs = dirs, select_dirs = select_dirs, numa_shuffling = NUMA.enabled_warning(progress, numa_shuffling), max_jobs = max_jobs, list_files = list_files, check_keywords = check_keywords, fresh_build = fresh_build, no_build = no_build, soft_build = soft_build, verbose = verbose, export_files = export_files) } val end_date = Date.now() val elapsed_time = end_date.time - start_date.time if (verbose) { progress.echo("\nFinished at " + Build_Log.print_date(end_date)) } val total_timing = results.sessions.iterator.map(a => results(a).timing).foldLeft(Timing.zero)(_ + _). copy(elapsed = elapsed_time) progress.echo(total_timing.message_resources) sys.exit(results.rc) }) /* build logic image */ def build_logic(options: Options, logic: String, progress: Progress = new Progress, build_heap: Boolean = false, dirs: List[Path] = Nil, fresh: Boolean = false, strict: Boolean = false): Int = { val selection = Sessions.Selection.session(logic) val rc = if (!fresh && build(options, selection = selection, build_heap = build_heap, no_build = true, dirs = dirs).ok) 0 else { progress.echo("Build started for Isabelle/" + logic + " ...") Build.build(options, selection = selection, progress = progress, build_heap = build_heap, fresh_build = fresh, dirs = dirs).rc } if (strict && rc != 0) error("Failed to build Isabelle/" + logic) else rc } } diff --git a/src/Pure/Tools/server.scala b/src/Pure/Tools/server.scala --- a/src/Pure/Tools/server.scala +++ b/src/Pure/Tools/server.scala @@ -1,598 +1,598 @@ /* Title: Pure/Tools/server.scala Author: Makarius Resident Isabelle servers. Message formats: - short message (single line): NAME ARGUMENT - long message (multiple lines): BYTE_LENGTH NAME ARGUMENT Argument formats: - Unit as empty string - XML.Elem in YXML notation - JSON.T in standard notation */ package isabelle import java.io.{BufferedInputStream, BufferedOutputStream, InputStreamReader, OutputStreamWriter, IOException} import java.net.{Socket, SocketException, SocketTimeoutException, ServerSocket, InetAddress} object Server { /* message argument */ object Argument { def is_name_char(c: Char): Boolean = Symbol.is_ascii_letter(c) || Symbol.is_ascii_digit(c) || c == '_' || c == '.' def split(msg: String): (String, String) = { val name = msg.takeWhile(is_name_char) val argument = msg.substring(name.length).dropWhile(Symbol.is_ascii_blank) (name, argument) } def print(arg: Any): String = arg match { case () => "" case t: XML.Elem => YXML.string_of_tree(t) case t: JSON.T => JSON.Format(t) } def parse(argument: String): Any = if (argument == "") () else if (YXML.detect_elem(argument)) YXML.parse_elem(argument) else JSON.parse(argument, strict = false) def unapply(argument: String): Option[Any] = try { Some(parse(argument)) } catch { case ERROR(_) => None } } /* input command */ type Command_Body = PartialFunction[(Context, Any), Any] abstract class Command(val command_name: String) { def command_body: Command_Body override def toString: String = command_name } class Commands(commands: Command*) extends Isabelle_System.Service { def entries: List[Command] = commands.toList } private lazy val command_table: Map[String, Command] = Isabelle_System.make_services(classOf[Commands]).flatMap(_.entries). foldLeft(Map.empty[String, Command]) { case (cmds, cmd) => val name = cmd.command_name if (cmds.isDefinedAt(name)) error("Duplicate Isabelle server command: " + quote(name)) else cmds + (name -> cmd) } /* output reply */ class Error(val message: String, val json: JSON.Object.T = JSON.Object.empty) extends RuntimeException(message) def json_error(exn: Throwable): JSON.Object.T = exn match { case e: Error => Reply.error_message(e.message) ++ e.json case ERROR(msg) => Reply.error_message(msg) case _ if Exn.is_interrupt(exn) => Reply.error_message(Exn.message(exn)) case _ => JSON.Object.empty } object Reply extends Enumeration { val OK, ERROR, FINISHED, FAILED, NOTE = Value def message(msg: String, kind: String = ""): JSON.Object.T = JSON.Object(Markup.KIND -> proper_string(kind).getOrElse(Markup.WRITELN), "message" -> msg) def error_message(msg: String): JSON.Object.T = message(msg, kind = Markup.ERROR) def unapply(msg: String): Option[(Reply.Value, Any)] = { if (msg == "") None else { val (name, argument) = Argument.split(msg) for { reply <- try { Some(withName(name)) } catch { case _: NoSuchElementException => None } arg <- Argument.unapply(argument) } yield (reply, arg) } } } /* handler: port, password, thread */ abstract class Handler(port0: Int) { val socket: ServerSocket = new ServerSocket(port0, 50, Server.localhost) def port: Int = socket.getLocalPort val password: String = UUID.random_string() override def toString: String = print(port, password) def handle(connection: Server.Connection): Unit private lazy val thread: Thread = Isabelle_Thread.fork(name = "server_handler") { var finished = false while (!finished) { Exn.capture(socket.accept) match { case Exn.Res(client) => Isabelle_Thread.fork(name = "client") { using(Connection(client))(connection => if (connection.read_password(password)) handle(connection)) } case Exn.Exn(_) => finished = true } } } def start(): Unit = thread def join: Unit = thread.join def stop(): Unit = { socket.close(); join } } /* socket connection */ object Connection { def apply(socket: Socket): Connection = new Connection(socket) } class Connection private(socket: Socket) extends AutoCloseable { override def toString: String = socket.toString def close(): Unit = socket.close() def set_timeout(t: Time): Unit = socket.setSoTimeout(t.ms.toInt) private val in = new BufferedInputStream(socket.getInputStream) private val out = new BufferedOutputStream(socket.getOutputStream) private val out_lock: AnyRef = new Object def tty_loop(): TTY_Loop = new TTY_Loop( new OutputStreamWriter(out), new InputStreamReader(in), writer_lock = out_lock) def read_password(password: String): Boolean = try { Byte_Message.read_line(in).map(_.text) == Some(password) } catch { case _: IOException => false } def read_message(): Option[String] = try { Byte_Message.read_line_message(in).map(_.text) } catch { case _: IOException => None } def await_close(): Unit = try { Byte_Message.read(in, 1); socket.close() } catch { case _: IOException => } def write_message(msg: String): Unit = out_lock.synchronized { Byte_Message.write_line_message(out, Bytes(UTF8.bytes(msg))) } def reply(r: Reply.Value, arg: Any): Unit = { val argument = Argument.print(arg) write_message(if (argument == "") r.toString else r.toString + " " + argument) } def reply_ok(arg: Any): Unit = reply(Reply.OK, arg) def reply_error(arg: Any): Unit = reply(Reply.ERROR, arg) def reply_error_message(message: String, more: JSON.Object.Entry*): Unit = reply_error(Reply.error_message(message) ++ more) def notify(arg: Any): Unit = reply(Reply.NOTE, arg) } /* context with output channels */ class Context private[Server](val server: Server, connection: Connection) extends AutoCloseable { context => def command_list: List[String] = command_table.keys.toList.sorted def reply(r: Reply.Value, arg: Any): Unit = connection.reply(r, arg) def notify(arg: Any): Unit = connection.notify(arg) def message(kind: String, msg: String, more: JSON.Object.Entry*): Unit = notify(Reply.message(msg, kind = kind) ++ more) def writeln(msg: String, more: JSON.Object.Entry*): Unit = message(Markup.WRITELN, msg, more:_*) def warning(msg: String, more: JSON.Object.Entry*): Unit = message(Markup.WARNING, msg, more:_*) def error_message(msg: String, more: JSON.Object.Entry*): Unit = message(Markup.ERROR, msg, more:_*) def progress(more: JSON.Object.Entry*): Connection_Progress = new Connection_Progress(context, more:_*) override def toString: String = connection.toString /* asynchronous tasks */ private val _tasks = Synchronized(Set.empty[Task]) def make_task(body: Task => JSON.Object.T): Task = { val task = new Task(context, body) _tasks.change(_ + task) task } def remove_task(task: Task): Unit = _tasks.change(_ - task) def cancel_task(id: UUID.T): Unit = _tasks.change(tasks => { tasks.find(task => task.id == id).foreach(_.cancel()); tasks }) def close(): Unit = { while(_tasks.change_result(tasks => { tasks.foreach(_.cancel()); (tasks.nonEmpty, tasks) })) { _tasks.value.foreach(_.join) } } } class Connection_Progress private[Server](context: Context, more: JSON.Object.Entry*) extends Progress { override def echo(msg: String): Unit = context.writeln(msg, more:_*) override def echo_warning(msg: String): Unit = context.warning(msg, more:_*) override def echo_error_message(msg: String): Unit = context.error_message(msg, more:_*) override def theory(theory: Progress.Theory): Unit = { val entries: List[JSON.Object.Entry] = List("theory" -> theory.theory, "session" -> theory.session) ::: (theory.percentage match { case None => Nil case Some(p) => List("percentage" -> p) }) context.writeln(theory.message, entries ::: more.toList:_*) } override def nodes_status(nodes_status: Document_Status.Nodes_Status): Unit = { val json = for ((name, node_status) <- nodes_status.present) yield name.json + ("status" -> node_status.json) context.notify(JSON.Object(Markup.KIND -> Markup.NODES_STATUS, Markup.NODES_STATUS -> json)) } override def toString: String = context.toString } class Task private[Server](val context: Context, body: Task => JSON.Object.T) { task => val id: UUID.T = UUID.random() val ident: JSON.Object.Entry = ("task" -> id.toString) val progress: Connection_Progress = context.progress(ident) def cancel(): Unit = progress.stop() private lazy val thread = Isabelle_Thread.fork(name = "server_task") { Exn.capture { body(task) } match { case Exn.Res(res) => context.reply(Reply.FINISHED, res + ident) case Exn.Exn(exn) => val err = json_error(exn) if (err.isEmpty) throw exn else context.reply(Reply.FAILED, err + ident) } progress.stop() context.remove_task(task) } def start: Unit = thread def join: Unit = thread.join } /* server info */ val localhost_name: String = "127.0.0.1" def localhost: InetAddress = InetAddress.getByName(localhost_name) def print_address(port: Int): String = localhost_name + ":" + port def print(port: Int, password: String): String = print_address(port) + " (password " + quote(password) + ")" object Info { private val Pattern = ("""server "([^"]*)" = \Q""" + localhost_name + """\E:(\d+) \(password "([^"]*)"\)""").r def parse(s: String): Option[Info] = s match { case Pattern(name, Value.Int(port), password) => Some(Info(name, port, password)) case _ => None } def apply(name: String, port: Int, password: String): Info = new Info(name, port, password) } class Info private(val name: String, val port: Int, val password: String) { def address: String = print_address(port) override def toString: String = "server " + quote(name) + " = " + print(port, password) def connection(): Connection = { val connection = Connection(new Socket(localhost, port)) connection.write_message(password) connection } def active: Boolean = try { using(connection())(connection => { connection.set_timeout(Time.seconds(2.0)) connection.read_message() match { case Some(Reply(Reply.OK, _)) => true case _ => false } }) } catch { case _: IOException => false case _: SocketException => false case _: SocketTimeoutException => false } } /* per-user servers */ val default_name = "isabelle" object Data { val database = Path.explode("$ISABELLE_HOME_USER/servers.db") val name = SQL.Column.string("name").make_primary_key val port = SQL.Column.int("port") val password = SQL.Column.string("password") val table = SQL.Table("isabelle_servers", List(name, port, password)) } def list(db: SQLite.Database): List[Info] = if (db.tables.contains(Data.table.name)) { db.using_statement(Data.table.select())(stmt => stmt.execute_query().iterator(res => Info( res.string(Data.name), res.int(Data.port), res.string(Data.password))).toList.sortBy(_.name)) } else Nil def find(db: SQLite.Database, name: String): Option[Info] = list(db).find(server_info => server_info.name == name && server_info.active) def init( name: String = default_name, port: Int = 0, existing_server: Boolean = false, log: Logger = No_Logger): (Info, Option[Server]) = { using(SQLite.open_database(Data.database))(db => { db.transaction { Isabelle_System.chmod("600", Data.database) db.create_table(Data.table) list(db).filterNot(_.active).foreach(server_info => db.using_statement(Data.table.delete(Data.name.where_equal(server_info.name)))( _.execute())) } db.transaction { find(db, name) match { case Some(server_info) => (server_info, None) case None => if (existing_server) error("Isabelle server " + quote(name) + " not running") val server = new Server(port, log) val server_info = Info(name, server.port, server.password) db.using_statement(Data.table.delete(Data.name.where_equal(name)))(_.execute()) db.using_statement(Data.table.insert())(stmt => { stmt.string(1) = server_info.name stmt.int(2) = server_info.port stmt.string(3) = server_info.password stmt.execute() }) server.start() (server_info, Some(server)) } } }) } def exit(name: String = default_name): Boolean = { using(SQLite.open_database(Data.database))(db => db.transaction { find(db, name) match { case Some(server_info) => using(server_info.connection())(_.write_message("shutdown")) - while(server_info.active) { Time.seconds(0.05).sleep } + while(server_info.active) { Time.seconds(0.05).sleep() } true case None => false } }) } /* Isabelle tool wrapper */ val isabelle_tool = Isabelle_Tool("server", "manage resident Isabelle servers", Scala_Project.here, args => { var console = false var log_file: Option[Path] = None var operation_list = false var operation_exit = false var name = default_name var port = 0 var existing_server = false val getopts = Getopts(""" Usage: isabelle server [OPTIONS] Options are: -L FILE logging on FILE -c console interaction with specified server -l list servers (alternative operation) -n NAME explicit server name (default: """ + default_name + """) -p PORT explicit server port -s assume existing server, no implicit startup -x exit specified server (alternative operation) Manage resident Isabelle servers. """, "L:" -> (arg => log_file = Some(Path.explode(File.standard_path(arg)))), "c" -> (_ => console = true), "l" -> (_ => operation_list = true), "n:" -> (arg => name = arg), "p:" -> (arg => port = Value.Int.parse(arg)), "s" -> (_ => existing_server = true), "x" -> (_ => operation_exit = true)) val more_args = getopts(args) if (more_args.nonEmpty) getopts.usage() if (operation_list) { for { server_info <- using(SQLite.open_database(Data.database))(list) if server_info.active } Output.writeln(server_info.toString, stdout = true) } else if (operation_exit) { val ok = Server.exit(name) sys.exit(if (ok) 0 else 2) } else { val log = Logger.make(log_file) val (server_info, server) = init(name, port = port, existing_server = existing_server, log = log) Output.writeln(server_info.toString, stdout = true) if (console) { using(server_info.connection())(connection => connection.tty_loop().join) } server.foreach(_.join) } }) } class Server private(port0: Int, val log: Logger) extends Server.Handler(port0) { server => private val _sessions = Synchronized(Map.empty[UUID.T, Headless.Session]) def err_session(id: UUID.T): Nothing = error("No session " + Library.single_quote(id.toString)) def the_session(id: UUID.T): Headless.Session = _sessions.value.getOrElse(id, err_session(id)) def add_session(entry: (UUID.T, Headless.Session)): Unit = _sessions.change(_ + entry) def remove_session(id: UUID.T): Headless.Session = { _sessions.change_result(sessions => sessions.get(id) match { case Some(session) => (session, sessions - id) case None => err_session(id) }) } def shutdown(): Unit = { server.socket.close() val sessions = _sessions.change_result(sessions => (sessions, Map.empty)) for ((_, session) <- sessions) { try { val result = session.stop() if (!result.ok) log("Session shutdown failed: " + result.print_rc) } catch { case ERROR(msg) => log("Session shutdown failed: " + msg) } } } override def join: Unit = { super.join; shutdown() } override def handle(connection: Server.Connection): Unit = { using(new Server.Context(server, connection))(context => { connection.reply_ok( JSON.Object( "isabelle_id" -> Isabelle_System.isabelle_id(), "isabelle_name" -> Isabelle_System.isabelle_name())) var finished = false while (!finished) { connection.read_message() match { case None => finished = true case Some("") => context.notify("Command 'help' provides list of commands") case Some(msg) => val (name, argument) = Server.Argument.split(msg) Server.command_table.get(name) match { case Some(cmd) => argument match { case Server.Argument(arg) => if (cmd.command_body.isDefinedAt((context, arg))) { Exn.capture { cmd.command_body((context, arg)) } match { case Exn.Res(task: Server.Task) => connection.reply_ok(JSON.Object(task.ident)) task.start case Exn.Res(res) => connection.reply_ok(res) case Exn.Exn(exn) => val err = Server.json_error(exn) if (err.isEmpty) throw exn else connection.reply_error(err) } } else { connection.reply_error_message( "Bad argument for command " + Library.single_quote(name), "argument" -> argument) } case _ => connection.reply_error_message( "Malformed argument for command " + Library.single_quote(name), "argument" -> argument) } case None => connection.reply_error("Bad command " + Library.single_quote(name)) } } } }) } } diff --git a/src/Tools/jEdit/src/document_model.scala b/src/Tools/jEdit/src/document_model.scala --- a/src/Tools/jEdit/src/document_model.scala +++ b/src/Tools/jEdit/src/document_model.scala @@ -1,700 +1,700 @@ /* Title: Tools/jEdit/src/document_model.scala Author: Fabian Immler, TU Munich Author: Makarius Document model connected to jEdit buffer or external file: content of theory node or auxiliary file (blob). */ package isabelle.jedit import isabelle._ import java.io.{File => JFile} import scala.collection.mutable import scala.annotation.tailrec import org.gjt.sp.jedit.View import org.gjt.sp.jedit.Buffer import org.gjt.sp.jedit.buffer.{BufferAdapter, BufferListener, JEditBuffer} object Document_Model { /* document models */ sealed case class State( models: Map[Document.Node.Name, Document_Model] = Map.empty, buffer_models: Map[JEditBuffer, Buffer_Model] = Map.empty, overlays: Document.Overlays = Document.Overlays.empty) { def file_models_iterator: Iterator[(Document.Node.Name, File_Model)] = for { (node_name, model) <- models.iterator if model.isInstanceOf[File_Model] } yield (node_name, model.asInstanceOf[File_Model]) def document_blobs: Document.Blobs = Document.Blobs( (for { (node_name, model) <- models.iterator blob <- model.get_blob } yield (node_name -> blob)).toMap) def open_buffer(session: Session, node_name: Document.Node.Name, buffer: Buffer) : (Buffer_Model, State) = { val old_model = models.get(node_name) match { case Some(file_model: File_Model) => Some(file_model) case Some(buffer_model: Buffer_Model) => Some(buffer_model.exit()) case _ => None } val buffer_model = Buffer_Model(session, node_name, buffer).init(old_model) (buffer_model, copy(models = models + (node_name -> buffer_model), buffer_models = buffer_models + (buffer -> buffer_model))) } def close_buffer(buffer: JEditBuffer): State = { buffer_models.get(buffer) match { case None => this case Some(buffer_model) => val file_model = buffer_model.exit() copy(models = models + (file_model.node_name -> file_model), buffer_models = buffer_models - buffer) } } def provide_file(session: Session, node_name: Document.Node.Name, text: String): State = if (models.isDefinedAt(node_name)) this else { val edit = Text.Edit.insert(0, text) val model = File_Model.init(session, node_name, text, pending_edits = List(edit)) copy(models = models + (node_name -> model)) } } private val state = Synchronized(State()) // owned by GUI thread def get_models(): Map[Document.Node.Name, Document_Model] = state.value.models def get(name: Document.Node.Name): Option[Document_Model] = get_models().get(name) def get(buffer: JEditBuffer): Option[Buffer_Model] = state.value.buffer_models.get(buffer) def document_blobs(): Document.Blobs = state.value.document_blobs /* bibtex */ def bibtex_entries_iterator(): Iterator[Text.Info[(String, Document_Model)]] = Bibtex.entries_iterator(state.value.models) def bibtex_completion(history: Completion.History, rendering: Rendering, caret: Text.Offset) : Option[Completion.Result] = Bibtex.completion(history, rendering, caret, state.value.models) /* overlays */ def node_overlays(name: Document.Node.Name): Document.Node.Overlays = state.value.overlays(name) def insert_overlay(command: Command, fn: String, args: List[String]): Unit = state.change(st => st.copy(overlays = st.overlays.insert(command, fn, args))) def remove_overlay(command: Command, fn: String, args: List[String]): Unit = state.change(st => st.copy(overlays = st.overlays.remove(command, fn, args))) /* sync external files */ def sync_files(changed_files: Set[JFile]): Boolean = { state.change_result(st => { val changed_models = (for { (node_name, model) <- st.file_models_iterator file <- model.file if changed_files(file) text <- PIDE.resources.read_file_content(node_name) if model.content.text != text } yield { val content = Document_Model.File_Content(text) val edits = Text.Edit.replace(0, model.content.text, text) (node_name, model.copy(content = content, pending_edits = model.pending_edits ::: edits)) }).toList if (changed_models.isEmpty) (false, st) else (true, st.copy(models = changed_models.foldLeft(st.models)(_ + _))) }) } /* syntax */ def syntax_changed(names: List[Document.Node.Name]): Unit = { GUI_Thread.require {} val models = state.value.models for (name <- names.iterator; model <- models.get(name)) { model match { case buffer_model: Buffer_Model => buffer_model.syntax_changed() case _ => } } } /* init and exit */ def init(session: Session, node_name: Document.Node.Name, buffer: Buffer): Buffer_Model = { GUI_Thread.require {} state.change_result(st => st.buffer_models.get(buffer) match { case Some(buffer_model) if buffer_model.node_name == node_name => buffer_model.init_token_marker() (buffer_model, st) case _ => val res = st.close_buffer(buffer).open_buffer(session, node_name, buffer) buffer.propertiesChanged() res }) } def exit(buffer: Buffer): Unit = { GUI_Thread.require {} state.change(st => if (st.buffer_models.isDefinedAt(buffer)) { val res = st.close_buffer(buffer) buffer.propertiesChanged() res } else st) } def provide_files(session: Session, files: List[(Document.Node.Name, String)]): Unit = { GUI_Thread.require {} state.change(st => files.foldLeft(st) { case (st1, (node_name, text)) => st1.provide_file(session, node_name, text) }) } /* required nodes */ def required_nodes(): Set[Document.Node.Name] = (for { (node_name, model) <- state.value.models.iterator if model.node_required } yield node_name).toSet def node_required( name: Document.Node.Name, toggle: Boolean = false, set: Boolean = false): Unit = { GUI_Thread.require {} val changed = state.change_result(st => st.models.get(name) match { case None => (false, st) case Some(model) => val required = if (toggle) !model.node_required else set model match { case model1: File_Model if required != model1.node_required => (true, st.copy(models = st.models + (name -> model1.copy(node_required = required)))) case model1: Buffer_Model if required != model1.node_required => model1.set_node_required(required); (true, st) case _ => (false, st) } }) if (changed) { PIDE.plugin.options_changed() PIDE.editor.flush() } } def view_node_required(view: View, toggle: Boolean = false, set: Boolean = false): Unit = Document_Model.get(view.getBuffer).foreach(model => node_required(model.node_name, toggle = toggle, set = set)) /* flushed edits */ def flush_edits(hidden: Boolean, purge: Boolean): (Document.Blobs, List[Document.Edit_Text]) = { GUI_Thread.require {} state.change_result(st => { val doc_blobs = st.document_blobs val buffer_edits = (for { (_, model) <- st.buffer_models.iterator edit <- model.flush_edits(doc_blobs, hidden).iterator } yield edit).toList val file_edits = (for { (node_name, model) <- st.file_models_iterator (edits, model1) <- model.flush_edits(doc_blobs, hidden) } yield (edits, node_name -> model1)).toList val model_edits = buffer_edits ::: file_edits.flatMap(_._1) val purge_edits = if (purge) { val purged = (for ((node_name, model) <- st.file_models_iterator) yield (node_name -> model.purge_edits(doc_blobs))).toList val imports = { val open_nodes = (for ((_, model) <- st.buffer_models.iterator) yield model.node_name).toList val touched_nodes = model_edits.map(_._1) val pending_nodes = for ((node_name, None) <- purged) yield node_name (open_nodes ::: touched_nodes ::: pending_nodes).map((_, Position.none)) } val retain = PIDE.resources.dependencies(imports).theories.toSet for ((node_name, Some(edits)) <- purged if !retain(node_name); edit <- edits) yield edit } else Nil val st1 = st.copy(models = st.models ++ file_edits.map(_._2) -- purge_edits.map(_._1)) PIDE.plugin.file_watcher.purge( (for { (_, model) <- st1.file_models_iterator file <- model.file } yield file.getParentFile).toSet) ((doc_blobs, model_edits ::: purge_edits), st1) }) } /* file content */ sealed case class File_Content(text: String) { lazy val bytes: Bytes = Bytes(Symbol.encode(text)) lazy val chunk: Symbol.Text_Chunk = Symbol.Text_Chunk(text) lazy val bibtex_entries: List[Text.Info[String]] = try { Bibtex.entries(text) } catch { case ERROR(_) => Nil } } /* HTTP preview */ private val plain_text_prefix = "plain_text=" def open_preview(view: View, plain_text: Boolean): Unit = { Document_Model.get(view.getBuffer) match { case Some(model) => val name = model.node_name val url = PIDE.plugin.http_server.url + PIDE.plugin.http_root + "/preview?" + (if (plain_text) plain_text_prefix else "") + Url.encode(name.node) PIDE.editor.hyperlink_url(url).follow(view) case _ => } } def http_handlers(http_root: String): List[HTTP.Handler] = { val fonts_root = http_root + "/fonts" val preview_root = http_root + "/preview" val html = HTTP.Handler.get(preview_root, arg => for { query <- Library.try_unprefix(preview_root + "?", arg.uri.toString).map(Url.decode) name = Library.perhaps_unprefix(plain_text_prefix, query) model <- get(PIDE.resources.node_name(name)) } yield { val snapshot = model.await_stable_snapshot() val html_context = Presentation.html_context(fonts_url = HTML.fonts_dir(fonts_root)) val document = Presentation.html_document( PIDE.resources, snapshot, html_context, Presentation.elements2, plain_text = query.startsWith(plain_text_prefix)) HTTP.Response.html(document.content) }) List(HTTP.welcome(http_root), HTTP.fonts(fonts_root), html) } } sealed abstract class Document_Model extends Document.Model { /* perspective */ def document_view_ranges(snapshot: Document.Snapshot): List[Text.Range] = Nil def node_perspective( doc_blobs: Document.Blobs, hidden: Boolean): (Boolean, Document.Node.Perspective_Text) = { GUI_Thread.require {} if (Isabelle.continuous_checking && is_theory) { val snapshot = this.snapshot() val reparse = snapshot.node.load_commands_changed(doc_blobs) val perspective = if (hidden) Text.Perspective.empty else { val view_ranges = document_view_ranges(snapshot) val load_ranges = snapshot.commands_loading_ranges(PIDE.editor.visible_node) Text.Perspective(view_ranges ::: load_ranges) } val overlays = PIDE.editor.node_overlays(node_name) (reparse, Document.Node.Perspective(node_required, perspective, overlays)) } else (false, Document.Node.no_perspective_text) } /* snapshot */ @tailrec final def await_stable_snapshot(): Document.Snapshot = { val snapshot = this.snapshot() if (snapshot.is_outdated) { - PIDE.options.seconds("editor_output_delay").sleep + PIDE.options.seconds("editor_output_delay").sleep() await_stable_snapshot() } else snapshot } } object File_Model { def empty(session: Session): File_Model = File_Model(session, Document.Node.Name.empty, None, Document_Model.File_Content(""), false, Document.Node.no_perspective_text, Nil) def init(session: Session, node_name: Document.Node.Name, text: String, node_required: Boolean = false, last_perspective: Document.Node.Perspective_Text = Document.Node.no_perspective_text, pending_edits: List[Text.Edit] = Nil): File_Model = { val file = JEdit_Lib.check_file(node_name.node) file.foreach(PIDE.plugin.file_watcher.register_parent(_)) val content = Document_Model.File_Content(text) val node_required1 = node_required || File_Format.registry.is_theory(node_name) File_Model(session, node_name, file, content, node_required1, last_perspective, pending_edits) } } case class File_Model( session: Session, node_name: Document.Node.Name, file: Option[JFile], content: Document_Model.File_Content, node_required: Boolean, last_perspective: Document.Node.Perspective_Text, pending_edits: List[Text.Edit]) extends Document_Model { /* text */ def get_text(range: Text.Range): Option[String] = range.try_substring(content.text) /* header */ def node_header: Document.Node.Header = PIDE.resources.special_header(node_name) getOrElse PIDE.resources.check_thy(node_name, Scan.char_reader(content.text), strict = false) /* content */ def node_position(offset: Text.Offset): Line.Node_Position = Line.Node_Position(node_name.node, Line.Position.zero.advance(content.text.substring(0, offset))) def get_blob: Option[Document.Blob] = if (is_theory) None else Some(Document.Blob(content.bytes, content.text, content.chunk, pending_edits.nonEmpty)) def bibtex_entries: List[Text.Info[String]] = if (Bibtex.is_bibtex(node_name.node)) content.bibtex_entries else Nil /* edits */ def update_text(text: String): Option[File_Model] = Text.Edit.replace(0, content.text, text) match { case Nil => None case edits => val content1 = Document_Model.File_Content(text) val pending_edits1 = pending_edits ::: edits Some(copy(content = content1, pending_edits = pending_edits1)) } def flush_edits(doc_blobs: Document.Blobs, hidden: Boolean) : Option[(List[Document.Edit_Text], File_Model)] = { val (reparse, perspective) = node_perspective(doc_blobs, hidden) if (reparse || pending_edits.nonEmpty || last_perspective != perspective) { val edits = node_edits(node_header, pending_edits, perspective) Some((edits, copy(last_perspective = perspective, pending_edits = Nil))) } else None } def purge_edits(doc_blobs: Document.Blobs): Option[List[Document.Edit_Text]] = if (pending_edits.nonEmpty || !File_Format.registry.is_theory(node_name) && (node_required || !Document.Node.is_no_perspective_text(last_perspective))) None else { val text_edits = List(Text.Edit.remove(0, content.text)) Some(node_edits(Document.Node.no_header, text_edits, Document.Node.no_perspective_text)) } /* snapshot */ def is_stable: Boolean = pending_edits.isEmpty def snapshot(): Document.Snapshot = session.snapshot(node_name, pending_edits) } case class Buffer_Model(session: Session, node_name: Document.Node.Name, buffer: Buffer) extends Document_Model { /* text */ def get_text(range: Text.Range): Option[String] = JEdit_Lib.get_text(buffer, range) /* header */ def node_header(): Document.Node.Header = { GUI_Thread.require {} PIDE.resources.special_header(node_name) getOrElse JEdit_Lib.buffer_lock(buffer) { PIDE.resources.check_thy(node_name, JEdit_Lib.buffer_reader(buffer), strict = false) } } /* perspective */ // owned by GUI thread private var _node_required = false def node_required: Boolean = _node_required def set_node_required(b: Boolean): Unit = GUI_Thread.require { _node_required = b } def document_view_iterator: Iterator[Document_View] = for { text_area <- JEdit_Lib.jedit_text_areas(buffer) doc_view <- Document_View.get(text_area) } yield doc_view override def document_view_ranges(snapshot: Document.Snapshot): List[Text.Range] = { GUI_Thread.require {} (for { doc_view <- document_view_iterator range <- doc_view.perspective(snapshot).ranges.iterator } yield range).toList } /* blob */ // owned by GUI thread private var _blob: Option[(Bytes, String, Symbol.Text_Chunk)] = None private def reset_blob(): Unit = GUI_Thread.require { _blob = None } def get_blob: Option[Document.Blob] = GUI_Thread.require { if (is_theory) None else { val (bytes, text, chunk) = _blob match { case Some(x) => x case None => val bytes = PIDE.resources.make_file_content(buffer) val text = buffer.getText(0, buffer.getLength) val chunk = Symbol.Text_Chunk(text) val x = (bytes, text, chunk) _blob = Some(x) x } val changed = pending_edits.nonEmpty Some(Document.Blob(bytes, text, chunk, changed)) } } /* bibtex entries */ // owned by GUI thread private var _bibtex_entries: Option[List[Text.Info[String]]] = None private def reset_bibtex_entries(): Unit = GUI_Thread.require { _bibtex_entries = None } def bibtex_entries: List[Text.Info[String]] = GUI_Thread.require { if (Bibtex.is_bibtex(node_name.node)) { _bibtex_entries match { case Some(entries) => entries case None => val text = JEdit_Lib.buffer_text(buffer) val entries = try { Bibtex.entries(text) } catch { case ERROR(msg) => Output.warning(msg); Nil } _bibtex_entries = Some(entries) entries } } else Nil } /* pending edits */ private object pending_edits { private val pending = new mutable.ListBuffer[Text.Edit] private var last_perspective = Document.Node.no_perspective_text def nonEmpty: Boolean = synchronized { pending.nonEmpty } def get_edits: List[Text.Edit] = synchronized { pending.toList } def get_last_perspective: Document.Node.Perspective_Text = synchronized { last_perspective } def set_last_perspective(perspective: Document.Node.Perspective_Text): Unit = synchronized { last_perspective = perspective } def flush_edits(doc_blobs: Document.Blobs, hidden: Boolean): List[Document.Edit_Text] = synchronized { GUI_Thread.require {} val edits = get_edits val (reparse, perspective) = node_perspective(doc_blobs, hidden) if (reparse || edits.nonEmpty || last_perspective != perspective) { pending.clear() last_perspective = perspective node_edits(node_header(), edits, perspective) } else Nil } def edit(edits: List[Text.Edit]): Unit = synchronized { GUI_Thread.require {} reset_blob() reset_bibtex_entries() for (doc_view <- document_view_iterator) doc_view.rich_text_area.active_reset() pending ++= edits PIDE.editor.invoke() } } def is_stable: Boolean = !pending_edits.nonEmpty def snapshot(): Document.Snapshot = session.snapshot(node_name, pending_edits.get_edits) def flush_edits(doc_blobs: Document.Blobs, hidden: Boolean): List[Document.Edit_Text] = pending_edits.flush_edits(doc_blobs, hidden) /* buffer listener */ private val buffer_listener: BufferListener = new BufferAdapter { override def contentInserted(buffer: JEditBuffer, start_line: Int, offset: Int, num_lines: Int, length: Int): Unit = { pending_edits.edit(List(Text.Edit.insert(offset, buffer.getText(offset, length)))) } override def preContentRemoved(buffer: JEditBuffer, start_line: Int, offset: Int, num_lines: Int, removed_length: Int): Unit = { pending_edits.edit(List(Text.Edit.remove(offset, buffer.getText(offset, removed_length)))) } } /* syntax */ def syntax_changed(): Unit = { JEdit_Lib.buffer_line_manager(buffer).setFirstInvalidLineContext(0) for (text_area <- JEdit_Lib.jedit_text_areas(buffer)) Untyped.method(Class.forName("org.gjt.sp.jedit.textarea.TextArea"), "foldStructureChanged"). invoke(text_area) buffer.invalidateCachedFoldLevels() } def init_token_marker(): Unit = { Isabelle.buffer_token_marker(buffer) match { case Some(marker) if marker != buffer.getTokenMarker => buffer.setTokenMarker(marker) syntax_changed() case _ => } } /* init */ def init(old_model: Option[File_Model]): Buffer_Model = { GUI_Thread.require {} old_model match { case None => pending_edits.edit(List(Text.Edit.insert(0, JEdit_Lib.buffer_text(buffer)))) case Some(file_model) => set_node_required(file_model.node_required) pending_edits.set_last_perspective(file_model.last_perspective) pending_edits.edit( file_model.pending_edits ::: Text.Edit.replace(0, file_model.content.text, JEdit_Lib.buffer_text(buffer))) } buffer.addBufferListener(buffer_listener) init_token_marker() this } /* exit */ def exit(): File_Model = { GUI_Thread.require {} buffer.removeBufferListener(buffer_listener) init_token_marker() File_Model.init(session, node_name, JEdit_Lib.buffer_text(buffer), node_required, pending_edits.get_last_perspective, pending_edits.get_edits) } } diff --git a/src/Tools/jEdit/src/scala_console.scala b/src/Tools/jEdit/src/scala_console.scala --- a/src/Tools/jEdit/src/scala_console.scala +++ b/src/Tools/jEdit/src/scala_console.scala @@ -1,181 +1,181 @@ /* Title: Tools/jEdit/src/scala_console.scala Author: Makarius Scala instance of Console plugin. */ package isabelle.jedit import isabelle._ import console.{Console, ConsolePane, Shell, Output} import org.gjt.sp.jedit.JARClassLoader import java.io.{OutputStream, Writer, PrintWriter} class Scala_Console extends Shell("Scala") { /* global state -- owned by GUI thread */ @volatile private var interpreters = Map.empty[Console, Interpreter] @volatile private var global_console: Console = null @volatile private var global_out: Output = null @volatile private var global_err: Output = null private val console_stream = new OutputStream { val buf = new StringBuilder(100) override def flush(): Unit = { val s = buf.synchronized { val s = buf.toString; buf.clear(); s } val str = UTF8.decode_permissive(s) GUI_Thread.later { if (global_out == null) System.out.print(str) else global_out.writeAttrs(null, str) } - Time.seconds(0.01).sleep // FIXME adhoc delay to avoid loosing output + Time.seconds(0.01).sleep() // FIXME adhoc delay to avoid loosing output } override def close(): Unit = flush() def write(byte: Int): Unit = { val c = byte.toChar buf.synchronized { buf.append(c) } if (c == '\n') flush() } } private val console_writer = new Writer { def flush(): Unit = console_stream.flush() def close(): Unit = console_stream.flush() def write(cbuf: Array[Char], off: Int, len: Int): Unit = { if (len > 0) { UTF8.bytes(new String(cbuf.slice(off, off + len))).foreach(console_stream.write(_)) } } } private def with_console[A](console: Console, out: Output, err: Output)(e: => A): A = { global_console = console global_out = out global_err = if (err == null) out else err try { scala.Console.withErr(console_stream) { scala.Console.withOut(console_stream) { e } } } finally { console_stream.flush global_console = null global_out = null global_err = null } } private def report_error(str: String): Unit = { if (global_console == null || global_err == null) isabelle.Output.writeln(str) else GUI_Thread.later { global_err.print(global_console.getErrorColor, str) } } /* interpreter thread */ private abstract class Request private case class Start(console: Console) extends Request private case class Execute(console: Console, out: Output, err: Output, command: String) extends Request private class Interpreter { private val running = Synchronized[Option[Thread]](None) def interrupt(): Unit = running.change(opt => { opt.foreach(_.interrupt()); opt }) private val interp = Scala.Compiler.context(error = report_error, jar_dirs = JEdit_Lib.directories). interpreter( print_writer = new PrintWriter(console_writer, true), class_loader = new JARClassLoader) val thread: Consumer_Thread[Request] = Consumer_Thread.fork("Scala_Console") { case Start(console) => interp.bind("view", "org.gjt.sp.jedit.View", console.getView) interp.bind("console", "console.Console", console) interp.interpret("import isabelle._; import isabelle.jedit._") true case Execute(console, out, err, command) => with_console(console, out, err) { try { running.change(_ => Some(Thread.currentThread())) interp.interpret(command) } finally { running.change(_ => None) Exn.Interrupt.dispose() } GUI_Thread.later { if (err != null) err.commandDone() out.commandDone() } true } } } /* jEdit console methods */ override def openConsole(console: Console): Unit = { val interp = new Interpreter interp.thread.send(Start(console)) interpreters += (console -> interp) } override def closeConsole(console: Console): Unit = { interpreters.get(console) match { case Some(interp) => interp.interrupt() interp.thread.shutdown() interpreters -= console case None => } } override def printInfoMessage(out: Output): Unit = { out.print(null, "This shell evaluates Isabelle/Scala expressions.\n\n" + "The contents of package isabelle and isabelle.jedit are imported.\n" + "The following special toplevel bindings are provided:\n" + " view -- current jEdit/Swing view (e.g. view.getBuffer, view.getTextArea)\n" + " console -- jEdit Console plugin\n" + " PIDE -- Isabelle/PIDE plugin (e.g. PIDE.session, PIDE.snapshot, PIDE.rendering)\n") } override def printPrompt(console: Console, out: Output): Unit = { out.writeAttrs(ConsolePane.colorAttributes(console.getInfoColor), "scala>") out.writeAttrs(ConsolePane.colorAttributes(console.getPlainColor), " ") } override def execute( console: Console, input: String, out: Output, err: Output, command: String): Unit = { interpreters(console).thread.send(Execute(console, out, err, command)) } override def stop(console: Console): Unit = interpreters.get(console).foreach(_.interrupt()) }