diff --git a/etc/options b/etc/options --- a/etc/options +++ b/etc/options @@ -1,381 +1,384 @@ (* :mode=isabelle-options: *) section "Document Preparation" option browser_info : bool = false -- "generate theory browser information" option document : string = "" (standard "true") -- "build PDF document (enabled for \"pdf\" or \"true\", disabled for \"\" or \"false\")" option document_output : string = "" (standard "output") -- "document output directory" option document_echo : bool = false -- "inform about document file names during session presentation" option document_variants : string = "document" -- "alternative document variants (separated by colons)" option document_tags : string = "" -- "default command tags (separated by commas)" option document_bibliography : bool = false -- "explicitly enable use of bibtex (default: according to presence of root.bib)" option document_build : string = "lualatex" (standard "build") -- "document build engine (e.g. build, lualatex, pdflatex)" option document_logo : string = "" -- "generate named instance of Isabelle logo (underscore means unnamed variant)" option document_heading_prefix : string = "isamarkup" (standard) -- "prefix for LaTeX macros generated from 'chapter', 'section' etc." option document_comment_latex : bool = false -- "use regular LaTeX version of comment.sty, instead of historic plain TeX version" option thy_output_display : bool = false -- "indicate output as multi-line display-style material" option thy_output_break : bool = false -- "control line breaks in non-display material" option thy_output_cartouche : bool = false -- "indicate if the output should be delimited as cartouche" option thy_output_quotes : bool = false -- "indicate if the output should be delimited via double quotes" option thy_output_margin : int = 76 -- "right margin / page width for printing of display material" option thy_output_indent : int = 0 -- "indentation for pretty printing of display material" option thy_output_source : bool = false -- "print original source text rather than internal representation" option thy_output_source_cartouche : bool = false -- "print original source text rather than internal representation, preserve cartouches" option thy_output_modes : string = "" -- "additional print modes for document output (separated by commas)" section "Prover Output" option show_types : bool = false -- "show type constraints when printing terms" option show_sorts : bool = false -- "show sort constraints when printing types" option show_brackets : bool = false -- "show extra brackets when printing terms/types" option show_question_marks : bool = true -- "show leading question mark of schematic variables" option show_consts : bool = false -- "show constants with types when printing proof state" option show_main_goal : bool = false -- "show main goal when printing proof state" option goals_limit : int = 10 -- "maximum number of subgoals to be printed" option show_states : bool = false -- "show toplevel states even if outside of interactive mode" option names_long : bool = false -- "show fully qualified names" option names_short : bool = false -- "show base names only" option names_unique : bool = true -- "show partially qualified names, as required for unique name resolution" option eta_contract : bool = true -- "print terms in eta-contracted form" option print_mode : string = "" -- "additional print modes for prover output (separated by commas)" section "Parallel Processing and Timing" public option threads : int = 0 -- "maximum number of worker threads for prover process (0 = hardware max.)" option threads_trace : int = 0 -- "level of tracing information for multithreading" option threads_stack_limit : real = 0.25 -- "maximum stack size for worker threads (in giga words, 0 = unlimited)" public option parallel_limit : int = 0 -- "approximative limit for parallel tasks (0 = unlimited)" public option parallel_print : bool = true -- "parallel and asynchronous printing of results" public option parallel_proofs : int = 1 -- "level of parallel proof checking: 0, 1, 2" option parallel_subproofs_threshold : real = 0.01 -- "lower bound of timing estimate for forked nested proofs (seconds)" option command_timing_threshold : real = 0.1 -- "default threshold for persistent command timing (seconds)" public option timeout_scale : real = 1.0 -- "scale factor for timeout in Isabelle/ML and session build" section "Detail of Proof Checking" option record_proofs : int = -1 -- "set level of proofterm recording: 0, 1, 2, negative means unchanged" option quick_and_dirty : bool = false -- "if true then some tools will OMIT some proofs" option skip_proofs : bool = false -- "skip over proofs (implicit 'sorry')" option strict_facts : bool = false -- "force lazy facts when defined in context" section "Global Session Parameters" option condition : string = "" -- "required environment variables for subsequent theories (separated by commas)" option timeout : real = 0 -- "timeout for session build job (seconds > 0)" option timeout_build : bool = true -- "observe timeout for session build" option process_output_limit : int = 100 -- "build process output limit (in million characters, 0 = unlimited)" option process_output_tail : int = 40 -- "build process output tail shown to user (in lines, 0 = unlimited)" option profiling : string = "" (standard "time") -- "ML profiling (possible values: time, time_thread, allocations)" option system_log : string = "" (standard "-") -- "output for system messages (log file or \"-\" for console progress)" option system_heaps : bool = false -- "store session heaps in $ISABELLE_HEAPS_SYSTEM, not $ISABELLE_HEAPS" section "ML System" option ML_print_depth : int = 20 -- "ML print depth for toplevel pretty-printing" public option ML_exception_trace : bool = false -- "ML exception trace for toplevel command execution" public option ML_exception_debugger : bool = false -- "ML debugger exception trace for toplevel command execution" public option ML_debugger : bool = false -- "ML debugger instrumentation for newly compiled code" public option ML_system_64 : bool = false -- "ML system for 64bit platform is used if possible (change requires restart)" public option ML_process_policy : string = "" -- "ML process command prefix (process policy)" section "PIDE Build" option pide_reports : bool = true -- "report PIDE markup" option build_pide_reports : bool = true -- "report PIDE markup in batch build" section "Editor Session" public option editor_load_delay : real = 0.5 -- "delay for file load operations (new buffers etc.)" public option editor_input_delay : real = 0.2 -- "delay for user input (text edits, cursor movement etc.)" public option editor_generated_input_delay : real = 1.0 -- "delay for machine-generated input that may outperform user edits" public option editor_output_delay : real = 0.1 -- "delay for prover output (markup, common messages etc.)" public option editor_consolidate_delay : real = 2.0 -- "delay to consolidate status of command evaluation (execution forks)" public option editor_prune_delay : real = 15 -- "delay to prune history (delete old versions)" option editor_prune_size : int = 0 -- "retained size of pruned history (delete old versions)" public option editor_update_delay : real = 0.5 -- "delay for physical GUI updates" public option editor_reparse_limit : int = 10000 -- "maximum amount of reparsed text outside perspective" public option editor_tracing_messages : int = 1000 -- "initial number of tracing messages for each command transaction (0: unbounded)" public option editor_chart_delay : real = 3.0 -- "delay for chart repainting" public option editor_continuous_checking : bool = true -- "continuous checking of proof document (visible and required parts)" public option editor_output_state : bool = false -- "implicit output of proof state" option editor_execution_delay : real = 0.02 -- "delay for start of execution process after document update (seconds)" option editor_syslog_limit : int = 100 -- "maximum amount of buffered syslog messages" public option editor_presentation : bool = false -- "dynamic presentation while editing" section "Headless Session" option headless_consolidate_delay : real = 15 -- "delay to consolidate status of command evaluation (execution forks)" option headless_prune_delay : real = 60 -- "delay to prune history (delete old versions)" option headless_check_delay : real = 0.5 -- "delay for theory status check during PIDE processing (seconds)" option headless_check_limit : int = 0 -- "maximum number of theory status checks (0 = unlimited)" option headless_nodes_status_delay : real = -1 -- "delay for overall nodes status check during PIDE processing (seconds, disabled for < 0)" option headless_watchdog_timeout : real = 600 -- "watchdog timeout for PIDE processing of broken theories (seconds, 0 = disabled)" option headless_commit_cleanup_delay : real = 60 -- "delay for cleanup of already imported theories (seconds, 0 = disabled)" option headless_load_limit : real = 5.0 -- "limit in MB for loaded theory files (0 = unlimited)" section "Miscellaneous Tools" public option find_theorems_limit : int = 40 -- "limit of displayed results" public option find_theorems_tactic_limit : int = 5 -- "limit of tactic search for 'solves' criterion" section "Completion" public option completion_limit : int = 40 -- "limit for completion within the formal context" public option completion_path_ignore : string = "*~:*.marks:*.orig:*.rej:.DS_Store" -- "glob patterns to ignore in file-system path completion (separated by colons)" section "Spell Checker" public option spell_checker : bool = true -- "enable spell-checker for prose words within document text, comments etc." public option spell_checker_dictionary : string = "en" -- "spell-checker dictionary name" public option spell_checker_include : string = "words,comment,comment1,comment2,comment3,ML_comment,SML_comment" -- "included markup elements for spell-checker (separated by commas)" public option spell_checker_exclude : string = "document_marker,antiquoted,raw_text" -- "excluded markup elements for spell-checker (separated by commas)" section "Secure Shell" +option ssh_batch_mode : bool = true + -- "enable SSH batch mode (no user interaction)" + option ssh_multiplexing : bool = true -- "enable multiplexing of SSH sessions (ignored on Windows)" option ssh_compression : bool = true -- "enable SSH compression" option ssh_alive_interval : real = 30 -- "time interval to keep SSH server connection alive (seconds, ignore value < 0)" option ssh_alive_count_max : int = 3 -- "maximum number of messages to keep SSH server connection alive (ignore value < 0)" section "Phabricator" option phabricator_version_arcanist : string = "4f70fcffa8a5393e210d64f237ffdaa256256d6a" -- "repository version for arcanist" option phabricator_version_phabricator : string = "193798385bd3a7f72dca255e44f8112f4f8fc155" -- "repository version for phabricator" section "Theory Export" option export_theory : bool = false -- "export theory content to Isabelle/Scala" option export_standard_proofs : bool = false -- "export standardized proof terms to Isabelle/Scala (not scalable)" option export_proofs : bool = false -- "export proof terms to Isabelle/Scala" option prune_proofs : bool = false -- "prune proof terms after export (do not store in Isabelle/ML)" section "Theory update" option update_inner_syntax_cartouches : bool = false -- "update inner syntax (types, terms, etc.) to use cartouches" option update_mixfix_cartouches : bool = false -- "update mixfix templates to use cartouches instead of double quotes" option update_control_cartouches : bool = false -- "update antiquotations to use control symbol with cartouche argument" option update_path_cartouches : bool = false -- "update file-system paths to use cartouches" section "Build Database" option build_database_server : bool = false option build_database_user : string = "" option build_database_password : string = "" option build_database_name : string = "" option build_database_host : string = "" option build_database_port : int = 0 option build_database_ssh_host : string = "" option build_database_ssh_user : string = "" option build_database_ssh_port : int = 0 section "Build Log Database" option build_log_database_user : string = "" option build_log_database_password : string = "" option build_log_database_name : string = "" option build_log_database_host : string = "" option build_log_database_port : int = 0 option build_log_ssh_host : string = "" option build_log_ssh_user : string = "" option build_log_ssh_port : int = 0 option build_log_history : int = 30 -- "length of relevant history (in days)" option build_log_transaction_size : int = 1 -- "number of log files for each db update" section "Isabelle/Scala/ML system channel" option system_channel_address : string = "" option system_channel_password : string = "" section "Bash process execution server" option bash_process_debugging : bool = false option bash_process_address : string = "" option bash_process_password : string = "" diff --git a/src/Pure/General/ssh.scala b/src/Pure/General/ssh.scala --- a/src/Pure/General/ssh.scala +++ b/src/Pure/General/ssh.scala @@ -1,364 +1,367 @@ /* Title: Pure/General/ssh.scala Author: Makarius SSH client on OpenSSH command-line tools, preferably with connection multiplexing, but this does not work on Windows. */ package isabelle import java.util.{Map => JMap} import java.io.{File => JFile} object SSH { /* target machine: user@host syntax */ object Target { def parse(s: String): (String, String) = { val i = s.indexOf('@') if (i <= 0) ("", s) else (s.substring(0, i), s.substring(i + 1)) } def unapplySeq(s: String): Option[List[String]] = { val (user, host) = parse(s) if (host.isEmpty) None else Some(List(user, host)) } } /* OpenSSH configuration and command-line */ // see https://linux.die.net/man/5/ssh_config object Config { def entry(x: String, y: String): String = x + "=" + y def entry(x: String, y: Int): String = entry(x, y.toString) def entry(x: String, y: Long): String = entry(x, y.toString) def entry(x: String, y: Boolean): String = entry(x, if (y) "yes" else "no") def make(options: Options, port: Int = 0, user: String = "", control_master: Boolean = false, control_path: String = "" ): List[String] = { + val ssh_batch_mode = options.bool("ssh_batch_mode") val ssh_compression = options.bool("ssh_compression") val ssh_alive_interval = options.real("ssh_alive_interval").round val ssh_alive_count_max = options.int("ssh_alive_count_max") - List("BatchMode=yes", entry("Compression", ssh_compression)) ::: + List( + entry("BatchMode", ssh_batch_mode), + entry("Compression", ssh_compression)) ::: (if (ssh_alive_interval >= 0) List(entry("ServerAliveInterval", ssh_alive_interval)) else Nil) ::: (if (ssh_alive_count_max >= 0) List(entry("ServerAliveCountMax", ssh_alive_count_max)) else Nil) ::: (if (port > 0) List(entry("Port", port)) else Nil) ::: (if (user.nonEmpty) List(entry("User", user)) else Nil) ::: (if (control_master) List("ControlMaster=yes", "ControlPersist=yes") else Nil) ::: (if (control_path.nonEmpty) List("ControlPath=" + control_path) else Nil) } def option(entry: String): String = "-o " + Bash.string(entry) def option(x: String, y: String): String = option(entry(x, y)) def option(x: String, y: Int): String = option(entry(x, y)) def option(x: String, y: Long): String = option(entry(x, y)) def option(x: String, y: Boolean): String = option(entry(x, y)) def command(command: String, config: List[String]): String = Bash.string(command) + config.map(entry => " " + option(entry)).mkString } def sftp_string(str: String): String = { val special = "[]?*\\{} \"'" if (str.isEmpty) "\"\"" else if (str.exists(special.contains)) { val res = new StringBuilder for (c <- str) { if (special.contains(c)) res += '\\' res += c } res.toString() } else str } /* open session */ def open_session( options: Options, host: String, port: Int = 0, user: String = "" ): Session = { val multiplex = options.bool("ssh_multiplexing") && !Platform.is_windows val (control_master, control_path) = if (multiplex) (true, Isabelle_System.tmp_file("ssh", initialized = false).getPath) else (false, "") new Session(options, host, port, user, control_master, control_path) } class Session private[SSH]( val options: Options, val host: String, val port: Int, val user: String, control_master: Boolean, val control_path: String ) extends System { ssh => def port_suffix: String = if (port > 0) ":" + port else "" def user_prefix: String = if (user.nonEmpty) user + "@" else "" override def toString: String = user_prefix + host + port_suffix override def hg_url: String = "ssh://" + toString + "/" override def rsync_prefix: String = user_prefix + host + ":" /* local ssh commands */ def run_command(command: String, master: Boolean = false, opts: String = "", args: String = "", cwd: JFile = null, progress_stdout: String => Unit = (_: String) => (), progress_stderr: String => Unit = (_: String) => (), strict: Boolean = true ): Process_Result = { val config = Config.make(options, port = port, user = user, control_master = master, control_path = control_path) val cmd = Config.command(command, config) + (if (opts.nonEmpty) " " + opts else "") + (if (args.nonEmpty) " -- " + args else "") Isabelle_System.bash(cmd, cwd = cwd, progress_stdout = progress_stdout, progress_stderr = progress_stderr, strict = strict) } def run_sftp( script: String, init: Path => Unit = _ => (), exit: Path => Unit = _ => () ): Process_Result = { Isabelle_System.with_tmp_dir("ssh") { dir => init(dir) File.write(dir + Path.explode("script"), script) val result = run_command("sftp", opts = "-b script", args = Bash.string(host), cwd = dir.file).check exit(dir) result } } def run_ssh(master: Boolean = false, opts: String = "", args: String = ""): Process_Result = { val args1 = Bash.string(host) + (if (args.nonEmpty) " " + args else "") run_command("ssh", master = master, opts = opts, args = args1) } /* init and exit */ val user_home: String = { run_ssh(master = control_master, args = "printenv HOME \";\" printenv SHELL").check.out_lines match { case List(user_home, shell) => if (shell.endsWith("/bash")) user_home else { error("Bad SHELL for " + quote(toString) + " -- expected GNU bash, but found " + shell) } case _ => error("Malformed remote environment for " + quote(toString)) } } val settings: JMap[String, String] = JMap.of("HOME", user_home, "USER_HOME", user_home) override def close(): Unit = { if (control_path.nonEmpty) run_ssh(opts = "-O exit").check } /* remote commands */ override def execute(cmd_line: String, progress_stdout: String => Unit = (_: String) => (), progress_stderr: String => Unit = (_: String) => (), settings: Boolean = true, strict: Boolean = true ): Process_Result = { val args1 = Bash.string(host) + " " + Bash.string("export USER_HOME=\"$HOME\"\n" + cmd_line) run_command("ssh", args = args1, progress_stdout = progress_stdout, progress_stderr = progress_stderr, strict = strict) } override lazy val isabelle_platform: Isabelle_Platform = Isabelle_Platform(ssh = Some(ssh)) /* remote file-system */ override def expand_path(path: Path): Path = path.expand_env(settings) def remote_path(path: Path): String = expand_path(path).implode override def bash_path(path: Path): String = Bash.string(remote_path(path)) def sftp_path(path: Path): String = sftp_string(remote_path(path)) def rm(path: Path): Unit = run_sftp("rm " + sftp_path(path)) override def is_dir(path: Path): Boolean = run_ssh(args = "test -d " + bash_path(path)).ok override def is_file(path: Path): Boolean = run_ssh(args = "test -f " + bash_path(path)).ok override def make_directory(path: Path): Path = { if (!execute("mkdir -p " + bash_path(path)).ok) { error("Failed to create directory: " + quote(remote_path(path))) } path } def read_dir(path: Path): List[String] = run_sftp("ls -1 -a " + sftp_path(path)).out_lines.flatMap(s => if (s == "." || s == ".." || s.endsWith("/.") || s.endsWith("/..")) None else Some(Library.perhaps_unprefix("./", s))) private def get_file[A](path: Path, f: Path => A): A = { var result: Option[A] = None run_sftp("get -p " + sftp_path(path) + " local", exit = dir => result = Some(f(dir + Path.explode("local")))) result.get } private def put_file(path: Path, f: Path => Unit): Unit = run_sftp("put -p local " + sftp_path(path), init = dir => f(dir + Path.explode("local"))) override def read_file(path: Path, local_path: Path): Unit = get_file(path, Isabelle_System.copy_file(_, local_path)) override def read_bytes(path: Path): Bytes = get_file(path, Bytes.read) override def read(path: Path): String = get_file(path, File.read) override def write_file(path: Path, local_path: Path): Unit = put_file(path, Isabelle_System.copy_file(local_path, _)) def write_bytes(path: Path, bytes: Bytes): Unit = put_file(path, Bytes.write(_, bytes)) def write(path: Path, text: String): Unit = put_file(path, File.write(_, text)) /* tmp dirs */ def rm_tree(dir: Path): Unit = rm_tree(remote_path(dir)) def rm_tree(remote_dir: String): Unit = execute("rm -r -f " + Bash.string(remote_dir)).check def tmp_dir(): String = execute("mktemp -d /tmp/ssh-XXXXXXXXXXXX").check.out override def with_tmp_dir[A](body: Path => A): A = { val remote_dir = tmp_dir() try { body(Path.explode(remote_dir)) } finally { rm_tree(remote_dir) } } /* port forwarding */ def port_forwarding( remote_port: Int, remote_host: String = "localhost", local_port: Int = 0, local_host: String = "localhost", ssh_close: Boolean = false ): Port_Forwarding = { val port = if (local_port > 0) local_port else Isabelle_System.local_port() val forward = List(local_host, port, remote_host, remote_port).mkString(":") val forward_option = "-L " + Bash.string(forward) val cancel: () => Unit = if (control_path.nonEmpty) { run_ssh(opts = forward_option + " -O forward").check () => run_ssh(opts = forward_option + " -O cancel") // permissive } else { val result = Synchronized[Exn.Result[Boolean]](Exn.Res(false)) val thread = Isabelle_Thread.fork("port_forwarding") { val opts = forward_option + " " + Config.option("SessionType", "none") + " " + Config.option("PermitLocalCommand", true) + " " + Config.option("LocalCommand", "pwd") try { run_command("ssh", opts = opts, args = Bash.string(host), progress_stdout = _ => result.change(_ => Exn.Res(true))).check } catch { case exn: Throwable => result.change(_ => Exn.Exn(exn)) } } result.guarded_access { case res@Exn.Res(ok) => if (ok) Some((), res) else None case Exn.Exn(exn) => throw exn } () => thread.interrupt() } val shutdown_hook = Isabelle_System.create_shutdown_hook { cancel() } new Port_Forwarding(host, port, remote_host, remote_port) { override def toString: String = forward override def close(): Unit = { cancel() Isabelle_System.remove_shutdown_hook(shutdown_hook) if (ssh_close) ssh.close() } } } } abstract class Port_Forwarding private[SSH]( val host: String, val port: Int, val remote_host: String, val remote_port: Int ) extends AutoCloseable /* system operations */ trait System extends AutoCloseable { def close(): Unit = () def hg_url: String = "" def rsync_prefix: String = "" def rsync_path(path: Path): String = rsync_prefix + expand_path(path).implode def expand_path(path: Path): Path = path.expand def bash_path(path: Path): String = File.bash_path(path) def is_dir(path: Path): Boolean = path.is_dir def is_file(path: Path): Boolean = path.is_file def make_directory(path: Path): Path = Isabelle_System.make_directory(path) def with_tmp_dir[A](body: Path => A): A = Isabelle_System.with_tmp_dir("tmp")(body) def read_file(path1: Path, path2: Path): Unit = Isabelle_System.copy_file(path1, path2) def write_file(path1: Path, path2: Path): Unit = Isabelle_System.copy_file(path2, path1) def read_bytes(path: Path): Bytes = Bytes.read(path) def read(path: Path): String = File.read(path) def execute(command: String, progress_stdout: String => Unit = (_: String) => (), progress_stderr: String => Unit = (_: String) => (), settings: Boolean = true, strict: Boolean = true): Process_Result = Isabelle_System.bash(command, progress_stdout = progress_stdout, progress_stderr = progress_stderr, env = if (settings) Isabelle_System.settings() else null, strict = strict) def isabelle_platform: Isabelle_Platform = Isabelle_Platform() } object Local extends System }