diff --git a/src/Pure/Admin/build_history.scala b/src/Pure/Admin/build_history.scala --- a/src/Pure/Admin/build_history.scala +++ b/src/Pure/Admin/build_history.scala @@ -1,594 +1,594 @@ /* Title: Pure/Admin/build_history.scala Author: Makarius Build other history versions. */ package isabelle object Build_History { /* log files */ val engine = "build_history" val log_prefix = engine + "_" /* augment settings */ def augment_settings( other_isabelle: Other_Isabelle, threads: Int, arch_64: Boolean, heap: Int, max_heap: Option[Int], more_settings: List[String] ): String = { val (ml_platform, ml_settings) = { val cygwin_32 = "x86-cygwin" val windows_32 = "x86-windows" val windows_64 = "x86_64-windows" val windows_64_32 = "x86_64_32-windows" val platform_32 = other_isabelle.getenv("ISABELLE_PLATFORM32") val platform_64 = other_isabelle.getenv("ISABELLE_PLATFORM64") val platform_64_32 = platform_64.replace("x86_64-", "x86_64_32-") val polyml_home = try { Path.explode(other_isabelle.getenv("ML_HOME")).dir } catch { case ERROR(msg) => error("Bad ML_HOME: " + msg) } def ml_home(platform: String): Path = polyml_home + Path.explode(platform) def err(platform: String): Nothing = error("Platform " + platform + " unavailable on this machine") def check_dir(platform: String): Boolean = platform != "" && ml_home(platform).is_dir val ml_platform = if (Platform.is_windows && arch_64) { if (check_dir(windows_64)) windows_64 else err(windows_64) } else if (Platform.is_windows && !arch_64) { if (check_dir(windows_64_32)) windows_64_32 else if (check_dir(cygwin_32)) cygwin_32 else if (check_dir(windows_32)) windows_32 else err(windows_32) } else if (arch_64) { if (check_dir(platform_64)) platform_64 else err(platform_64) } else if (check_dir(platform_64_32)) platform_64_32 else platform_32 val ml_options = "--minheap " + heap + (if (max_heap.isDefined) " --maxheap " + max_heap.get else "") + " --gcthreads " + threads + (if (ml_platform.endsWith("-windows")) " --codepage utf8" else "") (ml_platform, List( "ML_HOME=" + File.bash_path(ml_home(ml_platform)), "ML_PLATFORM=" + quote(ml_platform), "ML_OPTIONS=" + quote(ml_options))) } val thread_settings = List( "ISABELLE_JAVA_SYSTEM_OPTIONS=\"$ISABELLE_JAVA_SYSTEM_OPTIONS -Disabelle.threads=" + threads + "\"", "ISABELLE_BUILD_OPTIONS=\"threads=" + threads + "\"") val settings = List(ml_settings, thread_settings) ::: (if (more_settings.isEmpty) Nil else List(more_settings)) File.append(other_isabelle.etc_settings, "\n" + cat_lines(settings.map(terminate_lines(_)))) ml_platform } /** local build **/ private val default_user_home = Path.USER_HOME private val default_multicore = (1, 1) private val default_heap = 1500 private val default_isabelle_identifier = "build_history" def local_build( options: Options, root: Path, user_home: Path = default_user_home, progress: Progress = new Progress, afp: Boolean = false, afp_partition: Int = 0, isabelle_identifier: String = default_isabelle_identifier, ml_statistics_step: Int = 1, component_repository: String = Components.default_component_repository, components_base: Path = Components.default_components_base, fresh: Boolean = false, hostname: String = "", multicore_base: Boolean = false, multicore_list: List[(Int, Int)] = List(default_multicore), arch_64: Boolean = false, heap: Int = default_heap, max_heap: Option[Int] = None, init_settings: List[String] = Nil, more_settings: List[String] = Nil, more_preferences: List[String] = Nil, verbose: Boolean = false, build_tags: List[String] = Nil, build_args: List[String] = Nil ): List[(Process_Result, Path)] = { /* sanity checks */ if (File.eq(Path.ISABELLE_HOME, root)) error("Repository coincides with ISABELLE_HOME=" + Path.ISABELLE_HOME.expand) for ((threads, _) <- multicore_list if threads < 1) error("Bad threads value < 1: " + threads) for ((_, processes) <- multicore_list if processes < 1) error("Bad processes value < 1: " + processes) if (heap < 100) error("Bad heap value < 100: " + heap) if (max_heap.isDefined && max_heap.get < heap) error("Bad max_heap value < heap: " + max_heap.get) System.getenv("ISABELLE_SETTINGS_PRESENT") match { case null | "" => case _ => error("Cannot run Admin/build_other within existing Isabelle settings environment") } /* Isabelle + AFP directory */ def directory(dir: Path): Mercurial.Hg_Sync.Directory = { val directory = Mercurial.Hg_Sync.directory(dir) if (verbose) progress.echo(directory.log) directory } val isabelle_directory = directory(root) val afp_directory = if (afp) Some(directory(root + Path.explode("AFP"))) else None val (afp_build_args, afp_sessions) = if (afp_directory.isEmpty) (Nil, Nil) else { val (opt, sessions) = { if (afp_partition == 0) ("-d", Nil) else { try { val afp_info = AFP.init(options, base_dir = afp_directory.get.root) ("-d", afp_info.partition(afp_partition)) } catch { case ERROR(_) => ("-D", Nil) } } } (List(opt, "~~/AFP/thys"), sessions) } /* main */ val other_isabelle = Other_Isabelle(root, isabelle_identifier = isabelle_identifier, user_home = user_home, progress = progress) val build_host = proper_string(hostname) getOrElse Isabelle_System.hostname() val build_history_date = Date.now() val build_group_id = build_host + ":" + build_history_date.time.ms var first_build = true for ((threads, processes) <- multicore_list) yield { /* init settings */ val component_settings = other_isabelle.init_components( component_repository = component_repository, components_base = components_base, catalogs = List("main", "optional")) other_isabelle.init_settings(component_settings ::: init_settings) other_isabelle.resolve_components(echo = verbose) val ml_platform = augment_settings(other_isabelle, threads, arch_64, heap, max_heap, more_settings) File.write(other_isabelle.etc_preferences, cat_lines(more_preferences)) val isabelle_output = other_isabelle.isabelle_home_user + Path.explode("heaps") + Path.explode(other_isabelle.getenv("ML_IDENTIFIER")) val isabelle_output_log = isabelle_output + Path.explode("log") val isabelle_base_log = isabelle_output + Path.explode("../base_log") if (first_build) { other_isabelle.resolve_components(echo = verbose) if (fresh) Isabelle_System.rm_tree(other_isabelle.isabelle_home + Path.explode("lib/classes")) other_isabelle.bash( "env PATH=\"" + File.bash_path(Path.explode("~~/lib/dummy_stty").expand) + ":$PATH\" " + "bin/isabelle jedit -b", redirect = true, echo = verbose).check for { tool <- List("ghc_setup", "ocaml_setup") if other_isabelle.getenv("ISABELLE_" + Word.uppercase(tool)) == "true" && (other_isabelle.isabelle_home + Path.explode("lib/Tools/" + tool)).is_file } other_isabelle(tool, echo = verbose) Isabelle_System.rm_tree(isabelle_base_log) } Isabelle_System.rm_tree(isabelle_output) Isabelle_System.make_directory(isabelle_output) val log_path = other_isabelle.isabelle_home_user + Build_Log.log_subdir(build_history_date) + Build_Log.log_filename(Build_History.engine, build_history_date, List(build_host, ml_platform, "M" + threads) ::: build_tags) Isabelle_System.make_directory(log_path.dir) val build_out = other_isabelle.isabelle_home_user + Path.explode("log/build.out") val build_out_progress = new File_Progress(build_out) build_out.file.delete /* build */ if (multicore_base && !first_build && isabelle_base_log.is_dir) Isabelle_System.copy_dir(isabelle_base_log, isabelle_output_log) val build_start = Date.now() val build_args1 = List("-v", "-j" + processes) ::: afp_build_args ::: build_args val build_isabelle = Other_Isabelle(root, isabelle_identifier = isabelle_identifier, user_home = user_home, progress = build_out_progress) val build_result = build_isabelle("build " + Bash.strings(build_args1 ::: afp_sessions), redirect = true, echo = true, strict = false) val build_end = Date.now() val build_info: Build_Log.Build_Info = Build_Log.Log_File(log_path.file_name, build_result.out_lines). parse_build_info(ml_statistics = true) /* output log */ val store = Sessions.store(options + "build_database_server=false") val meta_info = Properties.lines_nonempty(Build_Log.Prop.build_tags.name, build_tags) ::: Properties.lines_nonempty(Build_Log.Prop.build_args.name, build_args1) ::: List( Build_Log.Prop.build_group_id.name -> build_group_id, Build_Log.Prop.build_id.name -> (build_host + ":" + build_start.time.ms), Build_Log.Prop.build_engine.name -> Build_History.engine, Build_Log.Prop.build_host.name -> build_host, Build_Log.Prop.build_start.name -> Build_Log.print_date(build_start), Build_Log.Prop.build_end.name -> Build_Log.print_date(build_end), Build_Log.Prop.isabelle_version.name -> isabelle_directory.id) ::: afp_directory.map(dir => Build_Log.Prop.afp_version.name -> dir.id).toList build_out_progress.echo("Reading session build info ...") val session_build_info = build_info.finished_sessions.flatMap { session_name => val database = isabelle_output + store.database(session_name) if (database.is_file) { using(SQLite.open_database(database)) { db => val theory_timings = try { store.read_theory_timings(db, session_name).map(ps => Protocol.Theory_Timing_Marker((Build_Log.SESSION_NAME, session_name) :: ps)) } catch { case ERROR(_) => Nil } val session_sources = store.read_build(db, session_name).map(_.sources) match { case Some(sources) if sources.length == SHA1.digest_length => List("Sources " + session_name + " " + sources) case _ => Nil } theory_timings ::: session_sources } } else Nil } build_out_progress.echo("Reading ML statistics ...") val ml_statistics = build_info.finished_sessions.flatMap { session_name => val database = isabelle_output + store.database(session_name) val log_gz = isabelle_output + store.log_gz(session_name) val properties = if (database.is_file) { using(SQLite.open_database(database))(db => store.read_ml_statistics(db, session_name)) } else if (log_gz.is_file) { Build_Log.Log_File(log_gz).parse_session_info(ml_statistics = true).ml_statistics } else Nil val trimmed_properties = if (ml_statistics_step <= 0) Nil else if (ml_statistics_step == 1) properties else { (for { (ps, i) <- properties.iterator.zipWithIndex if i % ml_statistics_step == 0 } yield ps).toList } trimmed_properties.map(ps => (Build_Log.SESSION_NAME -> session_name) :: ps) } build_out_progress.echo("Reading error messages ...") val session_errors = build_info.failed_sessions.flatMap { session_name => val database = isabelle_output + store.database(session_name) val errors = if (database.is_file) { try { using(SQLite.open_database(database))(db => store.read_errors(db, session_name)) } // column "errors" could be missing catch { case _: java.sql.SQLException => Nil } } else Nil errors.map(msg => List(Build_Log.SESSION_NAME -> session_name, Markup.CONTENT -> msg)) } build_out_progress.echo("Reading heap sizes ...") val heap_sizes = build_info.finished_sessions.flatMap { session_name => val heap = isabelle_output + Path.explode(session_name) if (heap.is_file) Some("Heap " + session_name + " (" + Value.Long(heap.file.length) + " bytes)") else None } build_out_progress.echo("Writing log file " + log_path.xz + " ...") File.write_xz(log_path.xz, terminate_lines( Protocol.Meta_Info_Marker(meta_info) :: build_result.out_lines ::: session_build_info ::: ml_statistics.map(Protocol.ML_Statistics_Marker.apply) ::: session_errors.map(Protocol.Error_Message_Marker.apply) ::: heap_sizes), XZ.options(6)) /* next build */ if (multicore_base && first_build && isabelle_output_log.is_dir) Isabelle_System.copy_dir(isabelle_output_log, isabelle_base_log) Isabelle_System.rm_tree(isabelle_output) first_build = false (build_result, log_path.xz) } } /* command line entry point */ private object Multicore { private val Pat1 = """^(\d+)$""".r private val Pat2 = """^(\d+)x(\d+)$""".r def parse(s: String): (Int, Int) = s match { case Pat1(Value.Int(x)) => (x, 1) case Pat2(Value.Int(x), Value.Int(y)) => (x, y) case _ => error("Bad multicore configuration: " + quote(s)) } } def main(args: Array[String]): Unit = { Command_Line.tool { var afp = false var multicore_base = false var components_base: Path = Components.default_components_base var heap: Option[Int] = None var max_heap: Option[Int] = None var multicore_list = List(default_multicore) var isabelle_identifier = default_isabelle_identifier var afp_partition = 0 var component_repository = Components.default_component_repository var more_settings: List[String] = Nil var more_preferences: List[String] = Nil var fresh = false var hostname = "" var init_settings: List[String] = Nil var arch_64 = false var output_file = "" var ml_statistics_step = 1 var build_tags = List.empty[String] var user_home = default_user_home var verbose = false var exit_code = false val getopts = Getopts(""" Usage: Admin/build_other [OPTIONS] ISABELLE_HOME [ARGS ...] Options are: -A include $ISABELLE_HOME/AFP directory -B first multicore build serves as base for scheduling information -C DIR base directory for Isabelle components (default: """ + Components.default_components_base + """) -H SIZE minimal ML heap in MB (default: """ + default_heap + """ for x86, """ + default_heap * 2 + """ for x86_64) -M MULTICORE multicore configurations (see below) -N NAME alternative ISABELLE_IDENTIFIER (default: """ + default_isabelle_identifier + """) -P NUMBER AFP partition number (0, 1, 2, default: 0=unrestricted) -R URL remote repository for Isabelle components (default: """ + Components.default_component_repository + """) -U SIZE maximal ML heap in MB (default: unbounded) -e TEXT additional text for generated etc/settings -f fresh build of Isabelle/Scala components (recommended) -h NAME override local hostname -i TEXT initial text for generated etc/settings -m ARCH processor architecture (32=x86, 64=x86_64, default: x86) -n no build: sync only -o FILE output file for log names (default: stdout) -p TEXT additional text for generated etc/preferences -s NUMBER step size for ML statistics (0=none, 1=all, n=step, default: 1) -t TAG free-form build tag (multiple occurrences possible) -u DIR alternative USER_HOME directory -v verbose -x return overall exit code from build processes Build Isabelle sessions from the history of another REPOSITORY clone, passing ARGS directly to its isabelle build tool. Each MULTICORE configuration consists of one or two numbers (default 1): THREADS or THREADSxPROCESSES, e.g. -M 1,2,4 or -M 1x4,2x2,4. """, "A" -> (_ => afp = true), "B" -> (_ => multicore_base = true), "C:" -> (arg => components_base = Path.explode(arg)), "H:" -> (arg => heap = Some(Value.Int.parse(arg))), "M:" -> (arg => multicore_list = space_explode(',', arg).map(Multicore.parse)), "N:" -> (arg => isabelle_identifier = arg), "P:" -> (arg => afp_partition = Value.Int.parse(arg)), "R:" -> (arg => component_repository = arg), "U:" -> (arg => max_heap = Some(Value.Int.parse(arg))), "e:" -> (arg => more_settings = more_settings ::: List(arg)), "f" -> (_ => fresh = true), "h:" -> (arg => hostname = arg), "i:" -> (arg => init_settings = init_settings ::: List(arg)), "m:" -> { case "32" | "x86" => arch_64 = false case "64" | "x86_64" => arch_64 = true case bad => error("Bad processor architecture: " + quote(bad)) }, "o:" -> (arg => output_file = arg), "p:" -> (arg => more_preferences = more_preferences ::: List(arg)), "s:" -> (arg => ml_statistics_step = Value.Int.parse(arg)), "t:" -> (arg => build_tags = build_tags ::: List(arg)), "u:" -> (arg => user_home = Path.explode(arg)), "v" -> (_ => verbose = true), "x" -> (_ => exit_code = true)) val more_args = getopts(args) val (root, build_args) = more_args match { case root :: build_args => (Path.explode(root), build_args) case _ => getopts.usage() } val progress = new Console_Progress(stderr = true) val results = local_build(Options.init(), root, user_home = user_home, progress = progress, afp = afp, afp_partition = afp_partition, isabelle_identifier = isabelle_identifier, ml_statistics_step = ml_statistics_step, component_repository = component_repository, components_base = components_base, fresh = fresh, hostname = hostname, multicore_base = multicore_base, multicore_list = multicore_list, arch_64 = arch_64, heap = heap.getOrElse(if (arch_64) default_heap * 2 else default_heap), max_heap = max_heap, init_settings = init_settings, more_settings = more_settings, more_preferences = more_preferences, verbose = verbose, build_tags = build_tags, build_args = build_args) if (output_file == "") { for ((_, log_path) <- results) Output.writeln(log_path.implode, stdout = true) } else { File.write(Path.explode(output_file), cat_lines(for ((_, log_path) <- results) yield log_path.implode)) } val rc = results.foldLeft(Process_Result.RC.ok) { case (rc, (res, _)) => rc max res.rc } if (rc != Process_Result.RC.ok && exit_code) sys.exit(rc) } } /** remote build -- via rsync and ssh **/ def remote_build( ssh: SSH.Session, isabelle_self: Path, isabelle_other: Path, isabelle_identifier: String = "remote_build_history", progress: Progress = new Progress, protect_args: Boolean = false, rev: String = "", afp_repos: Option[Path] = None, afp_rev: String = "", options: String = "", args: String = "", no_build: Boolean = false ): List[(String, Bytes)] = { /* synchronize Isabelle + AFP repositories */ def sync(target: Path, accurate: Boolean = false, rev: String = "", afp_rev: String = "", afp: Boolean = false ): Unit = { - val context = Rsync.Context(progress, port = ssh.nominal_port, protect_args = protect_args) + val context = Rsync.Context(progress, port = ssh.port, protect_args = protect_args) Sync.sync(ssh.options, context, ssh.rsync_path(target), thorough = accurate, preserve_jars = !accurate, rev = rev, afp_rev = afp_rev, afp_root = if (afp) afp_repos else None) } def execute(command: String, args: String, echo: Boolean = false, strict: Boolean = true ): Unit = ssh.execute( Isabelle_System.export_isabelle_identifier(isabelle_identifier) + ssh.bash_path(isabelle_self + Path.explode(command)) + " " + args, progress_stdout = progress.echo_if(echo, _), progress_stderr = progress.echo_if(echo, _), strict = strict).check sync(isabelle_self) execute("bin/isabelle", "components -I") execute("bin/isabelle", "components -a", echo = true) execute("bin/isabelle", "jedit -bf") sync(isabelle_other, accurate = true, rev = proper_string(rev) getOrElse "tip", afp_rev = proper_string(afp_rev) getOrElse "tip", afp = true) /* build */ if (no_build) Nil else { ssh.with_tmp_dir { tmp_dir => val output_file = tmp_dir + Path.explode("output") val build_options = (if (afp_repos.isEmpty) "" else " -A") + " " + options try { execute("Admin/build_other", "-o " + ssh.bash_path(output_file) + build_options + " " + ssh.bash_path(isabelle_other) + " " + args, echo = true, strict = false) } catch { case ERROR(msg) => cat_error(msg, "The error(s) above occurred for Admin/build_other " + build_options) } for (line <- split_lines(ssh.read(output_file))) yield { val log = Path.explode(line) val bytes = ssh.read_bytes(log) ssh.rm(log) (log.file_name, bytes) } } } } } diff --git a/src/Pure/Admin/isabelle_cronjob.scala b/src/Pure/Admin/isabelle_cronjob.scala --- a/src/Pure/Admin/isabelle_cronjob.scala +++ b/src/Pure/Admin/isabelle_cronjob.scala @@ -1,649 +1,637 @@ /* Title: Pure/Admin/isabelle_cronjob.scala Author: Makarius Main entry point for administrative cronjob at TUM. */ package isabelle import java.nio.file.Files import scala.annotation.tailrec object Isabelle_Cronjob { /* global resources: owned by main cronjob */ val backup = "lxbroy10:cronjob" val main_dir: Path = Path.explode("~/cronjob") val main_state_file: Path = main_dir + Path.explode("run/main.state") val build_release_log: Path = main_dir + Path.explode("run/build_release.log") val current_log: Path = main_dir + Path.explode("run/main.log") // owned by log service val cumulative_log: Path = main_dir + Path.explode("log/main.log") // owned by log service val isabelle_repos: Path = main_dir + Path.explode("isabelle") val afp_repos: Path = main_dir + Path.explode("AFP") val mailman_archives_dir = Path.explode("~/cronjob/Mailman") val build_log_dirs = List(Path.explode("~/log"), Path.explode("~/afp/log"), Path.explode("~/cronjob/log")) /** logger tasks **/ sealed case class Logger_Task(name: String = "", body: Logger => Unit) /* init and exit */ def get_rev(): String = Mercurial.repository(isabelle_repos).id() def get_afp_rev(): String = Mercurial.repository(afp_repos).id() val init: Logger_Task = Logger_Task("init", { logger => Isabelle_Devel.make_index() Mercurial.setup_repository(Isabelle_System.afp_repository.root, afp_repos) File.write(logger.log_dir + Build_Log.log_filename("isabelle_identify", logger.start_date), Build_Log.Identify.content(logger.start_date, Some(get_rev()), Some(get_afp_rev()))) Isabelle_System.bash( """rsync -a --include="*/" --include="plain_identify*" --exclude="*" """ + Bash.string(backup + "/log/.") + " " + File.bash_path(main_dir) + "/log/.").check if (!Isabelle_Devel.cronjob_log.is_file) Files.createSymbolicLink(Isabelle_Devel.cronjob_log.java_path, current_log.java_path) }) val exit: Logger_Task = Logger_Task("exit", { logger => Isabelle_System.bash( "rsync -a " + File.bash_path(main_dir) + "/log/." + " " + Bash.string(backup) + "/log/.") .check }) /* Mailman archives */ val mailman_archives: Logger_Task = Logger_Task("mailman_archives", { logger => Mailman.Isabelle_Users.download(mailman_archives_dir) Mailman.Isabelle_Dev.download(mailman_archives_dir) }) /* build release */ val build_release: Logger_Task = Logger_Task("build_release", { logger => build_release_log.file.delete Isabelle_Devel.release_snapshot(logger.options, get_rev(), get_afp_rev(), progress = new File_Progress(build_release_log)) }) /* remote build_history */ sealed case class Item( known: Boolean, isabelle_version: String, afp_version: Option[String], pull_date: Date ) { def unknown: Boolean = !known def versions: (String, Option[String]) = (isabelle_version, afp_version) def known_versions(rev: String, afp_rev: Option[String]): Boolean = known && rev != "" && isabelle_version == rev && (afp_rev.isEmpty || afp_rev.get != "" && afp_version == afp_rev) } def recent_items( db: SQL.Database, days: Int, rev: String, afp_rev: Option[String], sql: PostgreSQL.Source ): List[Item] = { val afp = afp_rev.isDefined val select = Build_Log.Data.select_recent_versions( days = days, rev = rev, afp_rev = afp_rev, sql = "WHERE " + sql) db.using_statement(select)(stmt => stmt.execute_query().iterator({ res => val known = res.bool(Build_Log.Data.known) val isabelle_version = res.string(Build_Log.Prop.isabelle_version) val afp_version = if (afp) proper_string(res.string(Build_Log.Prop.afp_version)) else None val pull_date = res.date(Build_Log.Data.pull_date(afp)) Item(known, isabelle_version, afp_version, pull_date) }).toList) } def unknown_runs(items: List[Item]): List[List[Item]] = { val (run, rest) = Library.take_prefix[Item](_.unknown, items.dropWhile(_.known)) if (run.nonEmpty) run :: unknown_runs(rest) else Nil } sealed case class Remote_Build( description: String, host: String, - actual_host: String = "", user: String = "", port: Int = 0, - proxy_host: String = "", - proxy_user: String = "", - proxy_port: Int = 0, historic: Boolean = false, history: Int = 0, history_base: String = "build_history_base", java_heap: String = "", options: String = "", args: String = "", afp: Boolean = false, bulky: Boolean = false, more_hosts: List[String] = Nil, detect: PostgreSQL.Source = "", active: Boolean = true ) { def open_session(options: Options): SSH.Session = - SSH.open_session(options, host = host, user = user, port = port, actual_host = actual_host, - proxy_host = proxy_host, proxy_user = proxy_user, proxy_port = proxy_port, - permissive = proxy_host.nonEmpty) + SSH.open_session(options, host = host, user = user, port = port) def sql: PostgreSQL.Source = Build_Log.Prop.build_engine.toString + " = " + SQL.string(Build_History.engine) + " AND " + SQL.member(Build_Log.Prop.build_host.ident, host :: more_hosts) + (if (detect == "") "" else " AND " + SQL.enclose(detect)) def profile: Build_Status.Profile = Build_Status.Profile(description, history = history, afp = afp, bulky = bulky, sql = sql) def pick( options: Options, rev: String = "", filter: Item => Boolean = _ => true ): Option[(String, Option[String])] = { val afp_rev = if (afp) Some(get_afp_rev()) else None val store = Build_Log.store(options) using(store.open_database()) { db => def pick_days(days: Int, gap: Int): Option[(String, Option[String])] = { val items = recent_items(db, days, rev, afp_rev, sql).filter(filter) def runs = unknown_runs(items).filter(run => run.length >= gap) if (historic || items.exists(_.known_versions(rev, afp_rev))) { val longest_run = runs.foldLeft(List.empty[Item]) { case (item1, item2) => if (item1.length >= item2.length) item1 else item2 } if (longest_run.isEmpty) None else Some(longest_run(longest_run.length / 2).versions) } else if (rev != "") Some((rev, afp_rev)) else runs.flatten.headOption.map(_.versions) } pick_days(options.int("build_log_history") max history, 2) orElse pick_days(200, 5) orElse pick_days(2000, 1) } } def build_history_options: String = " -h " + Bash.string(host) + " " + (java_heap match { case "" => "" case h => "-e 'ISABELLE_TOOL_JAVA_OPTIONS=\"$ISABELLE_TOOL_JAVA_OPTIONS -Xmx" + h + "\"' " }) + options } val remote_builds_old: List[Remote_Build] = List( Remote_Build("macOS 10.15 Catalina", "laramac01", user = "makarius", - proxy_host = "laraserver", proxy_user = "makarius", options = "-m32 -M4 -e ISABELLE_GHC_SETUP=true -p pide_session=false", args = "-a -d '~~/src/Benchmarks'"), Remote_Build("Linux A", "i21of4", user = "i21isatest", - proxy_host = "lxbroy10", proxy_user = "i21isatest", options = "-m32 -M1x4,2,4" + " -e ISABELLE_OCAML=ocaml -e ISABELLE_OCAMLC=ocamlc -e ISABELLE_OCAML_SETUP=true" + " -e ISABELLE_GHC_SETUP=true" + " -e ISABELLE_MLTON=mlton" + " -e ISABELLE_SMLNJ=sml" + " -e ISABELLE_SWIPL=swipl", args = "-a -d '~~/src/Benchmarks'"), Remote_Build("Linux A", "lxbroy9", java_heap = "2g", options = "-m32 -B -M1x2,2", args = "-N -g timing"), Remote_Build("Linux Benchmarks", "lxbroy5", historic = true, history = 90, java_heap = "2g", options = "-m32 -B -M1x2,2 -t Benchmarks" + " -e ISABELLE_GHC=ghc -e ISABELLE_MLTON=mlton -e ISABELLE_OCAML=ocaml" + " -e ISABELLE_OCAMLC=ocamlc -e ISABELLE_OCAMLFIND=ocamlfind -e ISABELLE_SMLNJ=sml" + " -e ISABELLE_SWIPL=swipl", args = "-N -a -d '~~/src/Benchmarks'", detect = Build_Log.Prop.build_tags.toString + " = " + SQL.string("Benchmarks")), Remote_Build("macOS 10.14 Mojave (Old)", "lapnipkow3", options = "-m32 -M1,2 -e ISABELLE_GHC_SETUP=true -p pide_session=false", args = "-a -d '~~/src/Benchmarks'"), Remote_Build("AFP old bulky", "lrzcloud1", - proxy_host = "lxbroy10", proxy_user = "i21isatest", options = "-m64 -M6 -U30000 -s10 -t AFP", args = "-g large -g slow", afp = true, bulky = true, detect = Build_Log.Prop.build_tags.toString + " = " + SQL.string("AFP")), Remote_Build("AFP old", "lxbroy7", args = "-N -X large -X slow", afp = true, detect = Build_Log.Prop.build_tags.toString + " = " + SQL.string("AFP")), Remote_Build("Poly/ML 5.7 Linux", "lxbroy8", history_base = "37074e22e8be", options = "-m32 -B -M1x2,2 -t polyml-5.7 -i 'init_component /home/isabelle/contrib/polyml-5.7'", args = "-N -g timing", detect = Build_Log.Prop.build_tags.toString + " = " + SQL.string("polyml-5.7") + " AND " + Build_Log.Settings.ML_OPTIONS + " <> " + SQL.string("-H 500")), Remote_Build("Poly/ML 5.7.1 Linux", "lxbroy8", history_base = "a9d5b59c3e12", options = "-m32 -B -M1x2,2 -t polyml-5.7.1-pre2 -i 'init_component /home/isabelle/contrib/polyml-test-905dae2ebfda'", args = "-N -g timing", detect = Build_Log.Prop.build_tags.toString + " = " + SQL.string("polyml-5.7.1-pre1") + " OR " + Build_Log.Prop.build_tags + " = " + SQL.string("polyml-5.7.1-pre2")), Remote_Build("Poly/ML 5.7 macOS", "macbroy2", history_base = "37074e22e8be", options = "-m32 -B -M1x4,4 -t polyml-5.7 -i 'init_component /home/isabelle/contrib/polyml-5.7'", args = "-a", detect = Build_Log.Prop.build_tags.toString + " = " + SQL.string("polyml-5.7")), Remote_Build("Poly/ML 5.7.1 macOS", "macbroy2", history_base = "a9d5b59c3e12", options = "-m32 -B -M1x4,4 -t polyml-5.7.1-pre2 -i 'init_component /home/isabelle/contrib/polyml-test-905dae2ebfda'", args = "-a", detect = Build_Log.Prop.build_tags.toString + " = " + SQL.string("polyml-5.7.1-pre1") + " OR " + Build_Log.Prop.build_tags + " = " + SQL.string("polyml-5.7.1-pre2")), Remote_Build("macOS", "macbroy2", options = "-m32 -M8" + " -e ISABELLE_GHC=ghc -e ISABELLE_MLTON=mlton -e ISABELLE_OCAML=ocaml" + " -e ISABELLE_OCAMLC=ocamlc -e ISABELLE_OCAML_SETUP=true" + " -e ISABELLE_OPAM_ROOT=\"$ISABELLE_HOME/opam\"" + " -e ISABELLE_SMLNJ=/home/isabelle/smlnj/110.85/bin/sml" + " -p pide_session=false", args = "-a", detect = Build_Log.Prop.build_tags.undefined, history_base = "2c0f24e927dd"), Remote_Build("macOS, quick_and_dirty", "macbroy2", options = "-m32 -M8 -t quick_and_dirty -p pide_session=false", args = "-a -o quick_and_dirty", detect = Build_Log.Prop.build_tags.toString + " = " + SQL.string("quick_and_dirty"), history_base = "2c0f24e927dd"), Remote_Build("macOS, skip_proofs", "macbroy2", options = "-m32 -M8 -t skip_proofs -p pide_session=false", args = "-a -o skip_proofs", detect = Build_Log.Prop.build_tags.toString + " = " + SQL.string("skip_proofs"), history_base = "2c0f24e927dd"), Remote_Build("Poly/ML test", "lxbroy8", options = "-m32 -B -M1x2,2 -t polyml-test -i 'init_component /home/isabelle/contrib/polyml-5.7-20170217'", args = "-N -g timing", detect = Build_Log.Prop.build_tags.toString + " = " + SQL.string("polyml-test")), Remote_Build("macOS 10.12 Sierra", "macbroy30", options = "-m32 -M2 -p pide_session=false", args = "-a", detect = Build_Log.Prop.build_start.toString + " > date '2017-03-03'"), Remote_Build("macOS 10.10 Yosemite", "macbroy31", options = "-m32 -M2 -p pide_session=false", args = "-a"), Remote_Build("macOS 10.8 Mountain Lion", "macbroy30", options = "-m32 -M2", args = "-a", detect = Build_Log.Prop.build_start.toString + " < date '2017-03-03'")) ::: { for { (n, hosts) <- List(1 -> List("lxbroy6"), 2 -> List("lxbroy8", "lxbroy5")) } yield { Remote_Build("AFP old", host = hosts.head, more_hosts = hosts.tail, options = "-m32 -M1x2 -t AFP -P" + n + " -e ISABELLE_GHC=ghc" + " -e ISABELLE_MLTON=mlton" + " -e ISABELLE_OCAML=ocaml -e ISABELLE_OCAMLC=ocamlc -e ISABELLE_OCAMLFIND=ocamlfind" + " -e ISABELLE_SMLNJ=sml", args = "-N -X large -X slow", afp = true, detect = Build_Log.Prop.build_tags.toString + " = " + SQL.string("AFP")) } } val remote_builds1: List[List[Remote_Build]] = { List( List(Remote_Build("Linux A", "augsburg1", options = "-m32 -B -M4" + " -e ISABELLE_OCAML=ocaml -e ISABELLE_OCAMLC=ocamlc -e ISABELLE_OCAMLFIND=ocamlfind" + " -e ISABELLE_GHC_SETUP=true" + " -e ISABELLE_MLTON=mlton" + " -e ISABELLE_SMLNJ=sml" + " -e ISABELLE_SWIPL=swipl", args = "-a -d '~~/src/Benchmarks'")), List(Remote_Build("Linux B", "lxbroy10", historic = true, history = 90, options = "-m32 -B -M1x4,2,4,6", args = "-N -g timing")), List(Remote_Build("macOS 10.13 High Sierra", "lapbroy68", options = "-m32 -B -M1,2,4 -e ISABELLE_GHC_SETUP=true -p pide_session=false", args = "-a -d '~~/src/Benchmarks'")), List( Remote_Build("macOS 11 Big Sur", "mini1", options = "-m32 -B -M1x2,2,4 -p pide_session=false" + " -e ISABELLE_OCAML=ocaml -e ISABELLE_OCAMLC=ocamlc -e ISABELLE_OCAML_SETUP=true" + " -e ISABELLE_GHC_SETUP=true" + " -e ISABELLE_MLTON=/usr/local/bin/mlton" + " -e ISABELLE_SMLNJ=/usr/local/smlnj/bin/sml" + " -e ISABELLE_SWIPL=/usr/local/bin/swipl", args = "-a -d '~~/src/Benchmarks'")), List( Remote_Build("macOS 10.14 Mojave", "mini2", options = "-m32 -B -M1x2,2,4 -p pide_session=false" + " -e ISABELLE_OCAML=ocaml -e ISABELLE_OCAMLC=ocamlc -e ISABELLE_OCAML_SETUP=true" + " -e ISABELLE_GHC_SETUP=true" + " -e ISABELLE_MLTON=/usr/local/bin/mlton" + " -e ISABELLE_SMLNJ=/usr/local/smlnj/bin/sml" + " -e ISABELLE_SWIPL=/usr/local/bin/swipl", args = "-a -d '~~/src/Benchmarks'"), Remote_Build("macOS, quick_and_dirty", "mini2", options = "-m32 -M4 -t quick_and_dirty -p pide_session=false", args = "-a -o quick_and_dirty", detect = Build_Log.Prop.build_tags.toString + " = " + SQL.string("quick_and_dirty")), Remote_Build("macOS, skip_proofs", "mini2", options = "-m32 -M4 -t skip_proofs -p pide_session=false", args = "-a -o skip_proofs", detect = Build_Log.Prop.build_tags.toString + " = " + SQL.string("skip_proofs"))), List( - Remote_Build("macOS 10.15 Catalina", "monterey", actual_host = "laramac01", - user = "makarius", proxy_host = "laraserver", proxy_user = "makarius", + Remote_Build("macOS 10.15 Catalina", "monterey", user = "makarius", options = "-m32 -M4 -e ISABELLE_GHC_SETUP=true -p pide_session=false", args = "-a -d '~~/src/Benchmarks'")), List( Remote_Build("Windows", "vmnipkow9", historic = true, history = 90, options = "-m32 -M4" + " -C /cygdrive/d/isatest/contrib" + " -e ISABELLE_OCAML=ocaml -e ISABELLE_OCAMLC=ocamlc -e ISABELLE_OCAML_SETUP=true" + " -e ISABELLE_GHC_SETUP=true" + " -e ISABELLE_SMLNJ=/usr/local/smlnj-110.81/bin/sml", args = "-a", detect = Build_Log.Settings.ML_PLATFORM.toString + " = " + SQL.string("x86-windows") + " OR " + Build_Log.Settings.ML_PLATFORM + " = " + SQL.string("x86_64_32-windows")), Remote_Build("Windows", "vmnipkow9", historic = true, history = 90, options = "-m64 -M4" + " -C /cygdrive/d/isatest/contrib" + " -e ISABELLE_OCAML=ocaml -e ISABELLE_OCAMLC=ocamlc -e ISABELLE_OCAML_SETUP=true" + " -e ISABELLE_GHC_SETUP=true" + " -e ISABELLE_SMLNJ=/usr/local/smlnj-110.81/bin/sml", args = "-a", detect = Build_Log.Settings.ML_PLATFORM.toString + " = " + SQL.string("x86_64-windows")))) } val remote_builds2: List[List[Remote_Build]] = List( List( - Remote_Build("AFP", "lrzcloud2", actual_host = "10.195.4.41", - proxy_host = "lxbroy10", proxy_user = "i21isatest", + Remote_Build("AFP", "lrzcloud2", java_heap = "8g", options = "-m32 -M1x6 -t AFP" + " -e ISABELLE_GHC=ghc" + " -e ISABELLE_MLTON=mlton" + " -e ISABELLE_OCAML=ocaml -e ISABELLE_OCAMLC=ocamlc -e ISABELLE_OCAMLFIND=ocamlfind" + " -e ISABELLE_SMLNJ=sml", args = "-a -X large -X slow", afp = true, detect = Build_Log.Prop.build_tags.toString + " = " + SQL.string("AFP")), - Remote_Build("AFP", "lrzcloud2", actual_host = "10.195.4.41", - proxy_host = "lxbroy10", proxy_user = "i21isatest", + Remote_Build("AFP", "lrzcloud2", java_heap = "8g", options = "-m64 -M8 -U30000 -s10 -t AFP", args = "-g large -g slow", afp = true, bulky = true, detect = Build_Log.Prop.build_tags.toString + " = " + SQL.string("AFP")))) def remote_build_history( rev: String, afp_rev: Option[String], i: Int, r: Remote_Build ) : Logger_Task = { val task_name = "build_history-" + r.host Logger_Task(task_name, { logger => using(r.open_session(logger.options)) { ssh => val results = Build_History.remote_build(ssh, isabelle_repos, isabelle_repos.ext(r.host), isabelle_identifier = "cronjob_build_history", rev = rev, afp_repos = if (afp_rev.isDefined) Some(afp_repos) else None, afp_rev = afp_rev.getOrElse(""), options = " -N " + Bash.string(task_name) + (if (i < 0) "" else "_" + (i + 1).toString) + " -R " + Bash.string(Components.default_component_repository) + " -C '$USER_HOME/.isabelle/contrib' -f " + r.build_history_options, args = "-o timeout=10800 " + r.args) for ((log_name, bytes) <- results) { logger.log(Date.now(), log_name) Bytes.write(logger.log_dir + Path.explode(log_name), bytes) } } }) } val build_status_profiles: List[Build_Status.Profile] = (remote_builds_old :: remote_builds1 ::: remote_builds2).flatten.map(_.profile) /** task logging **/ object Log_Service { def apply(options: Options, progress: Progress = new Progress): Log_Service = new Log_Service(options, progress) } class Log_Service private(val options: Options, progress: Progress) { current_log.file.delete private val thread: Consumer_Thread[String] = Consumer_Thread.fork("cronjob: logger", daemon = true)( consume = { (text: String) => // critical File.append(current_log, text + "\n") File.append(cumulative_log, text + "\n") progress.echo(text) true }) def shutdown(): Unit = { thread.shutdown() } val hostname: String = Isabelle_System.hostname() def log(date: Date, task_name: String, msg: String): Unit = if (task_name != "") thread.send( "[" + Build_Log.print_date(date) + ", " + hostname + ", " + task_name + "]: " + msg) def start_logger(start_date: Date, task_name: String): Logger = new Logger(this, start_date, task_name) def run_task(start_date: Date, task: Logger_Task): Unit = { val logger = start_logger(start_date, task.name) val res = Exn.capture { task.body(logger) } val end_date = Date.now() val err = res match { case Exn.Res(_) => None case Exn.Exn(exn) => Output.writeln("Exception trace for " + quote(task.name) + ":") exn.printStackTrace() val first_line = split_lines(Exn.message(exn)).headOption getOrElse "exception" Some(first_line) } logger.log_end(end_date, err) } def fork_task(start_date: Date, task: Logger_Task): Task = new Task(task.name, run_task(start_date, task)) } class Logger private[Isabelle_Cronjob]( log_service: Log_Service, val start_date: Date, val task_name: String ) { def options: Options = log_service.options def log(date: Date, msg: String): Unit = log_service.log(date, task_name, msg) def log_end(end_date: Date, err: Option[String]): Unit = { val elapsed_time = end_date.time - start_date.time val msg = (if (err.isEmpty) "finished" else "ERROR " + err.get) + (if (elapsed_time.seconds < 3.0) "" else " (" + elapsed_time.message_hms + " elapsed time)") log(end_date, msg) } val log_dir = Isabelle_System.make_directory(main_dir + Build_Log.log_subdir(start_date)) log(start_date, "started") } class Task private[Isabelle_Cronjob](name: String, body: => Unit) { private val future: Future[Unit] = Future.thread("cronjob: " + name) { body } def is_finished: Boolean = future.is_finished } /** cronjob **/ def cronjob(progress: Progress, exclude_task: Set[String]): Unit = { /* soft lock */ val still_running = try { Some(File.read(main_state_file)) } catch { case ERROR(_) => None } still_running match { case None | Some("") => case Some(running) => error("Isabelle cronjob appears to be still running: " + running) } /* log service */ val log_service = Log_Service(Options.init(), progress = progress) def run(start_date: Date, task: Logger_Task): Unit = log_service.run_task(start_date, task) def run_now(task: Logger_Task): Unit = run(Date.now(), task) /* structured tasks */ def SEQ(tasks: List[Logger_Task]): Logger_Task = Logger_Task(body = _ => for (task <- tasks.iterator if !exclude_task(task.name) || task.name == "") run_now(task)) def PAR(tasks: List[Logger_Task]): Logger_Task = Logger_Task(body = { _ => @tailrec def join(running: List[Task]): Unit = { running.partition(_.is_finished) match { case (Nil, Nil) => case (Nil, _ :: _) => Time.seconds(0.5).sleep(); join(running) case (_ :: _, remaining) => join(remaining) } } val start_date = Date.now() val running = for (task <- tasks if !exclude_task(task.name)) yield log_service.fork_task(start_date, task) join(running) }) /* repository structure */ val hg = Mercurial.repository(isabelle_repos) val hg_graph = hg.graph() def history_base_filter(r: Remote_Build): Item => Boolean = { val base_rev = hg.id(r.history_base) val nodes = hg_graph.all_succs(List(base_rev)).toSet (item: Item) => nodes(item.isabelle_version) } /* main */ val main_start_date = Date.now() File.write(main_state_file, main_start_date.toString + " " + log_service.hostname) run(main_start_date, Logger_Task("isabelle_cronjob", logger => run_now( SEQ(List( init, PAR(List(mailman_archives, build_release)), PAR( List(remote_builds1, remote_builds2).map(remote_builds => SEQ(List( PAR(remote_builds.map(_.filter(_.active)).map(seq => SEQ( for { (r, i) <- (if (seq.length <= 1) seq.map((_, -1)) else seq.zipWithIndex) (rev, afp_rev) <- r.pick(logger.options, hg.id(), history_base_filter(r)) } yield remote_build_history(rev, afp_rev, i, r)))), Logger_Task("jenkins_logs", _ => Jenkins.download_logs(logger.options, Jenkins.build_log_jobs, main_dir)), Logger_Task("build_log_database", logger => Isabelle_Devel.build_log_database(logger.options, build_log_dirs)), Logger_Task("build_status", logger => Isabelle_Devel.build_status(logger.options)))))), exit))))) log_service.shutdown() main_state_file.file.delete } /** command line entry point **/ def main(args: Array[String]): Unit = { Command_Line.tool { var force = false var verbose = false var exclude_task = Set.empty[String] val getopts = Getopts(""" Usage: Admin/cronjob/main [OPTIONS] Options are: -f apply force to do anything -v verbose -x NAME exclude tasks with this name """, "f" -> (_ => force = true), "v" -> (_ => verbose = true), "x:" -> (arg => exclude_task += arg)) val more_args = getopts(args) if (more_args.nonEmpty) getopts.usage() val progress = if (verbose) new Console_Progress() else new Progress if (force) cronjob(progress, exclude_task) else error("Need to apply force to do anything") } } } diff --git a/src/Pure/General/sql.scala b/src/Pure/General/sql.scala --- a/src/Pure/General/sql.scala +++ b/src/Pure/General/sql.scala @@ -1,483 +1,483 @@ /* Title: Pure/General/sql.scala Author: Makarius Support for SQL databases: SQLite and PostgreSQL. */ package isabelle import java.time.OffsetDateTime import java.sql.{DriverManager, Connection, PreparedStatement, ResultSet} import scala.collection.mutable object SQL { /** SQL language **/ type Source = String /* concrete syntax */ def escape_char(c: Char): String = c match { case '\u0000' => "\\0" case '\'' => "\\'" case '\"' => "\\\"" case '\b' => "\\b" case '\n' => "\\n" case '\r' => "\\r" case '\t' => "\\t" case '\u001a' => "\\Z" case '\\' => "\\\\" case _ => c.toString } def string(s: String): Source = s.iterator.map(escape_char).mkString("'", "", "'") def ident(s: String): Source = Long_Name.implode(Long_Name.explode(s).map(a => quote(a.replace("\"", "\"\"")))) def enclose(s: Source): Source = "(" + s + ")" def enclosure(ss: Iterable[Source]): Source = ss.mkString("(", ", ", ")") def select(columns: List[Column] = Nil, distinct: Boolean = false): Source = "SELECT " + (if (distinct) "DISTINCT " else "") + (if (columns.isEmpty) "*" else commas(columns.map(_.ident))) + " FROM " val join_outer: Source = " LEFT OUTER JOIN " val join_inner: Source = " INNER JOIN " def join(outer: Boolean): Source = if (outer) join_outer else join_inner def member(x: Source, set: Iterable[String]): Source = set.iterator.map(a => x + " = " + SQL.string(a)).mkString("(", " OR ", ")") /* types */ object Type extends Enumeration { val Boolean = Value("BOOLEAN") val Int = Value("INTEGER") val Long = Value("BIGINT") val Double = Value("DOUBLE PRECISION") val String = Value("TEXT") val Bytes = Value("BLOB") val Date = Value("TIMESTAMP WITH TIME ZONE") } def sql_type_default(T: Type.Value): Source = T.toString def sql_type_sqlite(T: Type.Value): Source = if (T == Type.Boolean) "INTEGER" else if (T == Type.Date) "TEXT" else sql_type_default(T) def sql_type_postgresql(T: Type.Value): Source = if (T == Type.Bytes) "BYTEA" else sql_type_default(T) /* columns */ object Column { def bool(name: String, strict: Boolean = false, primary_key: Boolean = false): Column = Column(name, Type.Boolean, strict, primary_key) def int(name: String, strict: Boolean = false, primary_key: Boolean = false): Column = Column(name, Type.Int, strict, primary_key) def long(name: String, strict: Boolean = false, primary_key: Boolean = false): Column = Column(name, Type.Long, strict, primary_key) def double(name: String, strict: Boolean = false, primary_key: Boolean = false): Column = Column(name, Type.Double, strict, primary_key) def string(name: String, strict: Boolean = false, primary_key: Boolean = false): Column = Column(name, Type.String, strict, primary_key) def bytes(name: String, strict: Boolean = false, primary_key: Boolean = false): Column = Column(name, Type.Bytes, strict, primary_key) def date(name: String, strict: Boolean = false, primary_key: Boolean = false): Column = Column(name, Type.Date, strict, primary_key) } sealed case class Column( name: String, T: Type.Value, strict: Boolean = false, primary_key: Boolean = false, expr: SQL.Source = "" ) { def make_primary_key: Column = copy(primary_key = true) def apply(table: Table): Column = Column(Long_Name.qualify(table.name, name), T, strict = strict, primary_key = primary_key) def ident: Source = if (expr == "") SQL.ident(name) else enclose(expr) + " AS " + SQL.ident(name) def decl(sql_type: Type.Value => Source): Source = ident + " " + sql_type(T) + (if (strict || primary_key) " NOT NULL" else "") def defined: String = ident + " IS NOT NULL" def undefined: String = ident + " IS NULL" def equal(s: String): Source = ident + " = " + string(s) def where_equal(s: String): Source = "WHERE " + equal(s) override def toString: Source = ident } /* tables */ sealed case class Table(name: String, columns: List[Column], body: Source = "") { Library.duplicates(columns.map(_.name)) match { case Nil => case bad => error("Duplicate column names " + commas_quote(bad) + " for table " + quote(name)) } def ident: Source = SQL.ident(name) def query: Source = if (body == "") error("Missing SQL body for table " + quote(name)) else SQL.enclose(body) def query_named: Source = query + " AS " + SQL.ident(name) def create(strict: Boolean, sql_type: Type.Value => Source): Source = { val primary_key = columns.filter(_.primary_key).map(_.name) match { case Nil => Nil case keys => List("PRIMARY KEY " + enclosure(keys)) } "CREATE TABLE " + (if (strict) "" else "IF NOT EXISTS ") + ident + " " + enclosure(columns.map(_.decl(sql_type)) ::: primary_key) } def create_index(index_name: String, index_columns: List[Column], strict: Boolean = false, unique: Boolean = false): Source = "CREATE " + (if (unique) "UNIQUE " else "") + "INDEX " + (if (strict) "" else "IF NOT EXISTS ") + SQL.ident(index_name) + " ON " + ident + " " + enclosure(index_columns.map(_.name)) def insert_cmd(cmd: Source, sql: Source = ""): Source = cmd + " INTO " + ident + " VALUES " + enclosure(columns.map(_ => "?")) + (if (sql == "") "" else " " + sql) def insert(sql: Source = ""): Source = insert_cmd("INSERT", sql) def delete(sql: Source = ""): Source = "DELETE FROM " + ident + (if (sql == "") "" else " " + sql) def select( select_columns: List[Column] = Nil, sql: Source = "", distinct: Boolean = false): Source = SQL.select(select_columns, distinct = distinct) + ident + (if (sql == "") "" else " " + sql) override def toString: Source = ident } /** SQL database operations **/ /* statements */ class Statement private[SQL](val db: Database, val rep: PreparedStatement) extends AutoCloseable { stmt => object bool { def update(i: Int, x: Boolean): Unit = rep.setBoolean(i, x) def update(i: Int, x: Option[Boolean]): Unit = { if (x.isDefined) update(i, x.get) else rep.setNull(i, java.sql.Types.BOOLEAN) } } object int { def update(i: Int, x: Int): Unit = rep.setInt(i, x) def update(i: Int, x: Option[Int]): Unit = { if (x.isDefined) update(i, x.get) else rep.setNull(i, java.sql.Types.INTEGER) } } object long { def update(i: Int, x: Long): Unit = rep.setLong(i, x) def update(i: Int, x: Option[Long]): Unit = { if (x.isDefined) update(i, x.get) else rep.setNull(i, java.sql.Types.BIGINT) } } object double { def update(i: Int, x: Double): Unit = rep.setDouble(i, x) def update(i: Int, x: Option[Double]): Unit = { if (x.isDefined) update(i, x.get) else rep.setNull(i, java.sql.Types.DOUBLE) } } object string { def update(i: Int, x: String): Unit = rep.setString(i, x) def update(i: Int, x: Option[String]): Unit = update(i, x.orNull) } object bytes { def update(i: Int, bytes: Bytes): Unit = { if (bytes == null) rep.setBytes(i, null) else rep.setBinaryStream(i, bytes.stream(), bytes.length) } def update(i: Int, bytes: Option[Bytes]): Unit = update(i, bytes.orNull) } object date { def update(i: Int, date: Date): Unit = db.update_date(stmt, i, date) def update(i: Int, date: Option[Date]): Unit = update(i, date.orNull) } def execute(): Boolean = rep.execute() def execute_query(): Result = new Result(this, rep.executeQuery()) def close(): Unit = rep.close() } /* results */ class Result private[SQL](val stmt: Statement, val rep: ResultSet) { res => def next(): Boolean = rep.next() def iterator[A](get: Result => A): Iterator[A] = new Iterator[A] { private var _next: Boolean = res.next() def hasNext: Boolean = _next def next(): A = { val x = get(res); _next = res.next(); x } } def bool(column: Column): Boolean = rep.getBoolean(column.name) def int(column: Column): Int = rep.getInt(column.name) def long(column: Column): Long = rep.getLong(column.name) def double(column: Column): Double = rep.getDouble(column.name) def string(column: Column): String = { val s = rep.getString(column.name) if (s == null) "" else s } def bytes(column: Column): Bytes = { val bs = rep.getBytes(column.name) if (bs == null) Bytes.empty else Bytes(bs) } def date(column: Column): Date = stmt.db.date(res, column) def timing(c1: Column, c2: Column, c3: Column): Timing = Timing(Time.ms(long(c1)), Time.ms(long(c2)), Time.ms(long(c3))) def get[A](column: Column, f: Column => A): Option[A] = { val x = f(column) if (rep.wasNull) None else Some(x) } def get_bool(column: Column): Option[Boolean] = get(column, bool) def get_int(column: Column): Option[Int] = get(column, int) def get_long(column: Column): Option[Long] = get(column, long) def get_double(column: Column): Option[Double] = get(column, double) def get_string(column: Column): Option[String] = get(column, string) def get_bytes(column: Column): Option[Bytes] = get(column, bytes) def get_date(column: Column): Option[Date] = get(column, date) } /* database */ trait Database extends AutoCloseable { db => /* types */ def sql_type(T: Type.Value): Source /* connection */ def connection: Connection def close(): Unit = connection.close() def transaction[A](body: => A): A = { val auto_commit = connection.getAutoCommit() try { connection.setAutoCommit(false) val savepoint = connection.setSavepoint() try { val result = body connection.commit() result } catch { case exn: Throwable => connection.rollback(savepoint); throw exn } } finally { connection.setAutoCommit(auto_commit) } } /* statements and results */ def statement(sql: Source): Statement = new Statement(db, connection.prepareStatement(sql)) def using_statement[A](sql: Source)(f: Statement => A): A = using(statement(sql))(f) def update_date(stmt: Statement, i: Int, date: Date): Unit def date(res: Result, column: Column): Date def insert_permissive(table: Table, sql: Source = ""): Source /* tables and views */ def tables: List[String] = { val result = new mutable.ListBuffer[String] val rs = connection.getMetaData.getTables(null, null, "%", null) while (rs.next) { result += rs.getString(3) } result.toList } def create_table(table: Table, strict: Boolean = false, sql: Source = ""): Unit = using_statement( table.create(strict, sql_type) + (if (sql == "") "" else " " + sql))(_.execute()) def create_index(table: Table, name: String, columns: List[Column], strict: Boolean = false, unique: Boolean = false): Unit = using_statement(table.create_index(name, columns, strict, unique))(_.execute()) def create_view(table: Table, strict: Boolean = false): Unit = { if (strict || !tables.contains(table.name)) { val sql = "CREATE VIEW " + table + " AS " + { table.query; table.body } using_statement(sql)(_.execute()) } } } } /** SQLite **/ object SQLite { // see https://www.sqlite.org/lang_datefunc.html val date_format: Date.Format = Date.Format("uuuu-MM-dd HH:mm:ss.SSS x") lazy val init_jdbc: Unit = { val lib_path = Path.explode("$ISABELLE_SQLITE_HOME/" + Platform.jvm_platform) val lib_name = File.find_files(lib_path.file) match { case List(file) => file.getName case _ => error("Exactly one file expected in directory " + lib_path.expand) } System.setProperty("org.sqlite.lib.path", File.platform_path(lib_path)) System.setProperty("org.sqlite.lib.name", lib_name) Class.forName("org.sqlite.JDBC") } def open_database(path: Path): Database = { init_jdbc val path0 = path.expand val s0 = File.platform_path(path0) val s1 = if (Platform.is_windows) s0.replace('\\', '/') else s0 val connection = DriverManager.getConnection("jdbc:sqlite:" + s1) new Database(path0.toString, connection) } class Database private[SQLite](name: String, val connection: Connection) extends SQL.Database { override def toString: String = name def sql_type(T: SQL.Type.Value): SQL.Source = SQL.sql_type_sqlite(T) def update_date(stmt: SQL.Statement, i: Int, date: Date): Unit = if (date == null) stmt.string(i) = (null: String) else stmt.string(i) = date_format(date) def date(res: SQL.Result, column: SQL.Column): Date = date_format.parse(res.string(column)) def insert_permissive(table: SQL.Table, sql: SQL.Source = ""): SQL.Source = table.insert_cmd("INSERT OR IGNORE", sql = sql) def rebuild(): Unit = using_statement("VACUUM")(_.execute()) } } /** PostgreSQL **/ object PostgreSQL { type Source = SQL.Source val default_port = 5432 lazy val init_jdbc: Unit = Class.forName("org.postgresql.Driver") def open_database( user: String, password: String, database: String = "", host: String = "", port: Int = 0, ssh: Option[SSH.Session] = None, ssh_close: Boolean = false ): Database = { init_jdbc if (user == "") error("Undefined database user") val db_host = proper_string(host) getOrElse "localhost" val db_port = if (port > 0 && port != default_port) ":" + port else "" val db_name = "/" + (proper_string(database) getOrElse user) val (url, name, port_forwarding) = ssh match { case None => val spec = db_host + db_port + db_name val url = "jdbc:postgresql://" + spec val name = user + "@" + spec (url, name, None) case Some(ssh) => val fw = ssh.port_forwarding(remote_host = db_host, remote_port = if (port > 0) port else default_port, ssh_close = ssh_close) - val url = "jdbc:postgresql://localhost:" + fw.local_port + db_name + val url = "jdbc:postgresql://localhost:" + fw.port + db_name val name = user + "@" + fw + db_name + " via ssh " + ssh (url, name, Some(fw)) } try { val connection = DriverManager.getConnection(url, user, password) new Database(name, connection, port_forwarding) } catch { case exn: Throwable => port_forwarding.foreach(_.close()); throw exn } } class Database private[PostgreSQL]( name: String, val connection: Connection, port_forwarding: Option[SSH.Port_Forwarding] ) extends SQL.Database { override def toString: String = name def sql_type(T: SQL.Type.Value): SQL.Source = SQL.sql_type_postgresql(T) // see https://jdbc.postgresql.org/documentation/head/8-date-time.html def update_date(stmt: SQL.Statement, i: Int, date: Date): Unit = if (date == null) stmt.rep.setObject(i, null) else stmt.rep.setObject(i, OffsetDateTime.from(date.to(Date.timezone_utc).rep)) def date(res: SQL.Result, column: SQL.Column): Date = { val obj = res.rep.getObject(column.name, classOf[OffsetDateTime]) if (obj == null) null else Date.instant(obj.toInstant) } def insert_permissive(table: SQL.Table, sql: SQL.Source = ""): SQL.Source = table.insert_cmd("INSERT", sql = sql + (if (sql == "") "" else " ") + "ON CONFLICT DO NOTHING") override def close(): Unit = { super.close(); port_forwarding.foreach(_.close()) } } } diff --git a/src/Pure/General/ssh.scala b/src/Pure/General/ssh.scala --- a/src/Pure/General/ssh.scala +++ b/src/Pure/General/ssh.scala @@ -1,453 +1,335 @@ /* Title: Pure/General/ssh.scala Author: Makarius -SSH client based on JSch (see also http://www.jcraft.com/jsch/examples). +SSH client on OpenSSH command-line tools, preferably with connection +multiplexing, but this does not work on Windows. */ package isabelle import java.util.{Map => JMap} -import java.io.{InputStream, OutputStream, ByteArrayOutputStream} - -import scala.collection.mutable -import scala.util.matching.Regex - -import com.jcraft.jsch.{JSch, Logger => JSch_Logger, Session => JSch_Session, SftpException, - OpenSSHConfig, UserInfo, ChannelExec, ChannelSftp, SftpATTRS, JSchException} +import java.net.ServerSocket object SSH { /* target machine: user@host syntax */ object Target { - val User_Host: Regex = "^([^@]+)@(.+)$".r + private val User_Host = "^([^@]+)@(.+)$".r def parse(s: String): (String, String) = s match { case User_Host(user, host) => (user, host) case _ => ("", s) } def unapplySeq(s: String): Option[List[String]] = parse(s) match { case (_, "") => None case (user, host) => Some(List(user, host)) } } val default_port = 22 def make_port(port: Int): Int = if (port > 0) port else default_port def port_suffix(port: Int): String = if (port == default_port) "" else ":" + port def user_prefix(user: String): String = proper_string(user) match { case None => "" case Some(name) => name + "@" } - def connect_timeout(options: Options): Int = - options.seconds("ssh_connect_timeout").ms.toInt - def alive_interval(options: Options): Int = - options.seconds("ssh_alive_interval").ms.toInt + /* OpenSSH configuration and command-line */ - def alive_count_max(options: Options): Int = - options.int("ssh_alive_count_max") + // see https://linux.die.net/man/5/ssh_config + object Config { + def entry(x: String, y: String): String = x + "=" + y + def entry(x: String, y: Int): String = entry(x, y.toString) + def entry(x: String, y: Boolean): String = entry(x, if (y) "yes" else "no") + + def make(options: Options, + port: Int = default_port, + user: String = "", + control_master: Boolean = false, + control_path: String = "" + ): List[String] = { + List("BatchMode=yes", + entry("ConnectTimeout", options.seconds("ssh_connect_timeout").ms.toInt), + entry("ServerAliveInterval", options.seconds("ssh_alive_interval").ms.toInt), + entry("ServerAliveCountMax", options.int("ssh_alive_count_max")), + entry("Compression", options.bool("ssh_compression"))) ::: + (if (port > 0 && port != default_port) List(entry("Port", port)) else Nil) + (if (user.nonEmpty) List(entry("User", user)) else Nil) ::: + (if (control_master) List("ControlMaster=yes", "ControlPersist=yes") else Nil) ::: + (if (control_path.nonEmpty) List("ControlPath=" + control_path) else Nil) + } + + def make_command(command: String, config: List[String]): String = + Bash.string(command) + " " + config.map(entry => "-o " + Bash.string(entry)).mkString(" ") + } + + def sftp_string(str: String): String = { + val special = Set(' ', '*', '?', '{', '}') + if (str.exists(special)) { + val res = new StringBuilder + for (c <- str) { + if (special(c)) res += '\\' + res += c + } + res.toString() + } + else str + } /* open session */ - private def connect_session( - options: Options, - jsch: JSch, - host: String, - user: String = "", - port: Int = 0, - permissive: Boolean = false, - nominal_host: String = "", - nominal_user: String = "", - nominal_port: Option[Int] = None, - on_close: () => Unit = () => () - ): Session = { - val connect_port = make_port(port) - val session = jsch.getSession(proper_string(user).orNull, host, connect_port) - - session.setUserInfo(No_User_Info) - session.setServerAliveInterval(alive_interval(options)) - session.setServerAliveCountMax(alive_count_max(options)) - session.setConfig("MaxAuthTries", "3") - if (permissive) session.setConfig("StrictHostKeyChecking", "no") - if (nominal_host != "") session.setHostKeyAlias(nominal_host) - - if (options.bool("ssh_compression")) { - session.setConfig("compression.s2c", "zlib@openssh.com,zlib,none") - session.setConfig("compression.c2s", "zlib@openssh.com,zlib,none") - session.setConfig("compression_level", "9") - } - session.connect(connect_timeout(options)) - new Session(options, session, on_close, - proper_string(nominal_host) getOrElse host, - proper_string(nominal_user) getOrElse user, - nominal_port getOrElse connect_port) - } - def open_session( options: Options, host: String, - user: String = "", port: Int = 0, - actual_host: String = "", - proxy_host: String = "", - proxy_user: String = "", - proxy_port: Int = 0, - permissive: Boolean = false + user: String = "", + multiplex: Boolean = !Platform.is_windows ): Session = { - val config_dir = Path.explode(options.string("ssh_config_dir")) - if (!config_dir.is_dir) error("Bad ssh config directory: " + config_dir) - - val jsch = new JSch - - val config_file = Path.explode(options.string("ssh_config_file")) - if (config_file.is_file) - jsch.setConfigRepository(OpenSSHConfig.parseFile(File.platform_path(config_file))) - - val known_hosts = config_dir + Path.explode("known_hosts") - if (!known_hosts.is_file) known_hosts.file.createNewFile - jsch.setKnownHosts(File.platform_path(known_hosts)) + val (control_master, control_path) = + if (multiplex) { + val file = Isabelle_System.tmp_file("ssh_socket") + file.delete() + (true, file.getPath) + } + else (false, "") - val identity_files = - space_explode(':', options.string("ssh_identity_files")).map(Path.explode) - for (identity_file <- identity_files if identity_file.is_file) { - try { jsch.addIdentity(File.platform_path(identity_file)) } - catch { - case exn: JSchException => - error("Error in ssh identity file " + identity_file + ": " + exn.getMessage) - } - } - - val connect_host = proper_string(actual_host) getOrElse host - val connect_port = make_port(port) - if (proxy_host == "") { - connect_session(options, jsch, host = connect_host, user = user, port = connect_port) - } - else { - val proxy = - connect_session(options, jsch, host = proxy_host, port = proxy_port, user = proxy_user) - - val fw = - try { proxy.port_forwarding(remote_host = connect_host, remote_port = connect_port) } - catch { case exn: Throwable => proxy.close(); throw exn } - - try { - connect_session(options, jsch, - host = fw.local_host, - port = fw.local_port, - permissive = permissive, - nominal_host = host, - nominal_port = Some(connect_port), - nominal_user = user, user = user, - on_close = { () => fw.close(); proxy.close() }) - } - catch { case exn: Throwable => fw.close(); proxy.close(); throw exn } - } + new Session(options, host, make_port(port), user, control_master, control_path) } - - /* user info */ + class Session private[SSH]( + val options: Options, + val host: String, + val port: Int, + val user: String, + control_master: Boolean, + control_path: String + ) extends System { + ssh => - object No_User_Info extends UserInfo { - def getPassphrase: String = null - def getPassword: String = null - def promptPassword(msg: String): Boolean = false - def promptPassphrase(msg: String): Boolean = false - def promptYesNo(msg: String): Boolean = false - def showMessage(msg: String): Unit = Output.writeln(msg) - } - - - /* Sftp channel */ - - type Attrs = SftpATTRS - - sealed case class Dir_Entry(name: String, is_dir: Boolean) { - def is_file: Boolean = !is_dir - } + override def toString: String = user_prefix(user) + host + port_suffix(port) + override def hg_url: String = "ssh://" + user_prefix(user) + host + "/" + override def rsync_prefix: String = user_prefix(user) + host + ":" - /* exec channel */ - - private val exec_wait_delay = Time.seconds(0.3) - - class Exec private[SSH](session: Session, channel: ChannelExec) extends AutoCloseable { - override def toString: String = "exec " + session.toString - - def close(): Unit = channel.disconnect() + /* ssh commands */ - val exit_status: Future[Int] = - Future.thread("ssh_wait") { - while (!channel.isClosed) exec_wait_delay.sleep() - channel.getExitStatus - } - - val stdin: OutputStream = channel.getOutputStream - val stdout: InputStream = channel.getInputStream - val stderr: InputStream = channel.getErrStream - - // connect after preparing streams - channel.connect(connect_timeout(session.options)) - - def result( + def run_command(command: String, + master: Boolean = false, + opts: String = "", + args: String = "", progress_stdout: String => Unit = (_: String) => (), progress_stderr: String => Unit = (_: String) => (), strict: Boolean = true ): Process_Result = { - stdin.close() - - def read_lines(stream: InputStream, progress: String => Unit): List[String] = { - val result = new mutable.ListBuffer[String] - val line_buffer = new ByteArrayOutputStream(100) - def line_flush(): Unit = { - val line = Library.trim_line(line_buffer.toString(UTF8.charset_name)) - progress(line) - result += line - line_buffer.reset() - } - - var c = 0 - var finished = false - while (!finished) { - while ({ c = stream.read; c != -1 && c != 10 }) line_buffer.write(c) - if (c == 10) line_flush() - else if (channel.isClosed) { - if (line_buffer.size > 0) line_flush() - finished = true - } - else exec_wait_delay.sleep() - } - - result.toList - } - - val out_lines = Future.thread("ssh_stdout") { read_lines(stdout, progress_stdout) } - val err_lines = Future.thread("ssh_stderr") { read_lines(stderr, progress_stderr) } - - def terminate(): Unit = { - close() - out_lines.join - err_lines.join - exit_status.join - } - - val rc = - try { exit_status.join } - catch { case Exn.Interrupt() => terminate(); Process_Result.RC.interrupt } - - close() - if (strict && rc == Process_Result.RC.interrupt) throw Exn.Interrupt() + val config = + Config.make(options, port = port, user = user, + control_master = master, control_path = control_path) + val cmd = + Config.make_command(command, config) + + (if (opts.nonEmpty) " " + opts else "") + + (if (args.nonEmpty) " -- " + args else "") + Isabelle_System.bash(cmd, progress_stdout = progress_stdout, + progress_stderr = progress_stderr, strict = strict) + } - Process_Result(rc, out_lines.join, err_lines.join) - } - } - - - /* session */ - - class Session private[SSH]( - val options: Options, - session: JSch_Session, - on_close: () => Unit, - val nominal_host: String, - val nominal_user: String, - val nominal_port: Int - ) extends System { - ssh => - - def host: String = if (session.getHost == null) "" else session.getHost - - override def hg_url: String = - "ssh://" + user_prefix(nominal_user) + nominal_host + "/" - - override def rsync_prefix: String = - user_prefix(nominal_user) + nominal_host + ":" - - override def toString: String = - user_prefix(session.getUserName) + host + port_suffix(session.getPort) + - (if (session.isConnected) "" else " (disconnected)") - - - /* port forwarding */ - - def port_forwarding( - remote_port: Int, - remote_host: String = "localhost", - local_port: Int = 0, - local_host: String = "localhost", - ssh_close: Boolean = false - ): Port_Forwarding = { - val local_port1 = session.setPortForwardingL(local_host, local_port, remote_host, remote_port) - new Port_Forwarding(local_host, local_port1, remote_host, remote_port) { - override def close(): Unit = { - session.delPortForwardingL(this.local_host, this.local_port) - if (ssh_close) ssh.close() - } + def run_sftp(script: String, opts: String = "", args: String = ""): Process_Result = { + Isabelle_System.with_tmp_file("sftp") { tmp_path => + File.write(tmp_path, script) + val opts1 = "-b " + File.bash_path(tmp_path) + (if (opts.nonEmpty) " " + opts else "") + val args1 = Bash.string(host) + (if (args.nonEmpty) " " + args else "") + run_command("sftp", opts = opts1, args = args1) } } - - /* sftp channel */ - - private val sftp: ChannelSftp = session.openChannel("sftp").asInstanceOf[ChannelSftp] - sftp.connect(connect_timeout(options)) + def run_ssh(master: Boolean = false, opts: String = "", args: String = ""): Process_Result = { + val args1 = Bash.string(host) + (if (args.nonEmpty) " " + args else "") + run_command("ssh", master = master, opts = opts, args = args1) + } - override def close(): Unit = { sftp.disconnect(); session.disconnect(); on_close() } + def run_scp(opts: String = "", args: String = ""): Process_Result = { + val opts1 = "-s -p -q" + (if (opts.nonEmpty) " " + opts else "") + run_command("scp", opts = opts1, args = args) + } - val settings: JMap[String, String] = { - val home = sftp.getHome - JMap.of("HOME", home, "USER_HOME", home) + + /* init and exit */ + + val user_home: String = run_ssh(master = control_master, args = "printenv HOME").check.out + + val settings: JMap[String, String] = JMap.of("HOME", user_home, "USER_HOME", user_home) + + override def close(): Unit = { + if (control_path.nonEmpty) run_ssh(opts = "-O exit").check } + + + /* remote commands */ + + override def execute(cmd_line: String, + progress_stdout: String => Unit = (_: String) => (), + progress_stderr: String => Unit = (_: String) => (), + settings: Boolean = true, + strict: Boolean = true + ): Process_Result = { + val args1 = Bash.string(host) + " env " + Bash.string("USER_HOME=\"$HOME\"") + " " + cmd_line + run_command("ssh", args = args1, progress_stdout = progress_stdout, + progress_stderr = progress_stderr, strict = strict) + } + + override lazy val isabelle_platform: Isabelle_Platform = Isabelle_Platform(ssh = Some(ssh)) + + + /* remote file-system */ + override def expand_path(path: Path): Path = path.expand_env(settings) def remote_path(path: Path): String = expand_path(path).implode - override def bash_path(path: Path): String = Bash.string(remote_path(path)) - - def rm(path: Path): Unit = sftp.rm(remote_path(path)) - private def test_entry(path: Path, as_dir: Boolean): Boolean = - try { - val is_dir = sftp.stat(remote_path(path)).isDir - if (as_dir) is_dir else !is_dir - } - catch { case _: SftpException => false } + override def bash_path(path: Path): String = Bash.string(remote_path(path)) + def sftp_path(path: Path): String = sftp_string(remote_path(path)) - override def is_dir(path: Path): Boolean = test_entry(path, true) - override def is_file(path: Path): Boolean = test_entry(path, false) + def rm(path: Path): Unit = run_sftp("rm " + sftp_path(path)).check + + override def is_dir(path: Path): Boolean = run_ssh(args = "test -d " + bash_path(path)).ok + override def is_file(path: Path): Boolean = run_ssh(args = "test -f " + bash_path(path)).ok override def make_directory(path: Path): Path = { - if (!execute("mkdir -p " + remote_path(path)).ok) { + if (!execute("mkdir -p " + bash_path(path)).ok) { error("Failed to create directory: " + quote(remote_path(path))) } path } - def read_dir(path: Path): List[Dir_Entry] = { - if (!is_dir(path)) error("No such directory: " + path.toString) - - val dir_name = remote_path(path) - val dir = sftp.ls(dir_name) - (for { - i <- (0 until dir.size).iterator - a = dir.get(i).asInstanceOf[AnyRef] - name = Untyped.get[String](a, "filename") - attrs = Untyped.get[Attrs](a, "attrs") - if name != "." && name != ".." - } - yield { - Dir_Entry(name, - if (attrs.isLink) { - try { sftp.stat(dir_name + "/" + name).isDir } - catch { case _: SftpException => false } - } - else attrs.isDir) - }).toList.sortBy(_.name) - } - - def open_input(path: Path): InputStream = sftp.get(remote_path(path)) - def open_output(path: Path): OutputStream = sftp.put(remote_path(path)) + def read_dir(path: Path): List[String] = + run_sftp("ls -1 -a " + sftp_path(path)).check.out_lines.flatMap(s => + if (s == "." || s == ".." || s.endsWith("/.") || s.endsWith("/..")) None + else Some(Library.perhaps_unprefix("./", s))) override def read_file(path: Path, local_path: Path): Unit = - sftp.get(remote_path(path), File.platform_path(local_path)) - override def read_bytes(path: Path): Bytes = using(open_input(path))(Bytes.read_stream(_)) - override def read(path: Path): String = using(open_input(path))(File.read_stream) + run_scp(args = Bash.string(host + ":" + remote_path(path)) + " " + + File.bash_platform_path(local_path)).check + + override def read_bytes(path: Path): Bytes = + Isabelle_System.with_tmp_file("scp") { local_path => + read_file(path, local_path) + Bytes.read(local_path) + } + + override def read(path: Path): String = read_bytes(path).text override def write_file(path: Path, local_path: Path): Unit = - sftp.put(File.platform_path(local_path), remote_path(path)) - def write_bytes(path: Path, bytes: Bytes): Unit = - using(open_output(path))(bytes.write_stream) - def write(path: Path, text: String): Unit = - using(open_output(path))(stream => Bytes(text).write_stream(stream)) - - - /* exec channel */ + run_scp(args = File.bash_platform_path(local_path) + " " + + Bash.string(host + ":" + remote_path(path))).check - def exec(command: String): Exec = { - val channel = session.openChannel("exec").asInstanceOf[ChannelExec] - channel.setCommand("export USER_HOME=\"$HOME\"\n" + command) - new Exec(this, channel) - } + def write_bytes(path: Path, bytes: Bytes): Unit = + Isabelle_System.with_tmp_file("scp") { local_path => + Bytes.write(local_path, bytes) + write_file(path, local_path) + } - override def execute(command: String, - progress_stdout: String => Unit = (_: String) => (), - progress_stderr: String => Unit = (_: String) => (), - settings: Boolean = true, - strict: Boolean = true): Process_Result = - exec(command).result(progress_stdout, progress_stderr, strict) - - override def isabelle_platform: Isabelle_Platform = Isabelle_Platform(ssh = Some(this)) + def write(path: Path, text: String): Unit = write_bytes(path, Bytes(text)) /* tmp dirs */ def rm_tree(dir: Path): Unit = rm_tree(remote_path(dir)) def rm_tree(remote_dir: String): Unit = execute("rm -r -f " + Bash.string(remote_dir)).check def tmp_dir(): String = execute("mktemp -d -t tmp.XXXXXXXXXX").check.out override def with_tmp_dir[A](body: Path => A): A = { val remote_dir = tmp_dir() - try { body(Path.explode(remote_dir)) } finally { rm_tree(remote_dir) } + try { body(Path.explode(remote_dir)) } + finally { rm_tree(remote_dir) } + } + + + /* port forwarding */ + + def port_forwarding( + remote_port: Int, + remote_host: String = "localhost", + local_port: Int = 0, + local_host: String = "localhost", + ssh_close: Boolean = false + ): Port_Forwarding = { + val port = + if (local_port > 0) local_port + else { + // FIXME race condition + val dummy = new ServerSocket(0) + val port = dummy.getLocalPort + dummy.close() + port + } + val string = List(local_host, port, remote_host, remote_port).mkString(":") + run_ssh(opts = "-L " + Bash.string(string) + " -O forward").check + + new Port_Forwarding(host, port, remote_host, remote_port) { + override def toString: String = string + override def close(): Unit = { + run_ssh(opts = "-L " + Bash.string(string) + " -O cancel").check + if (ssh_close) ssh.close() + } + } } } abstract class Port_Forwarding private[SSH]( - val local_host: String, - val local_port: Int, + val host: String, + val port: Int, val remote_host: String, val remote_port: Int - ) extends AutoCloseable { - override def toString: String = - local_host + ":" + local_port + ":" + remote_host + ":" + remote_port - } + ) extends AutoCloseable /* system operations */ trait System extends AutoCloseable { def close(): Unit = () def hg_url: String = "" def rsync_prefix: String = "" def rsync_path(path: Path): String = rsync_prefix + expand_path(path).implode def expand_path(path: Path): Path = path.expand def bash_path(path: Path): String = File.bash_path(path) def is_dir(path: Path): Boolean = path.is_dir def is_file(path: Path): Boolean = path.is_file def make_directory(path: Path): Path = Isabelle_System.make_directory(path) def with_tmp_dir[A](body: Path => A): A = Isabelle_System.with_tmp_dir("tmp")(body) def read_file(path1: Path, path2: Path): Unit = Isabelle_System.copy_file(path1, path2) def write_file(path1: Path, path2: Path): Unit = Isabelle_System.copy_file(path2, path1) def read_bytes(path: Path): Bytes = Bytes.read(path) def read(path: Path): String = File.read(path) def execute(command: String, progress_stdout: String => Unit = (_: String) => (), progress_stderr: String => Unit = (_: String) => (), settings: Boolean = true, strict: Boolean = true): Process_Result = Isabelle_System.bash(command, progress_stdout = progress_stdout, progress_stderr = progress_stderr, env = if (settings) Isabelle_System.settings() else null, strict = strict) def isabelle_platform: Isabelle_Platform = Isabelle_Platform() } object Local extends System } diff --git a/src/Pure/System/components.scala b/src/Pure/System/components.scala --- a/src/Pure/System/components.scala +++ b/src/Pure/System/components.scala @@ -1,349 +1,350 @@ /* Title: Pure/System/components.scala Author: Makarius Isabelle system components. */ package isabelle import java.io.{File => JFile} object Components { /* archive name */ object Archive { val suffix: String = ".tar.gz" def apply(name: String): String = if (name == "") error("Bad component name: " + quote(name)) else name + suffix def unapply(archive: String): Option[String] = { for { name0 <- Library.try_unsuffix(suffix, archive) name <- proper_string(name0) } yield name } def get_name(archive: String): String = unapply(archive) getOrElse error("Bad component archive name (expecting .tar.gz): " + quote(archive)) } /* component collections */ def default_component_repository: String = Isabelle_System.getenv("ISABELLE_COMPONENT_REPOSITORY") val default_components_base: Path = Path.explode("$ISABELLE_COMPONENTS_BASE") def admin(dir: Path): Path = dir + Path.explode("Admin/components") def contrib(dir: Path = Path.current, name: String = ""): Path = dir + Path.explode("contrib") + Path.explode(name) def unpack(dir: Path, archive: Path, progress: Progress = new Progress): String = { val name = Archive.get_name(archive.file_name) progress.echo("Unpacking " + name) Isabelle_System.gnutar("-xzf " + File.bash_path(archive), dir = dir).check name } def resolve(base_dir: Path, names: List[String], target_dir: Option[Path] = None, copy_dir: Option[Path] = None, progress: Progress = new Progress ): Unit = { Isabelle_System.make_directory(base_dir) for (name <- names) { val archive_name = Archive(name) val archive = base_dir + Path.explode(archive_name) if (!archive.is_file) { val remote = Components.default_component_repository + "/" + archive_name Isabelle_System.download_file(remote, archive, progress = progress) } for (dir <- copy_dir) { Isabelle_System.make_directory(dir) Isabelle_System.copy_file(archive, dir) } unpack(target_dir getOrElse base_dir, archive, progress = progress) } } private val platforms_family: Map[Platform.Family.Value, Set[String]] = Map( Platform.Family.linux_arm -> Set("arm64-linux", "arm64_32-linux"), Platform.Family.linux -> Set("x86_64-linux", "x86_64_32-linux"), Platform.Family.macos -> Set("arm64-darwin", "arm64_32-darwin", "x86_64-darwin", "x86_64_32-darwin"), Platform.Family.windows -> Set("x86_64-cygwin", "x86_64-windows", "x86_64_32-windows", "x86-windows")) private val platforms_all: Set[String] = Set("x86-linux", "x86-cygwin") ++ platforms_family.iterator.flatMap(_._2) def purge(dir: Path, platform: Platform.Family.Value): Unit = { val purge_set = platforms_all -- platforms_family(platform) File.find_files(dir.file, (file: JFile) => file.isDirectory && purge_set(file.getName), include_dirs = true).foreach(Isabelle_System.rm_tree) } /* component directories */ def directories(): List[Path] = Path.split(Isabelle_System.getenv_strict("ISABELLE_COMPONENTS")) /* component directory content */ def settings(dir: Path = Path.current): Path = dir + Path.explode("etc/settings") def components(dir: Path = Path.current): Path = dir + Path.explode("etc/components") def check_dir(dir: Path): Boolean = settings(dir).is_file || components(dir).is_file def read_components(dir: Path): List[String] = split_lines(File.read(components(dir))).filter(_.nonEmpty) def write_components(dir: Path, lines: List[String]): Unit = File.write(components(dir), terminate_lines(lines)) /* component repository content */ val components_sha1: Path = Path.explode("~~/Admin/components/components.sha1") sealed case class SHA1_Digest(digest: SHA1.Digest, name: String) { override def toString: String = digest.shasum(name) } def read_components_sha1(lines: List[String] = Nil): List[SHA1_Digest] = (proper_list(lines) getOrElse split_lines(File.read(components_sha1))).flatMap(line => Word.explode(line) match { case Nil => None case List(sha1, name) => Some(SHA1_Digest(SHA1.fake_digest(sha1), name)) case _ => error("Bad components.sha1 entry: " + quote(line)) }) def write_components_sha1(entries: List[SHA1_Digest]): Unit = File.write(components_sha1, entries.sortBy(_.name).mkString("", "\n", "\n")) /** manage user components **/ val components_path: Path = Path.explode("$ISABELLE_HOME_USER/etc/components") def read_components(): List[String] = if (components_path.is_file) Library.trim_split_lines(File.read(components_path)) else Nil def write_components(lines: List[String]): Unit = { Isabelle_System.make_directory(components_path.dir) File.write(components_path, Library.terminate_lines(lines)) } def update_components(add: Boolean, path0: Path, progress: Progress = new Progress): Unit = { val path = path0.expand.absolute if (!check_dir(path) && !Sessions.is_session_dir(path)) error("Bad component directory: " + path) val lines1 = read_components() val lines2 = lines1.filter(line => line.isEmpty || line.startsWith("#") || !File.eq(Path.explode(line), path)) val lines3 = if (add) lines2 ::: List(path.implode) else lines2 if (lines1 != lines3) write_components(lines3) val prefix = if (lines1 == lines3) "Unchanged" else if (add) "Added" else "Removed" progress.echo(prefix + " component " + path) } /* main entry point */ def main(args: Array[String]): Unit = { Command_Line.tool { for (arg <- args) { val add = if (arg.startsWith("+")) true else if (arg.startsWith("-")) false else error("Bad argument: " + quote(arg)) val path = Path.explode(arg.substring(1)) update_components(add, path, progress = new Console_Progress) } } } /** build and publish components **/ def build_components( options: Options, components: List[Path], progress: Progress = new Progress, publish: Boolean = false, force: Boolean = false, update_components_sha1: Boolean = false ): Unit = { val archives: List[Path] = for (path <- components) yield { path.file_name match { case Archive(_) => path case name => if (!path.is_dir) error("Bad component directory: " + path) else if (!check_dir(path)) { error("Malformed component directory: " + path + "\n (requires " + settings() + " or " + Components.components() + ")") } else { val component_path = path.expand val archive_dir = component_path.dir val archive_name = Archive(name) val archive = archive_dir + Path.explode(archive_name) if (archive.is_file && !force) { error("Component archive already exists: " + archive) } progress.echo("Packaging " + archive_name) Isabelle_System.gnutar("-czf " + File.bash_path(archive) + " " + Bash.string(name), dir = archive_dir).check archive } } } if ((publish && archives.nonEmpty) || update_components_sha1) { options.string("isabelle_components_server") match { case SSH.Target(user, host) => using(SSH.open_session(options, host = host, user = user)) { ssh => val components_dir = Path.explode(options.string("isabelle_components_dir")) val contrib_dir = Path.explode(options.string("isabelle_components_contrib_dir")) for (dir <- List(components_dir, contrib_dir) if !ssh.is_dir(dir)) { error("Bad remote directory: " + dir) } if (publish) { for (archive <- archives) { val archive_name = archive.file_name val name = Archive.get_name(archive_name) val remote_component = components_dir + archive.base val remote_contrib = contrib_dir + Path.explode(name) // component archive if (ssh.is_file(remote_component) && !force) { error("Remote component archive already exists: " + remote_component) } progress.echo("Uploading " + archive_name) ssh.write_file(remote_component, archive) // contrib directory val is_standard_component = Isabelle_System.with_tmp_dir("component") { tmp_dir => Isabelle_System.gnutar("-xzf " + File.bash_path(archive), dir = tmp_dir).check check_dir(tmp_dir + Path.explode(name)) } if (is_standard_component) { if (ssh.is_dir(remote_contrib)) { if (force) ssh.rm_tree(remote_contrib) else error("Remote component directory already exists: " + remote_contrib) } progress.echo("Unpacking remote " + archive_name) ssh.execute("tar -C " + ssh.bash_path(contrib_dir) + " -xzf " + ssh.bash_path(remote_component)).check } else { progress.echo_warning("No unpacking of non-standard component: " + archive_name) } } } // remote SHA1 digests if (update_components_sha1) { val lines = for { entry <- ssh.read_dir(components_dir) - if entry.is_file && entry.name.endsWith(Archive.suffix) + if ssh.is_file(components_dir + Path.basic(entry)) && + entry.endsWith(Archive.suffix) } yield { - progress.echo("Digesting remote " + entry.name) + progress.echo("Digesting remote " + entry) ssh.execute("cd " + ssh.bash_path(components_dir) + - "; sha1sum " + Bash.string(entry.name)).check.out + "; sha1sum " + Bash.string(entry)).check.out } write_components_sha1(read_components_sha1(lines)) } } case s => error("Bad isabelle_components_server: " + quote(s)) } } // local SHA1 digests { val new_entries = for (archive <- archives) yield { val name = archive.file_name progress.echo("Digesting local " + name) SHA1_Digest(SHA1.digest(archive), name) } val new_names = new_entries.map(_.name).toSet write_components_sha1( new_entries ::: read_components_sha1().filterNot(entry => new_names.contains(entry.name))) } } /* Isabelle tool wrapper */ private val relevant_options = List("isabelle_components_server", "isabelle_components_dir", "isabelle_components_contrib_dir") val isabelle_tool = Isabelle_Tool("build_components", "build and publish Isabelle components", Scala_Project.here, { args => var publish = false var update_components_sha1 = false var force = false var options = Options.init() def show_options: String = cat_lines(relevant_options.flatMap(options.get).map(_.print)) val getopts = Getopts(""" Usage: isabelle build_components [OPTIONS] ARCHIVES... DIRS... Options are: -P publish on SSH server (see options below) -f force: overwrite existing component archives and directories -o OPTION override Isabelle system OPTION (via NAME=VAL or NAME) -u update all SHA1 keys in Isabelle repository Admin/components Build and publish Isabelle components as .tar.gz archives on SSH server, depending on system options: """ + Library.indent_lines(2, show_options) + "\n", "P" -> (_ => publish = true), "f" -> (_ => force = true), "o:" -> (arg => options = options + arg), "u" -> (_ => update_components_sha1 = true)) val more_args = getopts(args) if (more_args.isEmpty && !update_components_sha1) getopts.usage() val progress = new Console_Progress build_components(options, more_args.map(Path.explode), progress = progress, publish = publish, force = force, update_components_sha1 = update_components_sha1) }) }