diff --git a/src/HOL/Tools/ATP/system_on_tptp.ML b/src/HOL/Tools/ATP/system_on_tptp.ML --- a/src/HOL/Tools/ATP/system_on_tptp.ML +++ b/src/HOL/Tools/ATP/system_on_tptp.ML @@ -1,39 +1,36 @@ (* Title: HOL/Tools/ATP/system_on_tptp.ML Author: Makarius Support for remote ATPs via SystemOnTPTP. *) signature SYSTEM_ON_TPTP = sig val get_url: unit -> string val list_systems: unit -> {url: string, systems: string list} - val run_system_encoded: string -> {output: string, timing: Time.time} + val run_system_encoded: string list -> {output: string, timing: Time.time} val run_system: {system: string, problem: Path.T, extra: string, timeout: Time.time} -> {output: string, timing: Time.time} end structure SystemOnTPTP: SYSTEM_ON_TPTP = struct fun get_url () = Options.default_string \<^system_option>\SystemOnTPTP\ fun list_systems () = let val url = get_url () val systems = trim_split_lines (\<^scala>\SystemOnTPTP.list_systems\ url) in {url = url, systems = systems} end fun run_system_encoded args = - cat_strings0 [get_url (), args] + (get_url () :: args) |> \<^scala>\SystemOnTPTP.run_system\ - |> split_strings0 |> (fn [output, timing] => - {output = output, timing = Time.fromMilliseconds (Value.parse_int timing)}) + |> (fn [a, b] => {output = a, timing = Time.fromMilliseconds (Value.parse_int b)}) fun run_system {system, problem, extra, timeout} = - cat_strings0 - [system, Isabelle_System.absolute_path problem, - extra, string_of_int (Time.toMilliseconds timeout)] + [system, Isabelle_System.absolute_path problem, extra, string_of_int (Time.toMilliseconds timeout)] |> run_system_encoded end diff --git a/src/HOL/Tools/ATP/system_on_tptp.scala b/src/HOL/Tools/ATP/system_on_tptp.scala --- a/src/HOL/Tools/ATP/system_on_tptp.scala +++ b/src/HOL/Tools/ATP/system_on_tptp.scala @@ -1,91 +1,91 @@ /* Title: HOL/Tools/ATP/system_on_tptp.scala Author: Makarius Support for remote ATPs via SystemOnTPTP. */ package isabelle.atp import isabelle._ import java.net.URL object SystemOnTPTP { /* requests */ def get_url(options: Options): URL = Url(options.string("SystemOnTPTP")) def post_request( url: URL, parameters: List[(String, Any)], timeout: Time = HTTP.Client.default_timeout): HTTP.Content = { try { HTTP.Client.post(url, ("NoHTML" -> 1) :: parameters, timeout = timeout, user_agent = "Sledgehammer") } catch { case ERROR(msg) => cat_error("Failed to access SystemOnTPTP server", msg) } } /* list systems */ def list_systems(url: URL): HTTP.Content = post_request(url, List("SubmitButton" -> "ListSystems", "ListStatus" -> "READY", "QuietFlag" -> "-q0")) - object List_Systems extends Scala.Fun("SystemOnTPTP.list_systems", thread = true) + object List_Systems extends Scala.Fun_String("SystemOnTPTP.list_systems", thread = true) { val here = Scala_Project.here def apply(url: String): String = list_systems(Url(url)).text } /* run system */ def run_system(url: URL, system: String, problem: String, extra: String = "", quiet: String = "01", timeout: Time = Time.seconds(60)): HTTP.Content = { val paramaters = List( "SubmitButton" -> "RunSelectedSystems", "ProblemSource" -> "FORMULAE", "FORMULAEProblem" -> problem, "ForceSystem" -> "-force", "System___" + system -> system, "TimeLimit___" + system -> timeout.seconds.ceil.toLong, "Command___" + system -> extra, "QuietFlag" -> ("-q" + quiet)) post_request(url, paramaters, timeout = timeout + Time.seconds(15)) } - object Run_System extends Scala.Fun("SystemOnTPTP.run_system", thread = true) + object Run_System extends Scala.Fun_Strings("SystemOnTPTP.run_system", thread = true) { val here = Scala_Project.here - def apply(arg: String): String = + def apply(args: List[String]): List[String] = { - val List(url, system, problem_path, extra, Value.Int(timeout)) = Library.split_strings0(arg) + val List(url, system, problem_path, extra, Value.Int(timeout)) = args val problem = File.read(Path.explode(problem_path)) val res = run_system(Url(url), system, problem, extra = extra, timeout = Time.ms(timeout)) val text = res.text val timing = res.elapsed_time.ms val bad_prover = "WARNING: " + system + " does not exist" if (split_lines(text).exists(_.startsWith(bad_prover))) { error("The ATP " + quote(system) + " is not available at SystemOnTPTP") } - else Library.cat_strings0(List(text, timing.toString)) + else List(text, timing.toString) } } } diff --git a/src/HOL/Tools/Nitpick/kodkod.scala b/src/HOL/Tools/Nitpick/kodkod.scala --- a/src/HOL/Tools/Nitpick/kodkod.scala +++ b/src/HOL/Tools/Nitpick/kodkod.scala @@ -1,178 +1,178 @@ /* Title: HOL/Tools/Nitpick/kodkod.scala Author: Makarius Scala interface for Kodkod. */ package isabelle.nitpick import isabelle._ import java.util.concurrent.{TimeUnit, LinkedBlockingQueue, ThreadPoolExecutor} import org.antlr.runtime.{ANTLRInputStream, RecognitionException} import de.tum.in.isabelle.Kodkodi.{Context, KodkodiLexer, KodkodiParser} object Kodkod { /** result **/ sealed case class Result(rc: Int, out: String, err: String) { def ok: Boolean = rc == 0 def check: String = if (ok) out else error(if (err.isEmpty) "Error" else err) def encode: XML.Body = { import XML.Encode._ triple(int, string, string)((rc, out, err)) } } /** execute **/ def execute(source: String, solve_all: Boolean = false, prove: Boolean = false, max_solutions: Int = Integer.MAX_VALUE, cleanup_inst: Boolean = false, timeout: Time = Time.zero, max_threads: Int = 0): Result = { /* executor */ val pool_size = if (max_threads == 0) Isabelle_Thread.max_threads() else max_threads val executor: ThreadPoolExecutor = new ThreadPoolExecutor(pool_size, pool_size, 0L, TimeUnit.MILLISECONDS, new LinkedBlockingQueue[Runnable], new ThreadPoolExecutor.CallerRunsPolicy) val executor_killed = Synchronized(false) def executor_kill(): Unit = executor_killed.change(b => if (b) b else { Isabelle_Thread.fork() { executor.shutdownNow() }; true }) /* system context */ class Exit extends Exception("EXIT") class Exec_Context extends Context { private var rc = 0 private val out = new StringBuilder private val err = new StringBuilder def return_code(i: Int): Unit = synchronized { rc = rc max i} override def output(s: String): Unit = synchronized { Exn.Interrupt.expose() out ++= s out += '\n' } override def error(s: String): Unit = synchronized { Exn.Interrupt.expose() err ++= s err += '\n' } override def exit(i: Int): Unit = synchronized { return_code(i) executor_kill() throw new Exit } def result(): Result = synchronized { Result(rc, out.toString, err.toString) } } val context = new Exec_Context /* main */ try { val lexer = new KodkodiLexer(new ANTLRInputStream(Bytes(source).stream())) val parser = KodkodiParser.create(context, executor, false, solve_all, prove, max_solutions, cleanup_inst, lexer) val timeout_request = if (timeout.is_zero) None else { Some(Event_Timer.request(Time.now() + timeout) { context.error("Ran out of time") context.return_code(2) executor_kill() }) } try { parser.problems() } catch { case exn: RecognitionException => parser.reportError(exn) } timeout_request.foreach(_.cancel()) if (parser.getTokenStream.LA(1) != KodkodiParser.EOF) { context.error("Error: trailing tokens") context.exit(1) } if (lexer.getNumberOfSyntaxErrors + parser.getNumberOfSyntaxErrors > 0) { context.exit(1) } } catch { case _: Exit => case exn: Throwable => val message = exn.getMessage context.error(if (message.isEmpty) exn.toString else "Error: " + message) context.return_code(1) } executor.shutdownNow() context.result() } /** protocol handler **/ def warmup(): String = execute( "solver: \"MiniSat\"\n" + File.read(Path.explode("$KODKODI/examples/weber3.kki"))).check class Handler extends Session.Protocol_Handler { override def init(session: Session): Unit = warmup() } /** scala function **/ - object Fun extends Scala.Fun("kodkod", thread = true) + object Fun extends Scala.Fun_String("kodkod", thread = true) { val here = Scala_Project.here def apply(args: String): String = { val (timeout, (solve_all, (max_solutions, (max_threads, kki)))) = { import XML.Decode._ pair(int, pair(bool, pair(int, pair(int, string))))(YXML.parse_body(args)) } val result = execute(kki, solve_all = solve_all, max_solutions = max_solutions, timeout = Time.ms(timeout), max_threads = max_threads) YXML.string_of_body(result.encode) } } } class Scala_Functions extends Scala.Functions(Kodkod.Fun) diff --git a/src/HOL/Tools/Sledgehammer/sledgehammer_atp_systems.ML b/src/HOL/Tools/Sledgehammer/sledgehammer_atp_systems.ML --- a/src/HOL/Tools/Sledgehammer/sledgehammer_atp_systems.ML +++ b/src/HOL/Tools/Sledgehammer/sledgehammer_atp_systems.ML @@ -1,735 +1,734 @@ (* Title: HOL/Tools/ATP/atp_systems.ML Author: Fabian Immler, TU Muenchen Author: Jasmin Blanchette, TU Muenchen Setup for supported ATPs. *) signature SLEDGEHAMMER_ATP_SYSTEMS = sig type term_order = ATP_Problem.term_order type atp_format = ATP_Problem.atp_format type atp_formula_role = ATP_Problem.atp_formula_role type atp_failure = ATP_Proof.atp_failure type slice_spec = (int * string) * atp_format * string * string * bool type atp_config = {exec : string list * string list, arguments : Proof.context -> bool -> string -> Time.time -> Path.T -> - term_order * (unit -> (string * int) list) * (unit -> (string * real) list) -> string, + term_order * (unit -> (string * int) list) * (unit -> (string * real) list) -> string list, proof_delims : (string * string) list, known_failures : (atp_failure * string) list, prem_role : atp_formula_role, best_slices : Proof.context -> (real * (slice_spec * string)) list, best_max_mono_iters : int, best_max_new_mono_instances : int} val default_max_mono_iters : int val default_max_new_mono_instances : int val force_sos : bool Config.T val term_order : string Config.T val e_smartN : string val e_autoN : string val e_fun_weightN : string val e_sym_offset_weightN : string val e_selection_heuristic : string Config.T val e_default_fun_weight : real Config.T val e_fun_weight_base : real Config.T val e_fun_weight_span : real Config.T val e_default_sym_offs_weight : real Config.T val e_sym_offs_weight_base : real Config.T val e_sym_offs_weight_span : real Config.T val spass_H1SOS : string val spass_H2 : string val spass_H2LR0LT0 : string val spass_H2NuVS0 : string val spass_H2NuVS0Red2 : string val spass_H2SOS : string val is_vampire_noncommercial_license_accepted : unit -> bool option val isabelle_scala_function: string list * string list val remote_atp : string -> string -> string list -> (string * string) list -> (atp_failure * string) list -> atp_formula_role -> (Proof.context -> slice_spec * string) -> string * (unit -> atp_config) val add_atp : string * (unit -> atp_config) -> theory -> theory val get_atp : theory -> string -> (unit -> atp_config) val supported_atps : theory -> string list val is_atp_installed : theory -> string -> bool val refresh_systems_on_tptp : unit -> unit val effective_term_order : Proof.context -> string -> term_order end; structure Sledgehammer_ATP_Systems : SLEDGEHAMMER_ATP_SYSTEMS = struct open ATP_Problem open ATP_Proof open ATP_Problem_Generate (* ATP configuration *) val default_max_mono_iters = 3 (* FUDGE *) val default_max_new_mono_instances = 100 (* FUDGE *) type slice_spec = (int * string) * atp_format * string * string * bool type atp_config = {exec : string list * string list, arguments : Proof.context -> bool -> string -> Time.time -> Path.T -> - term_order * (unit -> (string * int) list) * (unit -> (string * real) list) -> string, + term_order * (unit -> (string * int) list) * (unit -> (string * real) list) -> string list, proof_delims : (string * string) list, known_failures : (atp_failure * string) list, prem_role : atp_formula_role, best_slices : Proof.context -> (real * (slice_spec * string)) list, best_max_mono_iters : int, best_max_new_mono_instances : int} (* "best_slices" must be found empirically, taking a holistic approach since the ATPs are run in parallel. Each slice has the format (time_frac, ((max_facts, fact_filter), format, type_enc, lam_trans, uncurried_aliases), extra) where time_frac = faction of the time available given to the slice (which should add up to 1.0) extra = extra information to the prover (e.g., SOS or no SOS). The last slice should be the most "normal" one, because it will get all the time available if the other slices fail early and also because it is used if slicing is disabled (e.g., by the minimizer). *) val mepoN = "mepo" val mashN = "mash" val meshN = "mesh" val tstp_proof_delims = [("% SZS output start CNFRefutation", "% SZS output end CNFRefutation"), ("% SZS output start Refutation", "% SZS output end Refutation"), ("% SZS output start Proof", "% SZS output end Proof")] fun known_szs_failures wrap = [(Unprovable, wrap "CounterSatisfiable"), (Unprovable, wrap "Satisfiable"), (GaveUp, wrap "GaveUp"), (GaveUp, wrap "Unknown"), (GaveUp, wrap "Incomplete"), (ProofMissing, wrap "Theorem"), (ProofMissing, wrap "Unsatisfiable"), (TimedOut, wrap "Timeout"), (Inappropriate, wrap "Inappropriate"), (OutOfResources, wrap "ResourceOut"), (OutOfResources, wrap "MemoryOut"), (Interrupted, wrap "Forced"), (Interrupted, wrap "User")] val known_szs_status_failures = known_szs_failures (prefix "SZS status ") val known_says_failures = known_szs_failures (prefix " says ") structure Data = Theory_Data ( type T = ((unit -> atp_config) * stamp) Symtab.table val empty = Symtab.empty val extend = I fun merge data : T = Symtab.merge (eq_snd (op =)) data handle Symtab.DUP name => error ("Duplicate ATP: " ^ quote name) ) fun to_secs min time = Int.max (min, (Time.toMilliseconds time + 999) div 1000) val sosN = "sos" val no_sosN = "no_sos" val force_sos = Attrib.setup_config_bool \<^binding>\atp_force_sos\ (K false) val smartN = "smart" (* val kboN = "kbo" *) val lpoN = "lpo" val xweightsN = "_weights" val xprecN = "_prec" val xsimpN = "_simp" (* SPASS-specific *) (* Possible values for "atp_term_order": "smart", "(kbo|lpo)(_weights)?(_prec|_simp)?" *) val term_order = Attrib.setup_config_string \<^binding>\atp_term_order\ (K smartN) (* agsyHOL *) val agsyhol_config : atp_config = {exec = (["AGSYHOL_HOME"], ["agsyHOL"]), arguments = fn _ => fn _ => fn _ => fn timeout => fn problem => fn _ => - "--proof --time-out " ^ string_of_int (to_secs 1 timeout) ^ " " ^ File.bash_path problem, + ["--proof --time-out " ^ string_of_int (to_secs 1 timeout) ^ " " ^ File.bash_path problem], proof_delims = tstp_proof_delims, known_failures = known_szs_status_failures, prem_role = Hypothesis, best_slices = (* FUDGE *) K [(1.0, (((60, ""), THF (Without_FOOL, Monomorphic, THF_Without_Choice), "mono_native_higher", keep_lamsN, false), ""))], best_max_mono_iters = default_max_mono_iters - 1 (* FUDGE *), best_max_new_mono_instances = default_max_new_mono_instances} val agsyhol = (agsyholN, fn () => agsyhol_config) (* Alt-Ergo *) val alt_ergo_config : atp_config = {exec = (["WHY3_HOME"], ["why3"]), arguments = fn _ => fn _ => fn _ => fn timeout => fn problem => fn _ => - "--format tptp --prover 'Alt-Ergo,0.95.2,' --timelimit " ^ string_of_int (to_secs 1 timeout) ^ - " " ^ File.bash_path problem, + ["--format tptp --prover 'Alt-Ergo,0.95.2,' --timelimit " ^ string_of_int (to_secs 1 timeout) ^ + " " ^ File.bash_path problem], proof_delims = [], known_failures = [(ProofMissing, ": Valid"), (TimedOut, ": Timeout"), (GaveUp, ": Unknown")], prem_role = Hypothesis, best_slices = fn _ => (* FUDGE *) [(1.0, (((100, ""), TFF (Without_FOOL, Polymorphic), "poly_native", liftingN, false), ""))], best_max_mono_iters = default_max_mono_iters, best_max_new_mono_instances = default_max_new_mono_instances} val alt_ergo = (alt_ergoN, fn () => alt_ergo_config) (* E *) val e_smartN = "smart" val e_autoN = "auto" val e_fun_weightN = "fun_weight" val e_sym_offset_weightN = "sym_offset_weight" val e_selection_heuristic = Attrib.setup_config_string \<^binding>\atp_e_selection_heuristic\ (K e_smartN) (* FUDGE *) val e_default_fun_weight = Attrib.setup_config_real \<^binding>\atp_e_default_fun_weight\ (K 20.0) val e_fun_weight_base = Attrib.setup_config_real \<^binding>\atp_e_fun_weight_base\ (K 0.0) val e_fun_weight_span = Attrib.setup_config_real \<^binding>\atp_e_fun_weight_span\ (K 40.0) val e_default_sym_offs_weight = Attrib.setup_config_real \<^binding>\atp_e_default_sym_offs_weight\ (K 1.0) val e_sym_offs_weight_base = Attrib.setup_config_real \<^binding>\atp_e_sym_offs_weight_base\ (K ~20.0) val e_sym_offs_weight_span = Attrib.setup_config_real \<^binding>\atp_e_sym_offs_weight_span\ (K 60.0) fun e_selection_heuristic_case heuristic fw sow = if heuristic = e_fun_weightN then fw else if heuristic = e_sym_offset_weightN then sow else raise Fail ("unexpected " ^ quote heuristic) fun scaled_e_selection_weight ctxt heuristic w = w * Config.get ctxt (e_selection_heuristic_case heuristic e_fun_weight_span e_sym_offs_weight_span) + Config.get ctxt (e_selection_heuristic_case heuristic e_fun_weight_base e_sym_offs_weight_base) |> Real.ceil |> signed_string_of_int fun e_selection_weight_arguments ctxt heuristic sel_weights = if heuristic = e_fun_weightN orelse heuristic = e_sym_offset_weightN then (* supplied by Stephan Schulz *) "--split-clauses=4 --split-reuse-defs --simul-paramod --forward-context-sr \ \--destructive-er-aggressive --destructive-er --presat-simplify \ \--prefer-initial-clauses -winvfreqrank -c1 -Ginvfreqconjmax -F1 \ \--delete-bad-limit=150000000 -WSelectMaxLComplexAvoidPosPred -H'(4*" ^ e_selection_heuristic_case heuristic "FunWeight" "SymOffsetWeight" ^ "(SimulateSOS," ^ (e_selection_heuristic_case heuristic e_default_fun_weight e_default_sym_offs_weight |> Config.get ctxt |> Real.ceil |> signed_string_of_int) ^ ",20,1.5,1.5,1" ^ (sel_weights () |> map (fn (s, w) => "," ^ s ^ ":" ^ scaled_e_selection_weight ctxt heuristic w) |> implode) ^ "),3*ConjectureGeneralSymbolWeight(PreferNonGoals,200,100,200,50,50,1,100,\ \1.5,1.5,1),1*Clauseweight(PreferProcessed,1,1,1),1*\ \FIFOWeight(PreferProcessed))' " else "-xAuto " val e_ord_weights = map (fn (s, w) => s ^ ":" ^ string_of_int w) #> space_implode "," fun e_ord_precedence [_] = "" | e_ord_precedence info = info |> map fst |> space_implode "<" fun e_term_order_info_arguments false false _ = "" | e_term_order_info_arguments gen_weights gen_prec ord_info = let val ord_info = ord_info () in (if gen_weights then "--order-weights='" ^ e_ord_weights ord_info ^ "' " else "") ^ (if gen_prec then "--precedence='" ^ e_ord_precedence ord_info ^ "' " else "") end val e_config : atp_config = {exec = (["E_HOME"], ["eprover"]), arguments = fn ctxt => fn _ => fn heuristic => fn timeout => fn problem => fn ({is_lpo, gen_weights, gen_prec, ...}, ord_info, sel_weights) => - "--auto-schedule --tstp-in --tstp-out --silent " ^ - e_selection_weight_arguments ctxt heuristic sel_weights ^ - e_term_order_info_arguments gen_weights gen_prec ord_info ^ - "--term-ordering=" ^ (if is_lpo then "LPO4" else "KBO6") ^ " " ^ - "--cpu-limit=" ^ string_of_int (to_secs 2 timeout) ^ - " --proof-object=1 " ^ - File.bash_path problem, + ["--auto-schedule --tstp-in --tstp-out --silent " ^ + e_selection_weight_arguments ctxt heuristic sel_weights ^ + e_term_order_info_arguments gen_weights gen_prec ord_info ^ + "--term-ordering=" ^ (if is_lpo then "LPO4" else "KBO6") ^ " " ^ + "--cpu-limit=" ^ string_of_int (to_secs 2 timeout) ^ + " --proof-object=1 " ^ + File.bash_path problem], proof_delims = [("# SZS output start CNFRefutation", "# SZS output end CNFRefutation")] @ tstp_proof_delims, known_failures = [(TimedOut, "Failure: Resource limit exceeded (time)"), (TimedOut, "time limit exceeded")] @ known_szs_status_failures, prem_role = Conjecture, best_slices = fn ctxt => let val heuristic = Config.get ctxt e_selection_heuristic val modern = string_ord (getenv "E_VERSION", "2.3") <> LESS val (format, enc) = if modern then (THF (With_FOOL, Monomorphic, THF_Lambda_Free), "mono_native_higher_fool") else (TFF (Without_FOOL, Monomorphic), "mono_native") in (* FUDGE *) if heuristic = e_smartN then [(0.15, (((128, meshN), format, enc, combsN, false), e_fun_weightN)), (0.15, (((128, mashN), format, enc, combsN, false), e_sym_offset_weightN)), (0.15, (((91, mepoN), format, enc, combsN, false), e_autoN)), (0.15, (((1000, meshN), format, "poly_guards??", combsN, false), e_sym_offset_weightN)), (0.15, (((256, mepoN), format, enc, liftingN, false), e_fun_weightN)), (0.25, (((64, mashN), format, enc, combsN, false), e_fun_weightN))] else [(1.0, (((500, ""), format, enc, combsN, false), heuristic))] end, best_max_mono_iters = default_max_mono_iters, best_max_new_mono_instances = default_max_new_mono_instances} val e = (eN, fn () => e_config) (* iProver *) val iprover_config : atp_config = {exec = (["IPROVER_HOME"], ["iproveropt", "iprover"]), arguments = fn _ => fn _ => fn _ => fn timeout => fn problem => fn _ => - "--clausifier \"$E_HOME\"/eprover " ^ - "--clausifier_options \"--tstp-format --silent --cnf\" " ^ - "--time_out_real " ^ string_of_real (Time.toReal timeout) ^ " " ^ File.bash_path problem, + ["--clausifier \"$E_HOME\"/eprover " ^ + "--clausifier_options \"--tstp-format --silent --cnf\" " ^ + "--time_out_real " ^ string_of_real (Time.toReal timeout) ^ " " ^ File.bash_path problem], proof_delims = tstp_proof_delims, known_failures = [(ProofIncomplete, "% SZS output start CNFRefutation")] @ known_szs_status_failures, prem_role = Hypothesis, best_slices = (* FUDGE *) K [(1.0, (((150, ""), FOF, "mono_guards??", liftingN, false), ""))], best_max_mono_iters = default_max_mono_iters, best_max_new_mono_instances = default_max_new_mono_instances} val iprover = (iproverN, fn () => iprover_config) (* LEO-II *) val leo2_config : atp_config = {exec = (["LEO2_HOME"], ["leo.opt", "leo"]), arguments = fn _ => fn full_proofs => fn _ => fn timeout => fn problem => fn _ => - "--foatp e --atp e=\"$E_HOME\"/eprover \ - \--atp epclextract=\"$E_HOME\"/epclextract \ - \--proofoutput 1 --timeout " ^ string_of_int (to_secs 1 timeout) ^ " " ^ - (if full_proofs then "--notReplLeibnizEQ --notReplAndrewsEQ --notUseExtCnfCmbd " else "") ^ - File.bash_path problem, + ["--foatp e --atp e=\"$E_HOME\"/eprover \ + \--atp epclextract=\"$E_HOME\"/epclextract \ + \--proofoutput 1 --timeout " ^ string_of_int (to_secs 1 timeout) ^ " " ^ + (if full_proofs then "--notReplLeibnizEQ --notReplAndrewsEQ --notUseExtCnfCmbd " else "") ^ + File.bash_path problem], proof_delims = tstp_proof_delims, known_failures = [(TimedOut, "CPU time limit exceeded, terminating"), (GaveUp, "No.of.Axioms")] @ known_szs_status_failures, prem_role = Hypothesis, best_slices = (* FUDGE *) K [(1.0, (((40, ""), THF (Without_FOOL, Monomorphic, THF_Without_Choice), "mono_native_higher", keep_lamsN, false), ""))], best_max_mono_iters = default_max_mono_iters - 1 (* FUDGE *), best_max_new_mono_instances = default_max_new_mono_instances} val leo2 = (leo2N, fn () => leo2_config) (* Leo-III *) (* Include choice? Disabled now since it's disabled for Satallax as well. *) val leo3_config : atp_config = {exec = (["LEO3_HOME"], ["leo3"]), arguments = fn _ => fn full_proofs => fn _ => fn timeout => fn problem => fn _ => - File.bash_path problem ^ " " ^ "--atp cvc=\"$CVC4_SOLVER\" --atp e=\"$E_HOME\"/eprover \ - \-p -t " ^ string_of_int (to_secs 1 timeout) ^ " " ^ - (if full_proofs then "--nleq --naeq " else ""), + [File.bash_path problem ^ " " ^ "--atp cvc=\"$CVC4_SOLVER\" --atp e=\"$E_HOME\"/eprover \ + \-p -t " ^ string_of_int (to_secs 1 timeout) ^ " " ^ + (if full_proofs then "--nleq --naeq " else "")], proof_delims = tstp_proof_delims, known_failures = known_szs_status_failures, prem_role = Hypothesis, best_slices = (* FUDGE *) K [(1.0, (((150, ""), THF (Without_FOOL, Polymorphic, THF_Without_Choice), "mono_native_higher", keep_lamsN, false), ""))], best_max_mono_iters = default_max_mono_iters - 1 (* FUDGE *), best_max_new_mono_instances = default_max_new_mono_instances} val leo3 = (leo3N, fn () => leo3_config) (* Satallax *) (* Choice is disabled until there is proper reconstruction for it. *) val satallax_config : atp_config = {exec = (["SATALLAX_HOME"], ["satallax.opt", "satallax"]), arguments = fn _ => fn _ => fn _ => fn timeout => fn problem => fn _ => - (case getenv "E_HOME" of - "" => "" - | home => "-E " ^ home ^ "/eprover ") ^ - "-p tstp -t " ^ string_of_int (to_secs 1 timeout) ^ " " ^ File.bash_path problem, + [(case getenv "E_HOME" of + "" => "" + | home => "-E " ^ home ^ "/eprover ") ^ + "-p tstp -t " ^ string_of_int (to_secs 1 timeout) ^ " " ^ File.bash_path problem], proof_delims = [("% SZS output start Proof", "% SZS output end Proof")], known_failures = known_szs_status_failures, prem_role = Hypothesis, best_slices = (* FUDGE *) K [(1.0, (((150, ""), THF (Without_FOOL, Monomorphic, THF_Without_Choice), "mono_native_higher", keep_lamsN, false), ""))], best_max_mono_iters = default_max_mono_iters - 1 (* FUDGE *), best_max_new_mono_instances = default_max_new_mono_instances} val satallax = (satallaxN, fn () => satallax_config) (* SPASS *) val spass_H1SOS = "-Heuristic=1 -SOS" val spass_H2 = "-Heuristic=2" val spass_H2LR0LT0 = "-Heuristic=2 -LR=0 -LT=0" val spass_H2NuVS0 = "-Heuristic=2 -RNuV=1 -Sorts=0" val spass_H2NuVS0Red2 = "-Heuristic=2 -RNuV=1 -Sorts=0 -RFRew=2 -RBRew=2 -RTaut=2" val spass_H2SOS = "-Heuristic=2 -SOS" val spass_config : atp_config = let val format = DFG Monomorphic in {exec = (["SPASS_HOME"], ["SPASS"]), arguments = fn _ => fn full_proofs => fn extra_options => fn timeout => fn problem => fn _ => - "-Isabelle=1 " ^ (if full_proofs then "-CNFRenaming=0 -Splits=0 " else "") ^ - "-TimeLimit=" ^ string_of_int (to_secs 1 timeout) ^ " " ^ File.bash_path problem - |> extra_options <> "" ? prefix (extra_options ^ " "), + ["-Isabelle=1 " ^ (if full_proofs then "-CNFRenaming=0 -Splits=0 " else "") ^ + "-TimeLimit=" ^ string_of_int (to_secs 1 timeout) ^ " " ^ File.bash_path problem + |> extra_options <> "" ? prefix (extra_options ^ " ")], proof_delims = [("Here is a proof", "Formulae used in the proof")], known_failures = [(GaveUp, "SPASS beiseite: Completion found"), (TimedOut, "SPASS beiseite: Ran out of time"), (OutOfResources, "SPASS beiseite: Maximal number of loops exceeded"), (MalformedInput, "Undefined symbol"), (MalformedInput, "Free Variable"), (Unprovable, "No formulae and clauses found in input file"), (InternalError, "Please report this error")], prem_role = Conjecture, best_slices = fn _ => (* FUDGE *) [(0.1667, (((150, meshN), format, "mono_native", combsN, true), "")), (0.1667, (((500, meshN), format, "mono_native", liftingN, true), spass_H2SOS)), (0.1666, (((50, meshN), format, "mono_native", liftingN, true), spass_H2LR0LT0)), (0.1000, (((250, meshN), format, "mono_native", combsN, true), spass_H2NuVS0)), (0.1000, (((1000, mepoN), format, "mono_native", liftingN, true), spass_H1SOS)), (0.1000, (((150, meshN), format, "poly_guards??", liftingN, false), spass_H2NuVS0Red2)), (0.1000, (((300, meshN), format, "mono_native", combsN, true), spass_H2SOS)), (0.1000, (((100, meshN), format, "mono_native", combs_and_liftingN, true), spass_H2))], best_max_mono_iters = default_max_mono_iters, best_max_new_mono_instances = default_max_new_mono_instances} end val spass = (spassN, fn () => spass_config) (* Vampire *) fun is_vampire_noncommercial_license_accepted () = let val flag = Options.default_string \<^system_option>\vampire_noncommercial\ |> String.map Char.toLower in if flag = "yes" then SOME true else if flag = "no" then SOME false else NONE end fun check_vampire_noncommercial () = (case is_vampire_noncommercial_license_accepted () of SOME true => () | SOME false => error (Pretty.string_of (Pretty.para "The Vampire prover may be used only for noncommercial applications")) | NONE => error (Pretty.string_of (Pretty.para "The Vampire prover is not activated; to activate it, set the Isabelle system option \ \\"vampire_noncommercial\" to \"yes\" (e.g. via the Isabelle/jEdit menu Plugin Options \ \/ Isabelle / General)"))) val vampire_basic_options = "--proof tptp --output_axiom_names on $VAMPIRE_EXTRA_OPTIONS" val vampire_full_proof_options = " --proof_extra free --forced_options avatar=off:equality_proxy=off:general_splitting=off:inequality_splitting=0:naming=0" val remote_vampire_command = "vampire " ^ vampire_basic_options ^ " " ^ vampire_full_proof_options ^ " -t %d %s" val vampire_config : atp_config = let val format = TFF (Without_FOOL, Monomorphic) in {exec = (["VAMPIRE_HOME"], ["vampire"]), arguments = fn _ => fn full_proofs => fn sos => fn timeout => fn problem => fn _ => (check_vampire_noncommercial (); - vampire_basic_options ^ (if full_proofs then " " ^ vampire_full_proof_options else "") ^ - " -t " ^ string_of_int (to_secs 1 timeout) ^ " --input_file " ^ File.bash_path problem - |> sos = sosN ? prefix "--sos on "), + [vampire_basic_options ^ (if full_proofs then " " ^ vampire_full_proof_options else "") ^ + " -t " ^ string_of_int (to_secs 1 timeout) ^ " --input_file " ^ File.bash_path problem + |> sos = sosN ? prefix "--sos on "]), proof_delims = [("=========== Refutation ==========", "======= End of refutation =======")] @ tstp_proof_delims, known_failures = [(GaveUp, "UNPROVABLE"), (GaveUp, "CANNOT PROVE"), (Unprovable, "Satisfiability detected"), (Unprovable, "Termination reason: Satisfiable"), (Interrupted, "Aborted by signal SIGINT")] @ known_szs_status_failures, prem_role = Hypothesis, best_slices = fn ctxt => (* FUDGE *) [(0.333, (((500, meshN), format, "mono_native", combs_or_liftingN, false), sosN)), (0.333, (((150, meshN), format, "poly_tags??", combs_or_liftingN, false), sosN)), (0.334, (((50, meshN), format, "mono_native", combs_or_liftingN, false), no_sosN))] |> Config.get ctxt force_sos ? (hd #> apfst (K 1.0) #> single), best_max_mono_iters = default_max_mono_iters, best_max_new_mono_instances = 2 * default_max_new_mono_instances (* FUDGE *)} end val vampire = (vampireN, fn () => vampire_config) (* Z3 with TPTP syntax (half experimental, half legacy) *) val z3_tptp_config : atp_config = let val format = TFF (Without_FOOL, Monomorphic) in {exec = (["Z3_TPTP_HOME"], ["z3_tptp"]), arguments = fn _ => fn _ => fn _ => fn timeout => fn problem => fn _ => - "-proof -t:" ^ string_of_int (to_secs 1 timeout) ^ " -file:" ^ File.bash_path problem, + ["-proof -t:" ^ string_of_int (to_secs 1 timeout) ^ " -file:" ^ File.bash_path problem], proof_delims = [("SZS status Theorem", "")], known_failures = known_szs_status_failures, prem_role = Hypothesis, best_slices = (* FUDGE *) K [(0.5, (((250, meshN), format, "mono_native", combsN, false), "")), (0.25, (((125, mepoN), format, "mono_native", combsN, false), "")), (0.125, (((62, mashN), format, "mono_native", combsN, false), "")), (0.125, (((31, meshN), format, "mono_native", combsN, false), ""))], best_max_mono_iters = default_max_mono_iters, best_max_new_mono_instances = 2 * default_max_new_mono_instances (* FUDGE *)} end val z3_tptp = (z3_tptpN, fn () => z3_tptp_config) (* Zipperposition *) val zipperposition_blsimp = "--mode ho-pragmatic --max-inferences 3 --ho-max-app-projections 0 --ho-max-elims 0 --ho-max-rigid-imitations 2 --ho-max-identifications 0 --ho-unif-max-depth 2 --boolean-reasoning no-cases --ext-rules ext-family --ext-rules-max-depth 1 --kbo-weight-fun invdocc --ho-prim-enum tf --ho-prim-enum-early-bird true --tptp-def-as-rewrite --ho-unif-level pragmatic-framework -q '1|const|conjecture-relative-var(1,s,f)' -q '1|prefer-processed|pnrefined(1,1,1,2,2,2,0.5)' -q '1|prefer-sos|staggered(1)' -q '2|prefer-fo|default' -q '1|prefer-neg-unit|orient-lmax(2,1,2,1,1)' -q '2|prefer-easy-ho|conjecture-relative-struct(1.5,3.5,2,3)' --ho-elim-leibniz 2 --ho-fixpoint-decider true --ho-pattern-decider false --ho-solid-decider true --ho-max-solidification 12 --select e-selection11 --solve-formulas true --sup-at-vars false --sup-at-var-headed false --lazy-cnf true --lazy-cnf-kind simp --lazy-cnf-renaming-threshold 4 --sine 50 --sine-tolerance 1.7 --sine-depth-max 3 --sine-depth-min 1 --sine-trim-implications true --ho-selection-restriction none --sup-from-var-headed false --sine-trim-implications true" val zipperposition_s6 = "--tptp-def-as-rewrite --rewrite-before-cnf true --mode ho-competitive --boolean-reasoning no-cases --ext-rules off --ho-prim-enum none --recognize-injectivity true --ho-elim-leibniz off --ho-unif-level full-framework --no-max-vars -q '3|const|conjecture-relative-var(1.02,l,f)' -q '1|prefer-ho-steps|conjecture-relative-var(1,s,f)' -q '1|prefer-processed|fifo' -q '3|by-app-var-num|pnrefined(2,1,1,1,2,2,2)' --select ho-selection5 --prec-gen-fun unary_first --solid-subsumption false --ignore-orphans false --ho-solid-decider true --ho-fixpoint-decider true --ho-pattern-decider true --sup-at-vars false --sup-at-var-headed false --sup-from-var-headed false --ho-neg-ext-simpl true" val zipperposition_cdots = "--mode ho-competitive --boolean-reasoning cases-simpl --ext-rules ext-family --ext-rules-max-depth 1 --ho-prim-enum pragmatic --ho-prim-max 1 --bool-subterm-selection A --avatar off --recognize-injectivity true --ho-elim-leibniz 1 --ho-unif-level full-framework --no-max-vars -q '6|prefer-sos|pnrefined(1,1,1,2,2,2,0.5)' -q '6|const|conjecture-relative-var(1.02,l,f)' -q '1|prefer-processed|fifo' -q '1|prefer-non-goals|conjecture-relative-var(1,l,f)' -q '4|prefer-easy-ho|conjecture-relative-var(1.01,s,f)' --select e-selection7 --ho-choice-inst true --sine 50 --sine-tolerance 2 --sine-depth-max 4 --sine-depth-min 1 --scan-clause-ac true --lambdasup 0 --kbo-weight-fun invfreqrank" val zipperposition_config : atp_config = let val format = THF (Without_FOOL, Polymorphic, THF_Without_Choice) in {exec = (["ZIPPERPOSITION_HOME"], ["zipperposition"]), arguments = fn _ => fn _ => fn extra_options => fn timeout => fn problem => fn _ => - "--input tptp --output tptp --timeout " ^ string_of_int (to_secs 1 timeout) ^ " " ^ File.bash_path problem - |> extra_options <> "" ? prefix (extra_options ^ " "), + ["--input tptp --output tptp --timeout " ^ string_of_int (to_secs 1 timeout) ^ " " ^ File.bash_path problem + |> extra_options <> "" ? prefix (extra_options ^ " ")], proof_delims = tstp_proof_delims, known_failures = known_szs_status_failures, prem_role = Hypothesis, best_slices = fn _ => (* FUDGE *) [(0.333, (((128, "meshN"), format, "mono_native_higher", keep_lamsN, false), zipperposition_blsimp)), (0.333, (((32, "meshN"), format, "poly_native_higher", keep_lamsN, false), zipperposition_s6)), (0.334, (((512, "meshN"), format, "mono_native_higher", keep_lamsN, false), zipperposition_cdots))], best_max_mono_iters = default_max_mono_iters, best_max_new_mono_instances = default_max_new_mono_instances} end val zipperposition = (zipperpositionN, fn () => zipperposition_config) (* Remote ATP invocation via SystemOnTPTP *) val no_remote_systems = {url = "", systems = [] : string list} val remote_systems = Synchronized.var "atp_remote_systems" no_remote_systems fun get_remote_systems () = Timeout.apply (seconds 10.0) SystemOnTPTP.list_systems () handle ERROR msg => (warning msg; no_remote_systems) | Timeout.TIMEOUT _ => no_remote_systems fun find_remote_system name [] systems = find_first (String.isPrefix (name ^ "---")) systems | find_remote_system name (version :: versions) systems = case find_first (String.isPrefix (name ^ "---" ^ version)) systems of NONE => find_remote_system name versions systems | res => res fun get_remote_system name versions = Synchronized.change_result remote_systems (fn remote => (if #url remote <> SystemOnTPTP.get_url () orelse null (#systems remote) then get_remote_systems () else remote) |> ` #systems) |> `(find_remote_system name versions) fun the_remote_system name versions = (case get_remote_system name versions of (SOME sys, _) => sys | (NONE, []) => error "SystemOnTPTP is currently not available" | (NONE, syss) => (case syss |> filter_out (String.isPrefix "%") |> filter_out (curry (op =) "") of [] => error "SystemOnTPTP is currently not available" | [msg] => error ("SystemOnTPTP is currently not available: " ^ msg) | syss => error ("System " ^ quote name ^ " is not available at SystemOnTPTP.\n(Available systems: " ^ commas_quote syss ^ ".)"))) val max_remote_secs = 1000 (* give Geoff Sutcliffe's servers a break *) val isabelle_scala_function = (["SCALA_HOME"], ["bin/scala"]) fun remote_config system_name system_versions proof_delims known_failures prem_role best_slice = {exec = isabelle_scala_function, arguments = fn _ => fn _ => fn command => fn timeout => fn problem => fn _ => - (cat_strings0 [the_remote_system system_name system_versions, Isabelle_System.absolute_path problem, - command, string_of_int (Int.min (max_remote_secs, to_secs 1 timeout) * 1000)]), + command, string_of_int (Int.min (max_remote_secs, to_secs 1 timeout) * 1000)], proof_delims = union (op =) tstp_proof_delims proof_delims, known_failures = known_failures @ known_says_failures, prem_role = prem_role, best_slices = fn ctxt => [(1.0, best_slice ctxt)], best_max_mono_iters = default_max_mono_iters, best_max_new_mono_instances = default_max_new_mono_instances} : atp_config fun remotify_config system_name system_versions best_slice ({proof_delims, known_failures, prem_role, ...} : atp_config) = remote_config system_name system_versions proof_delims known_failures prem_role best_slice fun remote_atp name system_name system_versions proof_delims known_failures prem_role best_slice = (remote_prefix ^ name, fn () => remote_config system_name system_versions proof_delims known_failures prem_role best_slice) fun remotify_atp (name, config) system_name system_versions best_slice = (remote_prefix ^ name, remotify_config system_name system_versions best_slice o config) fun gen_remote_waldmeister name type_enc = remote_atp name "Waldmeister" ["710"] tstp_proof_delims ([(OutOfResources, "Too many function symbols"), (Inappropriate, "**** Unexpected end of file."), (Crashed, "Unrecoverable Segmentation Fault")] @ known_szs_status_failures) Hypothesis (K (((50, ""), CNF_UEQ, type_enc, combsN, false), "") (* FUDGE *)) val remote_agsyhol = remotify_atp agsyhol "agsyHOL" ["1.0", "1"] (K (((60, ""), THF (Without_FOOL, Monomorphic, THF_Without_Choice), "mono_native_higher", keep_lamsN, false), "") (* FUDGE *)) val remote_alt_ergo = remotify_atp alt_ergo "Alt-Ergo" ["0.95.2"] (K (((250, ""), TFF (Without_FOOL, Polymorphic), "poly_native", keep_lamsN, false), "") (* FUDGE *)) val remote_e = remotify_atp e "E" ["2.0", "1.9.1", "1.8"] (K (((750, ""), TFF (Without_FOOL, Monomorphic), "mono_native", combsN, false), "") (* FUDGE *)) val remote_iprover = remotify_atp iprover "iProver" ["0.99"] (K (((150, ""), FOF, "mono_guards??", liftingN, false), "") (* FUDGE *)) val remote_leo2 = remotify_atp leo2 "LEO-II" ["1.5.0", "1.4", "1.3", "1.2", "1"] (K (((40, ""), THF (Without_FOOL, Monomorphic, THF_Without_Choice), "mono_native_higher", liftingN, false), "") (* FUDGE *)) val remote_leo3 = remotify_atp leo3 "Leo-III" ["1.1"] (K (((150, ""), THF (Without_FOOL, Polymorphic, THF_Without_Choice), "poly_native_higher", keep_lamsN, false), "") (* FUDGE *)) val remote_vampire = remotify_atp vampire "Vampire" ["THF-4.4"] (K (((400, ""), THF (Without_FOOL, Monomorphic, THF_Without_Choice), "mono_native_higher", keep_lamsN, false), remote_vampire_command) (* FUDGE *)) val remote_waldmeister = gen_remote_waldmeister waldmeisterN "raw_mono_tags??" val remote_zipperposition = remotify_atp zipperposition "Zipperpin" ["2.0"] (K (((512, ""), THF (Without_FOOL, Monomorphic, THF_Without_Choice), "mono_native_higher", keep_lamsN, false), "") (* FUDGE *)) (* Dummy prover *) fun dummy_config prem_role format type_enc uncurried_aliases : atp_config = {exec = (["ISABELLE_ATP"], ["scripts/dummy_atp"]), - arguments = K (K (K (K (K (K ""))))), + arguments = K (K (K (K (K (K []))))), proof_delims = [], known_failures = known_szs_status_failures, prem_role = prem_role, best_slices = K [(1.0, (((200, ""), format, type_enc, if is_format_higher_order format then keep_lamsN else combsN, uncurried_aliases), ""))], best_max_mono_iters = default_max_mono_iters, best_max_new_mono_instances = default_max_new_mono_instances} val dummy_tfx_format = TFF (With_FOOL, Polymorphic) val dummy_tfx_config = dummy_config Hypothesis dummy_tfx_format "mono_native_fool" false val dummy_tfx = (dummy_tfxN, fn () => dummy_tfx_config) (* Setup *) fun add_atp (name, config) thy = Data.map (Symtab.update_new (name, (config, stamp ()))) thy handle Symtab.DUP name => error ("Duplicate ATP: " ^ quote name) fun get_atp thy name = fst (the (Symtab.lookup (Data.get thy) name)) handle Option.Option => error ("Unknown ATP: " ^ name) val supported_atps = Symtab.keys o Data.get fun is_atp_installed thy name = let val {exec, ...} = get_atp thy name () in exists (fn var => getenv var <> "") (fst exec) end fun refresh_systems_on_tptp () = Synchronized.change remote_systems (fn _ => get_remote_systems ()) fun effective_term_order ctxt atp = let val ord = Config.get ctxt term_order in if ord = smartN then {is_lpo = false, gen_weights = (atp = spassN), gen_prec = (atp = spassN), gen_simp = false} else let val is_lpo = String.isSubstring lpoN ord in {is_lpo = is_lpo, gen_weights = not is_lpo andalso String.isSubstring xweightsN ord, gen_prec = String.isSubstring xprecN ord, gen_simp = String.isSubstring xsimpN ord} end end val atps = [agsyhol, alt_ergo, e, iprover, leo2, leo3, satallax, spass, vampire, z3_tptp, zipperposition, remote_agsyhol, remote_alt_ergo, remote_e, remote_iprover, remote_leo2, remote_leo3, remote_vampire, remote_waldmeister, remote_zipperposition, dummy_tfx] val _ = Theory.setup (fold add_atp atps) end; diff --git a/src/HOL/Tools/Sledgehammer/sledgehammer_prover_atp.ML b/src/HOL/Tools/Sledgehammer/sledgehammer_prover_atp.ML --- a/src/HOL/Tools/Sledgehammer/sledgehammer_prover_atp.ML +++ b/src/HOL/Tools/Sledgehammer/sledgehammer_prover_atp.ML @@ -1,400 +1,400 @@ (* Title: HOL/Tools/Sledgehammer/sledgehammer_prover_atp.ML Author: Fabian Immler, TU Muenchen Author: Makarius Author: Jasmin Blanchette, TU Muenchen ATPs as Sledgehammer provers. *) signature SLEDGEHAMMER_PROVER_ATP = sig type mode = Sledgehammer_Prover.mode type prover = Sledgehammer_Prover.prover val atp_dest_dir : string Config.T val atp_problem_prefix : string Config.T val atp_completish : int Config.T val atp_full_names : bool Config.T val is_ho_atp : Proof.context -> string -> bool val run_atp : mode -> string -> prover end; structure Sledgehammer_Prover_ATP : SLEDGEHAMMER_PROVER_ATP = struct open ATP_Util open ATP_Problem open ATP_Problem_Generate open ATP_Proof open ATP_Proof_Reconstruct open Sledgehammer_Util open Sledgehammer_Proof_Methods open Sledgehammer_Isar open Sledgehammer_ATP_Systems open Sledgehammer_Prover (* Empty string means create files in Isabelle's temporary files directory. *) val atp_dest_dir = Attrib.setup_config_string \<^binding>\sledgehammer_atp_dest_dir\ (K "") val atp_problem_prefix = Attrib.setup_config_string \<^binding>\sledgehammer_atp_problem_prefix\ (K "prob") val atp_completish = Attrib.setup_config_int \<^binding>\sledgehammer_atp_completish\ (K 0) (* In addition to being easier to read, readable names are often much shorter, especially if types are mangled in names. This makes a difference for some provers (e.g., E). For these reason, short names are enabled by default. *) val atp_full_names = Attrib.setup_config_bool \<^binding>\sledgehammer_atp_full_names\ (K false) fun is_atp_of_format is_format ctxt name = let val thy = Proof_Context.theory_of ctxt in (case try (get_atp thy) name of SOME config => exists (fn (_, ((_, format, _, _, _), _)) => is_format format) (#best_slices (config ()) ctxt) | NONE => false) end val is_ho_atp = is_atp_of_format is_format_higher_order fun choose_type_enc strictness best_type_enc format = the_default best_type_enc #> type_enc_of_string strictness #> adjust_type_enc format fun has_bound_or_var_of_type pred = exists_subterm (fn Var (_, T as Type _) => pred T | Abs (_, T as Type _, _) => pred T | _ => false) (* Unwanted equalities are those between a (bound or schematic) variable that does not properly occur in the second operand. *) val is_exhaustive_finite = let fun is_bad_equal (Var z) t = not (exists_subterm (fn Var z' => z = z' | _ => false) t) | is_bad_equal (Bound j) t = not (loose_bvar1 (t, j)) | is_bad_equal _ _ = false fun do_equals t1 t2 = is_bad_equal t1 t2 orelse is_bad_equal t2 t1 fun do_formula pos t = (case (pos, t) of (_, \<^const>\Trueprop\ $ t1) => do_formula pos t1 | (true, Const (\<^const_name>\Pure.all\, _) $ Abs (_, _, t')) => do_formula pos t' | (true, Const (\<^const_name>\All\, _) $ Abs (_, _, t')) => do_formula pos t' | (false, Const (\<^const_name>\Ex\, _) $ Abs (_, _, t')) => do_formula pos t' | (_, \<^const>\Pure.imp\ $ t1 $ t2) => do_formula (not pos) t1 andalso (t2 = \<^prop>\False\ orelse do_formula pos t2) | (_, \<^const>\HOL.implies\ $ t1 $ t2) => do_formula (not pos) t1 andalso (t2 = \<^const>\False\ orelse do_formula pos t2) | (_, \<^const>\Not\ $ t1) => do_formula (not pos) t1 | (true, \<^const>\HOL.disj\ $ t1 $ t2) => forall (do_formula pos) [t1, t2] | (false, \<^const>\HOL.conj\ $ t1 $ t2) => forall (do_formula pos) [t1, t2] | (true, Const (\<^const_name>\HOL.eq\, _) $ t1 $ t2) => do_equals t1 t2 | (true, Const (\<^const_name>\Pure.eq\, _) $ t1 $ t2) => do_equals t1 t2 | _ => false) in do_formula true end (* Facts containing variables of finite types such as "unit" or "bool" or of the form "ALL x. x = A | x = B | x = C" are likely to lead to untypable proofs for unsound type encodings. *) fun is_dangerous_prop ctxt = transform_elim_prop #> (has_bound_or_var_of_type (is_type_surely_finite ctxt) orf is_exhaustive_finite) fun get_slices slice slices = (0 upto length slices - 1) ~~ slices |> not slice ? (List.last #> single) fun get_facts_of_filter _ [(_, facts)] = facts | get_facts_of_filter fact_filter factss = (case AList.lookup (op =) factss fact_filter of SOME facts => facts | NONE => snd (hd factss)) (* For low values of "max_facts", this fudge value ensures that most slices are invoked with a nontrivial amount of facts. *) val max_fact_factor_fudge = 5 val mono_max_privileged_facts = 10 fun suffix_of_mode Auto_Try = "_try" | suffix_of_mode Try = "_try" | suffix_of_mode Normal = "" | suffix_of_mode MaSh = "" | suffix_of_mode Minimize = "_min" (* Give the ATPs some slack before interrupting them the hard way. "z3_tptp" on Linux appears to be the only ATP that does not honor its time limit. *) val atp_timeout_slack = seconds 1.0 (* Important messages are important but not so important that users want to see them each time. *) val atp_important_message_keep_quotient = 25 fun run_atp mode name ({debug, verbose, overlord, type_enc, strict, lam_trans, uncurried_aliases, fact_filter, max_facts, max_mono_iters, max_new_mono_instances, isar_proofs, compress, try0, smt_proofs, slice, minimize, timeout, preplay_timeout, ...} : params) ({comment, state, goal, subgoal, subgoal_count, factss, found_proof, ...} : prover_problem) = let val thy = Proof.theory_of state val ctxt = Proof.context_of state val {exec, arguments, proof_delims, known_failures, prem_role, best_slices, best_max_mono_iters, best_max_new_mono_instances, ...} = get_atp thy name () val full_proofs = isar_proofs |> the_default (mode = Minimize) val local_name = perhaps (try (unprefix remote_prefix)) name val completish = Config.get ctxt atp_completish val atp_mode = if completish > 0 then Sledgehammer_Completish completish else Sledgehammer val (_, hyp_ts, concl_t) = strip_subgoal goal subgoal ctxt val (dest_dir, problem_prefix) = if overlord then overlord_file_location_of_prover name else (Config.get ctxt atp_dest_dir, Config.get ctxt atp_problem_prefix) val problem_file_name = Path.basic (problem_prefix ^ (if overlord then "" else serial_string ()) ^ suffix_of_mode mode ^ "_" ^ string_of_int subgoal) val prob_path = if dest_dir = "" then File.tmp_path problem_file_name else if File.exists (Path.explode dest_dir) then Path.explode dest_dir + problem_file_name else error ("No such directory: " ^ quote dest_dir) val executable = (case find_first (fn var => getenv var <> "") (fst exec) of SOME var => let val pref = getenv var ^ "/" val paths = map (Path.explode o prefix pref) (if ML_System.platform_is_windows then map (suffix ".exe") (snd exec) @ snd exec else snd exec); in (case find_first File.exists paths of SOME path => path | NONE => error ("Bad executable: " ^ Path.print (hd paths))) end | NONE => error ("The environment variable " ^ quote (List.last (fst exec)) ^ " is not set")) fun run () = let (* If slicing is disabled, we expand the last slice to fill the entire time available. *) val all_slices = best_slices ctxt val actual_slices = get_slices slice all_slices fun max_facts_of_slices (slices : (real * (slice_spec * string)) list) = fold (Integer.max o fst o #1 o fst o snd) slices 0 val num_actual_slices = length actual_slices val max_fact_factor = Real.fromInt (case max_facts of NONE => max_facts_of_slices all_slices | SOME max => max) / Real.fromInt (max_facts_of_slices (map snd actual_slices)) fun monomorphize_facts facts = let val ctxt = ctxt |> repair_monomorph_context max_mono_iters best_max_mono_iters max_new_mono_instances best_max_new_mono_instances (* pseudo-theorem involving the same constants as the subgoal *) val subgoal_th = Logic.list_implies (hyp_ts, concl_t) |> Skip_Proof.make_thm thy val rths = facts |> chop mono_max_privileged_facts |>> map (pair 1 o snd) ||> map (pair 2 o snd) |> op @ |> cons (0, subgoal_th) in Monomorph.monomorph atp_schematic_consts_of ctxt rths |> tl |> curry ListPair.zip (map fst facts) |> maps (fn (name, rths) => map (pair name o zero_var_indexes o snd) rths) end fun run_slice time_left (cache_key, cache_value) (slice, (time_frac, (key as ((best_max_facts, best_fact_filter), format, best_type_enc, best_lam_trans, best_uncurried_aliases), extra))) = let val effective_fact_filter = fact_filter |> the_default best_fact_filter val facts = get_facts_of_filter effective_fact_filter factss val num_facts = Real.ceil (max_fact_factor * Real.fromInt best_max_facts) + max_fact_factor_fudge |> Integer.min (length facts) val generate_info = (case format of DFG _ => true | _ => false) val strictness = if strict then Strict else Non_Strict val type_enc = type_enc |> choose_type_enc strictness best_type_enc format val sound = is_type_enc_sound type_enc val real_ms = Real.fromInt o Time.toMilliseconds val slice_timeout = (real_ms time_left |> (if slice < num_actual_slices - 1 then curry Real.min (time_frac * real_ms timeout) else I)) * 0.001 |> seconds val generous_slice_timeout = if mode = MaSh then one_day else slice_timeout + atp_timeout_slack val _ = if debug then quote name ^ " slice #" ^ string_of_int (slice + 1) ^ " with " ^ string_of_int num_facts ^ " fact" ^ plural_s num_facts ^ " for " ^ string_of_time slice_timeout ^ "..." |> writeln else () val readable_names = not (Config.get ctxt atp_full_names) val lam_trans = lam_trans |> the_default best_lam_trans val uncurried_aliases = uncurried_aliases |> the_default best_uncurried_aliases val value as (atp_problem, _, _, _) = if cache_key = SOME key then cache_value else facts |> not sound ? filter_out (is_dangerous_prop ctxt o Thm.prop_of o snd) |> take num_facts |> not (is_type_enc_polymorphic type_enc) ? monomorphize_facts |> map (apsnd Thm.prop_of) |> generate_atp_problem ctxt generate_info format prem_role type_enc atp_mode lam_trans uncurried_aliases readable_names true hyp_ts concl_t fun sel_weights () = atp_problem_selection_weights atp_problem fun ord_info () = atp_problem_term_order_info atp_problem val ord = effective_term_order ctxt name val args = arguments ctxt full_proofs extra slice_timeout prob_path (ord, ord_info, sel_weights) - val command = File.bash_path executable ^ " " ^ args + val command = space_implode " " (File.bash_path executable :: args) fun run_command () = if exec = isabelle_scala_function then let val {output, timing} = SystemOnTPTP.run_system_encoded args in (output, timing) end else let val res = Isabelle_System.bash_process ("exec 2>&1; " ^ command) in (Process_Result.out res, Process_Result.timing_elapsed res) end val _ = atp_problem |> lines_of_atp_problem format ord ord_info |> (exec <> isabelle_scala_function) ? cons ("% " ^ command ^ "\n" ^ (if comment = "" then "" else "% " ^ comment ^ "\n")) |> File.write_list prob_path val ((output, run_time), (atp_proof, outcome)) = Timeout.apply generous_slice_timeout run_command () |>> overlord ? (fn output => prefix ("% " ^ command ^ "\n% " ^ timestamp () ^ "\n") output) |> (fn accum as (output, _) => (accum, extract_tstplike_proof_and_outcome verbose proof_delims known_failures output |>> atp_proof_of_tstplike_proof (perhaps (try (unprefix remote_prefix)) name) atp_problem handle UNRECOGNIZED_ATP_PROOF () => ([], SOME ProofUnparsable))) handle Timeout.TIMEOUT _ => (("", slice_timeout), ([], SOME TimedOut)) | ERROR msg => (("", Time.zeroTime), ([], SOME (UnknownError msg))) val outcome = (case outcome of NONE => (case used_facts_in_unsound_atp_proof ctxt (map fst facts) atp_proof of SOME facts => let val failure = UnsoundProof (is_type_enc_sound type_enc, sort string_ord facts) in if debug then (warning (string_of_atp_failure failure); NONE) else SOME failure end | NONE => (found_proof (); NONE)) | _ => outcome) in ((SOME key, value), (output, run_time, facts, atp_proof, outcome), SOME (format, type_enc)) end val timer = Timer.startRealTimer () fun maybe_run_slice slice (result as (cache, (_, run_time0, _, _, SOME _), _)) = let val time_left = timeout - Timer.checkRealTimer timer in if time_left <= Time.zeroTime then result else run_slice time_left cache slice |> (fn (cache, (output, run_time, used_from, atp_proof, outcome), format_type_enc) => (cache, (output, run_time0 + run_time, used_from, atp_proof, outcome), format_type_enc)) end | maybe_run_slice _ result = result in ((NONE, ([], Symtab.empty, [], Symtab.empty)), ("", Time.zeroTime, [], [], SOME InternalError), NONE) |> fold maybe_run_slice actual_slices end (* If the problem file has not been exported, remove it; otherwise, export the proof file too. *) fun clean_up () = if dest_dir = "" then (try File.rm prob_path; ()) else () fun export (_, (output, _, _, _, _), _) = if dest_dir = "" then () else File.write (Path.explode (Path.implode prob_path ^ "_proof")) output val ((_, (_, pool, lifted, sym_tab)), (output, run_time, used_from, atp_proof, outcome), SOME (format, type_enc)) = with_cleanup clean_up run () |> tap export val important_message = if mode = Normal andalso Random.random_range 0 (atp_important_message_keep_quotient - 1) = 0 then extract_important_message output else "" val (used_facts, preferred_methss, message) = (case outcome of NONE => let val used_facts = sort_by fst (used_facts_in_atp_proof ctxt (map fst used_from) atp_proof) val needs_full_types = is_typed_helper_used_in_atp_proof atp_proof val preferred_methss = (Metis_Method (NONE, NONE), bunches_of_proof_methods ctxt try0 smt_proofs needs_full_types (if atp_proof_prefers_lifting atp_proof then liftingN else hide_lamsN)) in (used_facts, preferred_methss, fn preplay => let val _ = if verbose then writeln "Generating proof text..." else () fun isar_params () = let val metis_type_enc = if is_typed_helper_used_in_atp_proof atp_proof then SOME full_typesN else NONE val metis_lam_trans = if atp_proof_prefers_lifting atp_proof then SOME liftingN else NONE val atp_proof = atp_proof |> termify_atp_proof ctxt name format type_enc pool lifted sym_tab |> local_name = spassN ? introduce_spass_skolems |> factify_atp_proof (map fst used_from) hyp_ts concl_t in (verbose, (metis_type_enc, metis_lam_trans), preplay_timeout, compress, try0, minimize, atp_proof, goal) end val one_line_params = (preplay (), proof_banner mode name, subgoal, subgoal_count) val num_chained = length (#facts (Proof.goal state)) in proof_text ctxt debug isar_proofs smt_proofs isar_params num_chained one_line_params ^ (if important_message <> "" then "\n\nImportant message from Dr. Geoff Sutcliffe:\n" ^ important_message else "") end) end | SOME failure => ([], (Auto_Method (* dummy *), []), fn _ => string_of_atp_failure failure)) in {outcome = outcome, used_facts = used_facts, used_from = used_from, preferred_methss = preferred_methss, run_time = run_time, message = message} end end; diff --git a/src/Pure/Admin/build_csdp.scala b/src/Pure/Admin/build_csdp.scala --- a/src/Pure/Admin/build_csdp.scala +++ b/src/Pure/Admin/build_csdp.scala @@ -1,207 +1,207 @@ /* Title: Pure/Admin/build_csdp.scala Author: Makarius Build Isabelle CSDP component from official download. */ package isabelle object Build_CSDP { // Note: version 6.2.0 does not quite work for the "sos" proof method val default_download_url = "https://github.com/coin-or/Csdp/archive/releases/6.1.1.tar.gz" /* flags */ sealed case class Flags(platform: String, CFLAGS: String = "", LIBS: String = "") { val changed: List[(String, String)] = List("CFLAGS" -> CFLAGS, "LIBS" -> LIBS).filter(p => p._2.nonEmpty) def print: Option[String] = if (changed.isEmpty) None else Some(" * " + platform + ":\n" + changed.map(p => " " + p._1 + "=" + p._2) .mkString("\n")) def change(path: Path): Unit = { def change_line(line: String, entry: (String, String)): String = line.replaceAll(entry._1 + "=.*", entry._1 + "=" + entry._2) File.change(path, s => split_lines(s).map(line => changed.foldLeft(line)(change_line)).mkString("\n")) } } val build_flags: List[Flags] = List( Flags("arm64-linux", CFLAGS = "-O3 -ansi -Wall -DNOSHORTS -DBIT64 -DUSESIGTERM -DUSEGETTIME -I../include", LIBS = "-static -L../lib -lsdp -llapack -lblas -lgfortran -lm"), Flags("x86_64-linux", CFLAGS = "-O3 -ansi -Wall -DNOSHORTS -DBIT64 -DUSESIGTERM -DUSEGETTIME -I../include", LIBS = "-static -L../lib -lsdp -llapack -lblas -lgfortran -lquadmath -lm"), Flags("x86_64-darwin", CFLAGS = "-O3 -Wall -DNOSHORTS -DBIT64 -DUSESIGTERM -DUSEGETTIME -I../include", LIBS = "-L../lib -lsdp -llapack -lblas -lm"), Flags("x86_64-windows")) /* build CSDP */ def build_csdp( download_url: String = default_download_url, verbose: Boolean = false, progress: Progress = new Progress, target_dir: Path = Path.current, mingw: MinGW = MinGW.none): Unit = { mingw.check Isabelle_System.with_tmp_dir("build")(tmp_dir => { /* component */ val Archive_Name = """^.*?([^/]+)$""".r val Version = """^[^0-9]*([0-9].*)\.tar.gz$""".r val archive_name = download_url match { case Archive_Name(name) => name case _ => error("Failed to determine source archive name from " + quote(download_url)) } val version = archive_name match { case Version(version) => version case _ => error("Failed to determine component version from " + quote(archive_name)) } val component_name = "csdp-" + version val component_dir = Isabelle_System.new_directory(target_dir + Path.basic(component_name)) progress.echo("Component " + component_dir) /* platform */ val platform_name = proper_string(Isabelle_System.getenv("ISABELLE_WINDOWS_PLATFORM64")) orElse proper_string(Isabelle_System.getenv("ISABELLE_PLATFORM64")) getOrElse error("No 64bit platform") val platform_dir = Isabelle_System.make_directory(component_dir + Path.basic(platform_name)) /* download source */ val archive_path = tmp_dir + Path.basic(archive_name) - Isabelle_System.download(download_url, archive_path, progress = progress) + Isabelle_System.download_file(download_url, archive_path, progress = progress) Isabelle_System.bash("tar xzf " + File.bash_path(archive_path), cwd = tmp_dir.file).check val source_name = File.get_dir(tmp_dir) Isabelle_System.bash( "tar xzf " + archive_path + " && mv " + Bash.string(source_name) + " src", cwd = component_dir.file).check /* build */ progress.echo("Building CSDP for " + platform_name + " ...") val build_dir = tmp_dir + Path.basic(source_name) build_flags.find(flags => flags.platform == platform_name) match { case None => error("No build flags for platform " + quote(platform_name)) case Some(flags) => File.find_files(build_dir.file, pred = file => file.getName == "Makefile"). foreach(file => flags.change(File.path(file))) } progress.bash(mingw.bash_script("make"), cwd = build_dir.file, echo = verbose).check /* install */ Isabelle_System.copy_file(build_dir + Path.explode("LICENSE"), component_dir) Isabelle_System.copy_file(build_dir + Path.explode("solver/csdp").platform_exe, platform_dir) if (Platform.is_windows) { Executable.libraries_closure(platform_dir + Path.explode("csdp.exe"), mingw = mingw, filter = Set("libblas", "liblapack", "libgfortran", "libgcc_s_seh", "libquadmath", "libwinpthread")) } /* settings */ val etc_dir = Isabelle_System.make_directory(component_dir + Path.basic("etc")) File.write(etc_dir + Path.basic("settings"), """# -*- shell-script -*- :mode=shellscript: ISABELLE_CSDP="$COMPONENT/${ISABELLE_WINDOWS_PLATFORM64:-$ISABELLE_PLATFORM64}/csdp" """) /* README */ File.write(component_dir + Path.basic("README"), """This is CSDP """ + version + """ from """ + download_url + """ Makefile flags have been changed for various platforms as follows: """ + build_flags.flatMap(_.print).mkString("\n\n") + """ The distribution has been built like this: cd src && make Only the bare "solver/csdp" program is used for Isabelle. Makarius """ + Date.Format.date(Date.now()) + "\n") }) } /* Isabelle tool wrapper */ val isabelle_tool = Isabelle_Tool("build_csdp", "build prover component from official download", Scala_Project.here, args => { var target_dir = Path.current var mingw = MinGW.none var download_url = default_download_url var verbose = false val getopts = Getopts(""" Usage: isabelle build_csdp [OPTIONS] Options are: -D DIR target directory (default ".") -M DIR msys/mingw root specification for Windows -U URL download URL (default: """" + default_download_url + """") -v verbose Build prover component from official download. """, "D:" -> (arg => target_dir = Path.explode(arg)), "M:" -> (arg => mingw = MinGW(Path.explode(arg))), "U:" -> (arg => download_url = arg), "v" -> (_ => verbose = true)) val more_args = getopts(args) if (more_args.nonEmpty) getopts.usage() val progress = new Console_Progress() build_csdp(download_url = download_url, verbose = verbose, progress = progress, target_dir = target_dir, mingw = mingw) }) } diff --git a/src/Pure/Admin/build_e.scala b/src/Pure/Admin/build_e.scala --- a/src/Pure/Admin/build_e.scala +++ b/src/Pure/Admin/build_e.scala @@ -1,145 +1,145 @@ /* Title: Pure/Admin/build_e.scala Author: Makarius Build Isabelle E prover component from official downloads. */ package isabelle object Build_E { /* build E prover */ val default_version = "2.5" val default_download_url = "https://wwwlehre.dhbw-stuttgart.de/~sschulz/WORK/E_DOWNLOAD" def build_e( version: String = default_version, download_url: String = default_download_url, verbose: Boolean = false, progress: Progress = new Progress, target_dir: Path = Path.current): Unit = { Isabelle_System.with_tmp_dir("build")(tmp_dir => { /* component */ val component_name = "e-" + version val component_dir = Isabelle_System.new_directory(target_dir + Path.basic(component_name)) progress.echo("Component " + component_dir) val platform_name = proper_string(Isabelle_System.getenv("ISABELLE_PLATFORM64")) .getOrElse(error("No 64bit platform")) val platform_dir = Isabelle_System.make_directory(component_dir + Path.basic(platform_name)) /* download source */ val archive_url = download_url + "/V_" + version + "/E.tgz" val archive_path = tmp_dir + Path.explode("E.tgz") - Isabelle_System.download(archive_url, archive_path, progress = progress) + Isabelle_System.download_file(archive_url, archive_path, progress = progress) Isabelle_System.bash("tar xzf " + archive_path, cwd = tmp_dir.file).check Isabelle_System.bash("tar xzf " + archive_path + " && mv E src", cwd = component_dir.file).check /* build */ progress.echo("Building E prover for " + platform_name + " ...") val build_dir = tmp_dir + Path.basic("E") val build_options = { val result = Isabelle_System.bash("./configure --help", cwd = build_dir.file) if (result.check.out.containsSlice("--enable-ho")) " --enable-ho" else "" } val build_script = "./configure" + build_options + " && make" Isabelle_System.bash(build_script, cwd = build_dir.file, progress_stdout = progress.echo_if(verbose, _), progress_stderr = progress.echo_if(verbose, _)).check /* install */ Isabelle_System.copy_file(build_dir + Path.basic("COPYING"), component_dir + Path.basic("LICENSE")) val install_files = List("epclextract", "eprover", "eprover-ho") for (name <- install_files ::: install_files.map(_ + ".exe")) { val path = build_dir + Path.basic("PROVER") + Path.basic(name) if (path.is_file) Isabelle_System.copy_file(path, platform_dir) } Isabelle_System.bash("if [ -f eprover-ho ]; then mv eprover-ho eprover; fi", cwd = platform_dir.file).check /* settings */ val etc_dir = Isabelle_System.make_directory(component_dir + Path.basic("etc")) File.write(etc_dir + Path.basic("settings"), """# -*- shell-script -*- :mode=shellscript: E_HOME="$COMPONENT/$ISABELLE_PLATFORM64" E_VERSION=""" + quote(version) + """ """) /* README */ File.write(component_dir + Path.basic("README"), "This is E prover " + version + " from\n" + archive_url + """ The distribution has been built like this: cd src && """ + build_script + """ Only a few executables from PROVERS/ have been moved to the platform-specific Isabelle component directory: x86_64-linux etc. Makarius """ + Date.Format.date(Date.now()) + "\n") }) } /* Isabelle tool wrapper */ val isabelle_tool = Isabelle_Tool("build_e", "build prover component from source distribution", Scala_Project.here, args => { var target_dir = Path.current var version = default_version var download_url = default_download_url var verbose = false val getopts = Getopts(""" Usage: isabelle build_e [OPTIONS] Options are: -D DIR target directory (default ".") -U URL download URL (default: """" + default_download_url + """") -V VERSION version (default: """ + default_version + """) -v verbose Build prover component from the specified source distribution. """, "D:" -> (arg => target_dir = Path.explode(arg)), "U:" -> (arg => download_url = arg), "V:" -> (arg => version = arg), "v" -> (_ => verbose = true)) val more_args = getopts(args) if (more_args.nonEmpty) getopts.usage() val progress = new Console_Progress() build_e(version = version, download_url = download_url, verbose = verbose, progress = progress, target_dir = target_dir) }) } diff --git a/src/Pure/Admin/build_jcef.scala b/src/Pure/Admin/build_jcef.scala --- a/src/Pure/Admin/build_jcef.scala +++ b/src/Pure/Admin/build_jcef.scala @@ -1,174 +1,174 @@ /* Title: Pure/Admin/build_jcef.scala Author: Makarius Build Isabelle component for Java Chromium Embedded Framework (JCEF). See also: - https://github.com/jcefbuild/jcefbuild - https://github.com/chromiumembedded/java-cef */ package isabelle object Build_JCEF { /* platform information */ sealed case class JCEF_Platform(platform_name: String, archive: String) { def archive_path: Path = Path.explode(archive) def dir(component_dir: Path): Path = component_dir + Path.basic(platform_name) } val platforms: List[JCEF_Platform] = List( JCEF_Platform("x86_64-linux", "linux64.zip"), JCEF_Platform("x86_64-windows", "win64.zip"), JCEF_Platform("x86_64-darwin", "macosx64.zip")) /* build JCEF */ val default_url = "https://github.com/jcefbuild/jcefbuild/releases/download" val default_version = "v1.0.10-83.4.0+gfd6631b+chromium-83.0.4103.106" def build_jcef( base_url: String = default_url, version: String = default_version, target_dir: Path = Path.current, progress: Progress = new Progress): Unit = { /* component name */ val Version = """^([^+]+).*$""".r val component = version match { case Version(name) => "jcef-" + name case _ => error("Bad component version " + quote(version)) } val component_dir = Isabelle_System.new_directory(target_dir + Path.basic(component)) progress.echo("Component " + component_dir) /* download and assemble platforms */ for (platform <- platforms) { Isabelle_System.with_tmp_file("archive", ext = "zip")(archive_file => { val url = base_url + "/" + Url.encode(version) + "/" + platform.archive - Isabelle_System.download(url, archive_file, progress = progress) + Isabelle_System.download_file(url, archive_file, progress = progress) Isabelle_System.bash("unzip -x " + File.bash_path(archive_file), cwd = component_dir.file).check val unzip_dir = component_dir + Path.explode("java-cef-build-bin") for { file <- File.find_files(unzip_dir.file).iterator name = file.getName if name.containsSlice("LICENSE") target_file = component_dir + Path.explode(name) if !target_file.is_file } Isabelle_System.move_file(File.path(file), target_file) val platform_dir = component_dir + Path.explode(platform.platform_name) Isabelle_System.move_file(unzip_dir + Path.explode("bin"), platform_dir) for { file <- File.find_files(platform_dir.file).iterator name = file.getName if name.endsWith(".dll") || name.endsWith(".exe") } File.set_executable(File.path(file), true) Isabelle_System.rm_tree(unzip_dir) }) } /* settings */ val etc_dir = Isabelle_System.make_directory(component_dir + Path.basic("etc")) File.write(etc_dir + Path.basic("settings"), """# -*- shell-script -*- :mode=shellscript: if [ -d "$COMPONENT/${ISABELLE_WINDOWS_PLATFORM64:-$ISABELLE_PLATFORM64}" ] then ISABELLE_JCEF_HOME="$COMPONENT/${ISABELLE_WINDOWS_PLATFORM64:-$ISABELLE_PLATFORM64}" ISABELLE_JCEF_LIBRARY="" case "$ISABELLE_PLATFORM_FAMILY" in linux) classpath "$ISABELLE_JCEF_HOME/"*.jar ISABELLE_JCEF_LIB="$ISABELLE_JCEF_HOME/lib/linux64" ISABELLE_JCEF_LIBRARY="$ISABELLE_JCEF_LIB/libcef.so" export LD_LIBRARY_PATH="$ISABELLE_JCEF_LIB:$LD_LIBRARY_PATH" ;; windows) classpath "$ISABELLE_JCEF_HOME/"*.jar ISABELLE_JCEF_LIB="$ISABELLE_JCEF_HOME/lib/win64" export PATH="$ISABELLE_JCEF_LIB:$PATH" ;; macos) classpath "$ISABELLE_JCEF_HOME/jcef_app.app/Contents/Java/"*.jar ISABELLE_JCEF_LIB="$ISABELLE_JCEF_HOME/jcef_app.app/Contents/Frameworks/Chromium Embedded Framework.framework/Libraries" export JAVA_LIBRARY_PATH="$ISABELLE_JCEF_HOME/jcef_app.app/Contents/Java:$ISABELLE_JCEF_LIB:$JAVA_LIBRARY_PATH" ;; esac fi """) /* README */ File.write(component_dir + Path.basic("README"), """This distribution of Java Chromium Embedded Framework (JCEF) has been assembled from the binary builds from """ + base_url + """ Examples invocations: * command-line isabelle env bash -c 'isabelle java -Djava.library.path="$(platform_path "$ISABELLE_JCEF_LIB")" tests.detailed.MainFrame' * Scala REPL (e.g. Isabelle/jEdit Console) import isabelle._ System.setProperty("java.library.path", File.platform_path(Path.explode("$ISABELLE_JCEF_LIB"))) org.cef.CefApp.startup(Array()) GUI_Thread.later { val frame = new tests.detailed.MainFrame(false, false, false, Array()); frame.setSize(1200,900); frame.setVisible(true) } Makarius """ + Date.Format.date(Date.now()) + "\n") } /* Isabelle tool wrapper */ val isabelle_tool = Isabelle_Tool("build_jcef", "build component for Java Chromium Embedded Framework", Scala_Project.here, args => { var target_dir = Path.current var base_url = default_url var version = default_version val getopts = Getopts(""" Usage: isabelle build_jcef [OPTIONS] Options are: -D DIR target directory (default ".") -U URL download URL (default: """" + default_url + """") -V VERSION version (default: """" + default_version + """") Build component for Java Chromium Embedded Framework. """, "D:" -> (arg => target_dir = Path.explode(arg)), "U:" -> (arg => base_url = arg), "V:" -> (arg => version = arg)) val more_args = getopts(args) if (more_args.nonEmpty) getopts.usage() val progress = new Console_Progress() build_jcef(base_url = base_url, version = version, target_dir = target_dir, progress = progress) }) } diff --git a/src/Pure/Admin/build_spass.scala b/src/Pure/Admin/build_spass.scala --- a/src/Pure/Admin/build_spass.scala +++ b/src/Pure/Admin/build_spass.scala @@ -1,180 +1,180 @@ /* Title: Pure/Admin/build_spass.scala Author: Makarius Build Isabelle SPASS component from unofficial download. */ package isabelle object Build_SPASS { /* build SPASS */ val default_download_url = "https://www.cs.vu.nl/~jbe248/spass-3.8ds-src.tar.gz" val standard_version = "3.8ds" def build_spass( download_url: String = default_download_url, verbose: Boolean = false, progress: Progress = new Progress, target_dir: Path = Path.current): Unit = { Isabelle_System.with_tmp_dir("build")(tmp_dir => { Isabelle_System.require_command("bison", "flex") /* component */ val Archive_Name = """^.*?([^/]+)$""".r val Component_Name = """^(.+)-src\.tar.gz$""".r val Version = """^[^-]+-([^-]+)$""".r val (archive_name, archive_base_name) = download_url match { case Archive_Name(name) => (name, Library.perhaps_unsuffix(".tar.gz", name)) case _ => error("Failed to determine source archive name from " + quote(download_url)) } val component_name = archive_name match { case Component_Name(name) => name case _ => error("Failed to determine component name from " + quote(archive_name)) } val version = component_name match { case Version(version) => version case _ => error("Failed to determine component version from " + quote(component_name)) } if (version != standard_version) { progress.echo_warning("Odd SPASS version " + version + " (expected " + standard_version + ")") } val component_dir = Isabelle_System.new_directory(target_dir + Path.basic(component_name)) progress.echo("Component " + component_dir) val platform_name = proper_string(Isabelle_System.getenv("ISABELLE_PLATFORM64")) .getOrElse(error("No 64bit platform")) val platform_dir = Isabelle_System.make_directory(component_dir + Path.basic(platform_name)) /* download source */ val archive_path = tmp_dir + Path.basic(archive_name) - Isabelle_System.download(download_url, archive_path, progress = progress) + Isabelle_System.download_file(download_url, archive_path, progress = progress) Isabelle_System.bash("tar xzf " + archive_path, cwd = tmp_dir.file).check Isabelle_System.bash( "tar xzf " + archive_path + " && mv " + Bash.string(archive_base_name) + " src", cwd = component_dir.file).check /* build */ progress.echo("Building SPASS for " + platform_name + " ...") val build_dir = tmp_dir + Path.basic(archive_base_name) if (Platform.is_windows) { File.change(build_dir + Path.basic("misc.c"), _.replace("""#include "execinfo.h" """, "") .replaceAll("""void misc_DumpCore\(void\)[^}]+}""", "void misc_DumpCore(void) { abort(); }")) } Isabelle_System.bash("make", cwd = build_dir.file, progress_stdout = progress.echo_if(verbose, _), progress_stderr = progress.echo_if(verbose, _)).check /* install */ Isabelle_System.copy_file(build_dir + Path.basic("LICENCE"), component_dir + Path.basic("LICENSE")) val install_files = List("SPASS") for (name <- install_files ::: install_files.map(_ + ".exe")) { val path = build_dir + Path.basic(name) if (path.is_file) Isabelle_System.copy_file(path, platform_dir) } /* settings */ val etc_dir = Isabelle_System.make_directory(component_dir + Path.basic("etc")) File.write(etc_dir + Path.basic("settings"), """# -*- shell-script -*- :mode=shellscript: SPASS_HOME="$COMPONENT/$ISABELLE_PLATFORM64" SPASS_VERSION=""" + quote(version) + """ """) /* README */ File.write(component_dir + Path.basic("README"), """This distribution of SPASS 3.8ds, described in Blanchette, Popescu, Wand, and Weidenbach's ITP 2012 paper "More SPASS with Isabelle", has been compiled from sources available at """ + download_url + """ via "make". The Windows/Cygwin compilation required commenting out the line #include "execinfo.h" in "misc.c" as well as most of the body of the "misc_DumpCore" function. The latest official SPASS sources can be downloaded from http://www.spass-prover.org/. Be aware, however, that the official SPASS releases are not compatible with Isabelle. Viel SPASS! Jasmin Blanchette 16-May-2018 Makarius """ + Date.Format.date(Date.now()) + "\n") }) } /* Isabelle tool wrapper */ val isabelle_tool = Isabelle_Tool("build_spass", "build prover component from source distribution", Scala_Project.here, args => { var target_dir = Path.current var download_url = default_download_url var verbose = false val getopts = Getopts(""" Usage: isabelle build_spass [OPTIONS] Options are: -D DIR target directory (default ".") -U URL download URL (default: """" + default_download_url + """") -v verbose Build prover component from the specified source distribution. """, "D:" -> (arg => target_dir = Path.explode(arg)), "U:" -> (arg => download_url = arg), "v" -> (_ => verbose = true)) val more_args = getopts(args) if (more_args.nonEmpty) getopts.usage() val progress = new Console_Progress() build_spass(download_url = download_url, verbose = verbose, progress = progress, target_dir = target_dir) }) } diff --git a/src/Pure/Admin/build_sqlite.scala b/src/Pure/Admin/build_sqlite.scala --- a/src/Pure/Admin/build_sqlite.scala +++ b/src/Pure/Admin/build_sqlite.scala @@ -1,115 +1,115 @@ /* Title: Pure/Admin/build_sqlite.scala Author: Makarius Build Isabelle sqlite-jdbc component from official download. */ package isabelle object Build_SQLite { /* build sqlite */ def build_sqlite( download_url: String, progress: Progress = new Progress, target_dir: Path = Path.current): Unit = { val Download_Name = """^.*/([^/]+)\.jar""".r val download_name = download_url match { case Download_Name(download_name) => download_name case _ => error("Malformed jar download URL: " + quote(download_url)) } /* component */ val component_dir = Isabelle_System.new_directory(target_dir + Path.basic(download_name)) progress.echo("Component " + component_dir) /* README */ File.write(component_dir + Path.basic("README"), "This is " + download_name + " from\n" + download_url + "\n\n Makarius\n " + Date.Format.date(Date.now()) + "\n") /* settings */ val etc_dir = Isabelle_System.make_directory(component_dir + Path.basic("etc")) File.write(etc_dir + Path.basic("settings"), """# -*- shell-script -*- :mode=shellscript: ISABELLE_SQLITE_HOME="$COMPONENT" classpath "$ISABELLE_SQLITE_HOME/""" + download_name + """.jar" """) /* jar */ val jar = component_dir + Path.basic(download_name).ext("jar") - Isabelle_System.download(download_url, jar, progress = progress) + Isabelle_System.download_file(download_url, jar, progress = progress) Isabelle_System.with_tmp_dir("sqlite")(jar_dir => { progress.echo("Unpacking " + jar) Isabelle_System.bash("isabelle_jdk jar xf " + File.bash_path(jar.absolute), cwd = jar_dir.file).check val jar_files = List( "META-INF/maven/org.xerial/sqlite-jdbc/LICENSE" -> ".", "META-INF/maven/org.xerial/sqlite-jdbc/LICENSE.zentus" -> ".", "org/sqlite/native/Linux/aarch64/libsqlitejdbc.so" -> "arm64-linux", "org/sqlite/native/Linux/x86_64/libsqlitejdbc.so" -> "x86_64-linux", "org/sqlite/native/Mac/aarch64/libsqlitejdbc.jnilib" -> "arm64-darwin", "org/sqlite/native/Mac/x86_64/libsqlitejdbc.jnilib" -> "x86_64-darwin", "org/sqlite/native/Windows/x86_64/sqlitejdbc.dll" -> "x86_64-windows") for ((file, dir) <- jar_files) { val target = Isabelle_System.make_directory(component_dir + Path.explode(dir)) Isabelle_System.copy_file(jar_dir + Path.explode(file), target) } File.set_executable(component_dir + Path.explode("x86_64-windows/sqlitejdbc.dll"), true) }) } /* Isabelle tool wrapper */ val isabelle_tool = Isabelle_Tool("build_sqlite", "build Isabelle sqlite-jdbc component from official download", Scala_Project.here, args => { var target_dir = Path.current val getopts = Getopts(""" Usage: isabelle build_sqlite [OPTIONS] DOWNLOAD Options are: -D DIR target directory (default ".") Build sqlite-jdbc component from the specified download URL (JAR), see also https://github.com/xerial/sqlite-jdbc and https://oss.sonatype.org/content/repositories/releases/org/xerial/sqlite-jdbc """, "D:" -> (arg => target_dir = Path.explode(arg))) val more_args = getopts(args) val download_url = more_args match { case List(download_url) => download_url case _ => getopts.usage() } val progress = new Console_Progress() build_sqlite(download_url, progress = progress, target_dir = target_dir) }) } diff --git a/src/Pure/Admin/build_verit.scala b/src/Pure/Admin/build_verit.scala --- a/src/Pure/Admin/build_verit.scala +++ b/src/Pure/Admin/build_verit.scala @@ -1,159 +1,159 @@ /* Title: Pure/Admin/build_csdp.scala Author: Makarius Build Isabelle veriT component from official download. */ package isabelle object Build_VeriT { val default_download_url = "https://verit.loria.fr/rmx/2020.10/verit-2020.10-rmx.tar.gz" /* build veriT */ def build_verit( download_url: String = default_download_url, verbose: Boolean = false, progress: Progress = new Progress, target_dir: Path = Path.current, mingw: MinGW = MinGW.none): Unit = { mingw.check Isabelle_System.with_tmp_dir("build")(tmp_dir => { /* component */ val Archive_Name = """^.*?([^/]+)$""".r val Version = """^[^-]+-(.+)\.tar.gz$""".r val archive_name = download_url match { case Archive_Name(name) => name case _ => error("Failed to determine source archive name from " + quote(download_url)) } val version = archive_name match { case Version(version) => version case _ => error("Failed to determine component version from " + quote(archive_name)) } val component_name = "verit-" + version val component_dir = Isabelle_System.new_directory(target_dir + Path.basic(component_name)) progress.echo("Component " + component_dir) /* platform */ val platform_name = proper_string(Isabelle_System.getenv("ISABELLE_WINDOWS_PLATFORM64")) orElse proper_string(Isabelle_System.getenv("ISABELLE_PLATFORM64")) getOrElse error("No 64bit platform") val platform_dir = Isabelle_System.make_directory(component_dir + Path.basic(platform_name)) /* download source */ val archive_path = tmp_dir + Path.basic(archive_name) - Isabelle_System.download(download_url, archive_path, progress = progress) + Isabelle_System.download_file(download_url, archive_path, progress = progress) Isabelle_System.bash("tar xzf " + File.bash_path(archive_path), cwd = tmp_dir.file).check val source_name = File.get_dir(tmp_dir) Isabelle_System.bash( "tar xzf " + archive_path + " && mv " + Bash.string(source_name) + " src", cwd = component_dir.file).check /* build */ progress.echo("Building veriT for " + platform_name + " ...") val configure_options = if (Platform.is_linux) "LDFLAGS=-Wl,-rpath,_DUMMY_" else "" val build_dir = tmp_dir + Path.basic(source_name) progress.bash(mingw.bash_script("set -e\n./configure " + configure_options + "\nmake"), cwd = build_dir.file, echo = verbose).check /* install */ Isabelle_System.copy_file(build_dir + Path.explode("LICENSE"), component_dir) val exe_path = Path.basic("veriT").platform_exe Isabelle_System.copy_file(build_dir + exe_path, platform_dir) Executable.libraries_closure(platform_dir + exe_path, filter = Set("libgmp"), mingw = mingw) /* settings */ val etc_dir = Isabelle_System.make_directory(component_dir + Path.basic("etc")) File.write(etc_dir + Path.basic("settings"), """# -*- shell-script -*- :mode=shellscript: ISABELLE_VERIT="$COMPONENT/${ISABELLE_WINDOWS_PLATFORM64:-$ISABELLE_PLATFORM64}/veriT" """) /* README */ File.write(component_dir + Path.basic("README"), """This is veriT """ + version + """ from """ + download_url + """ It has been built from sources like this: cd src ./configure make Makarius """ + Date.Format.date(Date.now()) + "\n") }) } /* Isabelle tool wrapper */ val isabelle_tool = Isabelle_Tool("build_verit", "build prover component from official download", Scala_Project.here, args => { var target_dir = Path.current var mingw = MinGW.none var download_url = default_download_url var verbose = false val getopts = Getopts(""" Usage: isabelle build_verit [OPTIONS] Options are: -D DIR target directory (default ".") -M DIR msys/mingw root specification for Windows -U URL download URL (default: """" + default_download_url + """") -v verbose Build prover component from official download. """, "D:" -> (arg => target_dir = Path.explode(arg)), "M:" -> (arg => mingw = MinGW(Path.explode(arg))), "U:" -> (arg => download_url = arg), "v" -> (_ => verbose = true)) val more_args = getopts(args) if (more_args.nonEmpty) getopts.usage() val progress = new Console_Progress() build_verit(download_url = download_url, verbose = verbose, progress = progress, target_dir = target_dir, mingw = mingw) }) } diff --git a/src/Pure/Admin/components.scala b/src/Pure/Admin/components.scala --- a/src/Pure/Admin/components.scala +++ b/src/Pure/Admin/components.scala @@ -1,353 +1,353 @@ /* Title: Pure/Admin/components.scala Author: Makarius Isabelle system components. */ package isabelle import java.io.{File => JFile} object Components { /* archive name */ object Archive { val suffix: String = ".tar.gz" def apply(name: String): String = if (name == "") error("Bad component name: " + quote(name)) else name + suffix def unapply(archive: String): Option[String] = { for { name0 <- Library.try_unsuffix(suffix, archive) name <- proper_string(name0) } yield name } def get_name(archive: String): String = unapply(archive) getOrElse error("Bad component archive name (expecting .tar.gz): " + quote(archive)) } /* component collections */ def default_component_repository: String = Isabelle_System.getenv("ISABELLE_COMPONENT_REPOSITORY") val default_components_base: Path = Path.explode("$ISABELLE_COMPONENTS_BASE") def admin(dir: Path): Path = dir + Path.explode("Admin/components") def contrib(dir: Path = Path.current, name: String = ""): Path = dir + Path.explode("contrib") + Path.explode(name) def unpack(dir: Path, archive: Path, progress: Progress = new Progress): String = { val name = Archive.get_name(archive.file_name) progress.echo("Unpacking " + name) Isabelle_System.gnutar("-xzf " + File.bash_path(archive), dir = dir).check name } def resolve(base_dir: Path, names: List[String], target_dir: Option[Path] = None, copy_dir: Option[Path] = None, progress: Progress = new Progress): Unit = { Isabelle_System.make_directory(base_dir) for (name <- names) { val archive_name = Archive(name) val archive = base_dir + Path.explode(archive_name) if (!archive.is_file) { val remote = Components.default_component_repository + "/" + archive_name - Isabelle_System.download(remote, archive, progress = progress) + Isabelle_System.download_file(remote, archive, progress = progress) } for (dir <- copy_dir) { Isabelle_System.make_directory(dir) Isabelle_System.copy_file(archive, dir) } unpack(target_dir getOrElse base_dir, archive, progress = progress) } } def purge(dir: Path, platform: Platform.Family.Value): Unit = { def purge_platforms(platforms: String*): Set[String] = platforms.flatMap(name => List("arm64-" + name, "x86-" + name, "x86_64_32-" + name, "x86_64-" + name)).toSet + "ppc-darwin" + "arm64-linux" val purge_set = platform match { case Platform.Family.linux => purge_platforms("darwin", "cygwin", "windows") case Platform.Family.macos => purge_platforms("linux", "cygwin", "windows") case Platform.Family.windows => purge_platforms("linux", "darwin") } File.find_files(dir.file, (file: JFile) => file.isDirectory && purge_set(file.getName), include_dirs = true).foreach(Isabelle_System.rm_tree) } /* component directory content */ def settings(dir: Path = Path.current): Path = dir + Path.explode("etc/settings") def components(dir: Path = Path.current): Path = dir + Path.explode("etc/components") def check_dir(dir: Path): Boolean = settings(dir).is_file || components(dir).is_file def read_components(dir: Path): List[String] = split_lines(File.read(components(dir))).filter(_.nonEmpty) def write_components(dir: Path, lines: List[String]): Unit = File.write(components(dir), terminate_lines(lines)) /* component repository content */ val components_sha1: Path = Path.explode("~~/Admin/components/components.sha1") sealed case class SHA1_Digest(sha1: String, file_name: String) { override def toString: String = sha1 + " " + file_name } def read_components_sha1(lines: List[String] = Nil): List[SHA1_Digest] = (proper_list(lines) getOrElse split_lines(File.read(components_sha1))).flatMap(line => Word.explode(line) match { case Nil => None case List(sha1, name) => Some(SHA1_Digest(sha1, name)) case _ => error("Bad components.sha1 entry: " + quote(line)) }) def write_components_sha1(entries: List[SHA1_Digest]): Unit = File.write(components_sha1, entries.sortBy(_.file_name).mkString("", "\n", "\n")) /** manage user components **/ val components_path = Path.explode("$ISABELLE_HOME_USER/etc/components") def read_components(): List[String] = if (components_path.is_file) Library.trim_split_lines(File.read(components_path)) else Nil def write_components(lines: List[String]): Unit = { Isabelle_System.make_directory(components_path.dir) File.write(components_path, Library.terminate_lines(lines)) } def update_components(add: Boolean, path0: Path, progress: Progress = new Progress): Unit = { val path = path0.expand.absolute if (!(path + Path.explode("etc/settings")).is_file && !(path + Path.explode("etc/components")).is_file) error("Bad component directory: " + path) val lines1 = read_components() val lines2 = lines1.filter(line => line.isEmpty || line.startsWith("#") || !File.eq(Path.explode(line), path)) val lines3 = if (add) lines2 ::: List(path.implode) else lines2 if (lines1 != lines3) write_components(lines3) val prefix = if (lines1 == lines3) "Unchanged" else if (add) "Added" else "Removed" progress.echo(prefix + " component " + path) } /* main entry point */ def main(args: Array[String]): Unit = { Command_Line.tool { for (arg <- args) { val add = if (arg.startsWith("+")) true else if (arg.startsWith("-")) false else error("Bad argument: " + quote(arg)) val path = Path.explode(arg.substring(1)) update_components(add, path, progress = new Console_Progress) } } } /** build and publish components **/ def build_components( options: Options, components: List[Path], progress: Progress = new Progress, publish: Boolean = false, force: Boolean = false, update_components_sha1: Boolean = false): Unit = { val archives: List[Path] = for (path <- components) yield { path.file_name match { case Archive(_) => path case name => if (!path.is_dir) error("Bad component directory: " + path) else if (!check_dir(path)) { error("Malformed component directory: " + path + "\n (requires " + settings() + " or " + Components.components() + ")") } else { val component_path = path.expand val archive_dir = component_path.dir val archive_name = Archive(name) val archive = archive_dir + Path.explode(archive_name) if (archive.is_file && !force) { error("Component archive already exists: " + archive) } progress.echo("Packaging " + archive_name) Isabelle_System.gnutar("-czf " + File.bash_path(archive) + " " + Bash.string(name), dir = archive_dir).check archive } } } if ((publish && archives.nonEmpty) || update_components_sha1) { options.string("isabelle_components_server") match { case SSH.Target(user, host) => using(SSH.open_session(options, host = host, user = user))(ssh => { val components_dir = Path.explode(options.string("isabelle_components_dir")) val contrib_dir = Path.explode(options.string("isabelle_components_contrib_dir")) for (dir <- List(components_dir, contrib_dir) if !ssh.is_dir(dir)) { error("Bad remote directory: " + dir) } if (publish) { for (archive <- archives) { val archive_name = archive.file_name val name = Archive.get_name(archive_name) val remote_component = components_dir + archive.base val remote_contrib = contrib_dir + Path.explode(name) // component archive if (ssh.is_file(remote_component) && !force) { error("Remote component archive already exists: " + remote_component) } progress.echo("Uploading " + archive_name) ssh.write_file(remote_component, archive) // contrib directory val is_standard_component = Isabelle_System.with_tmp_dir("component")(tmp_dir => { Isabelle_System.gnutar("-xzf " + File.bash_path(archive), dir = tmp_dir).check check_dir(tmp_dir + Path.explode(name)) }) if (is_standard_component) { if (ssh.is_dir(remote_contrib)) { if (force) ssh.rm_tree(remote_contrib) else error("Remote component directory already exists: " + remote_contrib) } progress.echo("Unpacking remote " + archive_name) ssh.execute("tar -C " + ssh.bash_path(contrib_dir) + " -xzf " + ssh.bash_path(remote_component)).check } else { progress.echo_warning("No unpacking of non-standard component: " + archive_name) } } } // remote SHA1 digests if (update_components_sha1) { val lines = for { entry <- ssh.read_dir(components_dir) if entry.is_file && entry.name.endsWith(Archive.suffix) } yield { progress.echo("Digesting remote " + entry.name) ssh.execute("cd " + ssh.bash_path(components_dir) + "; sha1sum " + Bash.string(entry.name)).check.out } write_components_sha1(read_components_sha1(lines)) } }) case s => error("Bad isabelle_components_server: " + quote(s)) } } // local SHA1 digests { val new_entries = for (archive <- archives) yield { val file_name = archive.file_name progress.echo("Digesting local " + file_name) val sha1 = SHA1.digest(archive).rep SHA1_Digest(sha1, file_name) } val new_names = new_entries.map(_.file_name).toSet write_components_sha1( new_entries ::: read_components_sha1().filterNot(entry => new_names.contains(entry.file_name))) } } /* Isabelle tool wrapper */ private val relevant_options = List("isabelle_components_server", "isabelle_components_dir", "isabelle_components_contrib_dir") val isabelle_tool = Isabelle_Tool("build_components", "build and publish Isabelle components", Scala_Project.here, args => { var publish = false var update_components_sha1 = false var force = false var options = Options.init() def show_options: String = cat_lines(relevant_options.map(name => options.options(name).print)) val getopts = Getopts(""" Usage: isabelle build_components [OPTIONS] ARCHIVES... DIRS... Options are: -P publish on SSH server (see options below) -f force: overwrite existing component archives and directories -o OPTION override Isabelle system OPTION (via NAME=VAL or NAME) -u update all SHA1 keys in Isabelle repository Admin/components Build and publish Isabelle components as .tar.gz archives on SSH server, depending on system options: """ + Library.prefix_lines(" ", show_options) + "\n", "P" -> (_ => publish = true), "f" -> (_ => force = true), "o:" -> (arg => options = options + arg), "u" -> (_ => update_components_sha1 = true)) val more_args = getopts(args) if (more_args.isEmpty && !update_components_sha1) getopts.usage() val progress = new Console_Progress build_components(options, more_args.map(Path.explode), progress = progress, publish = publish, force = force, update_components_sha1 = update_components_sha1) }) } diff --git a/src/Pure/General/bytes.scala b/src/Pure/General/bytes.scala --- a/src/Pure/General/bytes.scala +++ b/src/Pure/General/bytes.scala @@ -1,215 +1,214 @@ /* Title: Pure/General/bytes.scala Author: Makarius Immutable byte vectors versus UTF8 strings. */ package isabelle import java.io.{File => JFile, ByteArrayOutputStream, ByteArrayInputStream, OutputStream, InputStream, FileInputStream, FileOutputStream} import java.net.URL import java.util.Base64 import org.tukaani.xz.{XZInputStream, XZOutputStream} object Bytes { val empty: Bytes = new Bytes(Array[Byte](), 0, 0) def apply(s: CharSequence): Bytes = { val str = s.toString if (str.isEmpty) empty else { val b = UTF8.bytes(str) new Bytes(b, 0, b.length) } } def apply(a: Array[Byte]): Bytes = apply(a, 0, a.length) def apply(a: Array[Byte], offset: Int, length: Int): Bytes = if (length == 0) empty else { val b = new Array[Byte](length) System.arraycopy(a, offset, b, 0, length) new Bytes(b, 0, b.length) } val newline: Bytes = apply("\n") def base64(s: String): Bytes = { val a = Base64.getDecoder.decode(s) new Bytes(a, 0, a.length) } /* read */ def read_stream(stream: InputStream, limit: Int = Integer.MAX_VALUE, hint: Int = 1024): Bytes = if (limit == 0) empty else { val out_size = (if (limit == Integer.MAX_VALUE) hint else limit) max 1024 val out = new ByteArrayOutputStream(out_size) val buf = new Array[Byte](8192) var m = 0 do { m = stream.read(buf, 0, buf.length min (limit - out.size)) if (m != -1) out.write(buf, 0, m) } while (m != -1 && limit > out.size) new Bytes(out.toByteArray, 0, out.size) } def read(file: JFile): Bytes = { val length = file.length val limit = if (length < 0 || length > Integer.MAX_VALUE) Integer.MAX_VALUE else length.toInt using(new FileInputStream(file))(read_stream(_, limit = limit)) } def read(path: Path): Bytes = read(path.file) def read(url: URL): Bytes = using(url.openStream)(read_stream(_)) /* write */ def write(file: JFile, bytes: Bytes): Unit = using(new FileOutputStream(file))(bytes.write_stream(_)) def write(path: Path, bytes: Bytes): Unit = write(path.file, bytes) } final class Bytes private( protected val bytes: Array[Byte], protected val offset: Int, val length: Int) extends CharSequence { /* equality */ override def equals(that: Any): Boolean = { that match { case other: Bytes => if (this eq other) true else if (length != other.length) false else (0 until length).forall(i => bytes(offset + i) == other.bytes(other.offset + i)) case _ => false } } private lazy val hash: Int = { var h = 0 for (i <- offset until offset + length) { val b = bytes(i).asInstanceOf[Int] & 0xFF h = 31 * h + b } h } override def hashCode(): Int = hash /* content */ lazy val sha1_digest: SHA1.Digest = SHA1.digest(bytes) def is_empty: Boolean = length == 0 def iterator: Iterator[Byte] = for (i <- (offset until (offset + length)).iterator) yield bytes(i) def array: Array[Byte] = { val a = new Array[Byte](length) System.arraycopy(bytes, offset, a, 0, length) a } - def text: String = - UTF8.decode_chars(s => s, bytes, offset, offset + length).toString + def text: String = UTF8.decode_permissive(this) def base64: String = { val b = if (offset == 0 && length == bytes.length) bytes else Bytes(bytes, offset, length).bytes Base64.getEncoder.encodeToString(b) } def maybe_base64: (Boolean, String) = { val s = text if (this == Bytes(s)) (false, s) else (true, base64) } override def toString: String = "Bytes(" + length + ")" def proper: Option[Bytes] = if (is_empty) None else Some(this) def proper_text: Option[String] = if (is_empty) None else Some(text) def +(other: Bytes): Bytes = if (other.is_empty) this else if (is_empty) other else { val new_bytes = new Array[Byte](length + other.length) System.arraycopy(bytes, offset, new_bytes, 0, length) System.arraycopy(other.bytes, other.offset, new_bytes, length, other.length) new Bytes(new_bytes, 0, new_bytes.length) } /* CharSequence operations */ def charAt(i: Int): Char = if (0 <= i && i < length) (bytes(offset + i).asInstanceOf[Int] & 0xFF).asInstanceOf[Char] else throw new IndexOutOfBoundsException def subSequence(i: Int, j: Int): Bytes = { if (0 <= i && i <= j && j <= length) new Bytes(bytes, offset + i, j - i) else throw new IndexOutOfBoundsException } def trim_line: Bytes = if (length >= 2 && charAt(length - 2) == 13 && charAt(length - 1) == 10) subSequence(0, length - 2) else if (length >= 1 && (charAt(length - 1) == 13 || charAt(length - 1) == 10)) subSequence(0, length - 1) else this /* streams */ def stream(): ByteArrayInputStream = new ByteArrayInputStream(bytes, offset, length) def write_stream(stream: OutputStream): Unit = stream.write(bytes, offset, length) /* XZ data compression */ def uncompress(cache: XZ.Cache = XZ.Cache()): Bytes = using(new XZInputStream(stream(), cache))(Bytes.read_stream(_, hint = length)) def compress(options: XZ.Options = XZ.options(), cache: XZ.Cache = XZ.Cache()): Bytes = { val result = new ByteArrayOutputStream(length) using(new XZOutputStream(result, options, cache))(write_stream(_)) new Bytes(result.toByteArray, 0, result.size) } def maybe_compress(options: XZ.Options = XZ.options(), cache: XZ.Cache = XZ.Cache()) : (Boolean, Bytes) = { val compressed = compress(options = options, cache = cache) if (compressed.length < length) (true, compressed) else (false, this) } } diff --git a/src/Pure/General/output_primitives.ML b/src/Pure/General/output_primitives.ML --- a/src/Pure/General/output_primitives.ML +++ b/src/Pure/General/output_primitives.ML @@ -1,81 +1,81 @@ (* Title: Pure/General/output_primitives.ML Author: Makarius Primitives for Isabelle output channels. *) signature OUTPUT_PRIMITIVES = sig structure XML: sig type attributes = (string * string) list datatype tree = Elem of (string * attributes) * tree list | Text of string type body = tree list end type output = string type serial = int type properties = (string * string) list val ignore_outputs: output list -> unit val writeln_fn: output list -> unit val state_fn: output list -> unit val information_fn: output list -> unit val tracing_fn: output list -> unit val warning_fn: output list -> unit val legacy_fn: output list -> unit val error_message_fn: serial * output list -> unit val system_message_fn: output list -> unit val status_fn: output list -> unit val report_fn: output list -> unit val result_fn: properties -> output list -> unit - type protocol_message_fn = properties -> XML.body -> unit + type protocol_message_fn = properties -> XML.body list -> unit val protocol_message_fn: protocol_message_fn val markup_fn: string * properties -> output * output end; structure Output_Primitives: OUTPUT_PRIMITIVES = struct (** XML trees **) structure XML = struct type attributes = (string * string) list; datatype tree = Elem of (string * attributes) * tree list | Text of string; type body = tree list; end; (* output *) type output = string; type serial = int; type properties = (string * string) list; fun ignore_outputs (_: output list) = (); val writeln_fn = ignore_outputs; val state_fn = ignore_outputs; val information_fn = ignore_outputs; val tracing_fn = ignore_outputs; val warning_fn = ignore_outputs; val legacy_fn = ignore_outputs; fun error_message_fn (_: serial, _: output list) = (); val system_message_fn = ignore_outputs; val status_fn = ignore_outputs; val report_fn = ignore_outputs; fun result_fn (_: properties) = ignore_outputs; -type protocol_message_fn = properties -> XML.body -> unit; +type protocol_message_fn = properties -> XML.body list -> unit; val protocol_message_fn: protocol_message_fn = fn _ => fn _ => (); fun markup_fn (_: string * properties) = ("", ""); end; diff --git a/src/Pure/General/utf8.scala b/src/Pure/General/utf8.scala --- a/src/Pure/General/utf8.scala +++ b/src/Pure/General/utf8.scala @@ -1,90 +1,72 @@ /* Title: Pure/General/utf8.scala Author: Makarius Variations on UTF-8. */ package isabelle import java.nio.charset.Charset import scala.io.Codec object UTF8 { /* charset */ val charset_name: String = "UTF-8" val charset: Charset = Charset.forName(charset_name) def codec(): Codec = Codec(charset) def bytes(s: String): Array[Byte] = s.getBytes(charset) /* permissive UTF-8 decoding */ // see also https://en.wikipedia.org/wiki/UTF-8#Description // overlong encodings enable byte-stuffing of low-ASCII def decode_permissive(text: CharSequence): String = { - val buf = new java.lang.StringBuilder(text.length) + val len = text.length + val buf = new java.lang.StringBuilder(len) var code = -1 var rest = 0 def flush(): Unit = { if (code != -1) { if (rest == 0 && Character.isValidCodePoint(code)) buf.appendCodePoint(code) else buf.append('\uFFFD') code = -1 rest = 0 } } def init(x: Int, n: Int): Unit = { flush() code = x rest = n } def push(x: Int): Unit = { if (rest <= 0) init(x, -1) else { code <<= 6 code += x rest -= 1 } } - for (i <- 0 until text.length) { + for (i <- 0 until len) { val c = text.charAt(i) if (c < 128) { flush(); buf.append(c) } else if ((c & 0xC0) == 0x80) push(c & 0x3F) else if ((c & 0xE0) == 0xC0) init(c & 0x1F, 1) else if ((c & 0xF0) == 0xE0) init(c & 0x0F, 2) else if ((c & 0xF8) == 0xF0) init(c & 0x07, 3) } flush() buf.toString } - - private class Decode_Chars(decode: String => String, - buffer: Array[Byte], start: Int, end: Int) extends CharSequence - { - def length: Int = end - start - def charAt(i: Int): Char = (buffer(start + i).asInstanceOf[Int] & 0xFF).asInstanceOf[Char] - def subSequence(i: Int, j: Int): CharSequence = - new Decode_Chars(decode, buffer, start + i, start + j) - - // toString with adhoc decoding: abuse of CharSequence interface - override def toString: String = decode(decode_permissive(this)) - } - - def decode_chars(decode: String => String, - buffer: Array[Byte], start: Int, end: Int): CharSequence = - { - require(0 <= start && start <= end && end <= buffer.length, "bad decode range") - new Decode_Chars(decode, buffer, start, end) - } } diff --git a/src/Pure/PIDE/byte_message.ML b/src/Pure/PIDE/byte_message.ML --- a/src/Pure/PIDE/byte_message.ML +++ b/src/Pure/PIDE/byte_message.ML @@ -1,107 +1,116 @@ (* Title: Pure/General/byte_message.ML Author: Makarius Byte-oriented messages. *) signature BYTE_MESSAGE = sig val write: BinIO.outstream -> string list -> unit + val write_yxml: BinIO.outstream -> XML.tree -> unit val flush: BinIO.outstream -> unit val write_line: BinIO.outstream -> string -> unit val read: BinIO.instream -> int -> string val read_block: BinIO.instream -> int -> string option * int val read_line: BinIO.instream -> string option val write_message: BinIO.outstream -> string list -> unit + val write_message_yxml: BinIO.outstream -> XML.body list -> unit val read_message: BinIO.instream -> string list option val write_line_message: BinIO.outstream -> string -> unit val read_line_message: BinIO.instream -> string option end; structure Byte_Message: BYTE_MESSAGE = struct (* output operations *) fun write stream = List.app (File.output stream); +fun write_yxml stream tree = YXML.traverse (fn s => fn () => File.output stream s) tree (); + fun flush stream = ignore (try BinIO.flushOut stream); fun write_line stream s = (write stream [s, "\n"]; flush stream); (* input operations *) fun read stream n = Byte.bytesToString (BinIO.inputN (stream, n)); fun read_block stream n = let val msg = read stream n; val len = size msg; in (if len = n then SOME msg else NONE, len) end; fun read_line stream = let val result = trim_line o String.implode o rev; fun read_body cs = (case BinIO.input1 stream of NONE => if null cs then NONE else SOME (result cs) | SOME b => (case Byte.byteToChar b of #"\n" => SOME (result cs) | c => read_body (c :: cs))); in read_body [] end; (* messages with multiple chunks (arbitrary content) *) fun make_header ns = [space_implode "," (map Value.print_int ns), "\n"]; fun write_message stream chunks = (write stream (make_header (map size chunks) @ chunks); flush stream); +fun write_message_yxml stream chunks = + (write stream (make_header (map YXML.body_size chunks)); + (List.app o List.app) (write_yxml stream) chunks; + flush stream); + fun parse_header line = map Value.parse_nat (space_explode "," line) handle Fail _ => error ("Malformed message header: " ^ quote line); fun read_chunk stream n = (case read_block stream n of (SOME chunk, _) => chunk | (NONE, len) => error ("Malformed message chunk: unexpected EOF after " ^ string_of_int len ^ " of " ^ string_of_int n ^ " bytes")); fun read_message stream = read_line stream |> Option.map (parse_header #> map (read_chunk stream)); (* hybrid messages: line or length+block (with content restriction) *) fun is_length msg = msg <> "" andalso forall_string Symbol.is_ascii_digit msg; fun is_terminated msg = let val len = size msg in len > 0 andalso Symbol.is_ascii_line_terminator (str (String.sub (msg, len - 1))) end; fun write_line_message stream msg = if is_length msg orelse is_terminated msg then error ("Bad content for line message:\n" ^ implode (take 100 (Symbol.explode msg))) else let val n = size msg in write stream ((if n > 100 orelse exists_string (fn s => s = "\n") msg then make_header [n + 1] else []) @ [msg, "\n"]); flush stream end; fun read_line_message stream = (case read_line stream of NONE => NONE | SOME line => (case try Value.parse_nat line of NONE => SOME line | SOME n => Option.map trim_line (#1 (read_block stream n)))); end; diff --git a/src/Pure/PIDE/command.ML b/src/Pure/PIDE/command.ML --- a/src/Pure/PIDE/command.ML +++ b/src/Pure/PIDE/command.ML @@ -1,509 +1,507 @@ (* Title: Pure/PIDE/command.ML Author: Makarius Prover command execution: read -- eval -- print. *) signature COMMAND = sig type blob = {file_node: string, src_path: Path.T, content: (SHA1.digest * string list) option} val read_file: Path.T -> Position.T -> bool -> Path.T -> Token.file val read_thy: Toplevel.state -> theory val read: Keyword.keywords -> theory -> Path.T-> (unit -> theory) -> blob Exn.result list * int -> Token.T list -> Toplevel.transition val read_span: Keyword.keywords -> Toplevel.state -> Path.T -> (unit -> theory) -> Command_Span.span -> Toplevel.transition type eval val eval_command_id: eval -> Document_ID.command val eval_exec_id: eval -> Document_ID.exec val eval_eq: eval * eval -> bool val eval_running: eval -> bool val eval_finished: eval -> bool val eval_result_command: eval -> Toplevel.transition val eval_result_state: eval -> Toplevel.state val eval: Keyword.keywords -> Path.T -> (unit -> theory) -> blob Exn.result list * int -> Document_ID.command -> Token.T list -> eval -> eval type print type print_fn = Toplevel.transition -> Toplevel.state -> unit val print0: {pri: int, print_fn: print_fn} -> eval -> print val print: bool -> (string * string list) list -> Keyword.keywords -> string -> eval -> print list -> print list option val parallel_print: print -> bool type print_function = {keywords: Keyword.keywords, command_name: string, args: string list, exec_id: Document_ID.exec} -> {delay: Time.time option, pri: int, persistent: bool, strict: bool, print_fn: print_fn} option val print_function: string -> print_function -> unit val no_print_function: string -> unit type exec = eval * print list val init_exec: theory option -> exec val no_exec: exec val exec_ids: exec option -> Document_ID.exec list val exec: Document_ID.execution -> exec -> unit val exec_parallel_prints: Document_ID.execution -> Future.task list -> exec -> exec option end; structure Command: COMMAND = struct (** main phases of execution **) fun task_context group f = f |> Future.interruptible_task |> Future.task_context "Command.run_process" group; (* read *) type blob = {file_node: string, src_path: Path.T, content: (SHA1.digest * string list) option}; fun read_file_node file_node master_dir pos delimited src_path = let val _ = if Context_Position.pide_reports () then Position.report pos (Markup.language_path delimited) else (); fun read_file () = let val path = File.check_file (File.full_path master_dir src_path); val text = File.read path; val file_pos = Path.position path; in (text, file_pos) end; fun read_url () = let - val text = - Isabelle_System.with_tmp_file "file" "" (fn file => - (Isabelle_System.download file_node file; File.read file)); + val text = Isabelle_System.download file_node; val file_pos = Position.file file_node; in (text, file_pos) end; val (text, file_pos) = (case try Url.explode file_node of NONE => read_file () | SOME (Url.File _) => read_file () | _ => read_url ()); val lines = split_lines text; val digest = SHA1.digest text; in {src_path = src_path, lines = lines, digest = digest, pos = Position.copy_id pos file_pos} end handle ERROR msg => error (msg ^ Position.here pos); val read_file = read_file_node ""; local fun blob_file src_path lines digest file_node = let val file_pos = Position.file file_node |> (case Position.get_id (Position.thread_data ()) of NONE => I | SOME exec_id => Position.put_id exec_id); in {src_path = src_path, lines = lines, digest = digest, pos = file_pos} end fun resolve_files master_dir (blobs, blobs_index) toks = (case Outer_Syntax.parse_spans toks of [Command_Span.Span (Command_Span.Command_Span _, _)] => (case try (nth toks) blobs_index of SOME tok => let val source = Token.input_of tok; val pos = Input.pos_of source; val delimited = Input.is_delimited source; fun make_file (Exn.Res {file_node, src_path, content = NONE}) = Exn.interruptible_capture (fn () => read_file_node file_node master_dir pos delimited src_path) () | make_file (Exn.Res {file_node, src_path, content = SOME (digest, lines)}) = (Position.report pos (Markup.language_path delimited); Exn.Res (blob_file src_path lines digest file_node)) | make_file (Exn.Exn e) = Exn.Exn e; val files = map make_file blobs; in toks |> map_index (fn (i, tok) => if i = blobs_index then Token.put_files files tok else tok) end | NONE => toks) | _ => toks); fun reports_of_token keywords tok = let val malformed_symbols = Input.source_explode (Token.input_of tok) |> map_filter (fn (sym, pos) => if Symbol.is_malformed sym then SOME ((pos, Markup.bad ()), "Malformed symbolic character") else NONE); val is_malformed = Token.is_error tok orelse not (null malformed_symbols); val reports = Token.reports keywords tok @ Token.completion_report tok @ malformed_symbols; in (is_malformed, reports) end; in fun read_thy st = Toplevel.theory_of st handle Toplevel.UNDEF => Pure_Syn.bootstrap_thy; fun read keywords thy master_dir init blobs_info span = let val command_reports = Outer_Syntax.command_reports thy; val token_reports = map (reports_of_token keywords) span; val _ = Position.reports_text (maps #2 token_reports @ maps command_reports span); val verbatim = span |> map_filter (fn tok => if Token.kind_of tok = Token.Verbatim then SOME (Token.pos_of tok) else NONE); val _ = if null verbatim then () else legacy_feature ("Old-style {* verbatim *} token -- use \cartouche\ instead" ^ Position.here_list verbatim); in if exists #1 token_reports then Toplevel.malformed (#1 (Token.core_range_of span)) "Malformed command syntax" else Outer_Syntax.parse_span thy init (resolve_files master_dir blobs_info span) end; end; fun read_span keywords st master_dir init = Command_Span.content #> read keywords (read_thy st) master_dir init ([], ~1); (* eval *) type eval_state = {failed: bool, command: Toplevel.transition, state: Toplevel.state}; fun init_eval_state opt_thy = {failed = false, command = Toplevel.empty, state = (case opt_thy of NONE => Toplevel.init_toplevel () | SOME thy => Toplevel.theory_toplevel thy)}; datatype eval = Eval of {command_id: Document_ID.command, exec_id: Document_ID.exec, eval_process: eval_state lazy}; fun eval_command_id (Eval {command_id, ...}) = command_id; fun eval_exec_id (Eval {exec_id, ...}) = exec_id; val eval_eq = op = o apply2 eval_exec_id; val eval_running = Execution.is_running_exec o eval_exec_id; fun eval_finished (Eval {eval_process, ...}) = Lazy.is_finished eval_process; fun eval_result (Eval {eval_process, ...}) = Exn.release (Lazy.finished_result eval_process); val eval_result_command = #command o eval_result; val eval_result_state = #state o eval_result; local fun reset_state keywords tr st0 = Toplevel.setmp_thread_position tr (fn () => let val name = Toplevel.name_of tr; val res = if Keyword.is_theory_body keywords name then Toplevel.reset_theory st0 else if Keyword.is_proof keywords name then Toplevel.reset_proof st0 else if Keyword.is_theory_end keywords name then (case Toplevel.reset_notepad st0 of NONE => Toplevel.reset_theory st0 | some => some) else NONE; in (case res of NONE => st0 | SOME st => (Output.error_message (Toplevel.type_error tr ^ " -- using reset state"); st)) end) (); fun run keywords int tr st = if Future.proofs_enabled 1 andalso Keyword.is_diag keywords (Toplevel.name_of tr) then let val (tr1, tr2) = Toplevel.fork_presentation tr; val _ = Execution.fork {name = "Toplevel.diag", pos = Toplevel.pos_of tr, pri = ~1} (fn () => Toplevel.command_exception int tr1 st); in Toplevel.command_errors int tr2 st end else Toplevel.command_errors int tr st; fun check_token_comments ctxt tok = (Thy_Output.check_comments ctxt (Input.source_explode (Token.input_of tok)); []) handle exn => if Exn.is_interrupt exn then Exn.reraise exn else Runtime.exn_messages exn; fun check_span_comments ctxt span tr = Toplevel.setmp_thread_position tr (fn () => maps (check_token_comments ctxt) span) (); fun report_indent tr st = (case try Toplevel.proof_of st of SOME prf => let val keywords = Thy_Header.get_keywords (Proof.theory_of prf) in if Keyword.command_kind keywords (Toplevel.name_of tr) = SOME Keyword.prf_script then (case try (Thm.nprems_of o #goal o Proof.goal) prf of NONE => () | SOME 0 => () | SOME n => let val report = Markup.markup_only (Markup.command_indent (n - 1)); in Toplevel.setmp_thread_position tr (fn () => Output.report [report]) () end) else () end | NONE => ()); fun status tr m = Toplevel.setmp_thread_position tr (fn () => Output.status [Markup.markup_only m]) (); fun eval_state keywords span tr ({state, ...}: eval_state) = let val _ = Thread_Attributes.expose_interrupt (); val st = reset_state keywords tr state; val _ = report_indent tr st; val _ = status tr Markup.running; val (errs1, result) = run keywords true tr st; val errs2 = (case result of NONE => [] | SOME st' => check_span_comments (Toplevel.presentation_context st') span tr); val errs = errs1 @ errs2; val _ = List.app (Future.error_message (Toplevel.pos_of tr)) errs; in (case result of NONE => let val _ = status tr Markup.failed; val _ = status tr Markup.finished; val _ = if null errs then (status tr Markup.canceled; Exn.interrupt ()) else (); in {failed = true, command = tr, state = st} end | SOME st' => let val _ = if Keyword.is_theory_end keywords (Toplevel.name_of tr) andalso can (Toplevel.end_theory Position.none) st' then status tr Markup.finalized else (); val _ = status tr Markup.finished; in {failed = false, command = tr, state = st'} end) end; in fun eval keywords master_dir init blobs_info command_id span eval0 = let val exec_id = Document_ID.make (); fun process () = let val eval_state0 = eval_result eval0; val thy = read_thy (#state eval_state0); val tr = Position.setmp_thread_data (Position.id_only (Document_ID.print exec_id)) (fn () => read keywords thy master_dir init blobs_info span |> Toplevel.exec_id exec_id) (); in eval_state keywords span tr eval_state0 end; in Eval {command_id = command_id, exec_id = exec_id, eval_process = Lazy.lazy_name "Command.eval" process} end; end; (* print *) datatype print = Print of {name: string, args: string list, delay: Time.time option, pri: int, persistent: bool, exec_id: Document_ID.exec, print_process: unit lazy}; fun print_exec_id (Print {exec_id, ...}) = exec_id; val print_eq = op = o apply2 print_exec_id; type print_fn = Toplevel.transition -> Toplevel.state -> unit; type print_function = {keywords: Keyword.keywords, command_name: string, args: string list, exec_id: Document_ID.exec} -> {delay: Time.time option, pri: int, persistent: bool, strict: bool, print_fn: print_fn} option; local val print_functions = Synchronized.var "Command.print_functions" ([]: (string * print_function) list); fun print_error tr opt_context e = (Toplevel.setmp_thread_position tr o Runtime.controlled_execution opt_context) e () handle exn => if Exn.is_interrupt exn then Exn.reraise exn else List.app (Future.error_message (Toplevel.pos_of tr)) (Runtime.exn_messages exn); fun print_finished (Print {print_process, ...}) = Lazy.is_finished print_process; fun print_persistent (Print {persistent, ...}) = persistent; val overlay_ord = prod_ord string_ord (list_ord string_ord); fun make_print (name, args) {delay, pri, persistent, strict, print_fn} eval = let val exec_id = Document_ID.make (); fun process () = let val {failed, command, state = st', ...} = eval_result eval; val tr = Toplevel.exec_id exec_id command; val opt_context = try Toplevel.generic_theory_of st'; in if failed andalso strict then () else print_error tr opt_context (fn () => print_fn tr st') end; in Print { name = name, args = args, delay = delay, pri = pri, persistent = persistent, exec_id = exec_id, print_process = Lazy.lazy_name "Command.print" process} end; fun bad_print name_args exn = make_print name_args {delay = NONE, pri = 0, persistent = false, strict = false, print_fn = fn _ => fn _ => Exn.reraise exn}; in fun print0 {pri, print_fn} = make_print ("", [serial_string ()]) {delay = NONE, pri = pri, persistent = true, strict = true, print_fn = print_fn}; fun print command_visible command_overlays keywords command_name eval old_prints = let val print_functions = Synchronized.value print_functions; fun new_print (name, args) get_pr = let val params = {keywords = keywords, command_name = command_name, args = args, exec_id = eval_exec_id eval}; in (case Exn.capture (Runtime.controlled_execution NONE get_pr) params of Exn.Res NONE => NONE | Exn.Res (SOME pr) => SOME (make_print (name, args) pr eval) | Exn.Exn exn => SOME (bad_print (name, args) exn eval)) end; fun get_print (name, args) = (case find_first (fn Print print => (#name print, #args print) = (name, args)) old_prints of NONE => (case AList.lookup (op =) print_functions name of NONE => SOME (bad_print (name, args) (ERROR ("Missing print function " ^ quote name)) eval) | SOME get_pr => new_print (name, args) get_pr) | some => some); val retained_prints = filter (fn print => print_finished print andalso print_persistent print) old_prints; val visible_prints = if command_visible then fold (fn (name, _) => cons (name, [])) print_functions command_overlays |> sort_distinct overlay_ord |> map_filter get_print else []; val new_prints = visible_prints @ retained_prints; in if eq_list print_eq (old_prints, new_prints) then NONE else SOME new_prints end; fun parallel_print (Print {pri, ...}) = pri <= 0 orelse (Future.enabled () andalso Options.default_bool "parallel_print"); fun print_function name f = Synchronized.change print_functions (fn funs => (if name = "" then error "Unnamed print function" else (); if not (AList.defined (op =) funs name) then () else warning ("Redefining command print function: " ^ quote name); AList.update (op =) (name, f) funs)); fun no_print_function name = Synchronized.change print_functions (filter_out (equal name o #1)); end; val _ = print_function "Execution.print" (fn {args, exec_id, ...} => if null args then SOME {delay = NONE, pri = Task_Queue.urgent_pri + 2, persistent = false, strict = false, print_fn = fn _ => fn _ => Execution.fork_prints exec_id} else NONE); val _ = print_function "print_state" (fn {keywords, command_name, ...} => if Options.default_bool "editor_output_state" andalso Keyword.is_printed keywords command_name then SOME {delay = NONE, pri = Task_Queue.urgent_pri + 1, persistent = false, strict = false, print_fn = fn _ => fn st => if Toplevel.is_proof st then Output.state (Toplevel.string_of_state st) else ()} else NONE); (* combined execution *) type exec = eval * print list; fun init_exec opt_thy : exec = (Eval {command_id = Document_ID.none, exec_id = Document_ID.none, eval_process = Lazy.value (init_eval_state opt_thy)}, []); val no_exec = init_exec NONE; fun exec_ids NONE = [] | exec_ids (SOME (eval, prints)) = eval_exec_id eval :: map print_exec_id prints; local fun run_process execution_id exec_id process = let val group = Future.worker_subgroup () in if Execution.running execution_id exec_id [group] then ignore (task_context group (fn () => Lazy.force_result {strict = true} process) ()) else () end; fun ignore_process process = Lazy.is_running process orelse Lazy.is_finished process; fun run_eval execution_id (Eval {exec_id, eval_process, ...}) = if Lazy.is_finished eval_process then () else run_process execution_id exec_id eval_process; fun fork_print execution_id deps (Print {name, delay, pri, exec_id, print_process, ...}) = let val group = Future.worker_subgroup (); fun fork () = ignore ((singleton o Future.forks) {name = name, group = SOME group, deps = deps, pri = pri, interrupts = true} (fn () => if ignore_process print_process then () else run_process execution_id exec_id print_process)); in (case delay of NONE => fork () | SOME d => ignore (Event_Timer.request {physical = true} (Time.now () + d) fork)) end; fun run_print execution_id (print as Print {exec_id, print_process, ...}) = if ignore_process print_process then () else if parallel_print print then fork_print execution_id [] print else run_process execution_id exec_id print_process; in fun exec execution_id (eval, prints) = (run_eval execution_id eval; List.app (run_print execution_id) prints); fun exec_parallel_prints execution_id deps (exec as (Eval {eval_process, ...}, prints)) = if Lazy.is_finished eval_process then (List.app (fork_print execution_id deps) prints; NONE) else SOME exec; end; end; diff --git a/src/Pure/PIDE/markup.scala b/src/Pure/PIDE/markup.scala --- a/src/Pure/PIDE/markup.scala +++ b/src/Pure/PIDE/markup.scala @@ -1,739 +1,741 @@ /* Title: Pure/PIDE/markup.scala Author: Makarius Quasi-abstract markup elements. */ package isabelle object Markup { /* elements */ object Elements { def apply(elems: Set[String]): Elements = new Elements(elems) def apply(elems: String*): Elements = apply(Set(elems: _*)) val empty: Elements = apply() val full: Elements = new Elements(Set.empty) { override def apply(elem: String): Boolean = true override def toString: String = "Elements.full" } } sealed class Elements private[Markup](private val rep: Set[String]) { def apply(elem: String): Boolean = rep.contains(elem) def + (elem: String): Elements = new Elements(rep + elem) def ++ (elems: Elements): Elements = new Elements(rep ++ elems.rep) def - (elem: String): Elements = new Elements(rep - elem) def -- (elems: Elements): Elements = new Elements(rep -- elems.rep) override def toString: String = rep.mkString("Elements(", ",", ")") } /* properties */ val NAME = "name" val Name = new Properties.String(NAME) val XNAME = "xname" val XName = new Properties.String(XNAME) val KIND = "kind" val Kind = new Properties.String(KIND) val CONTENT = "content" val Content = new Properties.String(CONTENT) val SERIAL = "serial" val Serial = new Properties.Long(SERIAL) val INSTANCE = "instance" val Instance = new Properties.String(INSTANCE) /* basic markup */ val Empty: Markup = Markup("", Nil) val Broken: Markup = Markup("broken", Nil) class Markup_String(val name: String, prop: String) { private val Prop = new Properties.String(prop) def apply(s: String): Markup = Markup(name, Prop(s)) def unapply(markup: Markup): Option[String] = if (markup.name == name) Prop.unapply(markup.properties) else None } class Markup_Int(val name: String, prop: String) { private val Prop = new Properties.Int(prop) def apply(i: Int): Markup = Markup(name, Prop(i)) def unapply(markup: Markup): Option[Int] = if (markup.name == name) Prop.unapply(markup.properties) else None } class Markup_Long(val name: String, prop: String) { private val Prop = new Properties.Long(prop) def apply(i: Long): Markup = Markup(name, Prop(i)) def unapply(markup: Markup): Option[Long] = if (markup.name == name) Prop.unapply(markup.properties) else None } /* meta data */ val META_TITLE = "meta_title" val META_CREATOR = "meta_creator" val META_CONTRIBUTOR = "meta_contributor" val META_DATE = "meta_date" val META_LICENSE = "meta_license" val META_DESCRIPTION = "meta_description" /* formal entities */ val BINDING = "binding" val ENTITY = "entity" val Def = new Properties.Long("def") val Ref = new Properties.Long("ref") object Entity { object Def { def unapply(markup: Markup): Option[Long] = if (markup.name == ENTITY) Markup.Def.unapply(markup.properties) else None } object Ref { def unapply(markup: Markup): Option[Long] = if (markup.name == ENTITY) Markup.Ref.unapply(markup.properties) else None } object Occ { def unapply(markup: Markup): Option[Long] = Def.unapply(markup) orElse Ref.unapply(markup) } def unapply(markup: Markup): Option[(String, String)] = markup match { case Markup(ENTITY, props) => val kind = Kind.unapply(props).getOrElse("") val name = Name.unapply(props).getOrElse("") Some((kind, name)) case _ => None } } /* completion */ val COMPLETION = "completion" val NO_COMPLETION = "no_completion" val UPDATE = "update" /* position */ val LINE = "line" val END_LINE = "line" val OFFSET = "offset" val END_OFFSET = "end_offset" val FILE = "file" val ID = "id" val DEF_LINE = "def_line" val DEF_OFFSET = "def_offset" val DEF_END_OFFSET = "def_end_offset" val DEF_FILE = "def_file" val DEF_ID = "def_id" val POSITION_PROPERTIES = Set(LINE, OFFSET, END_OFFSET, FILE, ID) def position_property(entry: Properties.Entry): Boolean = POSITION_PROPERTIES(entry._1) val POSITION = "position" /* expression */ val EXPRESSION = "expression" object Expression { def unapply(markup: Markup): Option[String] = markup match { case Markup(EXPRESSION, Kind(kind)) => Some(kind) case Markup(EXPRESSION, _) => Some("") case _ => None } } /* citation */ val CITATION = "citation" val Citation = new Markup_String(CITATION, NAME) /* embedded languages */ val Symbols = new Properties.Boolean("symbols") val Antiquotes = new Properties.Boolean("antiquotes") val Delimited = new Properties.Boolean("delimited") val LANGUAGE = "language" object Language { val DOCUMENT = "document" val ML = "ML" val SML = "SML" val PATH = "path" val UNKNOWN = "unknown" def unapply(markup: Markup): Option[(String, Boolean, Boolean, Boolean)] = markup match { case Markup(LANGUAGE, props) => (props, props, props, props) match { case (Name(name), Symbols(symbols), Antiquotes(antiquotes), Delimited(delimited)) => Some((name, symbols, antiquotes, delimited)) case _ => None } case _ => None } object Path { def unapply(markup: Markup): Option[Boolean] = markup match { case Language(PATH, _, _, delimited) => Some(delimited) case _ => None } } } /* external resources */ val PATH = "path" val Path = new Markup_String(PATH, NAME) val EXPORT_PATH = "export_path" val Export_Path = new Markup_String(EXPORT_PATH, NAME) val URL = "url" val Url = new Markup_String(URL, NAME) val DOC = "doc" val Doc = new Markup_String(DOC, NAME) /* pretty printing */ val Consistent = new Properties.Boolean("consistent") val Indent = new Properties.Int("indent") val Width = new Properties.Int("width") object Block { val name = "block" def apply(c: Boolean, i: Int): Markup = Markup(name, (if (c) Consistent(c) else Nil) ::: (if (i != 0) Indent(i) else Nil)) def unapply(markup: Markup): Option[(Boolean, Int)] = if (markup.name == name) { val c = Consistent.unapply(markup.properties).getOrElse(false) val i = Indent.unapply(markup.properties).getOrElse(0) Some((c, i)) } else None } object Break { val name = "break" def apply(w: Int, i: Int): Markup = Markup(name, (if (w != 0) Width(w) else Nil) ::: (if (i != 0) Indent(i) else Nil)) def unapply(markup: Markup): Option[(Int, Int)] = if (markup.name == name) { val w = Width.unapply(markup.properties).getOrElse(0) val i = Indent.unapply(markup.properties).getOrElse(0) Some((w, i)) } else None } val ITEM = "item" val BULLET = "bullet" val SEPARATOR = "separator" /* text properties */ val WORDS = "words" val HIDDEN = "hidden" val DELETE = "delete" /* misc entities */ val THEORY = "theory" val CLASS = "class" val TYPE_NAME = "type_name" val FIXED = "fixed" val CASE = "case" val CONSTANT = "constant" val DYNAMIC_FACT = "dynamic_fact" /* inner syntax */ val TFREE = "tfree" val TVAR = "tvar" val FREE = "free" val SKOLEM = "skolem" val BOUND = "bound" val VAR = "var" val NUMERAL = "numeral" val LITERAL = "literal" val DELIMITER = "delimiter" val INNER_STRING = "inner_string" val INNER_CARTOUCHE = "inner_cartouche" val TOKEN_RANGE = "token_range" val SORTING = "sorting" val TYPING = "typing" val CLASS_PARAMETER = "class_parameter" val ATTRIBUTE = "attribute" val METHOD = "method" /* antiquotations */ val ANTIQUOTED = "antiquoted" val ANTIQUOTE = "antiquote" val ML_ANTIQUOTATION = "ML_antiquotation" val DOCUMENT_ANTIQUOTATION = "document_antiquotation" val DOCUMENT_ANTIQUOTATION_OPTION = "document_antiquotation_option" /* document text */ val RAW_TEXT = "raw_text" val PLAIN_TEXT = "plain_text" val PARAGRAPH = "paragraph" val TEXT_FOLD = "text_fold" object Document_Tag { val ELEMENT = "document_tag" val IMPORTANT = "important" val UNIMPORTANT = "unimportant" def unapply(markup: Markup): Option[String] = markup match { case Markup(ELEMENT, Name(name)) => Some(name) case _ => None } } /* Markdown document structure */ val MARKDOWN_PARAGRAPH = "markdown_paragraph" val MARKDOWN_ITEM = "markdown_item" val Markdown_Bullet = new Markup_Int("markdown_bullet", "depth") val Markdown_List = new Markup_String("markdown_list", "kind") val ITEMIZE = "itemize" val ENUMERATE = "enumerate" val DESCRIPTION = "description" /* ML */ val ML_KEYWORD1 = "ML_keyword1" val ML_KEYWORD2 = "ML_keyword2" val ML_KEYWORD3 = "ML_keyword3" val ML_DELIMITER = "ML_delimiter" val ML_TVAR = "ML_tvar" val ML_NUMERAL = "ML_numeral" val ML_CHAR = "ML_char" val ML_STRING = "ML_string" val ML_COMMENT = "ML_comment" val ML_DEF = "ML_def" val ML_OPEN = "ML_open" val ML_STRUCTURE = "ML_structure" val ML_TYPING = "ML_typing" val ML_BREAKPOINT = "ML_breakpoint" /* outer syntax */ val COMMAND = "command" val KEYWORD = "keyword" val KEYWORD1 = "keyword1" val KEYWORD2 = "keyword2" val KEYWORD3 = "keyword3" val QUASI_KEYWORD = "quasi_keyword" val IMPROPER = "improper" val OPERATOR = "operator" val STRING = "string" val ALT_STRING = "alt_string" val VERBATIM = "verbatim" val CARTOUCHE = "cartouche" val COMMENT = "comment" val LOAD_COMMAND = "load_command" /* comments */ val COMMENT1 = "comment1" val COMMENT2 = "comment2" val COMMENT3 = "comment3" /* timing */ val Elapsed = new Properties.Double("elapsed") val CPU = new Properties.Double("cpu") val GC = new Properties.Double("gc") object Timing_Properties { def apply(timing: isabelle.Timing): Properties.T = Elapsed(timing.elapsed.seconds) ::: CPU(timing.cpu.seconds) ::: GC(timing.gc.seconds) def unapply(props: Properties.T): Option[isabelle.Timing] = (props, props, props) match { case (Elapsed(elapsed), CPU(cpu), GC(gc)) => Some(new isabelle.Timing(Time.seconds(elapsed), Time.seconds(cpu), Time.seconds(gc))) case _ => None } def parse(props: Properties.T): isabelle.Timing = unapply(props) getOrElse isabelle.Timing.zero } val TIMING = "timing" object Timing { def apply(timing: isabelle.Timing): Markup = Markup(TIMING, Timing_Properties(timing)) def unapply(markup: Markup): Option[isabelle.Timing] = markup match { case Markup(TIMING, Timing_Properties(timing)) => Some(timing) case _ => None } } /* process result */ val Return_Code = new Properties.Int("return_code") object Process_Result { def apply(result: Process_Result): Properties.T = Return_Code(result.rc) ::: (if (result.timing.is_zero) Nil else Timing_Properties(result.timing)) def unapply(props: Properties.T): Option[Process_Result] = props match { case Return_Code(rc) => val timing = Timing_Properties.unapply(props).getOrElse(isabelle.Timing.zero) Some(isabelle.Process_Result(rc, timing = timing)) case _ => None } } /* command indentation */ object Command_Indent { val name = "command_indent" def unapply(markup: Markup): Option[Int] = if (markup.name == name) Indent.unapply(markup.properties) else None } /* goals */ val GOAL = "goal" val SUBGOAL = "subgoal" /* command status */ val TASK = "task" val ACCEPTED = "accepted" val FORKED = "forked" val JOINED = "joined" val RUNNING = "running" val FINISHED = "finished" val FAILED = "failed" val CANCELED = "canceled" val INITIALIZED = "initialized" val FINALIZED = "finalized" val CONSOLIDATING = "consolidating" val CONSOLIDATED = "consolidated" /* interactive documents */ val VERSION = "version" val ASSIGN = "assign" /* prover process */ val PROVER_COMMAND = "prover_command" val PROVER_ARG = "prover_arg" /* messages */ val INIT = "init" val STATUS = "status" val REPORT = "report" val RESULT = "result" val WRITELN = "writeln" val STATE = "state" val INFORMATION = "information" val TRACING = "tracing" val WARNING = "warning" val LEGACY = "legacy" val ERROR = "error" val NODES_STATUS = "nodes_status" val PROTOCOL = "protocol" val SYSTEM = "system" val STDOUT = "stdout" val STDERR = "stderr" val EXIT = "exit" val WRITELN_MESSAGE = "writeln_message" val STATE_MESSAGE = "state_message" val INFORMATION_MESSAGE = "information_message" val TRACING_MESSAGE = "tracing_message" val WARNING_MESSAGE = "warning_message" val LEGACY_MESSAGE = "legacy_message" val ERROR_MESSAGE = "error_message" val messages = Map( WRITELN -> WRITELN_MESSAGE, STATE -> STATE_MESSAGE, INFORMATION -> INFORMATION_MESSAGE, TRACING -> TRACING_MESSAGE, WARNING -> WARNING_MESSAGE, LEGACY -> LEGACY_MESSAGE, ERROR -> ERROR_MESSAGE) val message: String => String = messages.withDefault((s: String) => s) val NO_REPORT = "no_report" val BAD = "bad" val INTENSIFY = "intensify" /* active areas */ val BROWSER = "browser" val GRAPHVIEW = "graphview" val THEORY_EXPORTS = "theory_exports" val SENDBACK = "sendback" val PADDING = "padding" val PADDING_LINE = (PADDING, "line") val PADDING_COMMAND = (PADDING, "command") val DIALOG = "dialog" val Result = new Properties.String(RESULT) val JEDIT_ACTION = "jedit_action" /* protocol message functions */ val FUNCTION = "function" class Function(val name: String) { val PROPERTY: Properties.Entry = (FUNCTION, name) } class Properties_Function(name: String) extends Function(name) { def unapply(props: Properties.T): Option[Properties.T] = props match { case PROPERTY :: args => Some(args) case _ => None } } class Name_Function(name: String) extends Function(name) { def unapply(props: Properties.T): Option[String] = props match { case List(PROPERTY, (NAME, a)) => Some(a) case _ => None } } object ML_Statistics extends Function("ML_statistics") { def unapply(props: Properties.T): Option[(Long, String)] = props match { case List(PROPERTY, ("pid", Value.Long(pid)), ("stats_dir", stats_dir)) => Some((pid, stats_dir)) case _ => None } } val command_timing_properties: Set[String] = Set(FILE, OFFSET, NAME, Elapsed.name) def command_timing_property(entry: Properties.Entry): Boolean = command_timing_properties(entry._1) object Command_Timing extends Properties_Function("command_timing") object Theory_Timing extends Properties_Function("theory_timing") object Session_Timing extends Properties_Function("session_timing") { val Threads = new Properties.Int("threads") } object Task_Statistics extends Properties_Function("task_statistics") object Loading_Theory extends Properties_Function("loading_theory") object Build_Session_Finished extends Function("build_session_finished") object Commands_Accepted extends Function("commands_accepted") object Assign_Update extends Function("assign_update") object Removed_Versions extends Function("removed_versions") object Invoke_Scala extends Function("invoke_scala") { def unapply(props: Properties.T): Option[(String, String)] = props match { case List(PROPERTY, (NAME, name), (ID, id)) => Some((name, id)) case _ => None } } object Cancel_Scala extends Function("cancel_scala") { def unapply(props: Properties.T): Option[String] = props match { case List(PROPERTY, (ID, id)) => Some(id) case _ => None } } val PRINT_OPERATIONS = "print_operations" /* export */ val EXPORT = "export" val THEORY_NAME = "theory_name" val EXECUTABLE = "executable" val COMPRESS = "compress" val STRICT = "strict" /* debugger output */ val DEBUGGER_STATE = "debugger_state" object Debugger_State { def unapply(props: Properties.T): Option[String] = props match { case List((FUNCTION, DEBUGGER_STATE), (NAME, name)) => Some(name) case _ => None } } val DEBUGGER_OUTPUT = "debugger_output" object Debugger_Output { def unapply(props: Properties.T): Option[String] = props match { case List((FUNCTION, DEBUGGER_OUTPUT), (NAME, name)) => Some(name) case _ => None } } /* simplifier trace */ val SIMP_TRACE_PANEL = "simp_trace_panel" val SIMP_TRACE_LOG = "simp_trace_log" val SIMP_TRACE_STEP = "simp_trace_step" val SIMP_TRACE_RECURSE = "simp_trace_recurse" val SIMP_TRACE_HINT = "simp_trace_hint" val SIMP_TRACE_IGNORE = "simp_trace_ignore" val SIMP_TRACE_CANCEL = "simp_trace_cancel" object Simp_Trace_Cancel { def unapply(props: Properties.T): Option[Long] = props match { case (FUNCTION, SIMP_TRACE_CANCEL) :: Serial(i) => Some(i) case _ => None } } /* XML data representation */ def encode: XML.Encode.T[Markup] = (markup: Markup) => { import XML.Encode._ pair(string, properties)((markup.name, markup.properties)) } def decode: XML.Decode.T[Markup] = (body: XML.Body) => { import XML.Decode._ val (name, props) = pair(string, properties)(body) Markup(name, props) } } sealed case class Markup(name: String, properties: Properties.T) { + def is_empty: Boolean = name.isEmpty + def markup(s: String): String = YXML.string_of_tree(XML.Elem(this, List(XML.Text(s)))) def update_properties(more_props: Properties.T): Markup = if (more_props.isEmpty) this else Markup(name, more_props.foldRight(properties) { case (p, ps) => Properties.put(ps, p) }) def + (entry: Properties.Entry): Markup = Markup(name, Properties.put(properties, entry)) } diff --git a/src/Pure/PIDE/protocol.ML b/src/Pure/PIDE/protocol.ML --- a/src/Pure/PIDE/protocol.ML +++ b/src/Pure/PIDE/protocol.ML @@ -1,177 +1,177 @@ (* Title: Pure/PIDE/protocol.ML Author: Makarius Protocol message formats for interactive proof documents. *) structure Protocol: sig end = struct val _ = Protocol_Command.define "Prover.echo" (fn args => List.app writeln args); val _ = Protocol_Command.define "Prover.stop" (fn rc :: msgs => (List.app Output.system_message msgs; raise Protocol_Command.STOP (Value.parse_int rc))); val _ = Protocol_Command.define "Prover.options" (fn [options_yxml] => (Options.set_default (Options.decode (YXML.parse_body options_yxml)); Isabelle_Process.init_options_interactive ())); val _ = Protocol_Command.define "Prover.init_session" (fn [yxml] => Resources.init_session_yxml yxml); val _ = Protocol_Command.define "Document.define_blob" (fn [digest, content] => Document.change_state (Document.define_blob digest content)); fun decode_command id name parents_xml blobs_xml toks_xml sources : Document.command = let open XML.Decode; val parents = list string parents_xml; val (blobs_digests, blobs_index) = blobs_xml |> let val message = YXML.string_of_body o Protocol_Message.command_positions id; val blob = triple string string (option string) #> (fn (a, b, c) => {file_node = a, src_path = Path.explode b, digest = c}); in pair (list (variant [fn ([], a) => Exn.Res (blob a), fn ([], a) => Exn.Exn (ERROR (message a))])) int end; val toks = list (pair int int) toks_xml; in {command_id = Document_ID.parse id, name = name, parents = parents, blobs_digests = blobs_digests, blobs_index = blobs_index, tokens = toks ~~ sources} end; fun commands_accepted ids = - Output.protocol_message Markup.commands_accepted [XML.Text (space_implode "," ids)]; + Output.protocol_message Markup.commands_accepted [[XML.Text (space_implode "," ids)]]; val _ = Protocol_Command.define "Document.define_command" (fn id :: name :: parents :: blobs :: toks :: sources => let val command = decode_command id name (YXML.parse_body parents) (YXML.parse_body blobs) (YXML.parse_body toks) sources; val _ = Document.change_state (Document.define_command command); in commands_accepted [id] end); val _ = Protocol_Command.define "Document.define_commands" (fn args => let fun decode arg = let open XML.Decode; val (id, (name, (parents_xml, (blobs_xml, (toks_xml, sources))))) = pair string (pair string (pair I (pair I (pair I (list string))))) (YXML.parse_body arg); in decode_command id name parents_xml blobs_xml toks_xml sources end; val commands = map decode args; val _ = Document.change_state (fold Document.define_command commands); in commands_accepted (map (Value.print_int o #command_id) commands) end); val _ = Protocol_Command.define "Document.discontinue_execution" (fn [] => Execution.discontinue ()); val _ = Protocol_Command.define "Document.cancel_exec" (fn [exec_id] => Execution.cancel (Document_ID.parse exec_id)); val _ = Protocol_Command.define "Document.update" (Future.task_context "Document.update" (Future.new_group NONE) (fn old_id_string :: new_id_string :: consolidate_yxml :: edits_yxml => Document.change_state (fn state => let val old_id = Document_ID.parse old_id_string; val new_id = Document_ID.parse new_id_string; val consolidate = YXML.parse_body consolidate_yxml |> let open XML.Decode in list string end; val edits = edits_yxml |> map (YXML.parse_body #> let open XML.Decode in pair string (variant [fn ([], a) => Document.Edits (list (pair (option int) (option int)) a), fn ([], a) => let val (master, (name, (imports, (keywords, errors)))) = pair string (pair string (pair (list string) (pair (list (pair string (pair string (list string)))) (list YXML.string_of_body)))) a; val imports' = map (rpair Position.none) imports; val keywords' = map (fn (x, y) => ((x, Position.none), y)) keywords; val header = Thy_Header.make (name, Position.none) imports' keywords'; in Document.Deps {master = master, header = header, errors = errors} end, fn (a :: b, c) => Document.Perspective (bool_atom a, map int_atom b, list (pair int (pair string (list string))) c)]) end); val _ = Execution.discontinue (); val (edited, removed, assign_update, state') = Document.update old_id new_id edits consolidate state; val _ = (singleton o Future.forks) {name = "Document.update/remove", group = NONE, deps = Execution.snapshot removed, pri = Task_Queue.urgent_pri + 2, interrupts = false} (fn () => (Execution.purge removed; List.app Isabelle_Process.reset_tracing removed)); val _ = Output.protocol_message Markup.assign_update - ((new_id, edited, assign_update) |> + [(new_id, edited, assign_update) |> let open XML.Encode; fun encode_upd (a, bs) = string (space_implode "," (map Value.print_int (a :: bs))); - in triple int (list string) (list encode_upd) end); + in triple int (list string) (list encode_upd) end]; in Document.start_execution state' end))); val _ = Protocol_Command.define "Document.remove_versions" (fn [versions_yxml] => Document.change_state (fn state => let val versions = YXML.parse_body versions_yxml |> let open XML.Decode in list int end; val state1 = Document.remove_versions versions state; - val _ = Output.protocol_message Markup.removed_versions [XML.Text (versions_yxml)]; + val _ = Output.protocol_message Markup.removed_versions [[XML.Text (versions_yxml)]]; in state1 end)); val _ = Protocol_Command.define "Document.dialog_result" (fn [serial, result] => Active.dialog_result (Value.parse_int serial) result handle exn => if Exn.is_interrupt exn then () (*sic!*) else Exn.reraise exn); val _ = Protocol_Command.define "ML_Heap.full_gc" (fn [] => ML_Heap.full_gc ()); val _ = Protocol_Command.define "ML_Heap.share_common_data" (fn [] => ML_Heap.share_common_data ()); end; diff --git a/src/Pure/PIDE/prover.scala b/src/Pure/PIDE/prover.scala --- a/src/Pure/PIDE/prover.scala +++ b/src/Pure/PIDE/prover.scala @@ -1,368 +1,323 @@ /* Title: Pure/PIDE/prover.scala Author: Makarius Options: :folding=explicit: Prover process wrapping. */ package isabelle import java.io.{InputStream, OutputStream, BufferedOutputStream, IOException} object Prover { /* messages */ sealed abstract class Message type Receiver = Message => Unit class Input(val name: String, val args: List[String]) extends Message { override def toString: String = XML.Elem(Markup(Markup.PROVER_COMMAND, List((Markup.NAME, name))), args.flatMap(s => List(XML.newline, XML.elem(Markup.PROVER_ARG, YXML.parse_body(s))))).toString } class Output(val message: XML.Elem) extends Message { def kind: String = message.markup.name def properties: Properties.T = message.markup.properties def body: XML.Body = message.body def is_init: Boolean = kind == Markup.INIT def is_exit: Boolean = kind == Markup.EXIT def is_stdout: Boolean = kind == Markup.STDOUT def is_stderr: Boolean = kind == Markup.STDERR def is_system: Boolean = kind == Markup.SYSTEM def is_status: Boolean = kind == Markup.STATUS def is_report: Boolean = kind == Markup.REPORT def is_syslog: Boolean = is_init || is_exit || is_system || is_stderr override def toString: String = { val res = if (is_status || is_report) message.body.map(_.toString).mkString else Pretty.string_of(message.body, metric = Symbol.Metric) if (properties.isEmpty) - kind.toString + " [[" + res + "]]" + kind + " [[" + res + "]]" else - kind.toString + " " + + kind + " " + (for ((x, y) <- properties) yield x + "=" + y).mkString("{", ",", "}") + " [[" + res + "]]" } } - class Protocol_Output(props: Properties.T, val bytes: Bytes) + class Malformed(msg: String) extends Exn.User_Error("Malformed prover message: " + msg) + def bad_header(print: String): Nothing = throw new Malformed("bad message header\n" + print) + def bad_chunks(): Nothing = throw new Malformed("bad message chunks") + + def the_chunk(chunks: List[Bytes], print: => String): Bytes = + chunks match { + case List(chunk) => chunk + case _ => throw new Malformed("single chunk expected: " + print) + } + + class Protocol_Output(props: Properties.T, val chunks: List[Bytes]) extends Output(XML.Elem(Markup(Markup.PROTOCOL, props), Nil)) { - lazy val text: String = bytes.text + def chunk: Bytes = the_chunk(chunks, toString) + lazy val text: String = chunk.text } } class Prover( receiver: Prover.Receiver, cache: XML.Cache, channel: System_Channel, process: Bash.Process) extends Protocol { /** receiver output **/ private def system_output(text: String): Unit = { receiver(new Prover.Output(XML.Elem(Markup(Markup.SYSTEM, Nil), List(XML.Text(text))))) } - private def protocol_output(props: Properties.T, bytes: Bytes): Unit = + private def protocol_output(props: Properties.T, chunks: List[Bytes]): Unit = { - receiver(new Prover.Protocol_Output(cache.props(props), bytes)) + receiver(new Prover.Protocol_Output(props, chunks)) } private def output(kind: String, props: Properties.T, body: XML.Body): Unit = { val main = XML.Elem(Markup(kind, props), Protocol_Message.clean_reports(body)) val reports = Protocol_Message.reports(props, body) for (msg <- main :: reports) receiver(new Prover.Output(cache.elem(msg))) } private def exit_message(result: Process_Result): Unit = { output(Markup.EXIT, Markup.Process_Result(result), List(XML.Text(result.print_return_code))) } /** process manager **/ private val process_result: Future[Process_Result] = Future.thread("process_result") { val rc = process.join val timing = process.get_timing Process_Result(rc, timing = timing) } private def terminate_process(): Unit = { try { process.terminate() } catch { case exn @ ERROR(_) => system_output("Failed to terminate prover process: " + exn.getMessage) } } private val process_manager = Isabelle_Thread.fork(name = "process_manager") { val stdout = physical_output(false) val (startup_failed, startup_errors) = { var finished: Option[Boolean] = None val result = new StringBuilder(100) while (finished.isEmpty && (process.stderr.ready || !process_result.is_finished)) { while (finished.isEmpty && process.stderr.ready) { try { val c = process.stderr.read if (c == 2) finished = Some(true) else result += c.toChar } catch { case _: IOException => finished = Some(false) } } Time.seconds(0.05).sleep } (finished.isEmpty || !finished.get, result.toString.trim) } if (startup_errors != "") system_output(startup_errors) if (startup_failed) { terminate_process() process_result.join stdout.join exit_message(Process_Result(127)) } else { val (command_stream, message_stream) = channel.rendezvous() command_input_init(command_stream) val stderr = physical_output(true) val message = message_output(message_stream) val result = process_result.join system_output("process terminated") command_input_close() for (thread <- List(stdout, stderr, message)) thread.join system_output("process_manager terminated") exit_message(result) } channel.shutdown() } /* management methods */ def join(): Unit = process_manager.join() def terminate(): Unit = { system_output("Terminating prover process") command_input_close() var count = 10 while (!process_result.is_finished && count > 0) { Time.seconds(0.1).sleep count -= 1 } if (!process_result.is_finished) terminate_process() } /** process streams **/ /* command input */ private var command_input: Option[Consumer_Thread[List[Bytes]]] = None private def command_input_close(): Unit = command_input.foreach(_.shutdown()) private def command_input_init(raw_stream: OutputStream): Unit = { val name = "command_input" val stream = new BufferedOutputStream(raw_stream) command_input = Some( Consumer_Thread.fork(name)( consume = { case chunks => try { Bytes(chunks.map(_.length).mkString("", ",", "\n")).write_stream(stream) chunks.foreach(_.write_stream(stream)) stream.flush true } catch { case e: IOException => system_output(name + ": " + e.getMessage); false } }, finish = { case () => stream.close(); system_output(name + " terminated") } ) ) } /* physical output */ private def physical_output(err: Boolean): Thread = { val (name, reader, markup) = if (err) ("standard_error", process.stderr, Markup.STDERR) else ("standard_output", process.stdout, Markup.STDOUT) Isabelle_Thread.fork(name = name) { try { var result = new StringBuilder(100) var finished = false while (!finished) { //{{{ var c = -1 var done = false while (!done && (result.isEmpty || reader.ready)) { c = reader.read if (c >= 0) result.append(c.asInstanceOf[Char]) else done = true } if (result.nonEmpty) { output(markup, Nil, List(XML.Text(Symbol.decode(result.toString)))) result.clear() } else { reader.close() finished = true } //}}} } } catch { case e: IOException => system_output(name + ": " + e.getMessage) } system_output(name + " terminated") } } /* message output */ private def message_output(stream: InputStream): Thread = { - class EOF extends Exception - class Protocol_Error(msg: String) extends Exception(msg) - - val name = "message_output" - Isabelle_Thread.fork(name = name) { - val default_buffer = new Array[Byte](65536) - var c = -1 + def decode_chunk(chunk: Bytes): XML.Body = + Symbol.decode_yxml_failsafe(chunk.text, cache = cache) - def read_int(): Int = - //{{{ - { - var n = 0 - c = stream.read - if (c == -1) throw new EOF - while (48 <= c && c <= 57) { - n = 10 * n + (c - 48) - c = stream.read - } - if (c != 10) - throw new Protocol_Error("malformed header: expected integer followed by newline") - else n - } - //}}} - - def read_chunk_bytes(): (Array[Byte], Int) = - //{{{ - { - val n = read_int() - val buf = - if (n <= default_buffer.length) default_buffer - else new Array[Byte](n) - - var i = 0 - var m = 0 - do { - m = stream.read(buf, i, n - i) - if (m != -1) i += m + val thread_name = "message_output" + Isabelle_Thread.fork(name = thread_name) { + try { + var finished = false + while (!finished) { + Byte_Message.read_message(stream) match { + case None => finished = true + case Some(header :: chunks) => + decode_chunk(header) match { + case List(XML.Elem(Markup(kind, props), Nil)) => + if (kind == Markup.PROTOCOL) protocol_output(props, chunks) + else output(kind, props, decode_chunk(Prover.the_chunk(chunks, kind))) + case _ => Prover.bad_header(header.toString) + } + case Some(_) => Prover.bad_chunks() + } } - while (m != -1 && n > i) - - if (i != n) - throw new Protocol_Error("bad chunk (unexpected EOF after " + i + " of " + n + " bytes)") - - (buf, n) - } - //}}} - - def read_chunk(): XML.Body = - { - val (buf, n) = read_chunk_bytes() - YXML.parse_body_failsafe(UTF8.decode_chars(Symbol.decode, buf, 0, n)) - } - - try { - do { - try { - val header = read_chunk() - header match { - case List(XML.Elem(Markup(name, props), Nil)) => - val kind = name.intern - if (kind == Markup.PROTOCOL) { - val (buf, n) = read_chunk_bytes() - protocol_output(props, Bytes(buf, 0, n)) - } - else { - val body = read_chunk() - output(kind, props, body) - } - case _ => - read_chunk() - throw new Protocol_Error("bad header: " + header.toString) - } - } - catch { case _: EOF => } - } - while (c != -1) } catch { case e: IOException => system_output("Cannot read message:\n" + e.getMessage) - case e: Protocol_Error => system_output("Malformed message:\n" + e.getMessage) + case e: Prover.Malformed => system_output(e.getMessage) } stream.close() - system_output(name + " terminated") + system_output(thread_name + " terminated") } } /** protocol commands **/ var trace: Boolean = false def protocol_command_raw(name: String, args: List[Bytes]): Unit = command_input match { case Some(thread) if thread.is_active => if (trace) { val payload = args.foldLeft(0) { case (n, b) => n + b.length } Output.writeln( "protocol_command " + name + ", args = " + args.length + ", payload = " + payload) } thread.send(Bytes(name) :: args) case _ => error("Inactive prover input thread for command " + quote(name)) } def protocol_command_args(name: String, args: List[String]): Unit = { receiver(new Prover.Input(name, args)) protocol_command_raw(name, args.map(Bytes(_))) } def protocol_command(name: String, args: String*): Unit = protocol_command_args(name, args.toList) } diff --git a/src/Pure/PIDE/resources.ML b/src/Pure/PIDE/resources.ML --- a/src/Pure/PIDE/resources.ML +++ b/src/Pure/PIDE/resources.ML @@ -1,435 +1,443 @@ (* Title: Pure/PIDE/resources.ML Author: Makarius Resources for theories and auxiliary files. *) signature RESOURCES = sig val default_qualifier: string val init_session: {session_positions: (string * Properties.T) list, session_directories: (string * string) list, session_chapters: (string * string) list, bibtex_entries: (string * string list) list, command_timings: Properties.T list, - scala_functions: (string * Position.T) list, + scala_functions: (string * (bool * Position.T)) list, global_theories: (string * string) list, loaded_theories: string list} -> unit val init_session_yxml: string -> unit val init_session_file: Path.T -> unit val finish_session_base: unit -> unit val global_theory: string -> string option val loaded_theory: string -> bool val check_session: Proof.context -> string * Position.T -> string val session_chapter: string -> string val last_timing: Toplevel.transition -> Time.time val scala_functions: unit -> string list - val check_scala_function: Proof.context -> string * Position.T -> string + val check_scala_function: Proof.context -> string * Position.T -> string * bool val master_directory: theory -> Path.T val imports_of: theory -> (string * Position.T) list val begin_theory: Path.T -> Thy_Header.header -> theory list -> theory val thy_path: Path.T -> Path.T val theory_qualifier: string -> string val theory_bibtex_entries: string -> string list val find_theory_file: string -> Path.T option val import_name: string -> Path.T -> string -> {node_name: Path.T, master_dir: Path.T, theory_name: string} val check_thy: Path.T -> string -> {master: Path.T * SHA1.digest, text: string, theory_pos: Position.T, imports: (string * Position.T) list, keywords: Thy_Header.keywords} val parse_files: (Path.T -> Path.T list) -> (theory -> Token.file list) parser val parse_file: (theory -> Token.file) parser val provide: Path.T * SHA1.digest -> theory -> theory val provide_file: Token.file -> theory -> theory val provide_parse_files: (Path.T -> Path.T list) -> (theory -> Token.file list * theory) parser val provide_parse_file: (theory -> Token.file * theory) parser val loaded_files_current: theory -> bool val check_path: Proof.context -> Path.T option -> Input.source -> Path.T val check_file: Proof.context -> Path.T option -> Input.source -> Path.T val check_dir: Proof.context -> Path.T option -> Input.source -> Path.T val check_session_dir: Proof.context -> Path.T option -> Input.source -> Path.T end; structure Resources: RESOURCES = struct (* command timings *) type timings = ((string * Time.time) Inttab.table) Symtab.table; (*file -> offset -> name, time*) val empty_timings: timings = Symtab.empty; fun update_timings props = (case Markup.parse_command_timing_properties props of SOME ({file, offset, name}, time) => Symtab.map_default (file, Inttab.empty) (Inttab.map_default (offset, (name, time)) (fn (_, t) => (name, t + time))) | NONE => I); fun make_timings command_timings = fold update_timings command_timings empty_timings; fun approximative_id name pos = (case (Position.file_of pos, Position.offset_of pos) of (SOME file, SOME offset) => if name = "" then NONE else SOME {file = file, offset = offset, name = name} | _ => NONE); fun get_timings timings tr = (case approximative_id (Toplevel.name_of tr) (Toplevel.pos_of tr) of SOME {file, offset, name} => (case Symtab.lookup timings file of SOME offsets => (case Inttab.lookup offsets offset of SOME (name', time) => if name = name' then SOME time else NONE | NONE => NONE) | NONE => NONE) | NONE => NONE) |> the_default Time.zeroTime; (* session base *) val default_qualifier = "Draft"; type entry = {pos: Position.T, serial: serial}; fun make_entry props : entry = {pos = Position.of_properties props, serial = serial ()}; val empty_session_base = ({session_positions = []: (string * entry) list, session_directories = Symtab.empty: Path.T list Symtab.table, session_chapters = Symtab.empty: string Symtab.table, bibtex_entries = Symtab.empty: string list Symtab.table, timings = empty_timings, - scala_functions = Symtab.empty: Position.T Symtab.table}, + scala_functions = Symtab.empty: (bool * Position.T) Symtab.table}, {global_theories = Symtab.empty: string Symtab.table, loaded_theories = Symtab.empty: unit Symtab.table}); val global_session_base = Synchronized.var "Sessions.base" empty_session_base; fun init_session {session_positions, session_directories, session_chapters, bibtex_entries, command_timings, scala_functions, global_theories, loaded_theories} = Synchronized.change global_session_base (fn _ => ({session_positions = sort_by #1 (map (apsnd make_entry) session_positions), session_directories = fold_rev (fn (dir, name) => Symtab.cons_list (name, Path.explode dir)) session_directories Symtab.empty, session_chapters = Symtab.make session_chapters, bibtex_entries = Symtab.make bibtex_entries, timings = make_timings command_timings, scala_functions = Symtab.make scala_functions}, {global_theories = Symtab.make global_theories, loaded_theories = Symtab.make_set loaded_theories})); fun init_session_yxml yxml = let val (session_positions, (session_directories, (session_chapters, (bibtex_entries, (command_timings, (scala_functions, (global_theories, loaded_theories))))))) = YXML.parse_body yxml |> let open XML.Decode in (pair (list (pair string properties)) (pair (list (pair string string)) (pair (list (pair string string)) (pair (list (pair string (list string))) (pair (list properties) - (pair (list (pair string properties)) + (pair (list (pair string (pair bool properties))) (pair (list (pair string string)) (list string)))))))) end; in init_session {session_positions = session_positions, session_directories = session_directories, session_chapters = session_chapters, bibtex_entries = bibtex_entries, command_timings = command_timings, - scala_functions = map (apsnd Position.of_properties) scala_functions, + scala_functions = (map o apsnd o apsnd) Position.of_properties scala_functions, global_theories = global_theories, loaded_theories = loaded_theories} end; fun init_session_file path = init_session_yxml (File.read path) before File.rm path; fun finish_session_base () = Synchronized.change global_session_base (apfst (K (#1 empty_session_base))); fun get_session_base f = f (Synchronized.value global_session_base); fun get_session_base1 f = get_session_base (f o #1); fun get_session_base2 f = get_session_base (f o #2); fun global_theory a = Symtab.lookup (get_session_base2 #global_theories) a; fun loaded_theory a = Symtab.defined (get_session_base2 #loaded_theories) a; fun check_session ctxt arg = Completion.check_item "session" (fn (name, {pos, serial}) => Markup.entity Markup.sessionN name |> Markup.properties (Position.entity_properties_of false serial pos)) (get_session_base1 #session_positions) ctxt arg; fun session_chapter name = the_default "Unsorted" (Symtab.lookup (get_session_base1 #session_chapters) name); fun last_timing tr = get_timings (get_session_base1 #timings) tr; (* Scala functions *) (*raw bootstrap environment*) fun scala_functions () = space_explode "," (getenv "ISABELLE_SCALA_FUNCTIONS"); (*regular resources*) -fun scala_function_pos name = - (name, the_default Position.none (Symtab.lookup (get_session_base1 #scala_functions) name)); +fun scala_function a = + (a, the_default (false, Position.none) (Symtab.lookup (get_session_base1 #scala_functions) a)); fun check_scala_function ctxt arg = - Completion.check_entity Markup.scala_functionN - (scala_functions () |> sort_strings |> map scala_function_pos) ctxt arg; + let + val funs = scala_functions () |> sort_strings |> map scala_function; + val name = Completion.check_entity Markup.scala_functionN (map (apsnd #2) funs) ctxt arg; + val multi = + (case AList.lookup (op =) funs name of + SOME (multi, _) => multi + | NONE => false); + in (name, multi) end; val _ = Theory.setup (Thy_Output.antiquotation_verbatim_embedded \<^binding>\scala_function\ - (Scan.lift Parse.embedded_position) check_scala_function #> + (Scan.lift Parse.embedded_position) (#1 oo check_scala_function) #> ML_Antiquotation.inline_embedded \<^binding>\scala_function\ (Args.context -- Scan.lift Parse.embedded_position - >> (uncurry check_scala_function #> ML_Syntax.print_string)) #> + >> (uncurry check_scala_function #> #1 #> ML_Syntax.print_string)) #> ML_Antiquotation.value_embedded \<^binding>\scala\ (Args.context -- Scan.lift Args.embedded_position >> (fn (ctxt, arg) => - let val name = check_scala_function ctxt arg - in ML_Syntax.atomic ("Scala.function " ^ ML_Syntax.print_string name) end))); + let + val (name, multi) = check_scala_function ctxt arg; + val func = if multi then "Scala.function" else "Scala.function1"; + in ML_Syntax.atomic (func ^ " " ^ ML_Syntax.print_string name) end))); (* manage source files *) type files = {master_dir: Path.T, (*master directory of theory source*) imports: (string * Position.T) list, (*source specification of imports*) provided: (Path.T * SHA1.digest) list}; (*source path, digest*) fun make_files (master_dir, imports, provided): files = {master_dir = master_dir, imports = imports, provided = provided}; structure Files = Theory_Data ( type T = files; val empty = make_files (Path.current, [], []); val extend = I; fun merge ({master_dir, imports, provided = provided1}, {provided = provided2, ...}) = let val provided' = Library.merge (op =) (provided1, provided2) in make_files (master_dir, imports, provided') end ); fun map_files f = Files.map (fn {master_dir, imports, provided} => make_files (f (master_dir, imports, provided))); val master_directory = #master_dir o Files.get; val imports_of = #imports o Files.get; fun begin_theory master_dir {name, imports, keywords} parents = Theory.begin_theory name parents |> map_files (fn _ => (Path.explode (Path.implode_symbolic master_dir), imports, [])) |> Thy_Header.add_keywords keywords; (* theory files *) val thy_path = Path.ext "thy"; fun theory_qualifier theory = (case global_theory theory of SOME qualifier => qualifier | NONE => Long_Name.qualifier theory); fun theory_name qualifier theory = if Long_Name.is_qualified theory orelse is_some (global_theory theory) then theory else Long_Name.qualify qualifier theory; fun theory_bibtex_entries theory = Symtab.lookup_list (get_session_base1 #bibtex_entries) (theory_qualifier theory); fun find_theory_file thy_name = let val thy_file = thy_path (Path.basic (Long_Name.base_name thy_name)); val session = theory_qualifier thy_name; val dirs = Symtab.lookup_list (get_session_base1 #session_directories) session; in dirs |> get_first (fn dir => let val path = dir + thy_file in if File.is_file path then SOME path else NONE end) end; fun make_theory_node node_name theory = {node_name = node_name, master_dir = Path.dir node_name, theory_name = theory}; fun loaded_theory_node theory = {node_name = Path.basic theory, master_dir = Path.current, theory_name = theory}; fun import_name qualifier dir s = let val theory = theory_name qualifier (Thy_Header.import_name s); fun theory_node () = make_theory_node (File.full_path dir (thy_path (Path.expand (Path.explode s)))) theory; in if not (Thy_Header.is_base_name s) then theory_node () else if loaded_theory theory then loaded_theory_node theory else (case find_theory_file theory of SOME node_name => make_theory_node node_name theory | NONE => if Long_Name.is_qualified s then loaded_theory_node theory else theory_node ()) end; fun check_file dir file = File.check_file (File.full_path dir file); fun check_thy dir thy_name = let val thy_base_name = Long_Name.base_name thy_name; val master_file = (case find_theory_file thy_name of SOME path => check_file Path.current path | NONE => check_file dir (thy_path (Path.basic thy_base_name))); val text = File.read master_file; val {name = (name, pos), imports, keywords} = Thy_Header.read (Path.position master_file) text; val _ = thy_base_name <> name andalso error ("Bad theory name " ^ quote name ^ " for file " ^ Path.print (Path.base master_file) ^ Position.here pos); in {master = (master_file, SHA1.digest text), text = text, theory_pos = pos, imports = imports, keywords = keywords} end; (* load files *) fun parse_files make_paths = Scan.ahead Parse.not_eof -- Parse.path_input >> (fn (tok, source) => fn thy => (case Token.get_files tok of [] => let val master_dir = master_directory thy; val name = Input.string_of source; val pos = Input.pos_of source; val delimited = Input.is_delimited source; val src_paths = make_paths (Path.explode name); in map (Command.read_file master_dir pos delimited) src_paths end | files => map Exn.release files)); val parse_file = parse_files single >> (fn f => f #> the_single); fun provide (src_path, id) = map_files (fn (master_dir, imports, provided) => if AList.defined (op =) provided src_path then error ("Duplicate use of source file: " ^ Path.print src_path) else (master_dir, imports, (src_path, id) :: provided)); fun provide_file (file: Token.file) = provide (#src_path file, #digest file); fun provide_parse_files make_paths = parse_files make_paths >> (fn files => fn thy => let val fs = files thy; val thy' = fold (fn {src_path, digest, ...} => provide (src_path, digest)) fs thy; in (fs, thy') end); val provide_parse_file = provide_parse_files single >> (fn f => f #>> the_single); fun load_file thy src_path = let val full_path = check_file (master_directory thy) src_path; val text = File.read full_path; val id = SHA1.digest text; in ((full_path, id), text) end; fun loaded_files_current thy = #provided (Files.get thy) |> forall (fn (src_path, id) => (case try (load_file thy) src_path of NONE => false | SOME ((_, id'), _) => id = id')); (* formal check *) fun formal_check check_file ctxt opt_dir source = let val name = Input.string_of source; val pos = Input.pos_of source; val delimited = Input.is_delimited source; val _ = Context_Position.report ctxt pos (Markup.language_path delimited); fun err msg = error (msg ^ Position.here pos); val dir = (case opt_dir of SOME dir => dir | NONE => master_directory (Proof_Context.theory_of ctxt)); val path = dir + Path.explode name handle ERROR msg => err msg; val _ = Path.expand path handle ERROR msg => err msg; val _ = Context_Position.report ctxt pos (Markup.path (Path.implode_symbolic path)); val _ : Path.T = check_file path handle ERROR msg => err msg; in path end; val check_path = formal_check I; val check_file = formal_check File.check_file; val check_dir = formal_check File.check_dir; fun check_session_dir ctxt opt_dir s = let val dir = Path.expand (check_dir ctxt opt_dir s); val ok = File.is_file (dir + Path.explode("ROOT")) orelse File.is_file (dir + Path.explode("ROOTS")); in if ok then dir else error ("Bad session root directory (missing ROOT or ROOTS): " ^ Path.print dir ^ Position.here (Input.pos_of s)) end; (* antiquotations *) local fun document_antiq (check: Proof.context -> Path.T option -> Input.source -> Path.T) = Args.context -- Scan.lift Parse.path_input >> (fn (ctxt, source) => let val _ = check ctxt NONE source; val latex = Latex.string (Latex.output_ascii_breakable "/" (Input.string_of source)); in Latex.enclose_block "\\isatt{" "}" [latex] end); fun ML_antiq check = Args.context -- Scan.lift Parse.path_input >> (fn (ctxt, source) => check ctxt (SOME Path.current) source |> ML_Syntax.print_path); in val _ = Theory.setup (Thy_Output.antiquotation_verbatim_embedded \<^binding>\session\ (Scan.lift Parse.embedded_position) check_session #> Thy_Output.antiquotation_raw_embedded \<^binding>\path\ (document_antiq check_path) (K I) #> Thy_Output.antiquotation_raw_embedded \<^binding>\file\ (document_antiq check_file) (K I) #> Thy_Output.antiquotation_raw_embedded \<^binding>\dir\ (document_antiq check_dir) (K I) #> ML_Antiquotation.value_embedded \<^binding>\path\ (ML_antiq check_path) #> ML_Antiquotation.value_embedded \<^binding>\file\ (ML_antiq check_file) #> ML_Antiquotation.value_embedded \<^binding>\dir\ (ML_antiq check_dir) #> ML_Antiquotation.value_embedded \<^binding>\path_binding\ (Scan.lift (Parse.position Parse.path) >> (ML_Syntax.print_path_binding o Path.explode_binding)) #> ML_Antiquotation.value \<^binding>\master_dir\ (Args.theory >> (ML_Syntax.print_path o master_directory))); end; end; diff --git a/src/Pure/PIDE/resources.scala b/src/Pure/PIDE/resources.scala --- a/src/Pure/PIDE/resources.scala +++ b/src/Pure/PIDE/resources.scala @@ -1,451 +1,451 @@ /* Title: Pure/PIDE/resources.scala Author: Makarius Resources for theories and auxiliary files. */ package isabelle import scala.util.parsing.input.Reader import java.io.{File => JFile} object Resources { def empty: Resources = new Resources(Sessions.Structure.empty, Sessions.Structure.empty.bootstrap) } class Resources( val sessions_structure: Sessions.Structure, val session_base: Sessions.Base, val log: Logger = No_Logger, command_timings: List[Properties.T] = Nil) { resources => /* init session */ def init_session_yxml: String = { import XML.Encode._ YXML.string_of_body( pair(list(pair(string, properties)), pair(list(pair(string, string)), pair(list(pair(string, string)), pair(list(pair(string, list(string))), pair(list(properties), - pair(list(pair(string, properties)), + pair(list(pair(string, pair(bool, properties))), pair(list(pair(string, string)), list(string))))))))( (sessions_structure.session_positions, (sessions_structure.dest_session_directories, (sessions_structure.session_chapters, (sessions_structure.bibtex_entries, (command_timings, - (Scala.functions.map(fun => (fun.name, fun.position)), + (Scala.functions.map(fun => (fun.name, (fun.multi, fun.position))), (session_base.global_theories.toList, session_base.loaded_theories.keys))))))))) } /* file formats */ def make_theory_name(name: Document.Node.Name): Option[Document.Node.Name] = File_Format.registry.get(name).flatMap(_.make_theory_name(resources, name)) def make_theory_content(thy_name: Document.Node.Name): Option[String] = File_Format.registry.get_theory(thy_name).flatMap(_.make_theory_content(resources, thy_name)) def is_hidden(name: Document.Node.Name): Boolean = !name.is_theory || name.theory == Sessions.root_name || File_Format.registry.is_theory(name) def html_document(snapshot: Document.Snapshot): Option[Presentation.HTML_Document] = File_Format.registry.get(snapshot.node_name).flatMap(_.html_document(snapshot)) /* file-system operations */ def append(dir: String, source_path: Path): String = (Path.explode(dir) + source_path).expand.implode def append(node_name: Document.Node.Name, source_path: Path): String = append(node_name.master_dir, source_path) def file_node(file: Path, dir: String = "", theory: String = ""): Document.Node.Name = { val node = append(dir, file) val master_dir = append(dir, file.dir) Document.Node.Name(node, master_dir, theory) } def loaded_theory_node(theory: String): Document.Node.Name = Document.Node.Name(theory, "", theory) /* source files of Isabelle/ML bootstrap */ def source_file(raw_name: String): Option[String] = { if (Path.is_wellformed(raw_name)) { if (Path.is_valid(raw_name)) { def check(p: Path): Option[Path] = if (p.is_file) Some(p) else None val path = Path.explode(raw_name) val path1 = if (path.is_absolute || path.is_current) check(path) else { check(Path.explode("~~/src/Pure") + path) orElse (if (Isabelle_System.getenv("ML_SOURCES") == "") None else check(Path.explode("$ML_SOURCES") + path)) } Some(File.platform_path(path1 getOrElse path)) } else None } else Some(raw_name) } /* theory files */ def load_commands(syntax: Outer_Syntax, name: Document.Node.Name) : () => List[Command_Span.Span] = { val (is_utf8, raw_text) = with_thy_reader(name, reader => (Scan.reader_is_utf8(reader), reader.source.toString)) () => { if (syntax.has_load_commands(raw_text)) { val text = Symbol.decode(Scan.reader_decode_utf8(is_utf8, raw_text)) syntax.parse_spans(text).filter(_.is_load_command(syntax)) } else Nil } } def loaded_files(syntax: Outer_Syntax, name: Document.Node.Name, spans: List[Command_Span.Span]) : List[Path] = { val dir = name.master_dir_path for { span <- spans; file <- span.loaded_files(syntax).files } yield (dir + Path.explode(file)).expand } def pure_files(syntax: Outer_Syntax): List[Path] = { val pure_dir = Path.explode("~~/src/Pure") for { (name, theory) <- Thy_Header.ml_roots path = (pure_dir + Path.explode(name)).expand node_name = Document.Node.Name(path.implode, path.dir.implode, theory) file <- loaded_files(syntax, node_name, load_commands(syntax, node_name)()) } yield file } def theory_name(qualifier: String, theory: String): String = if (Long_Name.is_qualified(theory) || session_base.global_theories.isDefinedAt(theory)) theory else Long_Name.qualify(qualifier, theory) def find_theory_node(theory: String): Option[Document.Node.Name] = { val thy_file = Path.basic(Long_Name.base_name(theory)).thy val session = session_base.theory_qualifier(theory) val dirs = sessions_structure.get(session) match { case Some(info) => info.dirs case None => Nil } dirs.collectFirst({ case dir if (dir + thy_file).is_file => file_node(dir + thy_file, theory = theory) }) } def import_name(qualifier: String, dir: String, s: String): Document.Node.Name = { val theory = theory_name(qualifier, Thy_Header.import_name(s)) def theory_node = file_node(Path.explode(s).thy, dir = dir, theory = theory) if (!Thy_Header.is_base_name(s)) theory_node else if (session_base.loaded_theory(theory)) loaded_theory_node(theory) else { find_theory_node(theory) match { case Some(node_name) => node_name case None => if (Long_Name.is_qualified(s)) loaded_theory_node(theory) else theory_node } } } def import_name(name: Document.Node.Name, s: String): Document.Node.Name = import_name(session_base.theory_qualifier(name), name.master_dir, s) def import_name(info: Sessions.Info, s: String): Document.Node.Name = import_name(info.name, info.dir.implode, s) def find_theory(file: JFile): Option[Document.Node.Name] = { for { qualifier <- session_base.session_directories.get(File.canonical(file).getParentFile) theory_base <- proper_string(Thy_Header.theory_name(file.getName)) theory = theory_name(qualifier, theory_base) theory_node <- find_theory_node(theory) if File.eq(theory_node.path.file, file) } yield theory_node } def complete_import_name(context_name: Document.Node.Name, s: String): List[String] = { val context_session = session_base.theory_qualifier(context_name) val context_dir = try { Some(context_name.master_dir_path) } catch { case ERROR(_) => None } (for { (session, (info, _)) <- sessions_structure.imports_graph.iterator dir <- (if (session == context_session) context_dir.toList else info.dirs).iterator theory <- Thy_Header.try_read_dir(dir).iterator if Completion.completed(s)(theory) } yield { if (session == context_session || session_base.global_theories.isDefinedAt(theory)) theory else Long_Name.qualify(session, theory) }).toList.sorted } def with_thy_reader[A](name: Document.Node.Name, f: Reader[Char] => A): A = { val path = name.path if (path.is_file) using(Scan.byte_reader(path.file))(f) else if (name.node == name.theory) error("Cannot load theory " + quote(name.theory)) else error ("Cannot load theory file " + path) } def check_thy(node_name: Document.Node.Name, reader: Reader[Char], command: Boolean = true, strict: Boolean = true): Document.Node.Header = { if (node_name.is_theory && reader.source.length > 0) { try { val header = Thy_Header.read(node_name, reader, command = command, strict = strict) val imports = header.imports.map({ case (s, pos) => val name = import_name(node_name, s) if (Sessions.exclude_theory(name.theory_base_name)) error("Bad theory name " + quote(name.theory_base_name) + Position.here(pos)) (name, pos) }) Document.Node.Header(imports, header.keywords, header.abbrevs) } catch { case exn: Throwable => Document.Node.bad_header(Exn.message(exn)) } } else Document.Node.no_header } /* special header */ def special_header(name: Document.Node.Name): Option[Document.Node.Header] = { val imports = if (name.theory == Sessions.root_name) List(import_name(name, Sessions.theory_name)) else if (Thy_Header.is_ml_root(name.theory)) List(import_name(name, Thy_Header.ML_BOOTSTRAP)) else if (Thy_Header.is_bootstrap(name.theory)) List(import_name(name, Thy_Header.PURE)) else Nil if (imports.isEmpty) None else Some(Document.Node.Header(imports.map((_, Position.none)))) } /* blobs */ def undefined_blobs(nodes: Document.Nodes): List[Document.Node.Name] = (for { (node_name, node) <- nodes.iterator if !session_base.loaded_theory(node_name) cmd <- node.load_commands.iterator name <- cmd.blobs_undefined.iterator } yield name).toList /* document changes */ def parse_change( reparse_limit: Int, previous: Document.Version, doc_blobs: Document.Blobs, edits: List[Document.Edit_Text], consolidate: List[Document.Node.Name]): Session.Change = Thy_Syntax.parse_change(resources, reparse_limit, previous, doc_blobs, edits, consolidate) def commit(change: Session.Change): Unit = {} /* theory and file dependencies */ def dependencies( thys: List[(Document.Node.Name, Position.T)], progress: Progress = new Progress): Dependencies[Unit] = Dependencies.empty[Unit].require_thys((), thys, progress = progress) def session_dependencies(info: Sessions.Info, progress: Progress = new Progress) : Dependencies[Options] = { info.theories.foldLeft(Dependencies.empty[Options]) { case (dependencies, (options, thys)) => dependencies.require_thys(options, for { (thy, pos) <- thys } yield (import_name(info, thy), pos), progress = progress) } } object Dependencies { def empty[A]: Dependencies[A] = new Dependencies[A](Nil, Map.empty) private def show_path(names: List[Document.Node.Name]): String = names.map(name => quote(name.theory)).mkString(" via ") private def cycle_msg(names: List[Document.Node.Name]): String = "Cyclic dependency of " + show_path(names) private def required_by(initiators: List[Document.Node.Name]): String = if (initiators.isEmpty) "" else "\n(required by " + show_path(initiators.reverse) + ")" } final class Dependencies[A] private( rev_entries: List[Document.Node.Entry], seen: Map[Document.Node.Name, A]) { private def cons(entry: Document.Node.Entry): Dependencies[A] = new Dependencies[A](entry :: rev_entries, seen) def require_thy(adjunct: A, thy: (Document.Node.Name, Position.T), initiators: List[Document.Node.Name] = Nil, progress: Progress = new Progress): Dependencies[A] = { val (name, pos) = thy def message: String = "The error(s) above occurred for theory " + quote(name.theory) + Dependencies.required_by(initiators) + Position.here(pos) if (seen.isDefinedAt(name)) this else { val dependencies1 = new Dependencies[A](rev_entries, seen + (name -> adjunct)) if (session_base.loaded_theory(name)) dependencies1 else { try { if (initiators.contains(name)) error(Dependencies.cycle_msg(initiators)) progress.expose_interrupt() val header = try { with_thy_reader(name, check_thy(name, _, command = false)).cat_errors(message) } catch { case ERROR(msg) => cat_error(msg, message) } val entry = Document.Node.Entry(name, header) dependencies1.require_thys(adjunct, header.imports_pos, initiators = name :: initiators, progress = progress).cons(entry) } catch { case e: Throwable => dependencies1.cons(Document.Node.Entry(name, Document.Node.bad_header(Exn.message(e)))) } } } } def require_thys(adjunct: A, thys: List[(Document.Node.Name, Position.T)], progress: Progress = new Progress, initiators: List[Document.Node.Name] = Nil): Dependencies[A] = thys.foldLeft(this)(_.require_thy(adjunct, _, progress = progress, initiators = initiators)) def entries: List[Document.Node.Entry] = rev_entries.reverse def theories: List[Document.Node.Name] = entries.map(_.name) def theories_adjunct: List[(Document.Node.Name, A)] = theories.map(name => (name, seen(name))) def errors: List[String] = entries.flatMap(_.header.errors) def check_errors: Dependencies[A] = errors match { case Nil => this case errs => error(cat_lines(errs)) } lazy val theory_graph: Document.Node.Name.Graph[Unit] = { val regular = theories.toSet val irregular = (for { entry <- entries.iterator imp <- entry.header.imports if !regular(imp) } yield imp).toSet Document.Node.Name.make_graph( irregular.toList.map(name => ((name, ()), Nil)) ::: entries.map(entry => ((entry.name, ()), entry.header.imports))) } lazy val loaded_theories: Graph[String, Outer_Syntax] = entries.foldLeft(session_base.loaded_theories) { case (graph, entry) => val name = entry.name.theory val imports = entry.header.imports.map(_.theory) val graph1 = (name :: imports).foldLeft(graph)(_.default_node(_, Outer_Syntax.empty)) val graph2 = imports.foldLeft(graph1)(_.add_edge(_, name)) val syntax0 = if (name == Thy_Header.PURE) List(Thy_Header.bootstrap_syntax) else Nil val syntax1 = (name :: graph2.imm_preds(name).toList).map(graph2.get_node) val syntax = Outer_Syntax.merge(syntax0 ::: syntax1) + entry.header graph2.map_node(name, _ => syntax) } def get_syntax(name: Document.Node.Name): Outer_Syntax = loaded_theories.get_node(name.theory) def load_commands: List[(Document.Node.Name, List[Command_Span.Span])] = theories.zip( Par_List.map((e: () => List[Command_Span.Span]) => e(), theories.map(name => resources.load_commands(get_syntax(name), name)))) .filter(p => p._2.nonEmpty) def loaded_files(name: Document.Node.Name, spans: List[Command_Span.Span]) : (String, List[Path]) = { val theory = name.theory val syntax = get_syntax(name) val files1 = resources.loaded_files(syntax, name, spans) val files2 = if (theory == Thy_Header.PURE) pure_files(syntax) else Nil (theory, files1 ::: files2) } def loaded_files: List[Path] = for { (name, spans) <- load_commands file <- loaded_files(name, spans)._2 } yield file def imported_files: List[Path] = { val base_theories = loaded_theories.all_preds(theories.map(_.theory)). filter(session_base.loaded_theories.defined) base_theories.map(theory => session_base.known_theories(theory).name.path) ::: base_theories.flatMap(session_base.known_loaded_files.withDefaultValue(Nil)) } lazy val overall_syntax: Outer_Syntax = Outer_Syntax.merge(loaded_theories.maximals.map(loaded_theories.get_node)) override def toString: String = entries.toString } } diff --git a/src/Pure/PIDE/session.scala b/src/Pure/PIDE/session.scala --- a/src/Pure/PIDE/session.scala +++ b/src/Pure/PIDE/session.scala @@ -1,774 +1,784 @@ /* Title: Pure/PIDE/session.scala Author: Makarius Options: :folding=explicit: PIDE editor session, potentially with running prover process. */ package isabelle import scala.collection.immutable.Queue import scala.collection.mutable import scala.annotation.tailrec object Session { /* outlets */ object Consumer { def apply[A](name: String)(consume: A => Unit): Consumer[A] = new Consumer[A](name, consume) } final class Consumer[-A] private(val name: String, val consume: A => Unit) class Outlet[A](dispatcher: Consumer_Thread[() => Unit]) { private val consumers = Synchronized[List[Consumer[A]]](Nil) def += (c: Consumer[A]): Unit = consumers.change(Library.update(c)) def -= (c: Consumer[A]): Unit = consumers.change(Library.remove(c)) def post(a: A): Unit = { for (c <- consumers.value.iterator) { dispatcher.send(() => try { c.consume(a) } catch { case exn: Throwable => Output.error_message("Consumer failed: " + quote(c.name) + "\n" + Exn.message(exn)) }) } } } /* change */ sealed case class Change( previous: Document.Version, syntax_changed: List[Document.Node.Name], deps_changed: Boolean, doc_edits: List[Document.Edit_Command], consolidate: List[Document.Node.Name], version: Document.Version) case object Change_Flush /* events */ //{{{ case class Command_Timing(props: Properties.T) case class Theory_Timing(props: Properties.T) case class Runtime_Statistics(props: Properties.T) case class Task_Statistics(props: Properties.T) case class Global_Options(options: Options) case object Caret_Focus case class Raw_Edits(doc_blobs: Document.Blobs, edits: List[Document.Edit_Text]) case class Dialog_Result(id: Document_ID.Generic, serial: Long, result: String) case class Build_Theories(id: String, master_dir: Path, theories: List[(Options, List[Path])]) case class Commands_Changed( assignment: Boolean, nodes: Set[Document.Node.Name], commands: Set[Command]) sealed abstract class Phase { def print: String = this match { case Terminated(result) => if (result.ok) "finished" else "failed" case _ => Word.lowercase(this.toString) } } case object Inactive extends Phase // stable case object Startup extends Phase // transient case object Ready extends Phase // metastable case object Shutdown extends Phase // transient case class Terminated(result: Process_Result) extends Phase // stable //}}} /* syslog */ private[Session] class Syslog(limit: Int) { private var queue = Queue.empty[XML.Elem] private var length = 0 def += (msg: XML.Elem): Unit = synchronized { queue = queue.enqueue(msg) length += 1 if (length > limit) queue = queue.dequeue._2 } def content: String = synchronized { cat_lines(queue.iterator.map(XML.content)) + (if (length > limit) "\n(A total of " + length + " messages...)" else "") } } /* protocol handlers */ type Protocol_Function = Prover.Protocol_Output => Boolean abstract class Protocol_Handler extends Isabelle_System.Service { def init(session: Session): Unit = {} def exit(): Unit = {} def functions: List[(String, Protocol_Function)] = Nil def prover_options(options: Options): Options = options } } class Session(_session_options: => Options, val resources: Resources) extends Document.Session { session => val cache: XML.Cache = XML.Cache.make() def build_blobs_info(name: Document.Node.Name): Command.Blobs_Info = Command.Blobs_Info.none /* global flags */ @volatile var timing: Boolean = false @volatile var verbose: Boolean = false /* dynamic session options */ def session_options: Options = _session_options def output_delay: Time = session_options.seconds("editor_output_delay") def consolidate_delay: Time = session_options.seconds("editor_consolidate_delay") def prune_delay: Time = session_options.seconds("editor_prune_delay") def prune_size: Int = session_options.int("editor_prune_size") def syslog_limit: Int = session_options.int("editor_syslog_limit") def reparse_limit: Int = session_options.int("editor_reparse_limit") /* dispatcher */ private val dispatcher = Consumer_Thread.fork[() => Unit]("Session.dispatcher", daemon = true) { case e => e(); true } def assert_dispatcher[A](body: => A): A = { assert(dispatcher.check_thread) body } def require_dispatcher[A](body: => A): A = { require(dispatcher.check_thread, "not on dispatcher thread") body } def send_dispatcher(body: => Unit): Unit = { if (dispatcher.check_thread) body else dispatcher.send(() => body) } def send_wait_dispatcher(body: => Unit): Unit = { if (dispatcher.check_thread) body else dispatcher.send_wait(() => body) } /* outlets */ val finished_theories = new Session.Outlet[Document.Snapshot](dispatcher) val command_timings = new Session.Outlet[Session.Command_Timing](dispatcher) val theory_timings = new Session.Outlet[Session.Theory_Timing](dispatcher) val runtime_statistics = new Session.Outlet[Session.Runtime_Statistics](dispatcher) val task_statistics = new Session.Outlet[Session.Task_Statistics](dispatcher) val global_options = new Session.Outlet[Session.Global_Options](dispatcher) val caret_focus = new Session.Outlet[Session.Caret_Focus.type](dispatcher) val raw_edits = new Session.Outlet[Session.Raw_Edits](dispatcher) val commands_changed = new Session.Outlet[Session.Commands_Changed](dispatcher) val phase_changed = new Session.Outlet[Session.Phase](dispatcher) val syslog_messages = new Session.Outlet[Prover.Output](dispatcher) val raw_output_messages = new Session.Outlet[Prover.Output](dispatcher) val trace_events = new Session.Outlet[Simplifier_Trace.Event.type](dispatcher) val debugger_updates = new Session.Outlet[Debugger.Update.type](dispatcher) val all_messages = new Session.Outlet[Prover.Message](dispatcher) // potential bottle-neck! /** main protocol manager **/ /* internal messages */ private case class Start(start_prover: Prover.Receiver => Prover) private case object Stop private case class Get_State(promise: Promise[Document.State]) private case class Cancel_Exec(exec_id: Document_ID.Exec) - private case class Protocol_Command(name: String, args: List[String]) + private case class Protocol_Command_Raw(name: String, args: List[Bytes]) + private case class Protocol_Command_Args(name: String, args: List[String]) private case class Update_Options(options: Options) private case object Consolidate_Execution private case object Prune_History /* phase */ private def post_phase(new_phase: Session.Phase): Session.Phase = { phase_changed.post(new_phase) new_phase } private val _phase = Synchronized[Session.Phase](Session.Inactive) private def phase_=(new_phase: Session.Phase): Unit = _phase.change(_ => post_phase(new_phase)) def phase: Session.Phase = _phase.value def is_ready: Boolean = phase == Session.Ready /* syslog */ private val syslog = new Session.Syslog(syslog_limit) def syslog_content(): String = syslog.content /* pipelined change parsing */ private case class Text_Edits( previous: Future[Document.Version], doc_blobs: Document.Blobs, text_edits: List[Document.Edit_Text], consolidate: List[Document.Node.Name], version_result: Promise[Document.Version]) private val change_parser = Consumer_Thread.fork[Text_Edits]("change_parser", daemon = true) { case Text_Edits(previous, doc_blobs, text_edits, consolidate, version_result) => val prev = previous.get_finished val change = Timing.timeit("parse_change", timing) { resources.parse_change(reparse_limit, prev, doc_blobs, text_edits, consolidate) } version_result.fulfill(change.version) manager.send(change) true } /* buffered changes */ private object change_buffer { private var assignment: Boolean = false private var nodes: Set[Document.Node.Name] = Set.empty private var commands: Set[Command] = Set.empty def flush(): Unit = synchronized { if (assignment || nodes.nonEmpty || commands.nonEmpty) commands_changed.post(Session.Commands_Changed(assignment, nodes, commands)) if (nodes.nonEmpty) consolidation.update(nodes) assignment = false nodes = Set.empty commands = Set.empty } private val delay_flush = Delay.first(output_delay) { flush() } def invoke(assign: Boolean, edited_nodes: List[Document.Node.Name], cmds: List[Command]): Unit = synchronized { assignment |= assign for (node <- edited_nodes) { nodes += node } for (command <- cmds) { nodes += command.node_name command.blobs_names.foreach(nodes += _) commands += command } delay_flush.invoke() } def shutdown(): Unit = { delay_flush.revoke() flush() } } /* postponed changes */ private object postponed_changes { private var postponed: List[Session.Change] = Nil def store(change: Session.Change): Unit = synchronized { postponed ::= change } def flush(state: Document.State): List[Session.Change] = synchronized { val (assigned, unassigned) = postponed.partition(change => state.is_assigned(change.previous)) postponed = unassigned assigned.reverse } } /* node consolidation */ private object consolidation { private val delay = Delay.first(consolidate_delay) { manager.send(Consolidate_Execution) } private val init_state: Option[Set[Document.Node.Name]] = Some(Set.empty) private val state = Synchronized(init_state) def exit(): Unit = { delay.revoke() state.change(_ => None) } def update(new_nodes: Set[Document.Node.Name] = Set.empty): Unit = { val active = state.change_result(st => (st.isDefined, st.map(nodes => if (nodes.isEmpty) new_nodes else nodes ++ new_nodes))) if (active) delay.invoke() } def flush(): Set[Document.Node.Name] = state.change_result(st => if (st.isDefined) (st.get, init_state) else (Set.empty, None)) } /* prover process */ private object prover { private val variable = Synchronized[Option[Prover]](None) def defined: Boolean = variable.value.isDefined def get: Prover = variable.value.get def set(p: Prover): Unit = variable.change(_ => Some(p)) def reset: Unit = variable.change(_ => None) def await_reset(): Unit = variable.guarded_access({ case None => Some((), None) case _ => None }) } /* file formats and protocol handlers */ private lazy val file_formats: File_Format.Session = File_Format.registry.start_session(session) private val protocol_handlers = Protocol_Handlers.init(session) def get_protocol_handler[C <: Session.Protocol_Handler](c: Class[C]): Option[C] = protocol_handlers.get(c.getName) match { case Some(h) if Library.is_subclass(h.getClass, c) => Some(h.asInstanceOf[C]) case _ => None } def init_protocol_handler(handler: Session.Protocol_Handler): Unit = protocol_handlers.init(handler) def prover_options(options: Options): Options = protocol_handlers.prover_options(file_formats.prover_options(options)) /* debugger */ private val debugger_handler = new Debugger.Handler(this) init_protocol_handler(debugger_handler) def debugger: Debugger = debugger_handler.debugger /* manager thread */ private val delay_prune = Delay.first(prune_delay) { manager.send(Prune_History) } private val manager: Consumer_Thread[Any] = { /* global state */ val global_state = Synchronized(Document.State.init) /* raw edits */ def handle_raw_edits( doc_blobs: Document.Blobs = Document.Blobs.empty, edits: List[Document.Edit_Text] = Nil, consolidate: List[Document.Node.Name] = Nil): Unit = //{{{ { require(prover.defined, "prover process not defined (handle_raw_edits)") if (edits.nonEmpty) prover.get.discontinue_execution() val previous = global_state.value.history.tip.version val version = Future.promise[Document.Version] global_state.change(_.continue_history(previous, edits, version)) raw_edits.post(Session.Raw_Edits(doc_blobs, edits)) change_parser.send(Text_Edits(previous, doc_blobs, edits, consolidate, version)) } //}}} /* resulting changes */ def handle_change(change: Session.Change): Unit = //{{{ { require(prover.defined, "prover process not defined (handle_change)") // define commands { val id_commands = new mutable.ListBuffer[Command] def id_command(command: Command): Unit = { for { (name, digest) <- command.blobs_defined if !global_state.value.defined_blob(digest) } { change.version.nodes(name).get_blob match { case Some(blob) => global_state.change(_.define_blob(digest)) prover.get.define_blob(digest, blob.bytes) case None => Output.error_message("Missing blob " + quote(name.toString)) } } if (!global_state.value.defined_command(command.id)) { global_state.change(_.define_command(command)) id_commands += command } } for { (_, edit) <- change.doc_edits } { edit.foreach({ case (c1, c2) => c1.foreach(id_command); c2.foreach(id_command) }) } if (id_commands.nonEmpty) { prover.get.define_commands_bulk(resources, id_commands.toList) } } val assignment = global_state.value.the_assignment(change.previous).check_finished global_state.change(_.define_version(change.version, assignment)) prover.get.update(change.previous.id, change.version.id, change.doc_edits, change.consolidate) resources.commit(change) } //}}} /* prover output */ def handle_output(output: Prover.Output): Unit = //{{{ { def bad_output(): Unit = { if (verbose) Output.warning("Ignoring bad prover output: " + output.message.toString) } def change_command(f: Document.State => (Command.State, Document.State)): Unit = { try { val st = global_state.change_result(f) if (!st.command.span.is_theory) { change_buffer.invoke(false, Nil, List(st.command)) } } catch { case _: Document.State.Fail => bad_output() } } output match { case msg: Prover.Protocol_Output => val handled = protocol_handlers.invoke(msg) if (!handled) { msg.properties match { case Protocol.Command_Timing(props, state_id, timing) if prover.defined => command_timings.post(Session.Command_Timing(props)) val message = XML.elem(Markup.STATUS, List(XML.Elem(Markup.Timing(timing), Nil))) change_command(_.accumulate(state_id, cache.elem(message), cache)) case Markup.Theory_Timing(props) => theory_timings.post(Session.Theory_Timing(props)) case Markup.Task_Statistics(props) => task_statistics.post(Session.Task_Statistics(props)) case Protocol.Export(args) if args.id.isDefined && Value.Long.unapply(args.id.get).isDefined => val id = Value.Long.unapply(args.id.get).get - val export = Export.make_entry("", args, msg.bytes, cache) + val export = Export.make_entry("", args, msg.chunk, cache) change_command(_.add_export(id, (args.serial, export))) case Protocol.Loading_Theory(node_name, id) => val blobs_info = build_blobs_info(node_name) try { global_state.change(_.begin_theory(node_name, id, msg.text, blobs_info)) } catch { case _: Document.State.Fail => bad_output() } case List(Markup.Commands_Accepted.PROPERTY) => msg.text match { case Protocol.Commands_Accepted(ids) => ids.foreach(id => change_command(_.accumulate(id, Protocol.Commands_Accepted.message, cache))) case _ => bad_output() } case List(Markup.Assign_Update.PROPERTY) => msg.text match { case Protocol.Assign_Update(id, edited, update) => try { val (edited_nodes, cmds) = global_state.change_result(_.assign(id, edited, update)) change_buffer.invoke(true, edited_nodes, cmds) manager.send(Session.Change_Flush) } catch { case _: Document.State.Fail => bad_output() } case _ => bad_output() } delay_prune.invoke() case List(Markup.Removed_Versions.PROPERTY) => msg.text match { case Protocol.Removed(removed) => try { global_state.change(_.removed_versions(removed)) manager.send(Session.Change_Flush) } catch { case _: Document.State.Fail => bad_output() } case _ => bad_output() } case _ => bad_output() } } case _ => output.properties match { case Position.Id(state_id) => change_command(_.accumulate(state_id, output.message, cache)) case _ if output.is_init => val init_ok = try { Isabelle_System.make_services(classOf[Session.Protocol_Handler]) .foreach(init_protocol_handler) true } catch { case exn: Throwable => prover.get.protocol_command("Prover.stop", "1", Exn.message(exn)) false } if (init_ok) { prover.get.options(prover_options(session_options)) prover.get.init_session(resources) phase = Session.Ready debugger.ready() } case Markup.Process_Result(result) if output.is_exit => if (prover.defined) protocol_handlers.exit() for (id <- global_state.value.theories.keys) { val snapshot = global_state.change_result(_.end_theory(id)) finished_theories.post(snapshot) } file_formats.stop_session phase = Session.Terminated(result) prover.reset case _ => raw_output_messages.post(output) } } } //}}} /* main thread */ Consumer_Thread.fork[Any]("Session.manager", daemon = true) { case arg: Any => //{{{ arg match { case output: Prover.Output => if (output.is_syslog) { syslog += output.message syslog_messages.post(output) } if (output.is_stdout || output.is_stderr) raw_output_messages.post(output) else handle_output(output) all_messages.post(output) case input: Prover.Input => all_messages.post(input) case Start(start_prover) if !prover.defined => prover.set(start_prover(manager.send(_))) case Stop => consolidation.exit() delay_prune.revoke() if (prover.defined) { global_state.change(_ => Document.State.init) prover.get.terminate() } case Get_State(promise) => promise.fulfill(global_state.value) case Consolidate_Execution => if (prover.defined) { val state = global_state.value state.stable_tip_version match { case None => consolidation.update() case Some(version) => val consolidate = consolidation.flush().iterator.filter(name => !resources.session_base.loaded_theory(name) && !state.node_consolidated(version, name) && state.node_maybe_consolidated(version, name)).toList if (consolidate.nonEmpty) handle_raw_edits(consolidate = consolidate) } } case Prune_History => if (prover.defined) { val old_versions = global_state.change_result(_.remove_versions(prune_size)) if (old_versions.nonEmpty) prover.get.remove_versions(old_versions) } case Update_Options(options) => if (prover.defined && is_ready) { prover.get.options(prover_options(options)) handle_raw_edits() } global_options.post(Session.Global_Options(options)) case Cancel_Exec(exec_id) if prover.defined => prover.get.cancel_exec(exec_id) case Session.Raw_Edits(doc_blobs, edits) if prover.defined => handle_raw_edits(doc_blobs = doc_blobs, edits = edits) case Session.Dialog_Result(id, serial, result) if prover.defined => prover.get.dialog_result(serial, result) handle_output(new Prover.Output(Protocol.Dialog_Result(id, serial, result))) - case Protocol_Command(name, args) if prover.defined => + case Protocol_Command_Raw(name, args) if prover.defined => + prover.get.protocol_command_raw(name, args) + + case Protocol_Command_Args(name, args) if prover.defined => prover.get.protocol_command_args(name, args) case change: Session.Change if prover.defined => val state = global_state.value if (!state.removing_versions && state.is_assigned(change.previous)) handle_change(change) else postponed_changes.store(change) case Session.Change_Flush if prover.defined => val state = global_state.value if (!state.removing_versions) postponed_changes.flush(state).foreach(handle_change) case bad => if (verbose) Output.warning("Ignoring bad message: " + bad.toString) } true //}}} } } /* main operations */ def get_state(): Document.State = { if (manager.is_active) { val promise = Future.promise[Document.State] manager.send_wait(Get_State(promise)) promise.join } else Document.State.init } def snapshot(name: Document.Node.Name = Document.Node.Name.empty, pending_edits: List[Text.Edit] = Nil): Document.Snapshot = get_state().snapshot(name, pending_edits) def recent_syntax(name: Document.Node.Name): Outer_Syntax = get_state().recent_finished.version.get_finished.nodes(name).syntax getOrElse resources.session_base.overall_syntax @tailrec final def await_stable_snapshot(): Document.Snapshot = { val snapshot = this.snapshot() if (snapshot.is_outdated) { output_delay.sleep await_stable_snapshot() } else snapshot } def start(start_prover: Prover.Receiver => Prover): Unit = { file_formats _phase.change( { case Session.Inactive => manager.send(Start(start_prover)) post_phase(Session.Startup) case phase => error("Cannot start prover in phase " + quote(phase.print)) }) } def stop(): Process_Result = { val was_ready = _phase.guarded_access( { case Session.Startup | Session.Shutdown => None case Session.Terminated(_) => Some((false, phase)) case Session.Inactive => Some((false, post_phase(Session.Terminated(Process_Result(0))))) case Session.Ready => Some((true, post_phase(Session.Shutdown))) }) if (was_ready) manager.send(Stop) prover.await_reset() change_parser.shutdown() change_buffer.shutdown() manager.shutdown() dispatcher.shutdown() phase match { case Session.Terminated(result) => result case phase => error("Bad session phase after shutdown: " + quote(phase.print)) } } + def protocol_command_raw(name: String, args: List[Bytes]): Unit = + manager.send(Protocol_Command_Raw(name, args)) + + def protocol_command_args(name: String, args: List[String]): Unit = + manager.send(Protocol_Command_Args(name, args)) + def protocol_command(name: String, args: String*): Unit = - manager.send(Protocol_Command(name, args.toList)) + protocol_command_args(name, args.toList) def cancel_exec(exec_id: Document_ID.Exec): Unit = manager.send(Cancel_Exec(exec_id)) def update(doc_blobs: Document.Blobs, edits: List[Document.Edit_Text]): Unit = if (edits.nonEmpty) manager.send_wait(Session.Raw_Edits(doc_blobs, edits)) def update_options(options: Options): Unit = manager.send_wait(Update_Options(options)) def dialog_result(id: Document_ID.Generic, serial: Long, result: String): Unit = manager.send(Session.Dialog_Result(id, serial, result)) } diff --git a/src/Pure/PIDE/yxml.ML b/src/Pure/PIDE/yxml.ML --- a/src/Pure/PIDE/yxml.ML +++ b/src/Pure/PIDE/yxml.ML @@ -1,165 +1,175 @@ (* Title: Pure/PIDE/yxml.ML Author: Makarius Efficient text representation of XML trees using extra characters X and Y -- no escaping, may nest marked text verbatim. Suitable for direct inlining into plain text. Markup ...body... is encoded as: X Y name Y att=val ... X ... body ... X Y X *) signature YXML = sig val X: Symbol.symbol val Y: Symbol.symbol val embed_controls: string -> string val detect: string -> bool val traverse: (string -> 'a -> 'a) -> XML.tree -> 'a -> 'a + val tree_size: XML.tree -> int + val body_size: XML.body -> int val string_of_body: XML.body -> string val string_of: XML.tree -> string val write_body: Path.T -> XML.body -> unit val output_markup: Markup.T -> string * string val output_markup_elem: Markup.T -> (string * string) * string val parse_body: string -> XML.body val parse: string -> XML.tree val content_of: string -> string val is_wellformed: string -> bool end; structure YXML: YXML = struct (** string representation **) (* idempotent recoding of certain low ASCII control characters *) fun pseudo_utf8 c = if Symbol.is_ascii_control c then chr 192 ^ chr (128 + ord c) else c; fun embed_controls str = if exists_string Symbol.is_ascii_control str then translate_string pseudo_utf8 str else str; (* markers *) val X_char = Char.chr 5; val Y_char = Char.chr 6; val X = String.str X_char; val Y = String.str Y_char; val XY = X ^ Y; val XYX = XY ^ X; fun detect s = Char.contains s X_char orelse Char.contains s Y_char; -(* output *) +(* traverse *) fun traverse string = let fun attrib (a, x) = string Y #> string a #> string "=" #> string x; - fun tree (XML.Elem ((name, atts), ts)) = - string XY #> string name #> fold attrib atts #> string X #> - fold tree ts #> - string XYX + fun tree (XML.Elem (markup as (name, atts), ts)) = + if Markup.is_empty markup then fold tree ts + else + string XY #> string name #> fold attrib atts #> string X #> + fold tree ts #> + string XYX | tree (XML.Text s) = string s; in tree end; +fun tree_size tree = traverse (Integer.add o size) tree 0; +fun body_size body = fold (Integer.add o tree_size) body 0; + + +(* output *) + fun string_of_body body = Buffer.empty |> fold (traverse Buffer.add) body |> Buffer.content; val string_of = string_of_body o single; fun write_body path body = path |> File.open_output (fn file => fold (traverse (fn s => fn () => File.output file s)) body ()); (* markup elements *) val Z = chr 0; val Z_text = [XML.Text Z]; fun output_markup (markup as (name, atts)) = if Markup.is_empty markup then Markup.no_output else (XY ^ name ^ implode (map (fn (a, x) => Y ^ a ^ "=" ^ x) atts) ^ X, XYX); fun output_markup_elem markup = let val [bg1, bg2, en] = space_explode Z (string_of (XML.wrap_elem ((markup, Z_text), Z_text))) in ((bg1, bg2), en) end; (** efficient YXML parsing **) local (* splitting *) val split_string = Substring.full #> Substring.tokens (fn c => c = X_char) #> map (Substring.fields (fn c => c = Y_char) #> map Substring.string); (* structural errors *) fun err msg = raise Fail ("Malformed YXML: " ^ msg); fun err_attribute () = err "bad attribute"; fun err_element () = err "bad element"; fun err_unbalanced "" = err "unbalanced element" | err_unbalanced name = err ("unbalanced element " ^ quote name); (* stack operations *) fun add x ((elem, body) :: pending) = (elem, x :: body) :: pending; fun push "" _ _ = err_element () | push name atts pending = ((name, atts), []) :: pending; fun pop ((("", _), _) :: _) = err_unbalanced "" | pop ((markup, body) :: pending) = add (XML.Elem (markup, rev body)) pending; (* parsing *) fun parse_attrib s = (case first_field "=" s of NONE => err_attribute () | SOME ("", _) => err_attribute () | SOME att => att); fun parse_chunk ["", ""] = pop | parse_chunk ("" :: name :: atts) = push name (map parse_attrib atts) | parse_chunk txts = fold (add o XML.Text) txts; in fun parse_body source = (case fold parse_chunk (split_string source) [(("", []), [])] of [(("", _), result)] => rev result | ((name, _), _) :: _ => err_unbalanced name); fun parse source = (case parse_body source of [result] => result | [] => XML.Text "" | _ => err "multiple results"); end; val content_of = parse_body #> XML.content_of; fun is_wellformed s = string_of_body (parse_body s) = s handle Fail _ => false; end; diff --git a/src/Pure/PIDE/yxml.scala b/src/Pure/PIDE/yxml.scala --- a/src/Pure/PIDE/yxml.scala +++ b/src/Pure/PIDE/yxml.scala @@ -1,150 +1,153 @@ /* Title: Pure/PIDE/yxml.scala Author: Makarius Efficient text representation of XML trees. Suitable for direct inlining into plain text. */ package isabelle import scala.collection.mutable object YXML { /* chunk markers */ val X = '\u0005' val Y = '\u0006' val is_X: Char => Boolean = _ == X val is_Y: Char => Boolean = _ == Y val X_string: String = X.toString val Y_string: String = Y.toString val XY_string: String = X_string + Y_string val XYX_string: String = XY_string + X_string def detect(s: String): Boolean = s.exists(c => c == X || c == Y) def detect_elem(s: String): Boolean = s.startsWith(XY_string) /* string representation */ def traversal(string: String => Unit, body: XML.Body): Unit = { def tree(t: XML.Tree): Unit = t match { - case XML.Elem(Markup(name, atts), ts) => - string(XY_string) - string(name) - for ((a, x) <- atts) { string(Y_string); string(a); string("="); string(x) } - string(X_string) - ts.foreach(tree) - string(XYX_string) + case XML.Elem(markup @ Markup(name, atts), ts) => + if (markup.is_empty) ts.foreach(tree) + else { + string(XY_string) + string(name) + for ((a, x) <- atts) { string(Y_string); string(a); string("="); string(x) } + string(X_string) + ts.foreach(tree) + string(XYX_string) + } case XML.Text(text) => string(text) } body.foreach(tree) } def string_of_body(body: XML.Body): String = { val s = new StringBuilder traversal(str => s ++= str, body) s.toString } def string_of_tree(tree: XML.Tree): String = string_of_body(List(tree)) /* parsing */ private def err(msg: String) = error("Malformed YXML: " + msg) private def err_attribute() = err("bad attribute") private def err_element() = err("bad element") private def err_unbalanced(name: String) = if (name == "") err("unbalanced element") else err("unbalanced element " + quote(name)) private def parse_attrib(source: CharSequence): (String, String) = { val s = source.toString val i = s.indexOf('=') if (i <= 0) err_attribute() (s.substring(0, i), s.substring(i + 1)) } def parse_body(source: CharSequence, cache: XML.Cache = XML.Cache.none): XML.Body = { /* stack operations */ def buffer(): mutable.ListBuffer[XML.Tree] = new mutable.ListBuffer[XML.Tree] var stack: List[(Markup, mutable.ListBuffer[XML.Tree])] = List((Markup.Empty, buffer())) def add(x: XML.Tree): Unit = (stack: @unchecked) match { case (_, body) :: _ => body += x } def push(name: String, atts: XML.Attributes): Unit = if (name == "") err_element() else stack = (cache.markup(Markup(name, atts)), buffer()) :: stack def pop(): Unit = (stack: @unchecked) match { case (Markup.Empty, _) :: _ => err_unbalanced("") case (markup, body) :: pending => stack = pending add(cache.tree0(XML.Elem(markup, body.toList))) } /* parse chunks */ for (chunk <- Library.separated_chunks(is_X, source) if chunk.length != 0) { if (chunk.length == 1 && chunk.charAt(0) == Y) pop() else { Library.separated_chunks(is_Y, chunk).toList match { case ch :: name :: atts if ch.length == 0 => push(name.toString, atts.map(parse_attrib)) case txts => for (txt <- txts) add(cache.tree0(XML.Text(cache.string(txt.toString)))) } } } (stack: @unchecked) match { case List((Markup.Empty, body)) => body.toList case (Markup(name, _), _) :: _ => err_unbalanced(name) } } def parse(source: CharSequence, cache: XML.Cache = XML.Cache.none): XML.Tree = parse_body(source, cache = cache) match { case List(result) => result case Nil => XML.no_text case _ => err("multiple XML trees") } def parse_elem(source: CharSequence, cache: XML.Cache = XML.Cache.none): XML.Tree = parse_body(source, cache = cache) match { case List(elem: XML.Elem) => elem case _ => err("single XML element expected") } /* failsafe parsing */ private def markup_broken(source: CharSequence) = XML.Elem(Markup.Broken, List(XML.Text(source.toString))) def parse_body_failsafe(source: CharSequence, cache: XML.Cache = XML.Cache.none): XML.Body = { try { parse_body(source, cache = cache) } catch { case ERROR(_) => List(markup_broken(source)) } } def parse_failsafe(source: CharSequence, cache: XML.Cache = XML.Cache.none): XML.Tree = { try { parse(source, cache = cache) } catch { case ERROR(_) => markup_broken(source) } } } diff --git a/src/Pure/ROOT.ML b/src/Pure/ROOT.ML --- a/src/Pure/ROOT.ML +++ b/src/Pure/ROOT.ML @@ -1,357 +1,357 @@ (* Title: Pure/ROOT.ML Author: Makarius Main entry point for the Isabelle/Pure bootstrap process. Note: When this file is open in the Prover IDE, the ML files of Isabelle/Pure can be explored interactively. This is a separate copy of Pure within Pure: it does not affect the running logic session. *) chapter "Isabelle/Pure bootstrap"; ML_file "ML/ml_name_space.ML"; section "Bootstrap phase 0: Poly/ML setup"; ML_file "ML/ml_init.ML"; ML_file "ML/ml_system.ML"; ML_file "General/basics.ML"; ML_file "General/symbol_explode.ML"; ML_file "Concurrent/multithreading.ML"; ML_file "Concurrent/unsynchronized.ML"; ML_file "Concurrent/synchronized.ML"; ML_file "Concurrent/counter.ML"; ML_file "ML/ml_heap.ML"; ML_file "ML/ml_profiling.ML"; ML_file "ML/ml_print_depth0.ML"; ML_file "ML/ml_pretty.ML"; ML_file "ML/ml_compiler0.ML"; section "Bootstrap phase 1: towards ML within position context"; subsection "Library of general tools"; ML_file "library.ML"; ML_file "General/print_mode.ML"; ML_file "General/alist.ML"; ML_file "General/table.ML"; ML_file "General/random.ML"; ML_file "General/value.ML"; ML_file "General/properties.ML"; ML_file "General/output.ML"; ML_file "PIDE/markup.ML"; ML_file "General/utf8.ML"; ML_file "General/scan.ML"; ML_file "General/source.ML"; ML_file "General/symbol.ML"; ML_file "General/position.ML"; ML_file "General/symbol_pos.ML"; ML_file "General/input.ML"; ML_file "General/comment.ML"; ML_file "General/antiquote.ML"; ML_file "ML/ml_lex.ML"; ML_file "ML/ml_compiler1.ML"; section "Bootstrap phase 2: towards ML within Isar context"; subsection "Library of general tools"; ML_file "General/integer.ML"; ML_file "General/stack.ML"; ML_file "General/queue.ML"; ML_file "General/heap.ML"; ML_file "General/same.ML"; ML_file "General/ord_list.ML"; ML_file "General/balanced_tree.ML"; ML_file "General/linear_set.ML"; ML_file "General/buffer.ML"; ML_file "General/pretty.ML"; ML_file "General/rat.ML"; ML_file "PIDE/xml.ML"; ML_file "General/path.ML"; ML_file "General/url.ML"; ML_file "System/bash.ML"; ML_file "General/file.ML"; ML_file "General/long_name.ML"; ML_file "General/binding.ML"; ML_file "General/socket_io.ML"; ML_file "General/seq.ML"; ML_file "General/time.ML"; ML_file "General/timing.ML"; ML_file "General/sha1.ML"; +ML_file "PIDE/yxml.ML"; ML_file "PIDE/byte_message.ML"; -ML_file "PIDE/yxml.ML"; ML_file "PIDE/protocol_message.ML"; ML_file "PIDE/document_id.ML"; ML_file "General/change_table.ML"; ML_file "General/graph.ML"; ML_file "System/options.ML"; subsection "Fundamental structures"; ML_file "name.ML"; ML_file "term.ML"; ML_file "context.ML"; ML_file "config.ML"; ML_file "context_position.ML"; ML_file "soft_type_system.ML"; subsection "Concurrency within the ML runtime"; ML_file "ML/exn_properties.ML"; ML_file_no_debug "ML/exn_debugger.ML"; ML_file "Concurrent/thread_data_virtual.ML"; ML_file "Concurrent/isabelle_thread.ML"; ML_file "Concurrent/single_assignment.ML"; ML_file "Concurrent/par_exn.ML"; ML_file "Concurrent/task_queue.ML"; ML_file "Concurrent/future.ML"; ML_file "Concurrent/event_timer.ML"; ML_file "Concurrent/timeout.ML"; ML_file "Concurrent/lazy.ML"; ML_file "Concurrent/par_list.ML"; ML_file "Concurrent/mailbox.ML"; ML_file "Concurrent/cache.ML"; ML_file "PIDE/active.ML"; ML_file "Thy/export.ML"; subsection "Inner syntax"; ML_file "Syntax/type_annotation.ML"; ML_file "Syntax/term_position.ML"; ML_file "Syntax/lexicon.ML"; ML_file "Syntax/ast.ML"; ML_file "Syntax/syntax_ext.ML"; ML_file "Syntax/parser.ML"; ML_file "Syntax/syntax_trans.ML"; ML_file "Syntax/mixfix.ML"; ML_file "Syntax/printer.ML"; ML_file "Syntax/syntax.ML"; subsection "Core of tactical proof system"; ML_file "term_ord.ML"; ML_file "term_subst.ML"; ML_file "General/completion.ML"; ML_file "General/name_space.ML"; ML_file "sorts.ML"; ML_file "type.ML"; ML_file "logic.ML"; ML_file "Syntax/simple_syntax.ML"; ML_file "net.ML"; ML_file "item_net.ML"; ML_file "envir.ML"; ML_file "consts.ML"; ML_file "term_xml.ML"; ML_file "primitive_defs.ML"; ML_file "sign.ML"; ML_file "defs.ML"; ML_file "term_sharing.ML"; ML_file "pattern.ML"; ML_file "unify.ML"; ML_file "theory.ML"; ML_file "proofterm.ML"; ML_file "thm.ML"; ML_file "more_pattern.ML"; ML_file "more_unify.ML"; ML_file "more_thm.ML"; ML_file "facts.ML"; ML_file "thm_name.ML"; ML_file "global_theory.ML"; ML_file "pure_thy.ML"; ML_file "drule.ML"; ML_file "morphism.ML"; ML_file "variable.ML"; ML_file "conv.ML"; ML_file "goal_display.ML"; ML_file "tactical.ML"; ML_file "search.ML"; ML_file "tactic.ML"; ML_file "raw_simplifier.ML"; ML_file "conjunction.ML"; ML_file "assumption.ML"; subsection "Isar -- Intelligible Semi-Automated Reasoning"; (*ML support and global execution*) ML_file "ML/ml_syntax.ML"; ML_file "ML/ml_env.ML"; ML_file "ML/ml_options.ML"; ML_file "ML/ml_print_depth.ML"; ML_file_no_debug "Isar/runtime.ML"; ML_file "PIDE/execution.ML"; ML_file "ML/ml_compiler.ML"; ML_file "skip_proof.ML"; ML_file "goal.ML"; (*outer syntax*) ML_file "Isar/keyword.ML"; ML_file "Isar/token.ML"; ML_file "Isar/parse.ML"; ML_file "Thy/document_source.ML"; ML_file "Thy/thy_header.ML"; ML_file "Thy/document_marker.ML"; (*proof context*) ML_file "Isar/object_logic.ML"; ML_file "Isar/rule_cases.ML"; ML_file "Isar/auto_bind.ML"; ML_file "type_infer.ML"; ML_file "Syntax/local_syntax.ML"; ML_file "Isar/proof_context.ML"; ML_file "type_infer_context.ML"; ML_file "Syntax/syntax_phases.ML"; ML_file "Isar/args.ML"; (*theory specifications*) ML_file "Isar/local_defs.ML"; ML_file "Isar/local_theory.ML"; ML_file "Isar/entity.ML"; ML_file "PIDE/command_span.ML"; ML_file "Thy/thy_element.ML"; ML_file "Thy/markdown.ML"; ML_file "Thy/latex.ML"; (*ML with context and antiquotations*) ML_file "ML/ml_context.ML"; ML_file "ML/ml_antiquotation.ML"; ML_file "ML/ml_compiler2.ML"; ML_file "ML/ml_pid.ML"; section "Bootstrap phase 3: towards theory Pure and final ML toplevel setup"; (*basic proof engine*) ML_file "par_tactical.ML"; ML_file "context_tactic.ML"; ML_file "Isar/proof_display.ML"; ML_file "Isar/attrib.ML"; ML_file "Isar/context_rules.ML"; ML_file "Isar/method.ML"; ML_file "Isar/proof.ML"; ML_file "Isar/element.ML"; ML_file "Isar/obtain.ML"; ML_file "Isar/subgoal.ML"; ML_file "Isar/calculation.ML"; (*local theories and targets*) ML_file "Isar/locale.ML"; ML_file "Isar/generic_target.ML"; ML_file "Isar/bundle.ML"; ML_file "Isar/overloading.ML"; ML_file "axclass.ML"; ML_file "Isar/class.ML"; ML_file "Isar/named_target.ML"; ML_file "Isar/expression.ML"; ML_file "Isar/interpretation.ML"; ML_file "Isar/class_declaration.ML"; ML_file "Isar/target_context.ML"; ML_file "Isar/experiment.ML"; ML_file "simplifier.ML"; ML_file "Tools/plugin.ML"; (*executable theory content*) ML_file "Isar/code.ML"; (*specifications*) ML_file "Isar/spec_rules.ML"; ML_file "Isar/specification.ML"; ML_file "Isar/parse_spec.ML"; ML_file "Isar/typedecl.ML"; (*toplevel transactions*) ML_file "Isar/proof_node.ML"; ML_file "Isar/toplevel.ML"; (*proof term operations*) ML_file "Proof/proof_rewrite_rules.ML"; ML_file "Proof/proof_syntax.ML"; ML_file "Proof/proof_checker.ML"; ML_file "Proof/extraction.ML"; (*Isabelle system*) ML_file "PIDE/protocol_command.ML"; ML_file "System/scala.ML"; ML_file "System/process_result.ML"; ML_file "System/isabelle_system.ML"; (*theory documents*) ML_file "Thy/term_style.ML"; ML_file "Isar/outer_syntax.ML"; ML_file "ML/ml_antiquotations.ML"; ML_file "Thy/document_antiquotation.ML"; ML_file "Thy/thy_output.ML"; ML_file "Thy/document_antiquotations.ML"; ML_file "General/graph_display.ML"; ML_file "pure_syn.ML"; ML_file "PIDE/command.ML"; ML_file "PIDE/query_operation.ML"; ML_file "PIDE/resources.ML"; ML_file "Thy/thy_info.ML"; ML_file "thm_deps.ML"; ML_file "Thy/export_theory.ML"; ML_file "Thy/sessions.ML"; ML_file "PIDE/session.ML"; ML_file "PIDE/document.ML"; (*theory and proof operations*) ML_file "Isar/isar_cmd.ML"; subsection "Isabelle/Isar system"; ML_file "System/command_line.ML"; ML_file "System/message_channel.ML"; ML_file "System/isabelle_process.ML"; ML_file "System/scala_compiler.ML"; ML_file "System/isabelle_tool.ML"; ML_file "Thy/bibtex.ML"; ML_file "PIDE/protocol.ML"; ML_file "General/output_primitives_virtual.ML"; subsection "Miscellaneous tools and packages for Pure Isabelle"; ML_file "ML/ml_pp.ML"; ML_file "ML/ml_thms.ML"; ML_file "ML/ml_file.ML"; ML_file "Tools/build.ML"; ML_file "Tools/named_thms.ML"; ML_file "Tools/print_operation.ML"; ML_file "Tools/rail.ML"; ML_file "Tools/rule_insts.ML"; ML_file "Tools/thy_deps.ML"; ML_file "Tools/class_deps.ML"; ML_file "Tools/find_theorems.ML"; ML_file "Tools/find_consts.ML"; ML_file "Tools/simplifier_trace.ML"; ML_file_no_debug "Tools/debugger.ML"; ML_file "Tools/named_theorems.ML"; ML_file "Tools/doc.ML"; ML_file "Tools/jedit.ML"; ML_file "Tools/ghc.ML"; ML_file "Tools/generated_files.ML" diff --git a/src/Pure/System/bash.scala b/src/Pure/System/bash.scala --- a/src/Pure/System/bash.scala +++ b/src/Pure/System/bash.scala @@ -1,235 +1,234 @@ /* Title: Pure/System/bash.scala Author: Makarius GNU bash processes, with propagation of interrupts. */ package isabelle import java.io.{File => JFile, BufferedReader, InputStreamReader, BufferedWriter, OutputStreamWriter} import scala.annotation.tailrec object Bash { /* concrete syntax */ private def bash_chr(c: Byte): String = { val ch = c.toChar ch match { case '\t' => "$'\\t'" case '\n' => "$'\\n'" case '\f' => "$'\\f'" case '\r' => "$'\\r'" case _ => if (Symbol.is_ascii_letter(ch) || Symbol.is_ascii_digit(ch) || "+,-./:_".contains(ch)) Symbol.ascii(ch) else if (c < 0) "$'\\x" + Integer.toHexString(256 + c) + "'" else if (c < 16) "$'\\x0" + Integer.toHexString(c) + "'" else if (c < 32 || c >= 127) "$'\\x" + Integer.toHexString(c) + "'" else "\\" + ch } } def string(s: String): String = if (s == "") "\"\"" else UTF8.bytes(s).iterator.map(bash_chr).mkString def strings(ss: List[String]): String = ss.iterator.map(Bash.string).mkString(" ") /* process and result */ type Watchdog = (Time, Process => Boolean) def process(script: String, cwd: JFile = null, env: Map[String, String] = Isabelle_System.settings(), redirect: Boolean = false, cleanup: () => Unit = () => ()): Process = new Process(script, cwd, env, redirect, cleanup) class Process private[Bash]( script: String, cwd: JFile, env: Map[String, String], redirect: Boolean, cleanup: () => Unit) { private val timing_file = Isabelle_System.tmp_file("bash_timing") private val timing = Synchronized[Option[Timing]](None) def get_timing: Timing = timing.value getOrElse Timing.zero private val script_file = Isabelle_System.tmp_file("bash_script") File.write(script_file, script) private val proc = Isabelle_System.process( List(File.platform_path(Path.variable("ISABELLE_BASH_PROCESS")), "-", File.standard_path(timing_file), "bash", File.standard_path(script_file)), cwd = cwd, env = env, redirect = redirect) // channels val stdin: BufferedWriter = new BufferedWriter(new OutputStreamWriter(proc.getOutputStream, UTF8.charset)) val stdout: BufferedReader = new BufferedReader(new InputStreamReader(proc.getInputStream, UTF8.charset)) val stderr: BufferedReader = new BufferedReader(new InputStreamReader(proc.getErrorStream, UTF8.charset)) // signals private val pid = stdout.readLine @tailrec private def kill(signal: String, count: Int = 1): Boolean = { count <= 0 || { Isabelle_System.kill(signal, pid) val running = Isabelle_System.kill("0", pid)._2 == 0 if (running) { Time.seconds(0.1).sleep kill(signal, count - 1) } else false } } def terminate(): Unit = Isabelle_Thread.try_uninterruptible { kill("INT", count = 7) && kill("TERM", count = 3) && kill("KILL") proc.destroy() do_cleanup() } def interrupt(): Unit = Isabelle_Thread.try_uninterruptible { Isabelle_System.kill("INT", pid) } // JVM shutdown hook private val shutdown_hook = Isabelle_Thread.create(() => terminate()) try { Runtime.getRuntime.addShutdownHook(shutdown_hook) } catch { case _: IllegalStateException => } // cleanup private def do_cleanup(): Unit = { try { Runtime.getRuntime.removeShutdownHook(shutdown_hook) } catch { case _: IllegalStateException => } script_file.delete timing.change { case None => if (timing_file.isFile) { val t = Word.explode(File.read(timing_file)) match { case List(Value.Long(elapsed), Value.Long(cpu)) => Timing(Time.ms(elapsed), Time.ms(cpu), Time.zero) case _ => Timing.zero } timing_file.delete Some(t) } else Some(Timing.zero) case some => some } cleanup() } // join def join: Int = { val rc = proc.waitFor do_cleanup() rc } // result def result( progress_stdout: String => Unit = (_: String) => (), progress_stderr: String => Unit = (_: String) => (), watchdog: Option[Watchdog] = None, strict: Boolean = true): Process_Result = { stdin.close() val out_lines = Future.thread("bash_stdout") { File.read_lines(stdout, progress_stdout) } val err_lines = Future.thread("bash_stderr") { File.read_lines(stderr, progress_stderr) } val watchdog_thread = for ((time, check) <- watchdog) yield { Future.thread("bash_watchdog") { while (proc.isAlive) { time.sleep if (check(this)) interrupt() } } } val rc = try { join } catch { case Exn.Interrupt() => terminate(); Exn.Interrupt.return_code } watchdog_thread.foreach(_.cancel()) if (strict && rc == Exn.Interrupt.return_code) throw Exn.Interrupt() Process_Result(rc, out_lines.join, err_lines.join, get_timing) } } /* Scala function */ - object Process extends Scala.Fun("bash_process", thread = true) + object Process extends Scala.Fun_Strings("bash_process", thread = true) { val here = Scala_Project.here - def apply(script: String): String = + def apply(args: List[String]): List[String] = { - val result = Exn.capture { Isabelle_System.bash(script) } + val result = Exn.capture { Isabelle_System.bash(cat_lines(args)) } val is_interrupt = result match { case Exn.Res(res) => res.rc == Exn.Interrupt.return_code case Exn.Exn(exn) => Exn.is_interrupt(exn) } result match { - case _ if is_interrupt => "" - case Exn.Exn(exn) => Exn.message(exn) + case _ if is_interrupt => Nil + case Exn.Exn(exn) => List(Exn.message(exn)) case Exn.Res(res) => - Library.cat_strings0( - res.rc.toString :: - res.timing.elapsed.ms.toString :: - res.timing.cpu.ms.toString :: - res.out_lines.length.toString :: - res.out_lines ::: res.err_lines) + res.rc.toString :: + res.timing.elapsed.ms.toString :: + res.timing.cpu.ms.toString :: + res.out_lines.length.toString :: + res.out_lines ::: res.err_lines } } } } diff --git a/src/Pure/System/isabelle_process.ML b/src/Pure/System/isabelle_process.ML --- a/src/Pure/System/isabelle_process.ML +++ b/src/Pure/System/isabelle_process.ML @@ -1,228 +1,228 @@ (* Title: Pure/System/isabelle_process.ML Author: Makarius Isabelle process wrapper. *) signature ISABELLE_PROCESS = sig val is_active: unit -> bool val reset_tracing: Document_ID.exec -> unit val crashes: exn list Synchronized.var val init_options: unit -> unit val init_options_interactive: unit -> unit val init: unit -> unit val init_build: unit -> unit end; structure Isabelle_Process: ISABELLE_PROCESS = struct (* print mode *) val isabelle_processN = "isabelle_process"; fun is_active () = Print_Mode.print_mode_active isabelle_processN; val _ = Output.add_mode isabelle_processN Output.default_output Output.default_escape; val _ = Markup.add_mode isabelle_processN YXML.output_markup; val protocol_modes1 = [Syntax_Trans.no_bracketsN, Syntax_Trans.no_type_bracketsN]; val protocol_modes2 = [isabelle_processN, Pretty.symbolicN]; (* restricted tracing messages *) val tracing_messages = Synchronized.var "tracing_messages" (Inttab.empty: int Inttab.table); fun reset_tracing exec_id = Synchronized.change tracing_messages (Inttab.delete_safe exec_id); fun update_tracing () = (case Position.parse_id (Position.thread_data ()) of NONE => () | SOME exec_id => let val ok = Synchronized.change_result tracing_messages (fn tab => let val n = the_default 0 (Inttab.lookup tab exec_id) + 1; val limit = Options.default_int "editor_tracing_messages"; val ok = limit <= 0 orelse n <= limit; in (ok, Inttab.update (exec_id, n) tab) end); in if ok then () else let val (text, promise) = Active.dialog_text (); val _ = writeln ("Tracing paused. " ^ text "Stop" ^ ", or continue with next " ^ text "100" ^ ", " ^ text "1000" ^ ", " ^ text "10000" ^ " messages?") val m = Value.parse_int (Future.join promise) handle Fail _ => error "Stopped"; in Synchronized.change tracing_messages (Inttab.map_default (exec_id, 0) (fn k => k - m)) end end); (* init protocol -- uninterruptible *) val crashes = Synchronized.var "Isabelle_Process.crashes" ([]: exn list); local fun recover crash = (Synchronized.change crashes (cons crash); Output.physical_stderr "Recovered from Isabelle process crash -- see also Isabelle_Process.crashes\n"); fun ml_statistics () = Output.protocol_message (Markup.ML_statistics {pid = ML_Pid.get (), stats_dir = getenv "POLYSTATSDIR"}) []; in fun init_protocol modes = Thread_Attributes.uninterruptible (fn _ => fn (address, password) => let val _ = SHA1.test_samples () handle exn as Fail msg => (Output.physical_stderr (msg ^ "\n"); Exn.reraise exn); val _ = Output.physical_stderr Symbol.STX; (* streams *) val (in_stream, out_stream) = Socket_IO.open_streams address; val _ = Byte_Message.write_line out_stream password; val _ = TextIO.StreamIO.setBufferMode (TextIO.getOutstream TextIO.stdOut, IO.LINE_BUF); val _ = TextIO.StreamIO.setBufferMode (TextIO.getOutstream TextIO.stdErr, IO.LINE_BUF); val _ = BinIO.StreamIO.setBufferMode (BinIO.getOutstream out_stream, IO.BLOCK_BUF); (* messages *) val msg_channel = Message_Channel.make out_stream; - fun message name props body = - Message_Channel.send msg_channel (Message_Channel.message name props body); + fun message name props chunks = + Message_Channel.send msg_channel (Message_Channel.message name props chunks); fun standard_message props name ss = if forall (fn s => s = "") ss then () else let val pos_props = if exists Markup.position_property props then props else props @ Position.properties_of (Position.thread_data ()); - in message name pos_props (XML.blob ss) end; + in message name pos_props [XML.blob ss] end; fun report_message ss = if Context_Position.pide_reports () then standard_message [] Markup.reportN ss else (); val serial_props = Markup.serial_properties o serial; val message_context = Unsynchronized.setmp Private_Output.status_fn (standard_message [] Markup.statusN) #> Unsynchronized.setmp Private_Output.report_fn report_message #> Unsynchronized.setmp Private_Output.result_fn (fn props => fn s => standard_message (props @ serial_props ()) Markup.resultN s) #> Unsynchronized.setmp Private_Output.writeln_fn (fn s => standard_message (serial_props ()) Markup.writelnN s) #> Unsynchronized.setmp Private_Output.state_fn (fn s => standard_message (serial_props ()) Markup.stateN s) #> Unsynchronized.setmp Private_Output.information_fn (fn s => standard_message (serial_props ()) Markup.informationN s) #> Unsynchronized.setmp Private_Output.tracing_fn (fn s => (update_tracing (); standard_message (serial_props ()) Markup.tracingN s)) #> Unsynchronized.setmp Private_Output.warning_fn (fn s => standard_message (serial_props ()) Markup.warningN s) #> Unsynchronized.setmp Private_Output.legacy_fn (fn s => standard_message (serial_props ()) Markup.legacyN s) #> Unsynchronized.setmp Private_Output.error_message_fn (fn (i, s) => standard_message (Markup.serial_properties i) Markup.errorN s) #> Unsynchronized.setmp Private_Output.system_message_fn - (fn ss => message Markup.systemN [] (XML.blob ss)) #> + (fn ss => message Markup.systemN [] [XML.blob ss]) #> Unsynchronized.setmp Private_Output.protocol_message_fn - (fn props => fn body => message Markup.protocolN props body) #> + (fn props => fn chunks => message Markup.protocolN props chunks) #> Unsynchronized.setmp print_mode ((! print_mode @ #1 modes) |> fold (update op =) (#2 modes)); (* protocol *) fun protocol_loop () = let val _ = (case Byte_Message.read_message in_stream of NONE => raise Protocol_Command.STOP 0 | SOME [] => Output.system_message "Isabelle process: no input" | SOME (name :: args) => Protocol_Command.run name args) handle exn => if Protocol_Command.is_protocol_exn exn then Exn.reraise exn else (Runtime.exn_system_message exn handle crash => recover crash); in protocol_loop () end; fun protocol () = - (message Markup.initN [] [XML.Text (Session.welcome ())]; + (message Markup.initN [] [[XML.Text (Session.welcome ())]]; ml_statistics (); protocol_loop ()); val result = Exn.capture (message_context protocol) (); (* shutdown *) val _ = Future.shutdown (); val _ = Execution.reset (); val _ = Message_Channel.shutdown msg_channel; val _ = BinIO.closeIn in_stream; val _ = BinIO.closeOut out_stream; val _ = Options.reset_default (); in (case result of Exn.Exn (Protocol_Command.STOP rc) => if rc = 0 then () else exit rc | _ => Exn.release result) end); end; (* init options *) fun init_options () = (ML_Print_Depth.set_print_depth (Options.default_int "ML_print_depth"); Multithreading.trace := Options.default_int "threads_trace"; Multithreading.max_threads_update (Options.default_int "threads"); Multithreading.parallel_proofs := Options.default_int "parallel_proofs"; if Options.default_bool "export_standard_proofs" then Proofterm.proofs := 2 else (); let val proofs = Options.default_int "record_proofs" in if proofs < 0 then () else Proofterm.proofs := proofs end; Printer.show_markup_default := false); fun init_options_interactive () = (init_options (); Multithreading.parallel_proofs := (if Options.default_int "parallel_proofs" > 0 then 3 else 0); Printer.show_markup_default := true); (* generic init *) fun init_modes modes = let val address = Options.default_string \<^system_option>\system_channel_address\; val password = Options.default_string \<^system_option>\system_channel_password\; in if address <> "" andalso password <> "" then init_protocol modes (address, password) else init_options () end; fun init () = init_modes (protocol_modes1, protocol_modes2); fun init_build () = init_modes ([], protocol_modes2); end; diff --git a/src/Pure/System/isabelle_system.ML b/src/Pure/System/isabelle_system.ML --- a/src/Pure/System/isabelle_system.ML +++ b/src/Pure/System/isabelle_system.ML @@ -1,139 +1,139 @@ (* Title: Pure/System/isabelle_system.ML Author: Makarius Isabelle system support. *) signature ISABELLE_SYSTEM = sig val bash_process: string -> Process_Result.T val bash_output: string -> string * int val bash: string -> int val bash_functions: unit -> string list val check_bash_function: Proof.context -> string * Position.T -> string val absolute_path: Path.T -> string val make_directory: Path.T -> Path.T val copy_dir: Path.T -> Path.T -> unit val copy_file: Path.T -> Path.T -> unit val copy_file_base: Path.T * Path.T -> Path.T -> unit val create_tmp_path: string -> string -> Path.T val with_tmp_file: string -> string -> (Path.T -> 'a) -> 'a val rm_tree: Path.T -> unit val with_tmp_dir: string -> (Path.T -> 'a) -> 'a - val download: string -> Path.T -> unit + val download: string -> string + val download_file: string -> Path.T -> unit val isabelle_id: unit -> string val isabelle_identifier: unit -> string option val isabelle_heading: unit -> string val isabelle_name: unit -> string val identification: unit -> string end; structure Isabelle_System: ISABELLE_SYSTEM = struct (* bash *) fun bash_process script = Scala.function "bash_process" - ("export ISABELLE_TMP=" ^ Bash.string (getenv "ISABELLE_TMP") ^ "\n" ^ script) - |> split_strings0 + ["export ISABELLE_TMP=" ^ Bash.string (getenv "ISABELLE_TMP"), script] |> (fn [] => raise Exn.Interrupt | [err] => error err | a :: b :: c :: d :: lines => let val rc = Value.parse_int a; val (elapsed, cpu) = apply2 (Time.fromMilliseconds o Value.parse_int) (b, c); val (out_lines, err_lines) = chop (Value.parse_int d) lines; in Process_Result.make {rc = rc, out_lines = out_lines, err_lines = err_lines, timing = {elapsed = elapsed, cpu = cpu, gc = Time.zeroTime}} end | _ => raise Fail "Malformed Isabelle/Scala result"); val bash = bash_process #> Process_Result.print #> Process_Result.rc; fun bash_output s = let val res = bash_process s; val _ = warning (Process_Result.err res); in (Process_Result.out res, Process_Result.rc res) end; (* bash functions *) fun bash_functions () = bash_process "declare -Fx" |> Process_Result.check |> Process_Result.out_lines |> map_filter (space_explode " " #> try List.last); fun check_bash_function ctxt arg = Completion.check_entity Markup.bash_functionN (bash_functions () |> map (rpair Position.none)) ctxt arg; (* directory and file operations *) val absolute_path = Path.implode o File.absolute_path; -fun scala_function0 name = ignore o Scala.function name o cat_strings0; -fun scala_function name = scala_function0 name o map absolute_path; +fun scala_function name = ignore o Scala.function name o map absolute_path; fun make_directory path = (scala_function "make_directory" [path]; path); fun copy_dir src dst = scala_function "copy_dir" [src, dst]; fun copy_file src dst = scala_function "copy_file" [src, dst]; fun copy_file_base (base_dir, src) target_dir = - scala_function0 "copy_file_base" - [absolute_path base_dir, Path.implode src, absolute_path target_dir]; + ignore (Scala.function "copy_file_base" + [absolute_path base_dir, Path.implode src, absolute_path target_dir]); (* tmp files *) fun create_tmp_path name ext = let val path = File.tmp_path (Path.basic (name ^ serial_string ()) |> Path.ext ext); val _ = File.exists path andalso raise Fail ("Temporary file already exists: " ^ Path.print path); in path end; fun with_tmp_file name ext f = let val path = create_tmp_path name ext in Exn.release (Exn.capture f path before ignore (try File.rm path)) end; (* tmp dirs *) fun rm_tree path = scala_function "rm_tree" [path]; fun with_tmp_dir name f = let val path = create_tmp_path name "" in Exn.release (Exn.capture f (make_directory path) before ignore (try rm_tree path)) end; (* download file *) -fun download url file = - ignore (Scala.function "download" (cat_strings0 [url, absolute_path file])); +val download = Scala.function1 "download"; + +fun download_file url path = File.write path (download url); (* Isabelle distribution identification *) -fun isabelle_id () = Scala.function "isabelle_id" ""; +fun isabelle_id () = Scala.function1 "isabelle_id" ""; fun isabelle_identifier () = try getenv_strict "ISABELLE_IDENTIFIER"; fun isabelle_heading () = (case isabelle_identifier () of NONE => "" | SOME version => " (" ^ version ^ ")"); fun isabelle_name () = getenv_strict "ISABELLE_NAME"; fun identification () = "Isabelle/" ^ isabelle_id () ^ isabelle_heading (); end; diff --git a/src/Pure/System/isabelle_system.scala b/src/Pure/System/isabelle_system.scala --- a/src/Pure/System/isabelle_system.scala +++ b/src/Pure/System/isabelle_system.scala @@ -1,615 +1,614 @@ /* Title: Pure/System/isabelle_system.scala Author: Makarius Fundamental Isabelle system environment: quasi-static module with optional init operation. */ package isabelle import java.io.{File => JFile, IOException} import java.nio.file.{Path => JPath, Files, SimpleFileVisitor, FileVisitResult, StandardCopyOption, FileSystemException} import java.nio.file.attribute.BasicFileAttributes object Isabelle_System { /** bootstrap information **/ def jdk_home(): String = { val java_home = System.getProperty("java.home", "") val home = new JFile(java_home) val parent = home.getParent if (home.getName == "jre" && parent != null && (new JFile(new JFile(parent, "bin"), "javac")).exists) parent else java_home } def bootstrap_directory( preference: String, envar: String, property: String, description: String): String = { val value = proper_string(preference) orElse // explicit argument proper_string(System.getenv(envar)) orElse // e.g. inherited from running isabelle tool proper_string(System.getProperty(property)) getOrElse // e.g. via JVM application boot process error("Unknown " + description + " directory") if ((new JFile(value)).isDirectory) value else error("Bad " + description + " directory " + quote(value)) } /** implicit settings environment **/ abstract class Service @volatile private var _settings: Option[Map[String, String]] = None @volatile private var _services: Option[List[Class[Service]]] = None def settings(): Map[String, String] = { if (_settings.isEmpty) init() // unsynchronized check _settings.get } def services(): List[Class[Service]] = { if (_services.isEmpty) init() // unsynchronized check _services.get } def make_services[C](c: Class[C]): List[C] = for { c1 <- services() if Library.is_subclass(c1, c) } yield c1.getDeclaredConstructor().newInstance().asInstanceOf[C] def init(isabelle_root: String = "", cygwin_root: String = ""): Unit = synchronized { if (_settings.isEmpty || _services.isEmpty) { val isabelle_root1 = bootstrap_directory(isabelle_root, "ISABELLE_ROOT", "isabelle.root", "Isabelle root") val cygwin_root1 = if (Platform.is_windows) bootstrap_directory(cygwin_root, "CYGWIN_ROOT", "cygwin.root", "Cygwin root") else "" if (Platform.is_windows) Cygwin.init(isabelle_root1, cygwin_root1) def set_cygwin_root(): Unit = { if (Platform.is_windows) _settings = Some(_settings.getOrElse(Map.empty) + ("CYGWIN_ROOT" -> cygwin_root1)) } set_cygwin_root() def default(env: Map[String, String], entry: (String, String)): Map[String, String] = if (env.isDefinedAt(entry._1) || entry._2 == "") env else env + entry val env = { val temp_windows = { val temp = if (Platform.is_windows) System.getenv("TEMP") else null if (temp != null && temp.contains('\\')) temp else "" } val user_home = System.getProperty("user.home", "") val isabelle_app = System.getProperty("isabelle.app", "") default( default( default(sys.env + ("ISABELLE_JDK_HOME" -> File.standard_path(jdk_home())), "TEMP_WINDOWS" -> temp_windows), "HOME" -> user_home), "ISABELLE_APP" -> "true") } val settings = { val dump = JFile.createTempFile("settings", null) dump.deleteOnExit try { val cmd1 = if (Platform.is_windows) List(cygwin_root1 + "\\bin\\bash", "-l", File.standard_path(isabelle_root1 + "\\bin\\isabelle")) else List(isabelle_root1 + "/bin/isabelle") val cmd = cmd1 ::: List("getenv", "-d", dump.toString) val (output, rc) = process_output(process(cmd, env = env, redirect = true)) if (rc != 0) error(output) val entries = (for (entry <- Library.split_strings0(File.read(dump)) if entry != "") yield { val i = entry.indexOf('=') if (i <= 0) entry -> "" else entry.substring(0, i) -> entry.substring(i + 1) }).toMap entries + ("PATH" -> entries("PATH_JVM")) - "PATH_JVM" } finally { dump.delete } } _settings = Some(settings) set_cygwin_root() val variable = "ISABELLE_SCALA_SERVICES" val services = for (name <- space_explode(':', settings.getOrElse(variable, getenv_error(variable)))) yield { def err(msg: String): Nothing = error("Bad entry " + quote(name) + " in " + variable + "\n" + msg) try { Class.forName(name).asInstanceOf[Class[Service]] } catch { case _: ClassNotFoundException => err("Class not found") case exn: Throwable => err(Exn.message(exn)) } } _services = Some(services) } } /* getenv -- dynamic process environment */ private def getenv_error(name: String): Nothing = error("Undefined Isabelle environment variable: " + quote(name)) def getenv(name: String, env: Map[String, String] = settings()): String = env.getOrElse(name, "") def getenv_strict(name: String, env: Map[String, String] = settings()): String = proper_string(getenv(name, env)) getOrElse error("Undefined Isabelle environment variable: " + quote(name)) def cygwin_root(): String = getenv_strict("CYGWIN_ROOT") /* getetc -- static distribution parameters */ def getetc(name: String, root: Path = Path.ISABELLE_HOME): Option[String] = { val path = root + Path.basic("etc") + Path.basic(name) if (path.is_file) { Library.trim_split_lines(File.read(path)) match { case Nil => None case List(s) => Some(s) case _ => error("Single line expected in " + path.absolute) } } else None } /* Isabelle distribution identification */ def isabelle_id(root: Path = Path.ISABELLE_HOME): String = getetc("ISABELLE_ID", root = root) orElse Mercurial.archive_id(root) getOrElse { if (Mercurial.is_repository(root)) Mercurial.repository(root).parent() else error("Failed to identify Isabelle distribution " + root) } - object Isabelle_Id extends Scala.Fun("isabelle_id") + object Isabelle_Id extends Scala.Fun_String("isabelle_id") { val here = Scala_Project.here def apply(arg: String): String = isabelle_id() } def isabelle_tags(root: Path = Path.ISABELLE_HOME): String = getetc("ISABELLE_TAGS", root = root) orElse Mercurial.archive_tags(root) getOrElse { if (Mercurial.is_repository(root)) { val hg = Mercurial.repository(root) hg.tags(rev = hg.parent()) } else "" } def isabelle_identifier(): Option[String] = proper_string(getenv("ISABELLE_IDENTIFIER")) def isabelle_heading(): String = isabelle_identifier() match { case None => "" case Some(version) => " (" + version + ")" } def isabelle_name(): String = getenv_strict("ISABELLE_NAME") def identification(): String = "Isabelle/" + isabelle_id() + isabelle_heading() /** file-system operations **/ /* scala functions */ - private def apply_paths(arg: String, fun: List[Path] => Unit): String = - { fun(Library.split_strings0(arg).map(Path.explode)); "" } + private def apply_paths(args: List[String], fun: List[Path] => Unit): List[String] = + { fun(args.map(Path.explode)); Nil } - private def apply_paths1(arg: String, fun: Path => Unit): String = - apply_paths(arg, { case List(path) => fun(path) }) + private def apply_paths1(args: List[String], fun: Path => Unit): List[String] = + apply_paths(args, { case List(path) => fun(path) }) - private def apply_paths2(arg: String, fun: (Path, Path) => Unit): String = - apply_paths(arg, { case List(path1, path2) => fun(path1, path2) }) + private def apply_paths2(args: List[String], fun: (Path, Path) => Unit): List[String] = + apply_paths(args, { case List(path1, path2) => fun(path1, path2) }) - private def apply_paths3(arg: String, fun: (Path, Path, Path) => Unit): String = - apply_paths(arg, { case List(path1, path2, path3) => fun(path1, path2, path3) }) + private def apply_paths3(args: List[String], fun: (Path, Path, Path) => Unit): List[String] = + apply_paths(args, { case List(path1, path2, path3) => fun(path1, path2, path3) }) /* permissions */ def chmod(arg: String, path: Path): Unit = bash("chmod " + arg + " " + File.bash_path(path)).check def chown(arg: String, path: Path): Unit = bash("chown " + arg + " " + File.bash_path(path)).check /* directories */ def make_directory(path: Path): Path = { if (!path.is_dir) { try { Files.createDirectories(path.file.toPath) } catch { case ERROR(_) => error("Failed to create directory: " + path.absolute) } } path } def new_directory(path: Path): Path = if (path.is_dir) error("Directory already exists: " + path.absolute) else make_directory(path) def copy_dir(dir1: Path, dir2: Path): Unit = { val res = bash("cp -a " + File.bash_path(dir1) + " " + File.bash_path(dir2)) if (!res.ok) { cat_error("Failed to copy directory " + dir1.absolute + " to " + dir2.absolute, res.err) } } - object Make_Directory extends Scala.Fun("make_directory") + object Make_Directory extends Scala.Fun_Strings("make_directory") { val here = Scala_Project.here - def apply(arg: String): String = apply_paths1(arg, make_directory) + def apply(args: List[String]): List[String] = apply_paths1(args, make_directory) } - object Copy_Dir extends Scala.Fun("copy_dir") + object Copy_Dir extends Scala.Fun_Strings("copy_dir") { val here = Scala_Project.here - def apply(arg: String): String = apply_paths2(arg, copy_dir) + def apply(args: List[String]): List[String] = apply_paths2(args, copy_dir) } /* copy files */ def copy_file(src: JFile, dst: JFile): Unit = { val target = if (dst.isDirectory) new JFile(dst, src.getName) else dst if (!File.eq(src, target)) { try { Files.copy(src.toPath, target.toPath, StandardCopyOption.COPY_ATTRIBUTES, StandardCopyOption.REPLACE_EXISTING) } catch { case ERROR(msg) => cat_error("Failed top copy file " + File.path(src).absolute + " to " + File.path(dst).absolute, msg) } } } def copy_file(src: Path, dst: Path): Unit = copy_file(src.file, dst.file) def copy_file_base(base_dir: Path, src: Path, target_dir: Path): Unit = { val src1 = src.expand val src1_dir = src1.dir if (!src1.starts_basic) error("Illegal path specification " + src1 + " beyond base directory") copy_file(base_dir + src1, Isabelle_System.make_directory(target_dir + src1_dir)) } - object Copy_File extends Scala.Fun("copy_file") + object Copy_File extends Scala.Fun_Strings("copy_file") { val here = Scala_Project.here - def apply(arg: String): String = apply_paths2(arg, copy_file) + def apply(args: List[String]): List[String] = apply_paths2(args, copy_file) } - object Copy_File_Base extends Scala.Fun("copy_file_base") + object Copy_File_Base extends Scala.Fun_Strings("copy_file_base") { val here = Scala_Project.here - def apply(arg: String): String = apply_paths3(arg, copy_file_base) + def apply(args: List[String]): List[String] = apply_paths3(args, copy_file_base) } /* move files */ def move_file(src: JFile, dst: JFile): Unit = { val target = if (dst.isDirectory) new JFile(dst, src.getName) else dst if (!File.eq(src, target)) Files.move(src.toPath, target.toPath, StandardCopyOption.REPLACE_EXISTING) } def move_file(src: Path, dst: Path): Unit = move_file(src.file, dst.file) /* symbolic link */ def symlink(src: Path, dst: Path, force: Boolean = false): Unit = { val src_file = src.file val dst_file = dst.file val target = if (dst_file.isDirectory) new JFile(dst_file, src_file.getName) else dst_file if (force) target.delete try { Files.createSymbolicLink(target.toPath, src_file.toPath) } catch { case _: UnsupportedOperationException if Platform.is_windows => Cygwin.link(File.standard_path(src), target) case _: FileSystemException if Platform.is_windows => Cygwin.link(File.standard_path(src), target) } } /* tmp files */ def isabelle_tmp_prefix(): JFile = { val path = Path.explode("$ISABELLE_TMP_PREFIX") path.file.mkdirs // low-level mkdirs to avoid recursion via Isabelle environment File.platform_file(path) } def tmp_file(name: String, ext: String = "", base_dir: JFile = isabelle_tmp_prefix()): JFile = { val suffix = if (ext == "") "" else "." + ext val file = Files.createTempFile(base_dir.toPath, name, suffix).toFile file.deleteOnExit file } def with_tmp_file[A](name: String, ext: String = "")(body: Path => A): A = { val file = tmp_file(name, ext) try { body(File.path(file)) } finally { file.delete } } /* tmp dirs */ def rm_tree(root: JFile): Unit = { root.delete if (root.isDirectory) { Files.walkFileTree(root.toPath, new SimpleFileVisitor[JPath] { override def visitFile(file: JPath, attrs: BasicFileAttributes): FileVisitResult = { try { Files.deleteIfExists(file) } catch { case _: IOException => } FileVisitResult.CONTINUE } override def postVisitDirectory(dir: JPath, e: IOException): FileVisitResult = { if (e == null) { try { Files.deleteIfExists(dir) } catch { case _: IOException => } FileVisitResult.CONTINUE } else throw e } } ) } } def rm_tree(root: Path): Unit = rm_tree(root.file) - object Rm_Tree extends Scala.Fun("rm_tree") + object Rm_Tree extends Scala.Fun_Strings("rm_tree") { val here = Scala_Project.here - def apply(arg: String): String = apply_paths1(arg, rm_tree) + def apply(args: List[String]): List[String] = apply_paths1(args, rm_tree) } def tmp_dir(name: String, base_dir: JFile = isabelle_tmp_prefix()): JFile = { val dir = Files.createTempDirectory(base_dir.toPath, name).toFile dir.deleteOnExit dir } def with_tmp_dir[A](name: String)(body: Path => A): A = { val dir = tmp_dir(name) try { body(File.path(dir)) } finally { rm_tree(dir) } } /* quasi-atomic update of directory */ def update_directory(dir: Path, f: Path => Unit): Unit = { val new_dir = dir.ext("new") val old_dir = dir.ext("old") rm_tree(new_dir) rm_tree(old_dir) f(new_dir) if (dir.is_dir) move_file(dir, old_dir) move_file(new_dir, dir) rm_tree(old_dir) } /** external processes **/ /* raw process */ def process(command_line: List[String], cwd: JFile = null, env: Map[String, String] = settings(), redirect: Boolean = false): Process = { val proc = new ProcessBuilder proc.command(command_line:_*) // fragile on Windows if (cwd != null) proc.directory(cwd) if (env != null) { proc.environment.clear() for ((x, y) <- env) proc.environment.put(x, y) } proc.redirectErrorStream(redirect) proc.start } def process_output(proc: Process): (String, Int) = { proc.getOutputStream.close() val output = File.read_stream(proc.getInputStream) val rc = try { proc.waitFor } finally { proc.getInputStream.close() proc.getErrorStream.close() proc.destroy() Exn.Interrupt.dispose() } (output, rc) } def kill(signal: String, group_pid: String): (String, Int) = { val bash = if (Platform.is_windows) List(cygwin_root() + "\\bin\\bash.exe") else List("/usr/bin/env", "bash") process_output(process(bash ::: List("-c", "kill -" + signal + " -" + group_pid))) } /* GNU bash */ def bash(script: String, cwd: JFile = null, env: Map[String, String] = settings(), redirect: Boolean = false, progress_stdout: String => Unit = (_: String) => (), progress_stderr: String => Unit = (_: String) => (), watchdog: Option[Bash.Watchdog] = None, strict: Boolean = true, cleanup: () => Unit = () => ()): Process_Result = { Bash.process(script, cwd = cwd, env = env, redirect = redirect, cleanup = cleanup). result(progress_stdout = progress_stdout, progress_stderr = progress_stderr, watchdog = watchdog, strict = strict) } private lazy val gnutar_check: Boolean = try { bash("tar --version").check.out.containsSlice("GNU tar") || error("") } catch { case ERROR(_) => false } def gnutar( args: String, dir: Path = Path.current, original_owner: Boolean = false, redirect: Boolean = false): Process_Result = { val options = (if (dir.is_current) "" else "-C " + File.bash_path(dir) + " ") + (if (original_owner) "" else "--owner=root --group=staff ") if (gnutar_check) bash("tar " + options + args, redirect = redirect) else error("Expected to find GNU tar executable") } def require_command(cmds: String*): Unit = { for (cmd <- cmds) { if (!bash(Bash.string(cmd) + " --version").ok) error("Missing system command: " + quote(cmd)) } } def hostname(): String = bash("hostname -s").check.out def open(arg: String): Unit = bash("exec \"$ISABELLE_OPEN\" " + Bash.string(arg) + " >/dev/null 2>/dev/null &") def pdf_viewer(arg: Path): Unit = bash("exec \"$PDF_VIEWER\" " + File.bash_path(arg) + " >/dev/null 2>/dev/null &") def open_external_file(name: String): Boolean = { val ext = Library.take_suffix((c: Char) => c != '.', name.toList)._2.mkString val external = ext.nonEmpty && Library.space_explode(':', getenv("ISABELLE_EXTERNAL_FILES")).contains(ext) if (external) { if (ext == "pdf" && Path.is_wellformed(name)) pdf_viewer(Path.explode(name)) else open(name) } external } def export_isabelle_identifier(isabelle_identifier: String): String = if (isabelle_identifier == "") "" else "export ISABELLE_IDENTIFIER=" + Bash.string(isabelle_identifier) + "\n" /** Isabelle resources **/ /* repository clone with Admin */ def admin(): Boolean = Path.explode("~~/Admin").is_dir /* components */ def components(): List[Path] = Path.split(getenv_strict("ISABELLE_COMPONENTS")) /* default logic */ def default_logic(args: String*): String = { args.find(_ != "") match { case Some(logic) => logic case None => getenv_strict("ISABELLE_LOGIC") } } /* download file */ - def download(url_name: String, file: Path, progress: Progress = new Progress): Unit = + def download(url_name: String, progress: Progress = new Progress): HTTP.Content = { val url = Url(url_name) progress.echo("Getting " + quote(url_name)) - val content = - try { HTTP.Client.get(url) } - catch { case ERROR(msg) => cat_error("Failed to download " + quote(url_name), msg) } - Bytes.write(file, content.bytes) + try { HTTP.Client.get(url) } + catch { case ERROR(msg) => cat_error("Failed to download " + quote(url_name), msg) } } + def download_file(url_name: String, file: Path, progress: Progress = new Progress): Unit = + Bytes.write(file, download(url_name, progress = progress).bytes) + object Download extends Scala.Fun("download", thread = true) { val here = Scala_Project.here - def apply(arg: String): String = - Library.split_strings0(arg) match { - case List(url, file) => download(url, Path.explode(file)); "" - } + override def invoke(args: List[Bytes]): List[Bytes] = + args match { case List(url) => List(download(url.text).bytes) } } } diff --git a/src/Pure/System/isabelle_tool.scala b/src/Pure/System/isabelle_tool.scala --- a/src/Pure/System/isabelle_tool.scala +++ b/src/Pure/System/isabelle_tool.scala @@ -1,233 +1,233 @@ /* Title: Pure/System/isabelle_tool.scala Author: Makarius Author: Lars Hupel Isabelle system tools: external executables or internal Scala functions. */ package isabelle import java.net.URLClassLoader import scala.reflect.runtime.universe import scala.tools.reflect.{ToolBox, ToolBoxError} object Isabelle_Tool { /* Scala source tools */ abstract class Body extends Function[List[String], Unit] private def compile(path: Path): Body = { def err(msg: String): Nothing = cat_error(msg, "The error(s) above occurred in Isabelle/Scala tool " + path) val source = File.read(path) val class_loader = new URLClassLoader(Array(), getClass.getClassLoader) val tool_box = universe.runtimeMirror(class_loader).mkToolBox() try { val tree = tool_box.parse(source) val module = try { tree.asInstanceOf[universe.ModuleDef] } catch { case _: java.lang.ClassCastException => err("Source does not describe a module (Scala object)") } tool_box.compile(universe.Ident(tool_box.define(module)))() match { case body: Body => body case _ => err("Ill-typed source: Isabelle_Tool.Body expected") } } catch { case e: ToolBoxError => if (tool_box.frontEnd.hasErrors) { val infos = tool_box.frontEnd.infos.toList val msgs = infos.map(info => "Error in line " + info.pos.line + ":\n" + info.msg) err(msgs.mkString("\n")) } else err(e.toString) } } /* external tools */ private def dirs(): List[Path] = Path.split(Isabelle_System.getenv_strict("ISABELLE_TOOLS")) private def is_external(dir: Path, file_name: String): Boolean = { val file = (dir + Path.explode(file_name)).file try { file.isFile && file.canRead && (file_name.endsWith(".scala") || file.canExecute) && !file_name.endsWith("~") && !file_name.endsWith(".orig") } catch { case _: SecurityException => false } } private def find_external(name: String): Option[List[String] => Unit] = dirs().collectFirst({ case dir if is_external(dir, name + ".scala") => compile(dir + Path.explode(name + ".scala")) case dir if is_external(dir, name) => (args: List[String]) => { val tool = dir + Path.explode(name) val result = Isabelle_System.bash(File.bash_path(tool) + " " + Bash.strings(args)) sys.exit(result.print_stdout.rc) } }) /* internal tools */ private lazy val internal_tools: List[Isabelle_Tool] = Isabelle_System.make_services(classOf[Isabelle_Scala_Tools]).flatMap(_.tools) private def find_internal(name: String): Option[List[String] => Unit] = internal_tools.collectFirst({ case tool if tool.name == name => args => Command_Line.tool { tool.body(args) } }) /* list tools */ abstract class Entry { def name: String def position: Properties.T def description: String def print: String = description match { case "" => name case descr => name + " - " + descr } } sealed case class External(name: String, path: Path) extends Entry { def position: Properties.T = Position.File(path.absolute.implode) def description: String = { val Pattern = """.*\bDESCRIPTION: *(.*)""".r split_lines(File.read(path)).collectFirst({ case Pattern(s) => s }) getOrElse "" } } def external_tools(): List[External] = { for { dir <- dirs() if dir.is_dir file_name <- File.read_dir(dir) if is_external(dir, file_name) } yield { val path = dir + Path.explode(file_name) val name = Library.perhaps_unsuffix(".scala", file_name) External(name, path) } } def isabelle_tools(): List[Entry] = (external_tools() ::: internal_tools).sortBy(_.name) - object Isabelle_Tools extends Scala.Fun("isabelle_tools") + object Isabelle_Tools extends Scala.Fun_String("isabelle_tools") { val here = Scala_Project.here def apply(arg: String): String = if (arg.nonEmpty) error("Bad argument: " + quote(arg)) else { val result = isabelle_tools().map(entry => (entry.name, entry.position)) val body = { import XML.Encode._; list(pair(string, properties))(result) } YXML.string_of_body(body) } } /* command line entry point */ def main(args: Array[String]): Unit = { Command_Line.tool { args.toList match { case Nil | List("-?") => val tool_descriptions = isabelle_tools().map(_.print) Getopts(""" Usage: isabelle TOOL [ARGS ...] Start Isabelle TOOL with ARGS; pass "-?" for tool-specific help. Available tools:""" + tool_descriptions.mkString("\n ", "\n ", "\n")).usage() case tool_name :: tool_args => find_external(tool_name) orElse find_internal(tool_name) match { case Some(tool) => tool(tool_args) case None => error("Unknown Isabelle tool: " + quote(tool_name)) } } } } } sealed case class Isabelle_Tool( name: String, description: String, here: Scala_Project.Here, body: List[String] => Unit) extends Isabelle_Tool.Entry { def position: Position.T = here.position } class Isabelle_Scala_Tools(val tools: Isabelle_Tool*) extends Isabelle_System.Service class Tools extends Isabelle_Scala_Tools( Build.isabelle_tool, Build_Docker.isabelle_tool, Build_Job.isabelle_tool, Doc.isabelle_tool, Dump.isabelle_tool, Export.isabelle_tool, ML_Process.isabelle_tool, Mercurial.isabelle_tool, Mkroot.isabelle_tool, Logo.isabelle_tool, Options.isabelle_tool, Phabricator.isabelle_tool1, Phabricator.isabelle_tool2, Phabricator.isabelle_tool3, Phabricator.isabelle_tool4, Presentation.isabelle_tool, Profiling_Report.isabelle_tool, Server.isabelle_tool, Sessions.isabelle_tool, Scala_Project.isabelle_tool, Update.isabelle_tool, Update_Cartouches.isabelle_tool, Update_Comments.isabelle_tool, Update_Header.isabelle_tool, Update_Then.isabelle_tool, Update_Theorems.isabelle_tool, isabelle.vscode.TextMate_Grammar.isabelle_tool, isabelle.vscode.Language_Server.isabelle_tool) class Admin_Tools extends Isabelle_Scala_Tools( Build_CSDP.isabelle_tool, Build_Cygwin.isabelle_tool, Build_Doc.isabelle_tool, Build_E.isabelle_tool, Build_Fonts.isabelle_tool, Build_JCEF.isabelle_tool, Build_JDK.isabelle_tool, Build_PolyML.isabelle_tool1, Build_PolyML.isabelle_tool2, Build_SPASS.isabelle_tool, Build_SQLite.isabelle_tool, Build_Status.isabelle_tool, Build_Vampire.isabelle_tool, Build_VeriT.isabelle_tool, Build_Zipperposition.isabelle_tool, Check_Sources.isabelle_tool, Components.isabelle_tool, isabelle.vscode.Build_VSCode.isabelle_tool) diff --git a/src/Pure/System/message_channel.ML b/src/Pure/System/message_channel.ML --- a/src/Pure/System/message_channel.ML +++ b/src/Pure/System/message_channel.ML @@ -1,70 +1,58 @@ (* Title: Pure/System/message_channel.ML Author: Makarius Preferably asynchronous channel for Isabelle messages. *) signature MESSAGE_CHANNEL = sig type message - val message: string -> Properties.T -> XML.body -> message + val message: string -> Properties.T -> XML.body list -> message type T val send: T -> message -> unit val shutdown: T -> unit val make: BinIO.outstream -> T end; structure Message_Channel: MESSAGE_CHANNEL = struct (* message *) -datatype message = Message of {body: XML.body, flush: bool}; +datatype message = Message of XML.body list; -fun body_size body = fold (YXML.traverse (Integer.add o size)) body 0; - -fun chunk body = XML.Text (string_of_int (body_size body) ^ "\n") :: body; - -fun message name raw_props body = +fun message name raw_props chunks = let val robust_props = map (apply2 YXML.embed_controls) raw_props; - val header = XML.Elem ((name, robust_props), []); - in Message {body = chunk [header] @ chunk body, flush = name = Markup.protocolN} end; + val header = [XML.Elem ((name, robust_props), [])]; + in Message (header :: chunks) end; (* channel *) datatype T = Message_Channel of {send: message -> unit, shutdown: unit -> unit}; fun send (Message_Channel {send, ...}) = send; fun shutdown (Message_Channel {shutdown, ...}) = shutdown (); -val flush_timeout = SOME (seconds 0.02); - fun message_output mbox stream = let - fun continue timeout = - (case Mailbox.receive timeout mbox of - [] => (Byte_Message.flush stream; continue NONE) - | msgs => received timeout msgs) - and received _ (NONE :: _) = Byte_Message.flush stream - | received _ (SOME (Message {body, flush}) :: rest) = - let - val _ = fold (YXML.traverse (fn s => fn () => File.output stream s)) body (); - val timeout = if flush then (Byte_Message.flush stream; NONE) else flush_timeout; - in received timeout rest end - | received timeout [] = continue timeout; - in fn () => continue NONE end; + fun continue () = Mailbox.receive NONE mbox |> received + and received [] = continue () + | received (NONE :: _) = () + | received (SOME (Message chunks) :: rest) = + (Byte_Message.write_message_yxml stream chunks; received rest); + in continue end; fun make stream = let val mbox = Mailbox.create (); val thread = Isabelle_Thread.fork {name = "channel", stack_limit = NONE, interrupts = false} (message_output mbox stream); fun send msg = Mailbox.send mbox (SOME msg); fun shutdown () = (Mailbox.send mbox NONE; Mailbox.await_empty mbox; Isabelle_Thread.join thread); in Message_Channel {send = send, shutdown = shutdown} end; end; diff --git a/src/Pure/System/scala.ML b/src/Pure/System/scala.ML --- a/src/Pure/System/scala.ML +++ b/src/Pure/System/scala.ML @@ -1,66 +1,69 @@ (* Title: Pure/System/scala.ML Author: Makarius Invoke Scala functions from the ML runtime. *) signature SCALA = sig exception Null - val function: string -> string -> string + val function: string -> string list -> string list + val function1: string -> string -> string end; structure Scala: SCALA = struct exception Null; local val new_id = string_of_int o Counter.make (); val results = - Synchronized.var "Scala.results" (Symtab.empty: string Exn.result Symtab.table); + Synchronized.var "Scala.results" (Symtab.empty: string list Exn.result Symtab.table); val _ = Protocol_Command.define "Scala.result" - (fn [id, tag, res] => + (fn id :: args => let val result = - (case tag of - "0" => Exn.Exn Null - | "1" => Exn.Res res - | "2" => Exn.Exn (ERROR res) - | "3" => Exn.Exn (Fail res) - | "4" => Exn.Exn Exn.Interrupt - | _ => raise Fail ("Bad tag: " ^ tag)); + (case args of + ["0"] => Exn.Exn Null + | "1" :: rest => Exn.Res rest + | ["2", msg] => Exn.Exn (ERROR msg) + | ["3", msg] => Exn.Exn (Fail msg) + | ["4"] => Exn.Exn Exn.Interrupt + | _ => raise Fail "Malformed Scala.result"); in Synchronized.change results (Symtab.map_entry id (K result)) end); in -fun function name arg = +fun function name args = Thread_Attributes.uninterruptible (fn restore_attributes => fn () => let val id = new_id (); fun invoke () = (Synchronized.change results (Symtab.update (id, Exn.Exn Match)); - Output.protocol_message (Markup.invoke_scala name id) [XML.Text arg]); + Output.protocol_message (Markup.invoke_scala name id) (map (single o XML.Text) args)); fun cancel () = (Synchronized.change results (Symtab.delete_safe id); Output.protocol_message (Markup.cancel_scala id) []); fun await_result () = Synchronized.guarded_access results (fn tab => (case Symtab.lookup tab id of SOME (Exn.Exn Match) => NONE | SOME result => SOME (result, Symtab.delete id tab) | NONE => SOME (Exn.Exn Exn.Interrupt, tab))); in invoke (); Exn.release (restore_attributes await_result ()) handle exn => (if Exn.is_interrupt exn then cancel () else (); Exn.reraise exn) end) (); +val function1 = singleton o function; + end; end; diff --git a/src/Pure/System/scala.scala b/src/Pure/System/scala.scala --- a/src/Pure/System/scala.scala +++ b/src/Pure/System/scala.scala @@ -1,265 +1,282 @@ /* Title: Pure/System/scala.scala Author: Makarius Support for Scala at runtime. */ package isabelle import java.io.{File => JFile, StringWriter, PrintWriter} import scala.tools.nsc.{GenericRunnerSettings, ConsoleWriter, NewLinePrintWriter} import scala.tools.nsc.interpreter.{IMain, Results} import scala.tools.nsc.interpreter.shell.ReplReporterImpl object Scala { /** registered functions **/ abstract class Fun(val name: String, val thread: Boolean = false) - extends Function[String, String] { override def toString: String = name + def multi: Boolean = true def position: Properties.T = here.position def here: Scala_Project.Here + def invoke(args: List[Bytes]): List[Bytes] + } + + abstract class Fun_Strings(name: String, thread: Boolean = false) + extends Fun(name, thread = thread) + { + override def invoke(args: List[Bytes]): List[Bytes] = + apply(args.map(_.text)).map(Bytes.apply) + def apply(args: List[String]): List[String] + } + + abstract class Fun_String(name: String, thread: Boolean = false) + extends Fun_Strings(name, thread = thread) + { + override def multi: Boolean = false + override def apply(args: List[String]): List[String] = + List(apply(Library.the_single(args))) def apply(arg: String): String } class Functions(val functions: Fun*) extends Isabelle_System.Service lazy val functions: List[Fun] = Isabelle_System.make_services(classOf[Functions]).flatMap(_.functions) /** demo functions **/ - object Echo extends Fun("echo") + object Echo extends Fun_String("echo") { val here = Scala_Project.here def apply(arg: String): String = arg } - object Sleep extends Fun("sleep") + object Sleep extends Fun_String("sleep") { val here = Scala_Project.here def apply(seconds: String): String = { val t = seconds match { case Value.Double(s) => Time.seconds(s) case _ => error("Malformed argument: " + quote(seconds)) } val t0 = Time.now() t.sleep val t1 = Time.now() (t1 - t0).toString } } /** compiler **/ object Compiler { def context( error: String => Unit = Exn.error, jar_dirs: List[JFile] = Nil): Context = { def find_jars(dir: JFile): List[String] = File.find_files(dir, file => file.getName.endsWith(".jar")). map(File.absolute_name) val class_path = for { prop <- List("isabelle.scala.classpath", "java.class.path") path = System.getProperty(prop, "") if path != "\"\"" elem <- space_explode(JFile.pathSeparatorChar, path) } yield elem val settings = new GenericRunnerSettings(error) settings.classpath.value = (class_path ::: jar_dirs.flatMap(find_jars)).mkString(JFile.pathSeparator) new Context(settings) } def default_print_writer: PrintWriter = new NewLinePrintWriter(new ConsoleWriter, true) class Context private [Compiler](val settings: GenericRunnerSettings) { override def toString: String = settings.toString def interpreter( print_writer: PrintWriter = default_print_writer, class_loader: ClassLoader = null): IMain = { new IMain(settings, new ReplReporterImpl(settings, print_writer)) { override def parentClassLoader: ClassLoader = if (class_loader == null) super.parentClassLoader else class_loader } } def toplevel(interpret: Boolean, source: String): List[String] = { val out = new StringWriter val interp = interpreter(new PrintWriter(out)) val marker = '\u000b' val ok = interp.withLabel(marker.toString) { if (interpret) interp.interpret(source) == Results.Success else (new interp.ReadEvalPrint).compile(source) } out.close() val Error = """(?s)^\S* error: (.*)$""".r val errors = space_explode(marker, Library.strip_ansi_color(out.toString)). collect({ case Error(msg) => "Scala error: " + Library.trim_line(msg) }) if (!ok && errors.isEmpty) List("Error") else errors } } } - object Toplevel extends Fun("scala_toplevel") + object Toplevel extends Fun_String("scala_toplevel") { val here = Scala_Project.here def apply(arg: String): String = { val (interpret, source) = YXML.parse_body(arg) match { case Nil => (false, "") case List(XML.Text(source)) => (false, source) case body => import XML.Decode._; pair(bool, string)(body) } val errors = try { Compiler.context().toplevel(interpret, source) } catch { case ERROR(msg) => List(msg) } locally { import XML.Encode._; YXML.string_of_body(list(string)(errors)) } } } /** invoke Scala functions from ML **/ /* invoke function */ object Tag extends Enumeration { val NULL, OK, ERROR, FAIL, INTERRUPT = Value } def function_thread(name: String): Boolean = functions.find(fun => fun.name == name) match { case Some(fun) => fun.thread case None => false } - def function_body(name: String, arg: String): (Tag.Value, String) = + def function_body(name: String, args: List[Bytes]): (Tag.Value, List[Bytes]) = functions.find(fun => fun.name == name) match { case Some(fun) => - Exn.capture { fun(arg) } match { - case Exn.Res(null) => (Tag.NULL, "") + Exn.capture { fun.invoke(args) } match { + case Exn.Res(null) => (Tag.NULL, Nil) case Exn.Res(res) => (Tag.OK, res) - case Exn.Exn(Exn.Interrupt()) => (Tag.INTERRUPT, "") - case Exn.Exn(e) => (Tag.ERROR, Exn.message(e)) + case Exn.Exn(Exn.Interrupt()) => (Tag.INTERRUPT, Nil) + case Exn.Exn(e) => (Tag.ERROR, List(Bytes(Exn.message(e)))) } - case None => (Tag.FAIL, "Unknown Isabelle/Scala function: " + quote(name)) + case None => (Tag.FAIL, List(Bytes("Unknown Isabelle/Scala function: " + quote(name)))) } /* protocol handler */ class Handler extends Session.Protocol_Handler { private var session: Session = null private var futures = Map.empty[String, Future[Unit]] override def init(session: Session): Unit = synchronized { this.session = session } override def exit(): Unit = synchronized { for ((id, future) <- futures) cancel(id, future) futures = Map.empty } - private def result(id: String, tag: Scala.Tag.Value, res: String): Unit = + private def result(id: String, tag: Scala.Tag.Value, res: List[Bytes]): Unit = synchronized { if (futures.isDefinedAt(id)) { - session.protocol_command("Scala.result", id, tag.id.toString, res) + session.protocol_command_raw("Scala.result", Bytes(id) :: Bytes(tag.id.toString) :: res) futures -= id } } private def cancel(id: String, future: Future[Unit]): Unit = { future.cancel() - result(id, Scala.Tag.INTERRUPT, "") + result(id, Scala.Tag.INTERRUPT, Nil) } private def invoke_scala(msg: Prover.Protocol_Output): Boolean = synchronized { msg.properties match { case Markup.Invoke_Scala(name, id) => def body: Unit = { - val (tag, res) = Scala.function_body(name, msg.text) + val (tag, res) = Scala.function_body(name, msg.chunks) result(id, tag, res) } val future = if (Scala.function_thread(name)) { Future.thread(name = Isabelle_Thread.make_name(base = "invoke_scala"))(body) } else Future.fork(body) futures += (id -> future) true case _ => false } } private def cancel_scala(msg: Prover.Protocol_Output): Boolean = synchronized { msg.properties match { case Markup.Cancel_Scala(id) => futures.get(id) match { case Some(future) => cancel(id, future) case None => } true case _ => false } } override val functions = List( Markup.Invoke_Scala.name -> invoke_scala, Markup.Cancel_Scala.name -> cancel_scala) } } class Scala_Functions extends Scala.Functions( Scala.Echo, Scala.Sleep, Scala.Toplevel, Doc.Doc_Names, Bash.Process, Bibtex.Check_Database, Isabelle_System.Make_Directory, Isabelle_System.Copy_Dir, Isabelle_System.Copy_File, Isabelle_System.Copy_File_Base, Isabelle_System.Rm_Tree, Isabelle_System.Download, Isabelle_System.Isabelle_Id, Isabelle_Tool.Isabelle_Tools, isabelle.atp.SystemOnTPTP.List_Systems, isabelle.atp.SystemOnTPTP.Run_System) diff --git a/src/Pure/Thy/bibtex.scala b/src/Pure/Thy/bibtex.scala --- a/src/Pure/Thy/bibtex.scala +++ b/src/Pure/Thy/bibtex.scala @@ -1,704 +1,704 @@ /* Title: Pure/Thy/bibtex.scala Author: Makarius BibTeX support. */ package isabelle import java.io.{File => JFile} import scala.collection.mutable import scala.util.parsing.combinator.RegexParsers import scala.util.parsing.input.Reader object Bibtex { /** file format **/ def is_bibtex(name: String): Boolean = name.endsWith(".bib") class File_Format extends isabelle.File_Format { val format_name: String = "bibtex" val file_ext: String = "bib" override def theory_suffix: String = "bibtex_file" override def theory_content(name: String): String = """theory "bib" imports Pure begin bibtex_file """ + Outer_Syntax.quote_string(name) + """ end""" override def html_document(snapshot: Document.Snapshot): Option[Presentation.HTML_Document] = { val name = snapshot.node_name if (detect(name.node)) { val title = "Bibliography " + quote(snapshot.node_name.path.file_name) val content = Isabelle_System.with_tmp_file("bib", "bib") { bib => File.write(bib, snapshot.node.source) Bibtex.html_output(List(bib), style = "unsort", title = title) } Some(Presentation.HTML_Document(title, content)) } else None } } /** bibtex errors **/ def bibtex_errors(dir: Path, root_name: String): List[String] = { val log_path = dir + Path.explode(root_name).ext("blg") if (log_path.is_file) { val Error1 = """^(I couldn't open database file .+)$""".r val Error2 = """^(.+)---line (\d+) of file (.+)""".r Line.logical_lines(File.read(log_path)).flatMap(line => line match { case Error1(msg) => Some("Bibtex error: " + msg) case Error2(msg, Value.Int(l), file) => val path = File.standard_path(file) if (Path.is_wellformed(path)) { val pos = Position.Line_File(l, (dir + Path.explode(path)).canonical.implode) Some("Bibtex error" + Position.here(pos) + ":\n " + msg) } else None case _ => None }) } else Nil } /** check database **/ def check_database(database: String): (List[(String, Position.T)], List[(String, Position.T)]) = { val chunks = parse(Line.normalize(database)) var chunk_pos = Map.empty[String, Position.T] val tokens = new mutable.ListBuffer[(Token, Position.T)] var line = 1 var offset = 1 def make_pos(length: Int): Position.T = Position.Offset(offset) ::: Position.End_Offset(offset + length) ::: Position.Line(line) def advance_pos(tok: Token): Unit = { for (s <- Symbol.iterator(tok.source)) { if (Symbol.is_newline(s)) line += 1 offset += 1 } } def get_line_pos(l: Int): Position.T = if (0 < l && l <= tokens.length) tokens(l - 1)._2 else Position.none for (chunk <- chunks) { val name = chunk.name if (name != "" && !chunk_pos.isDefinedAt(name)) { chunk_pos += (name -> make_pos(chunk.heading_length)) } for (tok <- chunk.tokens) { tokens += (tok.copy(source = tok.source.replace("\n", " ")) -> make_pos(tok.source.length)) advance_pos(tok) } } Isabelle_System.with_tmp_dir("bibtex")(tmp_dir => { File.write(tmp_dir + Path.explode("root.bib"), tokens.iterator.map(p => p._1.source).mkString("", "\n", "\n")) File.write(tmp_dir + Path.explode("root.aux"), "\\bibstyle{plain}\n\\bibdata{root}\n\\citation{*}") Isabelle_System.bash("\"$ISABELLE_BIBTEX\" root", cwd = tmp_dir.file) val Error = """^(.*)---line (\d+) of file root.bib$""".r val Warning = """^Warning--(.+)$""".r val Warning_Line = """--line (\d+) of file root.bib$""".r val Warning_in_Chunk = """^Warning--(.+) in (.+)$""".r val log_file = tmp_dir + Path.explode("root.blg") val lines = if (log_file.is_file) Line.logical_lines(File.read(log_file)) else Nil val (errors, warnings) = if (lines.isEmpty) (Nil, Nil) else { lines.zip(lines.tail ::: List("")).flatMap( { case (Error(msg, Value.Int(l)), _) => Some((true, (msg, get_line_pos(l)))) case (Warning_in_Chunk(msg, name), _) if chunk_pos.isDefinedAt(name) => Some((false, (Word.capitalize(msg + " in entry " + quote(name)), chunk_pos(name)))) case (Warning(msg), Warning_Line(Value.Int(l))) => Some((false, (Word.capitalize(msg), get_line_pos(l)))) case (Warning(msg), _) => Some((false, (Word.capitalize(msg), Position.none))) case _ => None } ).partition(_._1) } (errors.map(_._2), warnings.map(_._2)) }) } - object Check_Database extends Scala.Fun("bibtex_check_database") + object Check_Database extends Scala.Fun_String("bibtex_check_database") { val here = Scala_Project.here def apply(database: String): String = { import XML.Encode._ YXML.string_of_body(pair(list(pair(string, properties)), list(pair(string, properties)))( check_database(database))) } } /** document model **/ /* entries */ def entries(text: String): List[Text.Info[String]] = { val result = new mutable.ListBuffer[Text.Info[String]] var offset = 0 for (chunk <- Bibtex.parse(text)) { val end_offset = offset + chunk.source.length if (chunk.name != "" && !chunk.is_command) result += Text.Info(Text.Range(offset, end_offset), chunk.name) offset = end_offset } result.toList } def entries_iterator[A, B <: Document.Model](models: Map[A, B]) : Iterator[Text.Info[(String, B)]] = { for { (_, model) <- models.iterator info <- model.bibtex_entries.iterator } yield info.map((_, model)) } /* completion */ def completion[A, B <: Document.Model]( history: Completion.History, rendering: Rendering, caret: Text.Offset, models: Map[A, B]): Option[Completion.Result] = { for { Text.Info(r, name) <- rendering.citations(rendering.before_caret_range(caret)).headOption name1 <- Completion.clean_name(name) original <- rendering.get_text(r) original1 <- Completion.clean_name(Library.perhaps_unquote(original)) entries = (for { Text.Info(_, (entry, _)) <- entries_iterator(models) if entry.toLowerCase.containsSlice(name1.toLowerCase) && entry != original1 } yield entry).toList if entries.nonEmpty items = entries.sorted.map({ case entry => val full_name = Long_Name.qualify(Markup.CITATION, entry) val description = List(entry, "(BibTeX entry)") val replacement = quote(entry) Completion.Item(r, original, full_name, description, replacement, 0, false) }).sorted(history.ordering).take(rendering.options.int("completion_limit")) } yield Completion.Result(r, original, false, items) } /** content **/ private val months = List( "jan", "feb", "mar", "apr", "may", "jun", "jul", "aug", "sep", "oct", "nov", "dec") def is_month(s: String): Boolean = months.contains(s.toLowerCase) private val commands = List("preamble", "string") def is_command(s: String): Boolean = commands.contains(s.toLowerCase) sealed case class Entry( kind: String, required: List[String], optional_crossref: List[String], optional_other: List[String]) { val optional_standard: List[String] = List("url", "doi", "ee") def is_required(s: String): Boolean = required.contains(s.toLowerCase) def is_optional(s: String): Boolean = optional_crossref.contains(s.toLowerCase) || optional_other.contains(s.toLowerCase) || optional_standard.contains(s.toLowerCase) def fields: List[String] = required ::: optional_crossref ::: optional_other ::: optional_standard def template: String = "@" + kind + "{,\n" + fields.map(x => " " + x + " = {},\n").mkString + "}\n" } val known_entries: List[Entry] = List( Entry("Article", List("author", "title"), List("journal", "year"), List("volume", "number", "pages", "month", "note")), Entry("InProceedings", List("author", "title"), List("booktitle", "year"), List("editor", "volume", "number", "series", "pages", "month", "address", "organization", "publisher", "note")), Entry("InCollection", List("author", "title", "booktitle"), List("publisher", "year"), List("editor", "volume", "number", "series", "type", "chapter", "pages", "edition", "month", "address", "note")), Entry("InBook", List("author", "editor", "title", "chapter"), List("publisher", "year"), List("volume", "number", "series", "type", "address", "edition", "month", "pages", "note")), Entry("Proceedings", List("title", "year"), List(), List("booktitle", "editor", "volume", "number", "series", "address", "month", "organization", "publisher", "note")), Entry("Book", List("author", "editor", "title"), List("publisher", "year"), List("volume", "number", "series", "address", "edition", "month", "note")), Entry("Booklet", List("title"), List(), List("author", "howpublished", "address", "month", "year", "note")), Entry("PhdThesis", List("author", "title", "school", "year"), List(), List("type", "address", "month", "note")), Entry("MastersThesis", List("author", "title", "school", "year"), List(), List("type", "address", "month", "note")), Entry("TechReport", List("author", "title", "institution", "year"), List(), List("type", "number", "address", "month", "note")), Entry("Manual", List("title"), List(), List("author", "organization", "address", "edition", "month", "year", "note")), Entry("Unpublished", List("author", "title", "note"), List(), List("month", "year")), Entry("Misc", List(), List(), List("author", "title", "howpublished", "month", "year", "note"))) def get_entry(kind: String): Option[Entry] = known_entries.find(entry => entry.kind.toLowerCase == kind.toLowerCase) def is_entry(kind: String): Boolean = get_entry(kind).isDefined /** tokens and chunks **/ object Token { object Kind extends Enumeration { val COMMAND = Value("command") val ENTRY = Value("entry") val KEYWORD = Value("keyword") val NAT = Value("natural number") val STRING = Value("string") val NAME = Value("name") val IDENT = Value("identifier") val SPACE = Value("white space") val COMMENT = Value("ignored text") val ERROR = Value("bad input") } } sealed case class Token(kind: Token.Kind.Value, source: String) { def is_kind: Boolean = kind == Token.Kind.COMMAND || kind == Token.Kind.ENTRY || kind == Token.Kind.IDENT def is_name: Boolean = kind == Token.Kind.NAME || kind == Token.Kind.IDENT def is_ignored: Boolean = kind == Token.Kind.SPACE || kind == Token.Kind.COMMENT def is_malformed: Boolean = kind == Token.Kind.ERROR def is_open: Boolean = kind == Token.Kind.KEYWORD && (source == "{" || source == "(") } case class Chunk(kind: String, tokens: List[Token]) { val source = tokens.map(_.source).mkString private val content: Option[List[Token]] = tokens match { case Token(Token.Kind.KEYWORD, "@") :: body if body.nonEmpty => (body.init.filterNot(_.is_ignored), body.last) match { case (tok :: Token(Token.Kind.KEYWORD, "{") :: toks, Token(Token.Kind.KEYWORD, "}")) if tok.is_kind => Some(toks) case (tok :: Token(Token.Kind.KEYWORD, "(") :: toks, Token(Token.Kind.KEYWORD, ")")) if tok.is_kind => Some(toks) case _ => None } case _ => None } def name: String = content match { case Some(tok :: _) if tok.is_name => tok.source case _ => "" } def heading_length: Int = if (name == "") 1 else { tokens.takeWhile(tok => !tok.is_open).foldLeft(0) { case (n, tok) => n + tok.source.length } } def is_ignored: Boolean = kind == "" && tokens.forall(_.is_ignored) def is_malformed: Boolean = kind == "" || tokens.exists(_.is_malformed) def is_command: Boolean = Bibtex.is_command(kind) && name != "" && content.isDefined def is_entry: Boolean = Bibtex.is_entry(kind) && name != "" && content.isDefined } /** parsing **/ // context of partial line-oriented scans abstract class Line_Context case object Ignored extends Line_Context case object At extends Line_Context case class Item_Start(kind: String) extends Line_Context case class Item_Open(kind: String, end: String) extends Line_Context case class Item(kind: String, end: String, delim: Delimited) extends Line_Context case class Delimited(quoted: Boolean, depth: Int) val Closed = Delimited(false, 0) private def token(kind: Token.Kind.Value)(source: String): Token = Token(kind, source) private def keyword(source: String): Token = Token(Token.Kind.KEYWORD, source) // See also https://ctan.org/tex-archive/biblio/bibtex/base/bibtex.web // module @. object Parsers extends RegexParsers { /* white space and comments */ override val whiteSpace = "".r private val space = """[ \t\n\r]+""".r ^^ token(Token.Kind.SPACE) private val spaces = rep(space) /* ignored text */ private val ignored: Parser[Chunk] = rep1("""(?i)([^@]+|@[ \t]*comment)""".r) ^^ { case ss => Chunk("", List(Token(Token.Kind.COMMENT, ss.mkString))) } private def ignored_line: Parser[(Chunk, Line_Context)] = ignored ^^ { case a => (a, Ignored) } /* delimited string: outermost "..." or {...} and body with balanced {...} */ // see also bibtex.web: scan_a_field_token_and_eat_white, scan_balanced_braces private def delimited_depth(delim: Delimited): Parser[(String, Delimited)] = new Parser[(String, Delimited)] { require(if (delim.quoted) delim.depth > 0 else delim.depth >= 0, "bad delimiter depth") def apply(in: Input) = { val start = in.offset val end = in.source.length var i = start var q = delim.quoted var d = delim.depth var finished = false while (!finished && i < end) { val c = in.source.charAt(i) if (c == '"' && d == 0) { i += 1; d = 1; q = true } else if (c == '"' && d == 1 && q) { i += 1; d = 0; q = false; finished = true } else if (c == '{') { i += 1; d += 1 } else if (c == '}') { if (d == 1 && !q || d > 1) { i += 1; d -= 1; if (d == 0) finished = true } else {i = start; finished = true } } else if (d > 0) i += 1 else finished = true } if (i == start) Failure("bad input", in) else { val s = in.source.subSequence(start, i).toString Success((s, Delimited(q, d)), in.drop(i - start)) } } }.named("delimited_depth") private def delimited: Parser[Token] = delimited_depth(Closed) ^? { case (s, delim) if delim == Closed => Token(Token.Kind.STRING, s) } private def recover_delimited: Parser[Token] = """["{][^@]*""".r ^^ token(Token.Kind.ERROR) def delimited_line(ctxt: Item): Parser[(Chunk, Line_Context)] = delimited_depth(ctxt.delim) ^^ { case (s, delim1) => (Chunk(ctxt.kind, List(Token(Token.Kind.STRING, s))), ctxt.copy(delim = delim1)) } | recover_delimited ^^ { case a => (Chunk(ctxt.kind, List(a)), Ignored) } /* other tokens */ private val at = "@" ^^ keyword private val nat = "[0-9]+".r ^^ token(Token.Kind.NAT) private val name = """[\x21-\x7f&&[^"#%'(),={}]]+""".r ^^ token(Token.Kind.NAME) private val identifier = """[\x21-\x7f&&[^"#%'(),={}0-9]][\x21-\x7f&&[^"#%'(),={}]]*""".r private val ident = identifier ^^ token(Token.Kind.IDENT) val other_token = "[=#,]".r ^^ keyword | (nat | (ident | space)) /* body */ private val body = delimited | (recover_delimited | other_token) private def body_line(ctxt: Item) = if (ctxt.delim.depth > 0) delimited_line(ctxt) else delimited_line(ctxt) | other_token ^^ { case a => (Chunk(ctxt.kind, List(a)), ctxt) } | ctxt.end ^^ { case a => (Chunk(ctxt.kind, List(keyword(a))), Ignored) } /* items: command or entry */ private val item_kind = identifier ^^ { case a => val kind = if (is_command(a)) Token.Kind.COMMAND else if (is_entry(a)) Token.Kind.ENTRY else Token.Kind.IDENT Token(kind, a) } private val item_begin = "{" ^^ { case a => ("}", keyword(a)) } | "(" ^^ { case a => (")", keyword(a)) } private def item_name(kind: String) = kind.toLowerCase match { case "preamble" => failure("") case "string" => identifier ^^ token(Token.Kind.NAME) case _ => name } private val item_start = at ~ spaces ~ item_kind ~ spaces ^^ { case a ~ b ~ c ~ d => (c.source, List(a) ::: b ::: List(c) ::: d) } private val item: Parser[Chunk] = (item_start ~ item_begin ~ spaces) into { case (kind, a) ~ ((end, b)) ~ c => opt(item_name(kind)) ~ rep(body) ~ opt(end ^^ keyword) ^^ { case d ~ e ~ f => Chunk(kind, a ::: List(b) ::: c ::: d.toList ::: e ::: f.toList) } } private val recover_item: Parser[Chunk] = at ~ "[^@]*".r ^^ { case a ~ b => Chunk("", List(a, Token(Token.Kind.ERROR, b))) } /* chunks */ val chunk: Parser[Chunk] = ignored | (item | recover_item) def chunk_line(ctxt: Line_Context): Parser[(Chunk, Line_Context)] = { ctxt match { case Ignored => ignored_line | at ^^ { case a => (Chunk("", List(a)), At) } case At => space ^^ { case a => (Chunk("", List(a)), ctxt) } | item_kind ^^ { case a => (Chunk(a.source, List(a)), Item_Start(a.source)) } | recover_item ^^ { case a => (a, Ignored) } | ignored_line case Item_Start(kind) => space ^^ { case a => (Chunk(kind, List(a)), ctxt) } | item_begin ^^ { case (end, a) => (Chunk(kind, List(a)), Item_Open(kind, end)) } | recover_item ^^ { case a => (a, Ignored) } | ignored_line case Item_Open(kind, end) => space ^^ { case a => (Chunk(kind, List(a)), ctxt) } | item_name(kind) ^^ { case a => (Chunk(kind, List(a)), Item(kind, end, Closed)) } | body_line(Item(kind, end, Closed)) | ignored_line case item_ctxt: Item => body_line(item_ctxt) | ignored_line case _ => failure("") } } } /* parse */ def parse(input: CharSequence): List[Chunk] = Parsers.parseAll(Parsers.rep(Parsers.chunk), Scan.char_reader(input)) match { case Parsers.Success(result, _) => result case _ => error("Unexpected failure to parse input:\n" + input.toString) } def parse_line(input: CharSequence, context: Line_Context): (List[Chunk], Line_Context) = { var in: Reader[Char] = Scan.char_reader(input) val chunks = new mutable.ListBuffer[Chunk] var ctxt = context while (!in.atEnd) { Parsers.parse(Parsers.chunk_line(ctxt), in) match { case Parsers.Success((x, c), rest) => chunks += x; ctxt = c; in = rest case Parsers.NoSuccess(_, rest) => error("Unepected failure to parse input:\n" + rest.source.toString) } } (chunks.toList, ctxt) } /** HTML output **/ private val output_styles = List( "" -> "html-n", "plain" -> "html-n", "alpha" -> "html-a", "named" -> "html-n", "paragraph" -> "html-n", "unsort" -> "html-u", "unsortlist" -> "html-u") def html_output(bib: List[Path], title: String = "Bibliography", body: Boolean = false, citations: List[String] = List("*"), style: String = "", chronological: Boolean = false): String = { Isabelle_System.with_tmp_dir("bibtex")(tmp_dir => { /* database files */ val bib_files = bib.map(_.drop_ext) val bib_names = { val names0 = bib_files.map(_.file_name) if (Library.duplicates(names0).isEmpty) names0 else names0.zipWithIndex.map({ case (name, i) => (i + 1).toString + "-" + name }) } for ((a, b) <- bib_files zip bib_names) { Isabelle_System.copy_file(a.ext("bib"), tmp_dir + Path.basic(b).ext("bib")) } /* style file */ val bst = output_styles.toMap.get(style) match { case Some(base) => base + (if (chronological) "c" else "") + ".bst" case None => error("Bad style for bibtex HTML output: " + quote(style) + "\n(expected: " + commas_quote(output_styles.map(_._1)) + ")") } Isabelle_System.copy_file(Path.explode("$BIB2XHTML_HOME/bst") + Path.explode(bst), tmp_dir) /* result */ val in_file = Path.explode("bib.aux") val out_file = Path.explode("bib.html") File.write(tmp_dir + in_file, bib_names.mkString("\\bibdata{", ",", "}\n") + citations.map(cite => "\\citation{" + cite + "}\n").mkString) Isabelle_System.bash( "\"$BIB2XHTML_HOME/main/bib2xhtml.pl\" -B \"$ISABELLE_BIBTEX\"" + " -u -s " + Bash.string(proper_string(style) getOrElse "empty") + (if (chronological) " -c" else "") + (if (title != "") " -h " + Bash.string(title) + " " else "") + " " + File.bash_path(in_file) + " " + File.bash_path(out_file), cwd = tmp_dir.file).check val html = File.read(tmp_dir + out_file) if (body) { cat_lines( split_lines(html). dropWhile(line => !line.startsWith("