diff --git a/src/Pure/Admin/build_status.scala b/src/Pure/Admin/build_status.scala --- a/src/Pure/Admin/build_status.scala +++ b/src/Pure/Admin/build_status.scala @@ -1,630 +1,630 @@ /* Title: Pure/Admin/build_status.scala Author: Makarius Present recent build status information from database. */ package isabelle object Build_Status { /* defaults */ val default_target_dir = Path.explode("build_status") val default_image_size = (800, 600) val default_history = 30 def default_profiles: List[Profile] = Jenkins.build_status_profiles ::: Isabelle_Cronjob.build_status_profiles /* data profiles */ sealed case class Profile( description: String, history: Int = 0, afp: Boolean = false, bulky: Boolean = false, sql: String ) { def days(options: Options): Int = options.int("build_log_history") max history def stretch(options: Options): Double = (days(options) max default_history min (default_history * 5)).toDouble / default_history def select( options: Options, columns: List[SQL.Column], only_sessions: Set[String] ): PostgreSQL.Source = { Build_Log.Data.universal_table.select(columns, distinct = true, sql = "WHERE " + Build_Log.Data.pull_date(afp) + " > " + Build_Log.Data.recent_time(days(options)) + " AND " + SQL.member(Build_Log.Data.status.ident, List( Build_Log.Session_Status.finished.toString, Build_Log.Session_Status.failed.toString)) + (if (only_sessions.isEmpty) "" else " AND " + SQL.member(Build_Log.Data.session_name.ident, only_sessions)) + " AND " + SQL.enclose(sql)) } } /* build status */ def build_status(options: Options, progress: Progress = new Progress, profiles: List[Profile] = default_profiles, only_sessions: Set[String] = Set.empty, verbose: Boolean = false, target_dir: Path = default_target_dir, ml_statistics: Boolean = false, image_size: (Int, Int) = default_image_size ): Unit = { val ml_statistics_domain = Iterator(ML_Statistics.heap_fields, ML_Statistics.program_fields, ML_Statistics.tasks_fields, ML_Statistics.workers_fields).flatMap(_._2).toSet val data = read_data(options, progress = progress, profiles = profiles, only_sessions = only_sessions, verbose = verbose, ml_statistics = ml_statistics, ml_statistics_domain = ml_statistics_domain) present_data(data, progress = progress, target_dir = target_dir, image_size = image_size) } /* read data */ sealed case class Data(date: Date, entries: List[Data_Entry]) sealed case class Data_Entry( name: String, hosts: List[String], stretch: Double, sessions: List[Session] ) { def failed_sessions: List[Session] = sessions.filter(_.head.failed).sortBy(_.name) } sealed case class Session( name: String, threads: Int, entries: List[Entry], ml_statistics: ML_Statistics, ml_statistics_date: Long ) { require(entries.nonEmpty, "no entries") lazy val sorted_entries: List[Entry] = entries.sortBy(entry => - entry.date) def head: Entry = sorted_entries.head def order: Long = - head.timing.elapsed.ms def finished_entries: List[Entry] = sorted_entries.filter(_.finished) def finished_entries_size: Int = finished_entries.map(_.date).toSet.size def check_timing: Boolean = finished_entries_size >= 3 def check_heap: Boolean = finished_entries_size >= 3 && finished_entries.forall(entry => entry.maximum_heap > 0 || entry.average_heap > 0 || entry.stored_heap > 0) def make_csv: CSV.File = { val header = List("session_name", "chapter", "pull_date", "afp_pull_date", "isabelle_version", "afp_version", "timing_elapsed", "timing_cpu", "timing_gc", "ml_timing_elapsed", "ml_timing_cpu", "ml_timing_gc", "maximum_code", "average_code", "maximum_stack", "average_stack", "maximum_heap", "average_heap", "stored_heap", "status") val date_format = Date.Format("uuuu-MM-dd HH:mm:ss") val records = for (entry <- sorted_entries) yield { CSV.Record(name, entry.chapter, date_format(entry.pull_date), entry.afp_pull_date match { case Some(date) => date_format(date) case None => "" }, entry.isabelle_version, entry.afp_version, entry.timing.elapsed.ms, entry.timing.cpu.ms, entry.timing.gc.ms, entry.ml_timing.elapsed.ms, entry.ml_timing.cpu.ms, entry.ml_timing.gc.ms, entry.maximum_code, entry.average_code, entry.maximum_stack, entry.average_stack, entry.maximum_heap, entry.average_heap, entry.stored_heap, entry.status) } CSV.File(name, header, records) } } sealed case class Entry( chapter: String, pull_date: Date, afp_pull_date: Option[Date], isabelle_version: String, afp_version: String, timing: Timing, ml_timing: Timing, maximum_code: Long, average_code: Long, maximum_stack: Long, average_stack: Long, maximum_heap: Long, average_heap: Long, stored_heap: Long, status: Build_Log.Session_Status.Value, errors: List[String] ) { val date: Long = (afp_pull_date getOrElse pull_date).unix_epoch def finished: Boolean = status == Build_Log.Session_Status.finished def failed: Boolean = status == Build_Log.Session_Status.failed def present_errors(name: String): XML.Body = { if (errors.isEmpty) HTML.text(name + print_version(isabelle_version, afp_version, chapter)) else { HTML.tooltip_errors(HTML.text(name), errors.map(s => HTML.text(Symbol.decode(s)))) :: HTML.text(print_version(isabelle_version, afp_version, chapter)) } } } sealed case class Image(name: String, width: Int, height: Int) { def path: Path = Path.basic(name) } def print_version( isabelle_version: String, afp_version: String = "", chapter: String = AFP.chapter ): String = { val body = proper_string(isabelle_version).map("Isabelle/" + _).toList ::: (if (chapter == AFP.chapter) proper_string(afp_version).map("AFP/" + _) else None).toList if (body.isEmpty) "" else body.mkString(" (", ", ", ")") } def read_data(options: Options, progress: Progress = new Progress, profiles: List[Profile] = default_profiles, only_sessions: Set[String] = Set.empty, ml_statistics: Boolean = false, ml_statistics_domain: String => Boolean = _ => true, verbose: Boolean = false ): Data = { val date = Date.now() var data_hosts = Map.empty[String, Set[String]] var data_stretch = Map.empty[String, Double] var data_entries = Map.empty[String, Map[String, Session]] def get_hosts(data_name: String): Set[String] = data_hosts.getOrElse(data_name, Set.empty) val store = Build_Log.store(options) using(store.open_database()) { db => for (profile <- profiles.sortBy(_.description)) { progress.echo("input " + quote(profile.description)) val afp = profile.afp val columns = List( Build_Log.Data.pull_date(afp = false), Build_Log.Data.pull_date(afp = true), Build_Log.Prop.build_host, Build_Log.Prop.isabelle_version, Build_Log.Prop.afp_version, Build_Log.Settings.ISABELLE_BUILD_OPTIONS, Build_Log.Settings.ML_PLATFORM, Build_Log.Data.session_name, Build_Log.Data.chapter, Build_Log.Data.groups, Build_Log.Data.threads, Build_Log.Data.timing_elapsed, Build_Log.Data.timing_cpu, Build_Log.Data.timing_gc, Build_Log.Data.ml_timing_elapsed, Build_Log.Data.ml_timing_cpu, Build_Log.Data.ml_timing_gc, Build_Log.Data.heap_size, Build_Log.Data.status, Build_Log.Data.errors) ::: (if (ml_statistics) List(Build_Log.Data.ml_statistics) else Nil) val Threads_Option = """threads\s*=\s*(\d+)""".r val sql = profile.select(options, columns, only_sessions) progress.echo_if(verbose, sql) db.using_statement(sql) { stmt => val res = stmt.execute_query() while (res.next()) { val session_name = res.string(Build_Log.Data.session_name) val chapter = res.string(Build_Log.Data.chapter) val groups = split_lines(res.string(Build_Log.Data.groups)) val threads = { val threads1 = res.string(Build_Log.Settings.ISABELLE_BUILD_OPTIONS) match { case Threads_Option(Value.Int(i)) => i case _ => 1 } val threads2 = res.get_int(Build_Log.Data.threads).getOrElse(1) threads1 max threads2 } val ml_platform = res.string(Build_Log.Settings.ML_PLATFORM) val ml_platform_64 = ml_platform.startsWith("x86_64-") || ml_platform.startsWith("arm64-") val data_name = profile.description + (if (ml_platform_64) ", 64bit" else "") + (if (threads == 1) "" else ", " + threads + " threads") res.get_string(Build_Log.Prop.build_host).foreach(host => data_hosts += (data_name -> (get_hosts(data_name) + host))) data_stretch += (data_name -> profile.stretch(options)) val isabelle_version = res.string(Build_Log.Prop.isabelle_version) val afp_version = res.string(Build_Log.Prop.afp_version) val ml_stats = ML_Statistics( if (ml_statistics) { Properties.uncompress(res.bytes(Build_Log.Data.ml_statistics), cache = store.cache) } else Nil, domain = ml_statistics_domain, heading = session_name + print_version(isabelle_version, afp_version, chapter)) val entry = Entry( chapter = chapter, pull_date = res.date(Build_Log.Data.pull_date(afp = false)), afp_pull_date = if (afp) res.get_date(Build_Log.Data.pull_date(afp = true)) else None, isabelle_version = isabelle_version, afp_version = afp_version, timing = res.timing( Build_Log.Data.timing_elapsed, Build_Log.Data.timing_cpu, Build_Log.Data.timing_gc), ml_timing = res.timing( Build_Log.Data.ml_timing_elapsed, Build_Log.Data.ml_timing_cpu, Build_Log.Data.ml_timing_gc), maximum_code = ml_stats.maximum(ML_Statistics.CODE_SIZE).toLong, average_code = ml_stats.average(ML_Statistics.CODE_SIZE).toLong, maximum_stack = ml_stats.maximum(ML_Statistics.STACK_SIZE).toLong, average_stack = ml_stats.average(ML_Statistics.STACK_SIZE).toLong, maximum_heap = ml_stats.maximum(ML_Statistics.HEAP_SIZE).toLong, average_heap = ml_stats.average(ML_Statistics.HEAP_SIZE).toLong, stored_heap = ML_Statistics.mem_scale(res.long(Build_Log.Data.heap_size)), status = Build_Log.Session_Status.withName(res.string(Build_Log.Data.status)), errors = Build_Log.uncompress_errors( res.bytes(Build_Log.Data.errors), cache = store.cache)) val sessions = data_entries.getOrElse(data_name, Map.empty) val session = sessions.get(session_name) match { case None => Session(session_name, threads, List(entry), ml_stats, entry.date) case Some(old) => val (ml_stats1, ml_stats1_date) = if (entry.date > old.ml_statistics_date) (ml_stats, entry.date) else (old.ml_statistics, old.ml_statistics_date) Session(session_name, threads, entry :: old.entries, ml_stats1, ml_stats1_date) } if ((!afp || chapter == AFP.chapter) && (!profile.bulky || groups.exists(AFP.groups_bulky.toSet))) { data_entries += (data_name -> (sessions + (session_name -> session))) } } } } } val sorted_entries = (for { (name, sessions) <- data_entries.toList sorted_sessions <- proper_list(sessions.toList.map(_._2).sortBy(_.order)) } yield { val hosts = get_hosts(name).toList.sorted val stretch = data_stretch(name) Data_Entry(name, hosts, stretch, sorted_sessions) }).sortBy(_.name) Data(date, sorted_entries) } /* present data */ def present_data(data: Data, progress: Progress = new Progress, target_dir: Path = default_target_dir, image_size: (Int, Int) = default_image_size ): Unit = { def clean_name(name: String): String = name.flatMap(c => if (c == ' ' || c == '/') "_" else if (c == ',') "" else c.toString) HTML.write_document(target_dir, "index.html", List(HTML.title("Isabelle build status")), List(HTML.chapter("Isabelle build status"), HTML.par( List(HTML.description( List(HTML.text("status date:") -> HTML.text(data.date.toString))))), HTML.par( List(HTML.itemize(data.entries.map(data_entry => List( HTML.link(clean_name(data_entry.name) + "/index.html", HTML.text(data_entry.name))) ::: (data_entry.failed_sessions match { case Nil => Nil case sessions => HTML.break ::: List(HTML.span(HTML.error_message, HTML.text("Failed sessions:"))) ::: List(HTML.itemize(sessions.map(s => s.head.present_errors(s.name)))) }) )))))) for (data_entry <- data.entries) { val data_name = data_entry.name val (image_width, image_height) = image_size val image_width_stretch = (image_width * data_entry.stretch).toInt progress.echo("output " + quote(data_name)) val dir = Isabelle_System.make_directory(target_dir + Path.basic(clean_name(data_name))) val data_files = (for (session <- data_entry.sessions) yield { val csv_file = session.make_csv csv_file.write(dir) session.name -> csv_file }).toMap val session_plots = Par_List.map((session: Session) => Isabelle_System.with_tmp_file(session.name, "data") { data_file => Isabelle_System.with_tmp_file(session.name, "gnuplot") { gnuplot_file => def plot_name(kind: String): String = session.name + "_" + kind + ".png" File.write(data_file, cat_lines( session.finished_entries.map(entry => List(entry.date.toString, entry.timing.elapsed.minutes.toString, entry.timing.resources.minutes.toString, entry.ml_timing.elapsed.minutes.toString, entry.ml_timing.resources.minutes.toString, entry.maximum_code.toString, entry.average_code.toString, entry.maximum_stack.toString, entry.average_stack.toString, entry.maximum_heap.toString, entry.average_heap.toString, entry.stored_heap.toString).mkString(" ")))) val max_time = (session.finished_entries.foldLeft(0.0) { case (m, entry) => m.max(entry.timing.elapsed.minutes). max(entry.timing.resources.minutes). max(entry.ml_timing.elapsed.minutes). max(entry.ml_timing.resources.minutes) } max 0.1) * 1.1 val timing_range = "[0:" + max_time + "]" def gnuplot(plot_name: String, plots: List[String], range: String): Image = { val image = Image(plot_name, image_width_stretch, image_height) File.write(gnuplot_file, """ set terminal png size """ + image.width + "," + image.height + """ set output """ + quote(File.standard_path(dir + image.path)) + """ set xdata time set timefmt "%s" set format x "%d-%b" set xlabel """ + quote(session.name) + """ noenhanced set key left bottom plot [] """ + range + " " + plots.map(s => quote(data_file.implode) + " " + s).mkString(", ") + "\n") val result = Isabelle_System.bash("\"$ISABELLE_GNUPLOT\" " + File.bash_path(gnuplot_file)) if (!result.ok) result.error("Gnuplot failed for " + data_name + "/" + plot_name).check image } val timing_plots = { val plots1 = List( """ using 1:2 smooth sbezier title "elapsed time (smooth)" """, """ using 1:2 smooth csplines title "elapsed time" """) val plots2 = List( """ using 1:3 smooth sbezier title "cpu time (smooth)" """, """ using 1:3 smooth csplines title "cpu time" """) if (session.threads == 1) plots1 else plots1 ::: plots2 } val ml_timing_plots = List( """ using 1:4 smooth sbezier title "ML elapsed time (smooth)" """, """ using 1:4 smooth csplines title "ML elapsed time" """, """ using 1:5 smooth sbezier title "ML cpu time (smooth)" """, """ using 1:5 smooth csplines title "ML cpu time" """) val heap_plots = List( """ using 1:10 smooth sbezier title "heap maximum (smooth)" """, """ using 1:10 smooth csplines title "heap maximum" """, """ using 1:11 smooth sbezier title "heap average (smooth)" """, """ using 1:11 smooth csplines title "heap average" """, """ using 1:12 smooth sbezier title "heap stored (smooth)" """, """ using 1:12 smooth csplines title "heap stored" """) def jfreechart(plot_name: String, fields: ML_Statistics.Fields): Image = { val image = Image(plot_name, image_width, image_height) val chart = session.ml_statistics.chart( - fields._1 + ": " + session.ml_statistics.heading, fields._2) + fields.title + ": " + session.ml_statistics.heading, fields.names) Graphics_File.write_chart_png( (dir + image.path).file, chart, image.width, image.height) image } val images = (if (session.check_timing) List( gnuplot(plot_name("timing"), timing_plots, timing_range), gnuplot(plot_name("ml_timing"), ml_timing_plots, timing_range)) else Nil) ::: (if (session.check_heap) List(gnuplot(plot_name("heap"), heap_plots, "[0:]")) else Nil) ::: (if (session.ml_statistics.content.nonEmpty) List(jfreechart(plot_name("heap_chart"), ML_Statistics.heap_fields), jfreechart(plot_name("program_chart"), ML_Statistics.program_fields)) ::: (if (session.threads > 1) List( jfreechart(plot_name("tasks_chart"), ML_Statistics.tasks_fields), jfreechart(plot_name("workers_chart"), ML_Statistics.workers_fields)) else Nil) else Nil) session.name -> images } }, data_entry.sessions).toMap HTML.write_document(dir, "index.html", List(HTML.title("Isabelle build status for " + data_name)), HTML.chapter("Isabelle build status for " + data_name) :: HTML.par( List(HTML.description( List( HTML.text("status date:") -> HTML.text(data.date.toString), HTML.text("build host:") -> HTML.text(commas(data_entry.hosts)))))) :: HTML.par( List(HTML.itemize( data_entry.sessions.map(session => HTML.link("#session_" + session.name, HTML.text(session.name)) :: HTML.text(" (" + session.head.timing.message_resources + ")"))))) :: data_entry.sessions.flatMap(session => List( HTML.section(HTML.id("session_" + session.name), session.name), HTML.par( HTML.description( List( HTML.text("data:") -> List(HTML.link(data_files(session.name).file_name, HTML.text("CSV"))), HTML.text("timing:") -> HTML.text(session.head.timing.message_resources), HTML.text("ML timing:") -> HTML.text(session.head.ml_timing.message_resources)) ::: ML_Statistics.mem_print(session.head.maximum_code).map(s => HTML.text("code maximum:") -> HTML.text(s)).toList ::: ML_Statistics.mem_print(session.head.average_code).map(s => HTML.text("code average:") -> HTML.text(s)).toList ::: ML_Statistics.mem_print(session.head.maximum_stack).map(s => HTML.text("stack maximum:") -> HTML.text(s)).toList ::: ML_Statistics.mem_print(session.head.average_stack).map(s => HTML.text("stack average:") -> HTML.text(s)).toList ::: ML_Statistics.mem_print(session.head.maximum_heap).map(s => HTML.text("heap maximum:") -> HTML.text(s)).toList ::: ML_Statistics.mem_print(session.head.average_heap).map(s => HTML.text("heap average:") -> HTML.text(s)).toList ::: ML_Statistics.mem_print(session.head.stored_heap).map(s => HTML.text("heap stored:") -> HTML.text(s)).toList ::: proper_string(session.head.isabelle_version).map(s => HTML.text("Isabelle version:") -> HTML.text(s)).toList ::: proper_string(session.head.afp_version).map(s => HTML.text("AFP version:") -> HTML.text(s)).toList) :: session_plots.getOrElse(session.name, Nil).map(image => HTML.size(image.width / 2, image.height / 2)(HTML.image(image.name))))))) } } /* Isabelle tool wrapper */ val isabelle_tool = Isabelle_Tool("build_status", "present recent build status information from database", Scala_Project.here, { args => var target_dir = default_target_dir var ml_statistics = false var only_sessions = Set.empty[String] var options = Options.init() var image_size = default_image_size var verbose = false val getopts = Getopts(""" Usage: isabelle build_status [OPTIONS] Options are: -D DIR target directory (default """ + default_target_dir + """) -M include full ML statistics -S SESSIONS only given SESSIONS (comma separated) -l DAYS length of relevant history (default """ + options.int("build_log_history") + """) -o OPTION override Isabelle system OPTION (via NAME=VAL or NAME) -s WxH size of PNG image (default """ + image_size._1 + "x" + image_size._2 + """) -v verbose Present performance statistics from build log database, which is specified via system options build_log_database_host, build_log_database_user, build_log_history etc. """, "D:" -> (arg => target_dir = Path.explode(arg)), "M" -> (_ => ml_statistics = true), "S:" -> (arg => only_sessions = space_explode(',', arg).toSet), "l:" -> (arg => options = options + ("build_log_history=" + arg)), "o:" -> (arg => options = options + arg), "s:" -> (arg => space_explode('x', arg).map(Value.Int.parse) match { case List(w, h) if w > 0 && h > 0 => image_size = (w, h) case _ => error("Error bad PNG image size: " + quote(arg)) }), "v" -> (_ => verbose = true)) val more_args = getopts(args) if (more_args.nonEmpty) getopts.usage() val progress = new Console_Progress build_status(options, progress = progress, only_sessions = only_sessions, verbose = verbose, target_dir = target_dir, ml_statistics = ml_statistics, image_size = image_size) }) } diff --git a/src/Pure/ML/ml_statistics.scala b/src/Pure/ML/ml_statistics.scala --- a/src/Pure/ML/ml_statistics.scala +++ b/src/Pure/ML/ml_statistics.scala @@ -1,314 +1,314 @@ /* Title: Pure/ML/ml_statistics.scala Author: Makarius ML runtime statistics. */ package isabelle import scala.annotation.tailrec import scala.collection.mutable import scala.collection.immutable.{SortedSet, SortedMap} import scala.swing.{Frame, Component} import org.jfree.data.xy.{XYSeries, XYSeriesCollection} import org.jfree.chart.{JFreeChart, ChartPanel, ChartFactory} import org.jfree.chart.plot.PlotOrientation object ML_Statistics { /* properties */ val Now = new Properties.Double("now") def now(props: Properties.T): Double = Now.unapply(props).get /* memory status */ val Heap_Size = new Properties.Long("size_heap") val Heap_Free = new Properties.Long("size_heap_free_last_GC") val GC_Percent = new Properties.Int("GC_percent") sealed case class Memory_Status(heap_size: Long, heap_free: Long, gc_percent: Int) { def heap_used: Long = (heap_size - heap_free) max 0 def heap_used_fraction: Double = if (heap_size == 0) 0.0 else heap_used.toDouble / heap_size def gc_progress: Option[Double] = if (1 <= gc_percent && gc_percent <= 100) Some((gc_percent - 1) * 0.01) else None } def memory_status(props: Properties.T): Memory_Status = { val heap_size = Heap_Size.get(props) val heap_free = Heap_Free.get(props) val gc_percent = GC_Percent.get(props) Memory_Status(heap_size, heap_free, gc_percent) } /* monitor process */ def monitor(pid: Long, stats_dir: String = "", delay: Time = Time.seconds(0.5), consume: Properties.T => Unit = Console.println ): Unit = { def progress_stdout(line: String): Unit = { val props = Library.space_explode(',', line).flatMap(Properties.Eq.unapply) if (props.nonEmpty) consume(props) } val env_prefix = if (stats_dir.isEmpty) "" else "export POLYSTATSDIR=" + Bash.string(stats_dir) + "\n" Bash.process(env_prefix + "\"$POLYML_EXE\" -q --use src/Pure/ML/ml_statistics.ML --eval " + Bash.string("ML_Statistics.monitor " + ML_Syntax.print_long(pid) + " " + ML_Syntax.print_double(delay.seconds)), cwd = Path.ISABELLE_HOME.file) .result(progress_stdout = progress_stdout, strict = false).check } /* protocol handler */ class Handler extends Session.Protocol_Handler { private var session: Session = null private var monitoring: Future[Unit] = Future.value(()) override def init(session: Session): Unit = synchronized { this.session = session } override def exit(): Unit = synchronized { session = null monitoring.cancel() } private def consume(props: Properties.T): Unit = synchronized { if (session != null) { val props1 = (session.cache.props(props ::: Java_Statistics.jvm_statistics())) session.runtime_statistics.post(Session.Runtime_Statistics(props1)) } } private def ml_statistics(msg: Prover.Protocol_Output): Boolean = synchronized { msg.properties match { case Markup.ML_Statistics(pid, stats_dir) => monitoring = Future.thread("ML_statistics") { monitor(pid, stats_dir = stats_dir, consume = consume) } true case _ => false } } override val functions: Session.Protocol_Functions = List(Markup.ML_Statistics.name -> ml_statistics) } /* memory fields (mega bytes) */ def mem_print(x: Long): Option[String] = if (x == 0L) None else Some(x.toString + " M") def mem_scale(x: Long): Long = x / 1024 / 1024 def mem_field_scale(name: String, x: Double): Double = - if (heap_fields._2.contains(name) || program_fields._2.contains(name)) { + if (heap_fields.names.contains(name) || program_fields.names.contains(name)) { mem_scale(x.toLong).toDouble } else x val CODE_SIZE = "size_code" val STACK_SIZE = "size_stacks" val HEAP_SIZE = "size_heap" /* standard fields */ - type Fields = (String, List[String]) + sealed case class Fields(title: String, names: List[String]) val tasks_fields: Fields = - ("Future tasks", + Fields("Future tasks", List("tasks_ready", "tasks_pending", "tasks_running", "tasks_passive", "tasks_urgent", "tasks_total")) val workers_fields: Fields = - ("Worker threads", List("workers_total", "workers_active", "workers_waiting")) + Fields("Worker threads", List("workers_total", "workers_active", "workers_waiting")) val GC_fields: Fields = - ("GCs", List("partial_GCs", "full_GCs", "share_passes")) + Fields("GCs", List("partial_GCs", "full_GCs", "share_passes")) val heap_fields: Fields = - ("Heap", List(HEAP_SIZE, "size_allocation", "size_allocation_free", + Fields("Heap", List(HEAP_SIZE, "size_allocation", "size_allocation_free", "size_heap_free_last_full_GC", "size_heap_free_last_GC")) val program_fields: Fields = - ("Program", List("size_code", "size_stacks")) + Fields("Program", List("size_code", "size_stacks")) val threads_fields: Fields = - ("Threads", List("threads_total", "threads_in_ML", "threads_wait_condvar", + Fields("Threads", List("threads_total", "threads_in_ML", "threads_wait_condvar", "threads_wait_IO", "threads_wait_mutex", "threads_wait_signal")) val time_fields: Fields = - ("Time", List("time_elapsed", "time_elapsed_GC", "time_CPU", "time_GC")) + Fields("Time", List("time_elapsed", "time_elapsed_GC", "time_CPU", "time_GC")) val speed_fields: Fields = - ("Speed", List("speed_CPU", "speed_GC")) + Fields("Speed", List("speed_CPU", "speed_GC")) private val time_speed = Map("time_CPU" -> "speed_CPU", "time_GC" -> "speed_GC") val java_heap_fields: Fields = - ("Java heap", List("java_heap_size", "java_heap_used")) + Fields("Java heap", List("java_heap_size", "java_heap_used")) val java_thread_fields: Fields = - ("Java threads", List("java_threads_total", "java_workers_total", "java_workers_active")) + Fields("Java threads", List("java_threads_total", "java_workers_total", "java_workers_active")) val main_fields: List[Fields] = List(heap_fields, tasks_fields, workers_fields) val other_fields: List[Fields] = List(threads_fields, GC_fields, program_fields, time_fields, speed_fields, java_heap_fields, java_thread_fields) val all_fields: List[Fields] = main_fields ::: other_fields /* content interpretation */ final case class Entry(time: Double, data: Map[String, Double]) { def get(field: String): Double = data.getOrElse(field, 0.0) } val empty: ML_Statistics = apply(Nil) def apply( ml_statistics0: List[Properties.T], heading: String = "", domain: String => Boolean = _ => true ): ML_Statistics = { require(ml_statistics0.forall(props => Now.unapply(props).isDefined), "missing \"now\" field") val ml_statistics = ml_statistics0.sortBy(now) val time_start = if (ml_statistics.isEmpty) 0.0 else now(ml_statistics.head) val duration = if (ml_statistics.isEmpty) 0.0 else now(ml_statistics.last) - time_start val fields = SortedSet.empty[String] ++ (for { props <- ml_statistics.iterator (x, _) <- props.iterator if x != Now.name && domain(x) } yield x) val content = { var last_edge = Map.empty[String, (Double, Double, Double)] val result = new mutable.ListBuffer[ML_Statistics.Entry] for (props <- ml_statistics) { val time = now(props) - time_start // rising edges -- relative speed val speeds = (for { (key, value) <- props.iterator key1 <- time_speed.get(key) if domain(key1) } yield { val (x0, y0, s0) = last_edge.getOrElse(key, (0.0, 0.0, 0.0)) val x1 = time val y1 = java.lang.Double.parseDouble(value) val s1 = if (x1 == x0) 0.0 else (y1 - y0) / (x1 - x0) if (y1 > y0) { last_edge += (key -> (x1, y1, s1)) (key1, s1.toString) } else (key1, s0.toString) }).toList val data = SortedMap.empty[String, Double] ++ (for { (x, y) <- props.iterator ++ speeds.iterator if x != Now.name && domain(x) z = java.lang.Double.parseDouble(y) if z != 0.0 } yield { (x.intern, mem_field_scale(x, z)) }) result += ML_Statistics.Entry(time, data) } result.toList } new ML_Statistics(heading, fields, content, time_start, duration) } } final class ML_Statistics private( val heading: String, val fields: Set[String], val content: List[ML_Statistics.Entry], val time_start: Double, val duration: Double ) { override def toString: String = if (content.isEmpty) "ML_Statistics.empty" else "ML_Statistics(length = " + content.length + ", fields = " + fields.size + ")" /* content */ def maximum(field: String): Double = content.foldLeft(0.0) { case (m, e) => m max e.get(field) } def average(field: String): Double = { @tailrec def sum(t0: Double, list: List[ML_Statistics.Entry], acc: Double): Double = list match { case Nil => acc case e :: es => val t = e.time sum(t, es, (t - t0) * e.get(field) + acc) } content match { case Nil => 0.0 case List(e) => e.get(field) case e :: es => sum(e.time, es, 0.0) / duration } } /* charts */ def update_data(data: XYSeriesCollection, selected_fields: List[String]): Unit = { data.removeAllSeries() for (field <- selected_fields) { val series = new XYSeries(field) content.foreach(entry => series.add(entry.time, entry.get(field))) data.addSeries(series) } } def chart(title: String, selected_fields: List[String]): JFreeChart = { val data = new XYSeriesCollection update_data(data, selected_fields) ChartFactory.createXYLineChart(title, "time", "value", data, PlotOrientation.VERTICAL, true, true, true) } def chart(fields: ML_Statistics.Fields): JFreeChart = - chart(fields._1, fields._2) + chart(fields.title, fields.names) def show_frames(fields: List[ML_Statistics.Fields] = ML_Statistics.main_fields): Unit = fields.map(chart).foreach(c => GUI_Thread.later { new Frame { iconImage = GUI.isabelle_image() title = heading contents = Component.wrap(new ChartPanel(c)) visible = true } }) } diff --git a/src/Tools/jEdit/src/monitor_dockable.scala b/src/Tools/jEdit/src/monitor_dockable.scala --- a/src/Tools/jEdit/src/monitor_dockable.scala +++ b/src/Tools/jEdit/src/monitor_dockable.scala @@ -1,123 +1,123 @@ /* Title: Tools/jEdit/src/monitor_dockable.scala Author: Makarius Monitor for runtime statistics. */ package isabelle.jedit import isabelle._ import java.awt.BorderLayout import scala.collection.immutable.Queue import scala.swing.TextField import scala.swing.event.ValueChanged import org.jfree.chart.ChartPanel import org.jfree.data.xy.XYSeriesCollection import org.gjt.sp.jedit.View class Monitor_Dockable(view: View, position: String) extends Dockable(view, position) { /* chart data -- owned by GUI thread */ private var statistics = Queue.empty[Properties.T] private var statistics_length = 0 private def add_statistics(stats: Properties.T): Unit = { statistics = statistics.appended(stats) statistics_length += 1 limit_data.text match { case Value.Int(limit) => while (statistics_length > limit) { statistics = statistics.dequeue._2 statistics_length -= 1 } case _ => } } private def clear_statistics(): Unit = { statistics = Queue.empty statistics_length = 0 } - private var data_name = ML_Statistics.all_fields.head._1 + private var data_name = ML_Statistics.all_fields.head.title private val chart = ML_Statistics.empty.chart(null, Nil) private val data = chart.getXYPlot.getDataset.asInstanceOf[XYSeriesCollection] private def update_chart(): Unit = { - ML_Statistics.all_fields.find(_._1 == data_name) match { + ML_Statistics.all_fields.find(_.title == data_name) match { case None => - case Some((_, fields)) => ML_Statistics(statistics.toList).update_data(data, fields) + case Some(fields) => ML_Statistics(statistics.toList).update_data(data, fields.names) } } private val input_delay = Delay.first(PIDE.session.input_delay, gui = true) { update_chart() } private val update_delay = Delay.first(PIDE.session.chart_delay, gui = true) { update_chart() } /* controls */ private val select_data = new GUI.Selector(ML_Statistics.all_fields.map(p => GUI.Selector.item(p._1))) { tooltip = "Select visualized data collection" override def changed(): Unit = { data_name = selection.item.toString; update_chart() } } private val limit_data = new TextField("200", 5) { tooltip = "Limit for accumulated data" verifier = { case Value.Int(x) => x > 0 case _ => false } reactions += { case ValueChanged(_) => input_delay.invoke() } } private val reset_data = new GUI.Button("Reset") { tooltip = "Reset accumulated data" override def clicked(): Unit = { clear_statistics(); update_chart() } } private val full_gc = new GUI.Button("GC") { tooltip = "Full garbage collection of ML heap" override def clicked(): Unit = PIDE.session.protocol_command("ML_Heap.full_gc") } private val share_common_data = new GUI.Button("Sharing") { tooltip = "Share common data of ML heap" override def clicked(): Unit = PIDE.session.protocol_command("ML_Heap.share_common_data") } private val controls = Wrap_Panel(List(select_data, limit_data, reset_data, full_gc, share_common_data)) /* layout */ set_content(new ChartPanel(chart)) add(controls.peer, BorderLayout.NORTH) /* main */ private val main = Session.Consumer[Session.Runtime_Statistics](getClass.getName) { stats => add_statistics(stats.props) update_delay.invoke() } override def init(): Unit = { PIDE.session.runtime_statistics += main } override def exit(): Unit = { PIDE.session.runtime_statistics -= main } }