diff --git a/src/Pure/Isar/keyword.scala b/src/Pure/Isar/keyword.scala --- a/src/Pure/Isar/keyword.scala +++ b/src/Pure/Isar/keyword.scala @@ -1,228 +1,231 @@ /* Title: Pure/Isar/keyword.scala Author: Makarius Isar keyword classification. */ package isabelle object Keyword { /** keyword classification **/ /* kinds */ val DIAG = "diag" val DOCUMENT_HEADING = "document_heading" val DOCUMENT_BODY = "document_body" val DOCUMENT_RAW = "document_raw" val THY_BEGIN = "thy_begin" val THY_END = "thy_end" val THY_DECL = "thy_decl" val THY_DECL_BLOCK = "thy_decl_block" val THY_DEFN = "thy_defn" val THY_STMT = "thy_stmt" val THY_LOAD = "thy_load" val THY_GOAL = "thy_goal" val THY_GOAL_DEFN = "thy_goal_defn" val THY_GOAL_STMT = "thy_goal_stmt" val QED = "qed" val QED_SCRIPT = "qed_script" val QED_BLOCK = "qed_block" val QED_GLOBAL = "qed_global" val PRF_GOAL = "prf_goal" val PRF_BLOCK = "prf_block" val NEXT_BLOCK = "next_block" val PRF_OPEN = "prf_open" val PRF_CLOSE = "prf_close" val PRF_CHAIN = "prf_chain" val PRF_DECL = "prf_decl" val PRF_ASM = "prf_asm" val PRF_ASM_GOAL = "prf_asm_goal" val PRF_SCRIPT = "prf_script" val PRF_SCRIPT_GOAL = "prf_script_goal" val PRF_SCRIPT_ASM_GOAL = "prf_script_asm_goal" val BEFORE_COMMAND = "before_command" val QUASI_COMMAND = "quasi_command" /* command categories */ val vacuous = Set(DIAG, DOCUMENT_HEADING, DOCUMENT_BODY, DOCUMENT_RAW) val diag = Set(DIAG) val document_heading = Set(DOCUMENT_HEADING) val document_body = Set(DOCUMENT_BODY) val document_raw = Set(DOCUMENT_RAW) val document = Set(DOCUMENT_HEADING, DOCUMENT_BODY, DOCUMENT_RAW) val theory_begin = Set(THY_BEGIN) val theory_end = Set(THY_END) val theory_load = Set(THY_LOAD) val theory = Set(THY_BEGIN, THY_END, THY_LOAD, THY_DECL, THY_DECL_BLOCK, THY_DEFN, THY_STMT, THY_GOAL, THY_GOAL_DEFN, THY_GOAL_STMT) val theory_block = Set(THY_BEGIN, THY_DECL_BLOCK) val theory_body = Set(THY_LOAD, THY_DECL, THY_DECL_BLOCK, THY_DEFN, THY_STMT, THY_GOAL, THY_GOAL_DEFN, THY_GOAL_STMT) val theory_defn = Set(THY_DEFN, THY_GOAL_DEFN) val prf_script = Set(PRF_SCRIPT) val proof = Set(QED, QED_SCRIPT, QED_BLOCK, QED_GLOBAL, PRF_GOAL, PRF_BLOCK, NEXT_BLOCK, PRF_OPEN, PRF_CLOSE, PRF_CHAIN, PRF_DECL, PRF_ASM, PRF_ASM_GOAL, PRF_SCRIPT, PRF_SCRIPT_GOAL, PRF_SCRIPT_ASM_GOAL) val proof_body = Set(DIAG, DOCUMENT_HEADING, DOCUMENT_BODY, DOCUMENT_RAW, PRF_BLOCK, NEXT_BLOCK, PRF_OPEN, PRF_CLOSE, PRF_CHAIN, PRF_DECL, PRF_ASM, PRF_ASM_GOAL, PRF_SCRIPT, PRF_SCRIPT_GOAL, PRF_SCRIPT_ASM_GOAL) + val proof_asm = Set(PRF_ASM, PRF_ASM_GOAL) + val improper = Set(QED_SCRIPT, PRF_SCRIPT, PRF_SCRIPT_GOAL, PRF_SCRIPT_ASM_GOAL) + val theory_goal = Set(THY_GOAL, THY_GOAL_DEFN, THY_GOAL_STMT) val proof_goal = Set(PRF_GOAL, PRF_ASM_GOAL, PRF_SCRIPT_GOAL, PRF_SCRIPT_ASM_GOAL) val qed = Set(QED, QED_SCRIPT, QED_BLOCK) val qed_global = Set(QED_GLOBAL) val proof_open = proof_goal + PRF_OPEN val proof_close = qed + PRF_CLOSE val proof_enclose = Set(PRF_BLOCK, NEXT_BLOCK, QED_BLOCK, PRF_CLOSE) val close_structure = Set(NEXT_BLOCK, QED_BLOCK, PRF_CLOSE, THY_END) /** keyword tables **/ object Spec { val none: Spec = Spec("") } sealed case class Spec(kind: String, exts: List[String] = Nil, tags: List[String] = Nil) { def is_none: Boolean = kind == "" override def toString: String = kind + (if (exts.isEmpty) "" else " (" + commas_quote(exts) + ")") + (if (tags.isEmpty) "" else tags.map(quote).mkString(" % ", " % ", "")) } object Keywords { def empty: Keywords = new Keywords() } class Keywords private( val kinds: Map[String, String] = Map.empty, val load_commands: Map[String, List[String]] = Map.empty) { override def toString: String = { val entries = for ((name, kind) <- kinds.toList.sortBy(_._1)) yield { val exts = load_commands.getOrElse(name, Nil) val kind_decl = if (kind == "") "" else " :: " + quote(kind) + (if (exts.isEmpty) "" else " (" + commas_quote(exts) + ")") quote(name) + kind_decl } entries.mkString("keywords\n ", " and\n ", "") } /* merge */ def is_empty: Boolean = kinds.isEmpty def ++ (other: Keywords): Keywords = if (this eq other) this else if (is_empty) other else { val kinds1 = if (kinds eq other.kinds) kinds else if (kinds.isEmpty) other.kinds else (kinds /: other.kinds) { case (m, e) => if (m.isDefinedAt(e._1)) m else m + e } val load_commands1 = if (load_commands eq other.load_commands) load_commands else if (load_commands.isEmpty) other.load_commands else (load_commands /: other.load_commands) { case (m, e) => if (m.isDefinedAt(e._1)) m else m + e } new Keywords(kinds1, load_commands1) } /* add keywords */ def + (name: String, kind: String = "", exts: List[String] = Nil): Keywords = { val kinds1 = kinds + (name -> kind) val load_commands1 = if (kind == THY_LOAD) { if (!Symbol.iterator(name).forall(Symbol.is_ascii)) error("Bad theory load command " + quote(name)) load_commands + (name -> exts) } else load_commands new Keywords(kinds1, load_commands1) } def add_keywords(header: Thy_Header.Keywords): Keywords = (this /: header) { case (keywords, (name, spec)) => if (spec.is_none) keywords + Symbol.decode(name) + Symbol.encode(name) else keywords + (Symbol.decode(name), spec.kind, spec.exts) + (Symbol.encode(name), spec.kind, spec.exts) } /* command kind */ def is_command(token: Token, check_kind: String => Boolean): Boolean = token.is_command && (kinds.get(token.source) match { case Some(k) => check_kind(k) case None => false }) def is_before_command(token: Token): Boolean = token.is_keyword && kinds.get(token.source) == Some(BEFORE_COMMAND) def is_quasi_command(token: Token): Boolean = token.is_keyword && kinds.get(token.source) == Some(QUASI_COMMAND) def is_indent_command(token: Token): Boolean = token.is_begin_or_command || is_quasi_command(token) /* load commands */ def load_commands_in(text: String): Boolean = load_commands.exists({ case (cmd, _) => text.containsSlice(cmd) }) /* lexicons */ private def make_lexicon(is_minor: Boolean): Scan.Lexicon = (Scan.Lexicon.empty /: kinds)( { case (lex, (name, kind)) => if ((kind == "" || kind == BEFORE_COMMAND || kind == QUASI_COMMAND) == is_minor) lex + name else lex }) lazy val minor: Scan.Lexicon = make_lexicon(true) lazy val major: Scan.Lexicon = make_lexicon(false) } } diff --git a/src/Pure/Isar/token.scala b/src/Pure/Isar/token.scala --- a/src/Pure/Isar/token.scala +++ b/src/Pure/Isar/token.scala @@ -1,347 +1,349 @@ /* Title: Pure/Isar/token.scala Author: Makarius Outer token syntax for Isabelle/Isar. */ package isabelle import scala.collection.mutable import scala.util.parsing.input object Token { /* tokens */ object Kind extends Enumeration { /*immediate source*/ val COMMAND = Value("command") val KEYWORD = Value("keyword") val IDENT = Value("identifier") val LONG_IDENT = Value("long identifier") val SYM_IDENT = Value("symbolic identifier") val VAR = Value("schematic variable") val TYPE_IDENT = Value("type variable") val TYPE_VAR = Value("schematic type variable") val NAT = Value("natural number") val FLOAT = Value("floating-point number") val SPACE = Value("white space") /*delimited content*/ val STRING = Value("string") val ALT_STRING = Value("back-quoted string") val VERBATIM = Value("verbatim text") val CARTOUCHE = Value("text cartouche") val INFORMAL_COMMENT = Value("informal comment") val FORMAL_COMMENT = Value("formal comment") /*special content*/ val ERROR = Value("bad input") val UNPARSED = Value("unparsed input") } /* parsers */ object Parsers extends Parsers trait Parsers extends Scan.Parsers with Comment.Parsers { private def delimited_token: Parser[Token] = { val string = quoted("\"") ^^ (x => Token(Token.Kind.STRING, x)) val alt_string = quoted("`") ^^ (x => Token(Token.Kind.ALT_STRING, x)) val verb = verbatim ^^ (x => Token(Token.Kind.VERBATIM, x)) val cart = cartouche ^^ (x => Token(Token.Kind.CARTOUCHE, x)) val cmt = comment ^^ (x => Token(Token.Kind.INFORMAL_COMMENT, x)) val formal_cmt = comment_cartouche ^^ (x => Token(Token.Kind.FORMAL_COMMENT, x)) string | (alt_string | (verb | (cart | (cmt | formal_cmt)))) } private def other_token(keywords: Keyword.Keywords): Parser[Token] = { val letdigs1 = many1(Symbol.is_letdig) val sub = one(s => s == Symbol.sub_decoded || s == Symbol.sub) val id = one(Symbol.is_letter) ~ (rep(letdigs1 | (sub ~ letdigs1 ^^ { case x ~ y => x + y })) ^^ (_.mkString)) ^^ { case x ~ y => x + y } val nat = many1(Symbol.is_digit) val natdot = nat ~ "." ~ nat ^^ { case x ~ y ~ z => x + y + z } val id_nat = id ~ opt("." ~ nat) ^^ { case x ~ Some(y ~ z) => x + y + z case x ~ None => x } val ident = id ~ rep("." ~> id) ^^ { case x ~ Nil => Token(Token.Kind.IDENT, x) case x ~ ys => Token(Token.Kind.LONG_IDENT, (x :: ys).mkString(".")) } val var_ = "?" ~ id_nat ^^ { case x ~ y => Token(Token.Kind.VAR, x + y) } val type_ident = "'" ~ id ^^ { case x ~ y => Token(Token.Kind.TYPE_IDENT, x + y) } val type_var = "?'" ~ id_nat ^^ { case x ~ y => Token(Token.Kind.TYPE_VAR, x + y) } val nat_ = nat ^^ (x => Token(Token.Kind.NAT, x)) val float = ("-" ~ natdot ^^ { case x ~ y => x + y } | natdot) ^^ (x => Token(Token.Kind.FLOAT, x)) val sym_ident = (many1(Symbol.is_symbolic_char) | one(sym => Symbol.is_symbolic(sym))) ^^ (x => Token(Token.Kind.SYM_IDENT, x)) val keyword = literal(keywords.minor) ^^ (x => Token(Token.Kind.KEYWORD, x)) ||| literal(keywords.major) ^^ (x => Token(Token.Kind.COMMAND, x)) val space = many1(Symbol.is_blank) ^^ (x => Token(Token.Kind.SPACE, x)) val recover_delimited = (recover_quoted("\"") | (recover_quoted("`") | (recover_verbatim | (recover_cartouche | recover_comment)))) ^^ (x => Token(Token.Kind.ERROR, x)) val bad = one(_ => true) ^^ (x => Token(Token.Kind.ERROR, x)) space | (recover_delimited | (((ident | (var_ | (type_ident | (type_var | (float | (nat_ | sym_ident)))))) ||| keyword) | bad)) } def token(keywords: Keyword.Keywords): Parser[Token] = delimited_token | other_token(keywords) def token_line(keywords: Keyword.Keywords, ctxt: Scan.Line_Context) : Parser[(Token, Scan.Line_Context)] = { val string = quoted_line("\"", ctxt) ^^ { case (x, c) => (Token(Token.Kind.STRING, x), c) } val alt_string = quoted_line("`", ctxt) ^^ { case (x, c) => (Token(Token.Kind.ALT_STRING, x), c) } val verb = verbatim_line(ctxt) ^^ { case (x, c) => (Token(Token.Kind.VERBATIM, x), c) } val cart = cartouche_line(ctxt) ^^ { case (x, c) => (Token(Token.Kind.CARTOUCHE, x), c) } val cmt = comment_line(ctxt) ^^ { case (x, c) => (Token(Token.Kind.INFORMAL_COMMENT, x), c) } val formal_cmt = comment_cartouche_line(ctxt) ^^ { case (x, c) => (Token(Token.Kind.FORMAL_COMMENT, x), c) } val other = other_token(keywords) ^^ { case x => (x, Scan.Finished) } string | (alt_string | (verb | (cart | (cmt | (formal_cmt | other))))) } } /* explode */ def explode(keywords: Keyword.Keywords, inp: CharSequence): List[Token] = Parsers.parseAll(Parsers.rep(Parsers.token(keywords)), Scan.char_reader(inp)) match { case Parsers.Success(tokens, _) => tokens case _ => error("Unexpected failure of tokenizing input:\n" + inp.toString) } def explode_line(keywords: Keyword.Keywords, inp: CharSequence, context: Scan.Line_Context) : (List[Token], Scan.Line_Context) = { var in: input.Reader[Char] = Scan.char_reader(inp) val toks = new mutable.ListBuffer[Token] var ctxt = context while (!in.atEnd) { Parsers.parse(Parsers.token_line(keywords, ctxt), in) match { case Parsers.Success((x, c), rest) => toks += x; ctxt = c; in = rest case Parsers.NoSuccess(_, rest) => error("Unexpected failure of tokenizing input:\n" + rest.source.toString) } } (toks.toList, ctxt) } val newline: Token = explode(Keyword.Keywords.empty, "\n").head /* embedded */ def read_embedded(keywords: Keyword.Keywords, inp: CharSequence): Option[Token] = explode(keywords, inp) match { case List(tok) if tok.is_embedded => Some(tok) case _ => None } /* names */ def read_name(keywords: Keyword.Keywords, inp: CharSequence): Option[Token] = explode(keywords, inp) match { case List(tok) if tok.is_name => Some(tok) case _ => None } def quote_name(keywords: Keyword.Keywords, name: String): String = if (read_name(keywords, name).isDefined) name else quote(name.replace("\"", "\\\"")) /* plain antiquotation (0 or 1 args) */ def read_antiq_arg(keywords: Keyword.Keywords, inp: CharSequence): Option[(String, Option[String])] = explode(keywords, inp).filter(_.is_proper) match { case List(t) if t.is_name => Some(t.content, None) case List(t1, t2) if t1.is_name && t2.is_embedded => Some(t1.content, Some(t2.content)) case _ => None } /* implode */ def implode(toks: List[Token]): String = toks match { case List(tok) => tok.source case _ => toks.map(_.source).mkString } /* token reader */ object Pos { val none: Pos = new Pos(0, 0, "", "") val start: Pos = new Pos(1, 1, "", "") def file(file: String): Pos = new Pos(1, 1, file, "") def id(id: String): Pos = new Pos(0, 1, "", id) val command: Pos = id(Markup.COMMAND) } final class Pos private[Token]( val line: Int, val offset: Symbol.Offset, val file: String, val id: String) extends input.Position { def column = 0 def lineContents = "" def advance(token: Token): Pos = advance(token.source) def advance(source: String): Pos = { var line1 = line var offset1 = offset for (s <- Symbol.iterator(source)) { if (line1 > 0 && Symbol.is_newline(s)) line1 += 1 if (offset1 > 0) offset1 += 1 } if (line1 == line && offset1 == offset) this else new Pos(line1, offset1, file, id) } private def position(end_offset: Symbol.Offset): Position.T = (if (line > 0) Position.Line(line) else Nil) ::: (if (offset > 0) Position.Offset(offset) else Nil) ::: (if (end_offset > 0) Position.End_Offset(end_offset) else Nil) ::: (if (file != "") Position.File(file) else Nil) ::: (if (id != "") Position.Id_String(id) else Nil) def position(): Position.T = position(0) def position(token: Token): Position.T = position(advance(token).offset) def position(source: String): Position.T = position(advance(source).offset) override def toString: String = Position.here(position(), delimited = false) } abstract class Reader extends input.Reader[Token] private class Token_Reader(tokens: List[Token], val pos: Pos) extends Reader { def first: Token = tokens.head def rest: Token_Reader = new Token_Reader(tokens.tail, pos.advance(first)) def atEnd: Boolean = tokens.isEmpty } def reader(tokens: List[Token], start: Token.Pos): Reader = new Token_Reader(tokens, start) /* XML data representation */ val encode: XML.Encode.T[Token] = (tok: Token) => { import XML.Encode._ pair(int, string)(tok.kind.id, tok.source) } val decode: XML.Decode.T[Token] = (body: XML.Body) => { import XML.Decode._ val (k, s) = pair(int, string)(body) Token(Kind(k), s) } } sealed case class Token(kind: Token.Kind.Value, source: String) { def is_command: Boolean = kind == Token.Kind.COMMAND def is_command(name: String): Boolean = kind == Token.Kind.COMMAND && source == name def is_keyword: Boolean = kind == Token.Kind.KEYWORD def is_keyword(name: String): Boolean = kind == Token.Kind.KEYWORD && source == name def is_keyword(name: Char): Boolean = kind == Token.Kind.KEYWORD && source.length == 1 && source(0) == name def is_delimiter: Boolean = is_keyword && !Symbol.is_ascii_identifier(source) def is_ident: Boolean = kind == Token.Kind.IDENT def is_sym_ident: Boolean = kind == Token.Kind.SYM_IDENT def is_string: Boolean = kind == Token.Kind.STRING def is_nat: Boolean = kind == Token.Kind.NAT def is_float: Boolean = kind == Token.Kind.FLOAT def is_name: Boolean = kind == Token.Kind.IDENT || kind == Token.Kind.LONG_IDENT || kind == Token.Kind.SYM_IDENT || kind == Token.Kind.STRING || kind == Token.Kind.NAT def is_embedded: Boolean = is_name || kind == Token.Kind.CARTOUCHE || kind == Token.Kind.VAR || kind == Token.Kind.TYPE_IDENT || kind == Token.Kind.TYPE_VAR def is_text: Boolean = is_embedded || kind == Token.Kind.VERBATIM def is_space: Boolean = kind == Token.Kind.SPACE def is_informal_comment: Boolean = kind == Token.Kind.INFORMAL_COMMENT def is_formal_comment: Boolean = kind == Token.Kind.FORMAL_COMMENT def is_marker: Boolean = kind == Token.Kind.FORMAL_COMMENT && (source.startsWith(Symbol.marker) || source.startsWith(Symbol.marker_decoded)) def is_comment: Boolean = is_informal_comment || is_formal_comment def is_ignored: Boolean = is_space || is_informal_comment def is_proper: Boolean = !is_space && !is_comment def is_error: Boolean = kind == Token.Kind.ERROR def is_unparsed: Boolean = kind == Token.Kind.UNPARSED def is_unfinished: Boolean = is_error && (source.startsWith("\"") || source.startsWith("`") || source.startsWith("{*") || source.startsWith("(*") || source.startsWith(Symbol.open) || source.startsWith(Symbol.open_decoded)) def is_open_bracket: Boolean = is_keyword && Word.open_brackets.exists(is_keyword) def is_close_bracket: Boolean = is_keyword && Word.close_brackets.exists(is_keyword) def is_begin: Boolean = is_keyword("begin") def is_end: Boolean = is_command("end") def is_begin_or_command: Boolean = is_begin || is_command + def symbol_length: Symbol.Offset = Symbol.iterator(source).length + def content: String = if (kind == Token.Kind.STRING) Scan.Parsers.quoted_content("\"", source) else if (kind == Token.Kind.ALT_STRING) Scan.Parsers.quoted_content("`", source) else if (kind == Token.Kind.VERBATIM) Scan.Parsers.verbatim_content(source) else if (kind == Token.Kind.CARTOUCHE) Scan.Parsers.cartouche_content(source) else if (kind == Token.Kind.INFORMAL_COMMENT) Scan.Parsers.comment_content(source) else if (kind == Token.Kind.FORMAL_COMMENT) Comment.content(source) else source def is_system_name: Boolean = { val s = content is_name && Path.is_wellformed(s) && !s.exists(Symbol.is_ascii_blank) && !Path.is_reserved(s) } } diff --git a/src/Pure/PIDE/document.scala b/src/Pure/PIDE/document.scala --- a/src/Pure/PIDE/document.scala +++ b/src/Pure/PIDE/document.scala @@ -1,1199 +1,1202 @@ /* Title: Pure/PIDE/document.scala Author: Makarius Document as collection of named nodes, each consisting of an editable list of commands, associated with asynchronous execution process. */ package isabelle import scala.collection.mutable object Document { /** document structure **/ /* overlays -- print functions with arguments */ object Overlays { val empty = new Overlays(Map.empty) } final class Overlays private(rep: Map[Node.Name, Node.Overlays]) { def apply(name: Node.Name): Node.Overlays = rep.getOrElse(name, Node.Overlays.empty) private def update(name: Node.Name, f: Node.Overlays => Node.Overlays): Overlays = { val node_overlays = f(apply(name)) new Overlays(if (node_overlays.is_empty) rep - name else rep + (name -> node_overlays)) } def insert(command: Command, fn: String, args: List[String]): Overlays = update(command.node_name, _.insert(command, fn, args)) def remove(command: Command, fn: String, args: List[String]): Overlays = update(command.node_name, _.remove(command, fn, args)) override def toString: String = rep.mkString("Overlays(", ",", ")") } /* document blobs: auxiliary files */ sealed case class Blob(bytes: Bytes, source: String, chunk: Symbol.Text_Chunk, changed: Boolean) { def unchanged: Blob = copy(changed = false) } object Blobs { def apply(blobs: Map[Node.Name, Blob]): Blobs = new Blobs(blobs) val empty: Blobs = apply(Map.empty) } final class Blobs private(blobs: Map[Node.Name, Blob]) { def get(name: Node.Name): Option[Blob] = blobs.get(name) def changed(name: Node.Name): Boolean = get(name) match { case Some(blob) => blob.changed case None => false } override def toString: String = blobs.mkString("Blobs(", ",", ")") } /* document nodes: theories and auxiliary files */ type Edit[A, B] = (Node.Name, Node.Edit[A, B]) type Edit_Text = Edit[Text.Edit, Text.Perspective] type Edit_Command = Edit[Command.Edit, Command.Perspective] object Node { /* header and name */ sealed case class Header( imports_pos: List[(Name, Position.T)] = Nil, keywords: Thy_Header.Keywords = Nil, abbrevs: Thy_Header.Abbrevs = Nil, errors: List[String] = Nil) { + def imports_offset: Map[Int, Name] = + (for { (name, Position.Offset(i)) <- imports_pos } yield i -> name).toMap + def imports: List[Name] = imports_pos.map(_._1) def append_errors(msgs: List[String]): Header = copy(errors = errors ::: msgs) def cat_errors(msg2: String): Header = copy(errors = errors.map(msg1 => Exn.cat_message(msg1, msg2))) } val no_header: Header = Header() def bad_header(msg: String): Header = Header(errors = List(msg)) object Name { val empty: Name = Name("") object Ordering extends scala.math.Ordering[Name] { def compare(name1: Name, name2: Name): Int = name1.node compare name2.node } type Graph[A] = isabelle.Graph[Node.Name, A] def make_graph[A](entries: List[((Name, A), List[Name])]): Graph[A] = Graph.make(entries, symmetric = true)(Ordering) } sealed case class Name(node: String, master_dir: String = "", theory: String = "") { override def hashCode: Int = node.hashCode override def equals(that: Any): Boolean = that match { case other: Name => node == other.node case _ => false } def path: Path = Path.explode(File.standard_path(node)) def master_dir_path: Path = Path.explode(File.standard_path(master_dir)) def is_theory: Boolean = theory.nonEmpty def theory_base_name: String = Long_Name.base_name(theory) override def toString: String = if (is_theory) theory else node def map(f: String => String): Name = copy(f(node), f(master_dir), theory) def map_theory(f: String => String): Name = copy(node, master_dir, f(theory)) def json: JSON.Object.T = JSON.Object("node_name" -> node, "theory_name" -> theory) } sealed case class Entry(name: Node.Name, header: Node.Header) { def map(f: String => String): Entry = copy(name = name.map(f)) override def toString: String = name.toString } /* node overlays */ object Overlays { val empty = new Overlays(Multi_Map.empty) } final class Overlays private(rep: Multi_Map[Command, (String, List[String])]) { def commands: Set[Command] = rep.keySet def is_empty: Boolean = rep.isEmpty def dest: List[(Command, (String, List[String]))] = rep.iterator.toList def insert(cmd: Command, fn: String, args: List[String]): Overlays = new Overlays(rep.insert(cmd, (fn, args))) def remove(cmd: Command, fn: String, args: List[String]): Overlays = new Overlays(rep.remove(cmd, (fn, args))) override def toString: String = rep.mkString("Node.Overlays(", ",", ")") } /* edits */ sealed abstract class Edit[A, B] { def foreach(f: A => Unit) { this match { case Edits(es) => es.foreach(f) case _ => } } def is_void: Boolean = this match { case Edits(Nil) => true case _ => false } } case class Blob[A, B](blob: Document.Blob) extends Edit[A, B] case class Edits[A, B](edits: List[A]) extends Edit[A, B] case class Deps[A, B](header: Header) extends Edit[A, B] case class Perspective[A, B](required: Boolean, visible: B, overlays: Overlays) extends Edit[A, B] /* perspective */ type Perspective_Text = Perspective[Text.Edit, Text.Perspective] type Perspective_Command = Perspective[Command.Edit, Command.Perspective] val no_perspective_text: Perspective_Text = Perspective(false, Text.Perspective.empty, Overlays.empty) val no_perspective_command: Perspective_Command = Perspective(false, Command.Perspective.empty, Overlays.empty) def is_no_perspective_text(perspective: Perspective_Text): Boolean = !perspective.required && perspective.visible.is_empty && perspective.overlays.is_empty def is_no_perspective_command(perspective: Perspective_Command): Boolean = !perspective.required && perspective.visible.is_empty && perspective.overlays.is_empty /* commands */ object Commands { def apply(commands: Linear_Set[Command]): Commands = new Commands(commands) val empty: Commands = apply(Linear_Set.empty) def starts(commands: Iterator[Command], offset: Text.Offset = 0) : Iterator[(Command, Text.Offset)] = { var i = offset for (command <- commands) yield { val start = i i += command.length (command, start) } } def starts_pos(commands: Iterator[Command], pos: Token.Pos = Token.Pos.start) : Iterator[(Command, Token.Pos)] = { var p = pos for (command <- commands) yield { val start = p p = (p /: command.span.content)(_.advance(_)) (command, start) } } private val block_size = 256 } final class Commands private(val commands: Linear_Set[Command]) { lazy val load_commands: List[Command] = commands.iterator.filter(cmd => cmd.blobs.nonEmpty).toList private lazy val full_index: (Array[(Command, Text.Offset)], Text.Range) = { val blocks = new mutable.ListBuffer[(Command, Text.Offset)] var next_block = 0 var last_stop = 0 for ((command, start) <- Commands.starts(commands.iterator)) { last_stop = start + command.length while (last_stop + 1 > next_block) { blocks += (command -> start) next_block += Commands.block_size } } (blocks.toArray, Text.Range(0, last_stop)) } private def full_range: Text.Range = full_index._2 def iterator(i: Text.Offset = 0): Iterator[(Command, Text.Offset)] = { if (commands.nonEmpty && full_range.contains(i)) { val (cmd0, start0) = full_index._1(i / Commands.block_size) Node.Commands.starts(commands.iterator(cmd0), start0) dropWhile { case (cmd, start) => start + cmd.length <= i } } else Iterator.empty } } val empty: Node = new Node() } final class Node private( val get_blob: Option[Document.Blob] = None, val header: Node.Header = Node.no_header, val syntax: Option[Outer_Syntax] = None, val text_perspective: Text.Perspective = Text.Perspective.empty, val perspective: Node.Perspective_Command = Node.no_perspective_command, _commands: Node.Commands = Node.Commands.empty) { def is_empty: Boolean = get_blob.isEmpty && header == Node.no_header && text_perspective.is_empty && Node.is_no_perspective_command(perspective) && commands.isEmpty def has_header: Boolean = header != Node.no_header override def toString: String = if (is_empty) "empty" else if (get_blob.isDefined) "blob" else "node" def commands: Linear_Set[Command] = _commands.commands def load_commands: List[Command] = _commands.load_commands def load_commands_changed(doc_blobs: Blobs): Boolean = load_commands.exists(_.blobs_changed(doc_blobs)) def init_blob(blob: Blob): Node = new Node(get_blob = Some(blob.unchanged)) def update_header(new_header: Node.Header): Node = new Node(get_blob, new_header, syntax, text_perspective, perspective, _commands) def update_syntax(new_syntax: Option[Outer_Syntax]): Node = new Node(get_blob, header, new_syntax, text_perspective, perspective, _commands) def update_perspective( new_text_perspective: Text.Perspective, new_perspective: Node.Perspective_Command): Node = new Node(get_blob, header, syntax, new_text_perspective, new_perspective, _commands) def edit_perspective: Node.Edit[Text.Edit, Text.Perspective] = Node.Perspective(perspective.required, text_perspective, perspective.overlays) def same_perspective( other_text_perspective: Text.Perspective, other_perspective: Node.Perspective_Command): Boolean = text_perspective == other_text_perspective && perspective.required == other_perspective.required && perspective.visible.same(other_perspective.visible) && perspective.overlays == other_perspective.overlays def update_commands(new_commands: Linear_Set[Command]): Node = if (new_commands eq _commands.commands) this else new Node(get_blob, header, syntax, text_perspective, perspective, Node.Commands(new_commands)) def command_iterator(i: Text.Offset = 0): Iterator[(Command, Text.Offset)] = _commands.iterator(i) def command_iterator(range: Text.Range): Iterator[(Command, Text.Offset)] = command_iterator(range.start) takeWhile { case (_, start) => start < range.stop } def command_start(cmd: Command): Option[Text.Offset] = Node.Commands.starts(commands.iterator).find(_._1 == cmd).map(_._2) def source: String = get_blob match { case Some(blob) => blob.source case None => command_iterator(0).map({ case (cmd, _) => cmd.source }).mkString } } /* development graph */ object Nodes { val empty: Nodes = new Nodes(Graph.empty(Node.Name.Ordering)) } final class Nodes private(graph: Graph[Node.Name, Node]) { def apply(name: Node.Name): Node = graph.default_node(name, Node.empty).get_node(name) def is_suppressed(name: Node.Name): Boolean = { val graph1 = graph.default_node(name, Node.empty) graph1.is_maximal(name) && graph1.get_node(name).is_empty } def purge_suppressed: Option[Nodes] = graph.keys_iterator.filter(is_suppressed).toList match { case Nil => None case del => Some(new Nodes((graph /: del)(_.del_node(_)))) } def + (entry: (Node.Name, Node)): Nodes = { val (name, node) = entry val imports = node.header.imports val graph1 = (graph.default_node(name, Node.empty) /: imports)((g, p) => g.default_node(p, Node.empty)) val graph2 = (graph1 /: graph1.imm_preds(name))((g, dep) => g.del_edge(dep, name)) val graph3 = (graph2 /: imports)((g, dep) => g.add_edge(dep, name)) new Nodes(graph3.map_node(name, _ => node)) } def domain: Set[Node.Name] = graph.domain def iterator: Iterator[(Node.Name, Node)] = graph.iterator.map({ case (name, (node, _)) => (name, node) }) def theory_name(theory: String): Option[Node.Name] = graph.keys_iterator.find(name => name.theory == theory) def commands_loading(file_name: Node.Name): List[Command] = (for { (_, node) <- iterator cmd <- node.load_commands.iterator name <- cmd.blobs_names.iterator if name == file_name } yield cmd).toList def descendants(names: List[Node.Name]): List[Node.Name] = graph.all_succs(names) def requirements(names: List[Node.Name]): List[Node.Name] = graph.all_preds_rev(names) def topological_order: List[Node.Name] = graph.topological_order override def toString: String = topological_order.mkString("Nodes(", ",", ")") } /** versioning **/ /* particular document versions */ object Version { val init: Version = new Version() def make(nodes: Nodes): Version = new Version(Document_ID.make(), nodes) def purge_future(versions: Map[Document_ID.Version, Version], future: Future[Version]) : Future[Version] = { if (future.is_finished) { val version = future.join versions.get(version.id) match { case Some(version1) if !(version eq version1) => Future.value(version1) case _ => future } } else future } def purge_suppressed( versions: Map[Document_ID.Version, Version]): Map[Document_ID.Version, Version] = { (versions /: (for ((id, v) <- versions.iterator; v1 <- v.purge_suppressed) yield (id, v1)))(_ + _) } } final class Version private( val id: Document_ID.Version = Document_ID.none, val nodes: Nodes = Nodes.empty) { override def toString: String = "Version(" + id + ")" def purge_suppressed: Option[Version] = nodes.purge_suppressed.map(new Version(id, _)) } /* changes of plain text, eventually resulting in document edits */ object Change { val init: Change = new Change() def make(previous: Future[Version], edits: List[Edit_Text], version: Future[Version]): Change = new Change(Some(previous), edits.reverse, version) } final class Change private( val previous: Option[Future[Version]] = Some(Future.value(Version.init)), val rev_edits: List[Edit_Text] = Nil, val version: Future[Version] = Future.value(Version.init)) { def is_finished: Boolean = (previous match { case None => true case Some(future) => future.is_finished }) && version.is_finished def truncate: Change = new Change(None, Nil, version) def purge(versions: Map[Document_ID.Version, Version]): Option[Change] = { val previous1 = previous.map(Version.purge_future(versions, _)) val version1 = Version.purge_future(versions, version) if ((previous eq previous1) && (version eq version1)) None else Some(new Change(previous1, rev_edits, version1)) } } /* history navigation */ object History { val init: History = new History() } final class History private( val undo_list: List[Change] = List(Change.init)) // non-empty list { override def toString: String = "History(" + undo_list.length + ")" def tip: Change = undo_list.head def + (change: Change): History = new History(change :: undo_list) def prune(check: Change => Boolean, retain: Int): Option[(List[Change], History)] = { val n = undo_list.iterator.zipWithIndex.find(p => check(p._1)).get._2 + 1 val (retained, dropped) = undo_list.splitAt(n max retain) retained.splitAt(retained.length - 1) match { case (prefix, List(last)) => Some(dropped, new History(prefix ::: List(last.truncate))) case _ => None } } def purge(versions: Map[Document_ID.Version, Version]): History = { val undo_list1 = undo_list.map(_.purge(versions)) if (undo_list1.forall(_.isEmpty)) this else new History(for ((a, b) <- undo_list1 zip undo_list) yield a.getOrElse(b)) } } /* snapshot */ object Snapshot { val init: Snapshot = State.init.snapshot() } abstract class Snapshot { def state: State def version: Version def is_outdated: Boolean def convert(i: Text.Offset): Text.Offset def revert(i: Text.Offset): Text.Offset def convert(range: Text.Range): Text.Range def revert(range: Text.Range): Text.Range def node_name: Node.Name def node: Node def nodes: List[(Node.Name, Node)] def commands_loading: List[Command] def commands_loading_ranges(pred: Node.Name => Boolean): List[Text.Range] def current_command(other_node_name: Node.Name, offset: Text.Offset): Option[Command] def markup_to_XML(range: Text.Range, elements: Markup.Elements): XML.Body def messages: List[(XML.Tree, Position.T)] def exports: List[Export.Entry] def exports_map: Map[String, Export.Entry] def find_command(id: Document_ID.Generic): Option[(Node, Command)] def find_command_position(id: Document_ID.Generic, offset: Symbol.Offset) : Option[Line.Node_Position] def cumulate[A]( range: Text.Range, info: A, elements: Markup.Elements, result: List[Command.State] => (A, Text.Markup) => Option[A], status: Boolean = false): List[Text.Info[A]] def select[A]( range: Text.Range, elements: Markup.Elements, result: List[Command.State] => Text.Markup => Option[A], status: Boolean = false): List[Text.Info[A]] def command_results(range: Text.Range): Command.Results def command_results(command: Command): Command.Results def command_id_map: Map[Document_ID.Generic, Command] } /* model */ trait Session { def resources: Resources } trait Model { def session: Session def is_stable: Boolean def snapshot(): Snapshot def node_name: Node.Name def is_theory: Boolean = node_name.is_theory override def toString: String = node_name.toString def get_text(range: Text.Range): Option[String] def node_required: Boolean def get_blob: Option[Blob] def bibtex_entries: List[Text.Info[String]] def node_edits( node_header: Node.Header, text_edits: List[Text.Edit], perspective: Node.Perspective_Text): List[Edit_Text] = { val edits: List[Node.Edit[Text.Edit, Text.Perspective]] = get_blob match { case None => List( Node.Deps( if (session.resources.session_base.loaded_theory(node_name)) { node_header.append_errors( List("Cannot update finished theory " + quote(node_name.theory))) } else node_header), Node.Edits(text_edits), perspective) case Some(blob) => List(Node.Blob(blob), Node.Edits(text_edits)) } edits.flatMap(edit => if (edit.is_void) None else Some(node_name -> edit)) } } /** global state -- document structure, execution process, editing history **/ type Assign_Update = List[(Document_ID.Command, List[Document_ID.Exec])] // update of exec state assignment object State { class Fail(state: State) extends Exception object Assignment { val init: Assignment = new Assignment() } final class Assignment private( val command_execs: Map[Document_ID.Command, List[Document_ID.Exec]] = Map.empty, val is_finished: Boolean = false) { override def toString: String = "Assignment(" + command_execs.size + "," + is_finished + ")" def check_finished: Assignment = { require(is_finished); this } def unfinished: Assignment = new Assignment(command_execs, false) def assign(update: Assign_Update): Assignment = { require(!is_finished) val command_execs1 = (command_execs /: update) { case (res, (command_id, exec_ids)) => if (exec_ids.isEmpty) res - command_id else res + (command_id -> exec_ids) } new Assignment(command_execs1, true) } } val init: State = State().define_version(Version.init, Assignment.init).assign(Version.init.id, Nil, Nil)._2 } final case class State private( /*reachable versions*/ versions: Map[Document_ID.Version, Version] = Map.empty, /*inlined auxiliary files*/ blobs: Set[SHA1.Digest] = Set.empty, /*static markup from define_command*/ commands: Map[Document_ID.Command, Command.State] = Map.empty, /*dynamic markup from execution*/ execs: Map[Document_ID.Exec, Command.State] = Map.empty, /*command-exec assignment for each version*/ assignments: Map[Document_ID.Version, State.Assignment] = Map.empty, /*commands with markup produced by other commands (imm_succs)*/ commands_redirection: Graph[Document_ID.Command, Unit] = Graph.long, /*explicit (linear) history*/ history: History = History.init, /*intermediate state between remove_versions/removed_versions*/ removing_versions: Boolean = false) { override def toString: String = "State(versions = " + versions.size + ", blobs = " + blobs.size + ", commands = " + commands.size + ", execs = " + execs.size + ", assignments = " + assignments.size + ", commands_redirection = " + commands_redirection.size + ", history = " + history.undo_list.size + ", removing_versions = " + removing_versions + ")" private def fail[A]: A = throw new State.Fail(this) def define_version(version: Version, assignment: State.Assignment): State = { val id = version.id copy(versions = versions + (id -> version), assignments = assignments + (id -> assignment.unfinished)) } def define_blob(digest: SHA1.Digest): State = copy(blobs = blobs + digest) def defined_blob(digest: SHA1.Digest): Boolean = blobs.contains(digest) def define_command(command: Command): State = { val id = command.id copy(commands = commands + (id -> command.init_state)) } def defined_command(id: Document_ID.Command): Boolean = commands.isDefinedAt(id) def the_version(id: Document_ID.Version): Version = versions.getOrElse(id, fail) def the_static_state(id: Document_ID.Command): Command.State = commands.getOrElse(id, fail) def the_dynamic_state(id: Document_ID.Exec): Command.State = execs.getOrElse(id, fail) def the_assignment(version: Version): State.Assignment = assignments.getOrElse(version.id, fail) def lookup_id(id: Document_ID.Generic): Option[Command.State] = commands.get(id) orElse execs.get(id) private def self_id(st: Command.State)(id: Document_ID.Generic): Boolean = id == st.command.id || (execs.get(id) match { case Some(st1) => st1.command.id == st.command.id case None => false }) private def other_id(id: Document_ID.Generic) : Option[(Symbol.Text_Chunk.Id, Symbol.Text_Chunk)] = lookup_id(id).map(st => ((Symbol.Text_Chunk.Id(st.command.id), st.command.chunk))) private def redirection(st: Command.State): Graph[Document_ID.Command, Unit] = (commands_redirection /: st.markups.redirection_iterator)({ case (graph, id) => graph.default_node(id, ()).default_node(st.command.id, ()).add_edge(id, st.command.id) }) def accumulate(id: Document_ID.Generic, message: XML.Elem, xml_cache: XML.Cache) : (Command.State, State) = { execs.get(id) match { case Some(st) => val new_st = st.accumulate(self_id(st), other_id, message, xml_cache) val execs1 = execs + (id -> new_st) (new_st, copy(execs = execs1, commands_redirection = redirection(new_st))) case None => commands.get(id) match { case Some(st) => val new_st = st.accumulate(self_id(st), other_id, message, xml_cache) val commands1 = commands + (id -> new_st) (new_st, copy(commands = commands1, commands_redirection = redirection(new_st))) case None => fail } } } def add_export(id: Document_ID.Generic, entry: Command.Exports.Entry): (Command.State, State) = { execs.get(id) match { case Some(st) => st.add_export(entry) match { case Some(new_st) => (new_st, copy(execs = execs + (id -> new_st))) case None => fail } case None => commands.get(id) match { case Some(st) => st.add_export(entry) match { case Some(new_st) => (new_st, copy(commands = commands + (id -> new_st))) case None => fail } case None => fail } } } def node_exports(version: Version, node_name: Node.Name): Command.Exports = Command.Exports.merge( for { command <- version.nodes(node_name).commands.iterator st <- command_states(version, command).iterator } yield st.exports) def assign(id: Document_ID.Version, edited: List[String], update: Assign_Update) : ((List[Node.Name], List[Command]), State) = { val version = the_version(id) val edited_set = edited.toSet val edited_nodes = (for { (name, _) <- version.nodes.iterator if edited_set(name.node) } yield name).toList def upd(exec_id: Document_ID.Exec, st: Command.State) : Option[(Document_ID.Exec, Command.State)] = if (execs.isDefinedAt(exec_id)) None else Some(exec_id -> st) val (changed_commands, new_execs) = ((Nil: List[Command], execs) /: update) { case ((commands1, execs1), (command_id, exec)) => val st = the_static_state(command_id) val command = st.command val commands2 = command :: commands1 val execs2 = exec match { case Nil => execs1 case eval_id :: print_ids => execs1 ++ upd(eval_id, st) ++ (for (id <- print_ids; up <- upd(id, command.empty_state)) yield up) } (commands2, execs2) } val new_assignment = the_assignment(version).assign(update) val new_state = copy(assignments = assignments + (id -> new_assignment), execs = new_execs) ((edited_nodes, changed_commands), new_state) } def is_assigned(version: Version): Boolean = assignments.get(version.id) match { case Some(assgn) => assgn.is_finished case None => false } def is_stable(change: Change): Boolean = change.is_finished && is_assigned(change.version.get_finished) def recent_finished: Change = history.undo_list.find(_.is_finished) getOrElse fail def recent_stable: Change = history.undo_list.find(is_stable) getOrElse fail def stable_tip_version: Option[Version] = if (is_stable(history.tip)) Some(history.tip.version.get_finished) else None def continue_history( previous: Future[Version], edits: List[Edit_Text], version: Future[Version]): State = { val change = Change.make(previous, edits, version) copy(history = history + change) } def remove_versions(retain: Int = 0): (List[Version], State) = { history.prune(is_stable, retain) match { case Some((dropped, history1)) => val old_versions = dropped.map(change => change.version.get_finished) val removing = old_versions.nonEmpty val state1 = copy(history = history1, removing_versions = removing) (old_versions, state1) case None => fail } } def removed_versions(removed: List[Document_ID.Version]): State = { val versions1 = Version.purge_suppressed(versions -- removed) val assignments1 = assignments -- removed var blobs1_names = Set.empty[Node.Name] var blobs1 = Set.empty[SHA1.Digest] var commands1 = Map.empty[Document_ID.Command, Command.State] var execs1 = Map.empty[Document_ID.Exec, Command.State] for { (version_id, version) <- versions1.iterator command_execs = assignments1(version_id).command_execs (_, node) <- version.nodes.iterator command <- node.commands.iterator } { for ((name, digest) <- command.blobs_defined) { blobs1_names += name blobs1 += digest } if (!commands1.isDefinedAt(command.id)) commands.get(command.id).foreach(st => commands1 += (command.id -> st)) for (exec_id <- command_execs.getOrElse(command.id, Nil)) { if (!execs1.isDefinedAt(exec_id)) execs.get(exec_id).foreach(st => execs1 += (exec_id -> st)) } } copy( versions = versions1, blobs = blobs1, commands = commands1, execs = execs1, commands_redirection = commands_redirection.restrict(commands1.keySet), assignments = assignments1, history = history.purge(versions1), removing_versions = false) } def command_id_map(version: Version, commands: Iterable[Command]) : Map[Document_ID.Generic, Command] = { require(is_assigned(version)) val assignment = the_assignment(version).check_finished (for { command <- commands.iterator id <- (command.id :: assignment.command_execs.getOrElse(command.id, Nil)).iterator } yield (id -> command)).toMap } def command_maybe_consolidated(version: Version, command: Command): Boolean = { require(is_assigned(version)) try { the_assignment(version).check_finished.command_execs.getOrElse(command.id, Nil) match { case eval_id :: print_ids => the_dynamic_state(eval_id).maybe_consolidated && !print_ids.exists(print_id => the_dynamic_state(print_id).consolidating) case Nil => false } } catch { case _: State.Fail => false } } private def command_states_self(version: Version, command: Command) : List[(Document_ID.Generic, Command.State)] = { require(is_assigned(version)) try { the_assignment(version).check_finished.command_execs.getOrElse(command.id, Nil) .map(id => id -> the_dynamic_state(id)) match { case Nil => fail case res => res } } catch { case _: State.Fail => try { List(command.id -> the_static_state(command.id)) } catch { case _: State.Fail => List(command.id -> command.init_state) } } } def command_states(version: Version, command: Command): List[Command.State] = { val self = command_states_self(version, command) val others = if (commands_redirection.defined(command.id)) { (for { command_id <- commands_redirection.imm_succs(command.id).iterator (id, st) <- command_states_self(version, the_static_state(command_id).command) if !self.exists(_._1 == id) } yield (id, st)).toMap.valuesIterator.toList } else Nil self.map(_._2) ::: others.flatMap(_.redirect(command)) } def command_results(version: Version, command: Command): Command.Results = Command.State.merge_results(command_states(version, command)) def command_markup(version: Version, command: Command, index: Command.Markup_Index, range: Text.Range, elements: Markup.Elements): Markup_Tree = Command.State.merge_markup(command_states(version, command), index, range, elements) def markup_to_XML( version: Version, node_name: Node.Name, range: Text.Range, elements: Markup.Elements): XML.Body = { val node = version.nodes(node_name) if (node_name.is_theory) { val markup_index = Command.Markup_Index.markup (for { command <- node.commands.iterator command_range <- command.range.try_restrict(range).iterator markup = command_markup(version, command, markup_index, command_range, elements) tree <- markup.to_XML(command_range, command.source, elements).iterator } yield tree).toList } else { val node_source = node.source Text.Range(0, node_source.length).try_restrict(range) match { case None => Nil case Some(node_range) => val markup = version.nodes.commands_loading(node_name).headOption match { case None => Markup_Tree.empty case Some(command) => val chunk_name = Symbol.Text_Chunk.File(node_name.node) val markup_index = Command.Markup_Index(false, chunk_name) command_markup(version, command, markup_index, node_range, elements) } markup.to_XML(node_range, node_source, elements) } } } def node_initialized(version: Version, name: Node.Name): Boolean = name.is_theory && (version.nodes(name).commands.iterator.find(_.potentially_initialized) match { case None => false case Some(command) => command_states(version, command).headOption.exists(_.initialized) }) def node_maybe_consolidated(version: Version, name: Node.Name): Boolean = name.is_theory && version.nodes(name).commands.reverse.iterator.forall(command_maybe_consolidated(version, _)) def node_consolidated(version: Version, name: Node.Name): Boolean = !name.is_theory || { val it = version.nodes(name).commands.reverse.iterator it.hasNext && command_states(version, it.next).exists(_.consolidated) } // persistent user-view def snapshot(name: Node.Name = Node.Name.empty, pending_edits: List[Text.Edit] = Nil) : Snapshot = { val stable = recent_stable val latest = history.tip /* pending edits and unstable changes */ val rev_pending_changes = for { change <- history.undo_list.takeWhile(_ != stable) (a, edits) <- change.rev_edits if a == name } yield edits val edits = (pending_edits /: rev_pending_changes)({ case (edits, Node.Edits(es)) => es ::: edits case (edits, _) => edits }) lazy val reverse_edits = edits.reverse new Snapshot { /* global information */ val state: State = State.this val version: Version = stable.version.get_finished val is_outdated: Boolean = pending_edits.nonEmpty || latest != stable /* local node content */ def convert(offset: Text.Offset): Text.Offset = (offset /: edits)((i, edit) => edit.convert(i)) def revert(offset: Text.Offset): Text.Offset = (offset /: reverse_edits)((i, edit) => edit.revert(i)) def convert(range: Text.Range): Text.Range = range.map(convert) def revert(range: Text.Range): Text.Range = range.map(revert) val node_name: Node.Name = name val node: Node = version.nodes(name) def nodes: List[(Node.Name, Node)] = (node_name :: node.load_commands.flatMap(_.blobs_names)). map(name => (name, version.nodes(name))) val commands_loading: List[Command] = if (node_name.is_theory) Nil else version.nodes.commands_loading(node_name) def commands_loading_ranges(pred: Node.Name => Boolean): List[Text.Range] = (for { cmd <- node.load_commands.iterator blob_name <- cmd.blobs_names.iterator if pred(blob_name) start <- node.command_start(cmd) } yield convert(cmd.core_range + start)).toList def current_command(other_node_name: Node.Name, offset: Text.Offset): Option[Command] = if (other_node_name.is_theory) { val other_node = version.nodes(other_node_name) val iterator = other_node.command_iterator(revert(offset) max 0) if (iterator.hasNext) { val (command0, _) = iterator.next other_node.commands.reverse.iterator(command0).find(command => !command.is_ignored) } else other_node.commands.reverse.iterator.find(command => !command.is_ignored) } else version.nodes.commands_loading(other_node_name).headOption def markup_to_XML(range: Text.Range, elements: Markup.Elements): XML.Body = state.markup_to_XML(version, node_name, range, elements) lazy val messages: List[(XML.Tree, Position.T)] = (for { (command, start) <- Document.Node.Commands.starts_pos( node.commands.iterator, Token.Pos.file(node_name.node)) pos = command.span.keyword_pos(start).position(command.span.name) (_, tree) <- state.command_results(version, command).iterator } yield (tree, pos)).toList lazy val exports: List[Export.Entry] = state.node_exports(version, node_name).iterator.map(_._2).toList lazy val exports_map: Map[String, Export.Entry] = (for (entry <- exports.iterator) yield (entry.name, entry)).toMap /* find command */ def find_command(id: Document_ID.Generic): Option[(Node, Command)] = state.lookup_id(id) match { case None => None case Some(st) => val command = st.command val node = version.nodes(command.node_name) if (node.commands.contains(command)) Some((node, command)) else None } def find_command_position(id: Document_ID.Generic, offset: Symbol.Offset) : Option[Line.Node_Position] = for ((node, command) <- find_command(id)) yield { val name = command.node_name.node val sources_iterator = node.commands.iterator.takeWhile(_ != command).map(_.source) ++ (if (offset == 0) Iterator.empty else Iterator.single(command.source(Text.Range(0, command.chunk.decode(offset))))) val pos = (Line.Position.zero /: sources_iterator)(_.advance(_)) Line.Node_Position(name, pos) } /* cumulate markup */ def cumulate[A]( range: Text.Range, info: A, elements: Markup.Elements, result: List[Command.State] => (A, Text.Markup) => Option[A], status: Boolean = false): List[Text.Info[A]] = { val former_range = revert(range).inflate_singularity val (chunk_name, command_iterator) = commands_loading.headOption match { case None => (Symbol.Text_Chunk.Default, node.command_iterator(former_range)) case Some(command) => (Symbol.Text_Chunk.File(node_name.node), Iterator((command, 0))) } val markup_index = Command.Markup_Index(status, chunk_name) (for { (command, command_start) <- command_iterator chunk <- command.chunks.get(chunk_name).iterator states = state.command_states(version, command) res = result(states) markup_range <- (former_range - command_start).try_restrict(chunk.range).iterator markup = Command.State.merge_markup(states, markup_index, markup_range, elements) Text.Info(r0, a) <- markup.cumulate[A](markup_range, info, elements, { case (a, Text.Info(r0, b)) => res(a, Text.Info(convert(r0 + command_start), b)) }).iterator r1 <- convert(r0 + command_start).try_restrict(range).iterator } yield Text.Info(r1, a)).toList } def select[A]( range: Text.Range, elements: Markup.Elements, result: List[Command.State] => Text.Markup => Option[A], status: Boolean = false): List[Text.Info[A]] = { def result1(states: List[Command.State]): (Option[A], Text.Markup) => Option[Option[A]] = { val res = result(states) (_: Option[A], x: Text.Markup) => res(x) match { case None => None case some => Some(some) } } for (Text.Info(r, Some(x)) <- cumulate(range, None, elements, result1, status)) yield Text.Info(r, x) } /* command results */ def command_results(range: Text.Range): Command.Results = Command.State.merge_results( select[List[Command.State]](range, Markup.Elements.full, command_states => { case _ => Some(command_states) }).flatMap(_.info)) def command_results(command: Command): Command.Results = state.command_results(version, command) /* command ids: static and dynamic */ def command_id_map: Map[Document_ID.Generic, Command] = state.command_id_map(version, version.nodes(node_name).commands) /* output */ override def toString: String = "Snapshot(node = " + node_name.node + ", version = " + version.id + (if (is_outdated) ", outdated" else "") + ")" } } } } diff --git a/src/Pure/PIDE/resources.ML b/src/Pure/PIDE/resources.ML --- a/src/Pure/PIDE/resources.ML +++ b/src/Pure/PIDE/resources.ML @@ -1,402 +1,393 @@ (* Title: Pure/PIDE/resources.ML Author: Makarius Resources for theories and auxiliary files. *) signature RESOURCES = sig val default_qualifier: string val init_session: - {html_symbols: (string * int) list, - session_positions: (string * Properties.T) list, + {session_positions: (string * Properties.T) list, session_directories: (string * string) list, session_chapters: (string * string) list, bibtex_entries: (string * string list) list, command_timings: Properties.T list, docs: string list, global_theories: (string * string) list, loaded_theories: string list} -> unit val init_session_yxml: string -> unit val init_session_file: Path.T -> unit val finish_session_base: unit -> unit val global_theory: string -> string option val loaded_theory: string -> bool - val html_symbols: unit -> HTML.symbols val check_session: Proof.context -> string * Position.T -> string val session_chapter: string -> string val last_timing: Toplevel.transition -> Time.time val check_doc: Proof.context -> string * Position.T -> string val master_directory: theory -> Path.T val imports_of: theory -> (string * Position.T) list val begin_theory: Path.T -> Thy_Header.header -> theory list -> theory val thy_path: Path.T -> Path.T val theory_qualifier: string -> string val theory_bibtex_entries: string -> string list val find_theory_file: string -> Path.T option val import_name: string -> Path.T -> string -> {node_name: Path.T, master_dir: Path.T, theory_name: string} val check_thy: Path.T -> string -> {master: Path.T * SHA1.digest, text: string, theory_pos: Position.T, imports: (string * Position.T) list, keywords: Thy_Header.keywords} val parse_files: string -> (theory -> Token.file list) parser val provide: Path.T * SHA1.digest -> theory -> theory val provide_file: Token.file -> theory -> theory val provide_parse_files: string -> (theory -> Token.file list * theory) parser val loaded_files_current: theory -> bool val check_path: Proof.context -> Path.T option -> string * Position.T -> Path.T val check_file: Proof.context -> Path.T option -> string * Position.T -> Path.T val check_dir: Proof.context -> Path.T option -> string * Position.T -> Path.T end; structure Resources: RESOURCES = struct (* command timings *) type timings = ((string * Time.time) Inttab.table) Symtab.table; (*file -> offset -> name, time*) val empty_timings: timings = Symtab.empty; fun update_timings props = (case Markup.parse_command_timing_properties props of SOME ({file, offset, name}, time) => Symtab.map_default (file, Inttab.empty) (Inttab.map_default (offset, (name, time)) (fn (_, t) => (name, t + time))) | NONE => I); fun make_timings command_timings = fold update_timings command_timings empty_timings; fun approximative_id name pos = (case (Position.file_of pos, Position.offset_of pos) of (SOME file, SOME offset) => if name = "" then NONE else SOME {file = file, offset = offset, name = name} | _ => NONE); fun get_timings timings tr = (case approximative_id (Toplevel.name_of tr) (Toplevel.pos_of tr) of SOME {file, offset, name} => (case Symtab.lookup timings file of SOME offsets => (case Inttab.lookup offsets offset of SOME (name', time) => if name = name' then SOME time else NONE | NONE => NONE) | NONE => NONE) | NONE => NONE) |> the_default Time.zeroTime; (* session base *) val default_qualifier = "Draft"; type entry = {pos: Position.T, serial: serial}; fun make_entry props : entry = {pos = Position.of_properties props, serial = serial ()}; val empty_session_base = - {html_symbols = HTML.no_symbols, - session_positions = []: (string * entry) list, + {session_positions = []: (string * entry) list, session_directories = Symtab.empty: Path.T list Symtab.table, session_chapters = Symtab.empty: string Symtab.table, bibtex_entries = Symtab.empty: string list Symtab.table, timings = empty_timings, docs = []: (string * entry) list, global_theories = Symtab.empty: string Symtab.table, loaded_theories = Symtab.empty: unit Symtab.table}; val global_session_base = Synchronized.var "Sessions.base" empty_session_base; fun init_session - {html_symbols, session_positions, session_directories, session_chapters, + {session_positions, session_directories, session_chapters, bibtex_entries, command_timings, docs, global_theories, loaded_theories} = Synchronized.change global_session_base (fn _ => - {html_symbols = HTML.make_symbols html_symbols, - session_positions = sort_by #1 (map (apsnd make_entry) session_positions), + {session_positions = sort_by #1 (map (apsnd make_entry) session_positions), session_directories = fold_rev (fn (dir, name) => Symtab.cons_list (name, Path.explode dir)) session_directories Symtab.empty, session_chapters = Symtab.make session_chapters, bibtex_entries = Symtab.make bibtex_entries, timings = make_timings command_timings, docs = sort_by #1 (map (apsnd make_entry o rpair []) docs), global_theories = Symtab.make global_theories, loaded_theories = Symtab.make_set loaded_theories}); fun init_session_yxml yxml = let - val (html_symbols, (session_positions, (session_directories, (session_chapters, - (bibtex_entries, (command_timings, (docs, (global_theories, loaded_theories)))))))) = + val (session_positions, (session_directories, (session_chapters, + (bibtex_entries, (command_timings, (docs, (global_theories, loaded_theories))))))) = YXML.parse_body yxml |> let open XML.Decode in - pair (list (pair string int)) - (pair (list (pair string properties)) - (pair (list (pair string string)) (pair (list (pair string string)) - (pair (list (pair string (list string))) (pair (list properties) - (pair (list string) (pair (list (pair string string)) (list string)))))))) + (pair (list (pair string properties)) + (pair (list (pair string string)) (pair (list (pair string string)) + (pair (list (pair string (list string))) (pair (list properties) + (pair (list string) (pair (list (pair string string)) (list string)))))))) end; in init_session - {html_symbols = html_symbols, - session_positions = session_positions, + {session_positions = session_positions, session_directories = session_directories, session_chapters = session_chapters, bibtex_entries = bibtex_entries, command_timings = command_timings, docs = docs, global_theories = global_theories, loaded_theories = loaded_theories} end; fun init_session_file path = init_session_yxml (File.read path) before File.rm path; fun finish_session_base () = Synchronized.change global_session_base (fn {global_theories, loaded_theories, ...} => - {html_symbols = HTML.no_symbols, - session_positions = [], + {session_positions = [], session_directories = Symtab.empty, session_chapters = Symtab.empty, bibtex_entries = Symtab.empty, timings = empty_timings, docs = [], global_theories = global_theories, loaded_theories = loaded_theories}); fun get_session_base f = f (Synchronized.value global_session_base); fun global_theory a = Symtab.lookup (get_session_base #global_theories) a; fun loaded_theory a = Symtab.defined (get_session_base #loaded_theories) a; -fun html_symbols () = get_session_base #html_symbols; - fun check_name which kind markup ctxt arg = Completion.check_item kind markup (get_session_base which |> sort_by #1) ctxt arg; val check_session = check_name #session_positions "session" (fn (name, {pos, serial}) => Markup.entity Markup.sessionN name |> Markup.properties (Position.entity_properties_of false serial pos)); fun session_chapter name = the_default "Unsorted" (Symtab.lookup (get_session_base #session_chapters) name); fun last_timing tr = get_timings (get_session_base #timings) tr; val check_doc = check_name #docs "documentation" (Markup.doc o #1); (* manage source files *) type files = {master_dir: Path.T, (*master directory of theory source*) imports: (string * Position.T) list, (*source specification of imports*) provided: (Path.T * SHA1.digest) list}; (*source path, digest*) fun make_files (master_dir, imports, provided): files = {master_dir = master_dir, imports = imports, provided = provided}; structure Files = Theory_Data ( type T = files; val empty = make_files (Path.current, [], []); val extend = I; fun merge ({master_dir, imports, provided = provided1}, {provided = provided2, ...}) = let val provided' = Library.merge (op =) (provided1, provided2) in make_files (master_dir, imports, provided') end ); fun map_files f = Files.map (fn {master_dir, imports, provided} => make_files (f (master_dir, imports, provided))); val master_directory = #master_dir o Files.get; val imports_of = #imports o Files.get; fun begin_theory master_dir {name, imports, keywords} parents = Theory.begin_theory name parents |> map_files (fn _ => (Path.explode (Path.implode_symbolic master_dir), imports, [])) |> Thy_Header.add_keywords keywords; (* theory files *) val thy_path = Path.ext "thy"; fun theory_qualifier theory = (case global_theory theory of SOME qualifier => qualifier | NONE => Long_Name.qualifier theory); fun theory_name qualifier theory = if Long_Name.is_qualified theory orelse is_some (global_theory theory) then theory else Long_Name.qualify qualifier theory; fun theory_bibtex_entries theory = Symtab.lookup_list (get_session_base #bibtex_entries) (theory_qualifier theory); fun find_theory_file thy_name = let val thy_file = thy_path (Path.basic (Long_Name.base_name thy_name)); val session = theory_qualifier thy_name; val dirs = Symtab.lookup_list (get_session_base #session_directories) session; in dirs |> get_first (fn dir => let val path = dir + thy_file in if File.is_file path then SOME path else NONE end) end; fun make_theory_node node_name theory = {node_name = node_name, master_dir = Path.dir node_name, theory_name = theory}; fun loaded_theory_node theory = {node_name = Path.basic theory, master_dir = Path.current, theory_name = theory}; fun import_name qualifier dir s = let val theory = theory_name qualifier (Thy_Header.import_name s); fun theory_node () = make_theory_node (File.full_path dir (thy_path (Path.expand (Path.explode s)))) theory; in if not (Thy_Header.is_base_name s) then theory_node () else if loaded_theory theory then loaded_theory_node theory else (case find_theory_file theory of SOME node_name => make_theory_node node_name theory | NONE => if Long_Name.is_qualified s then loaded_theory_node theory else theory_node ()) end; fun check_file dir file = File.check_file (File.full_path dir file); fun check_thy dir thy_name = let val thy_base_name = Long_Name.base_name thy_name; val master_file = (case find_theory_file thy_name of SOME path => check_file Path.current path | NONE => check_file dir (thy_path (Path.basic thy_base_name))); val text = File.read master_file; val {name = (name, pos), imports, keywords} = Thy_Header.read (Path.position master_file) text; val _ = thy_base_name <> name andalso error ("Bad theory name " ^ quote name ^ " for file " ^ Path.print (Path.base master_file) ^ Position.here pos); in {master = (master_file, SHA1.digest text), text = text, theory_pos = pos, imports = imports, keywords = keywords} end; (* load files *) fun parse_files cmd = Scan.ahead Parse.not_eof -- Parse.path >> (fn (tok, name) => fn thy => (case Token.get_files tok of [] => let val keywords = Thy_Header.get_keywords thy; val master_dir = master_directory thy; val pos = Token.pos_of tok; val src_paths = Keyword.command_files keywords cmd (Path.explode name); in map (Command.read_file master_dir pos) src_paths end | files => map Exn.release files)); fun provide (src_path, id) = map_files (fn (master_dir, imports, provided) => if AList.defined (op =) provided src_path then error ("Duplicate use of source file: " ^ Path.print src_path) else (master_dir, imports, (src_path, id) :: provided)); fun provide_file (file: Token.file) = provide (#src_path file, #digest file); fun provide_parse_files cmd = parse_files cmd >> (fn files => fn thy => let val fs = files thy; val thy' = fold (fn {src_path, digest, ...} => provide (src_path, digest)) fs thy; in (fs, thy') end); fun load_file thy src_path = let val full_path = check_file (master_directory thy) src_path; val text = File.read full_path; val id = SHA1.digest text; in ((full_path, id), text) end; fun loaded_files_current thy = #provided (Files.get thy) |> forall (fn (src_path, id) => (case try (load_file thy) src_path of NONE => false | SOME ((_, id'), _) => id = id')); (* formal check *) fun formal_check check_file ctxt opt_dir (name, pos) = let fun err msg = error (msg ^ Position.here pos); val _ = Context_Position.report ctxt pos Markup.language_path; val dir = (case opt_dir of SOME dir => dir | NONE => master_directory (Proof_Context.theory_of ctxt)); val path = dir + Path.explode name handle ERROR msg => err msg; val _ = Path.expand path handle ERROR msg => err msg; val _ = Context_Position.report ctxt pos (Markup.path (Path.implode_symbolic path)); val _ = check_file path handle ERROR msg => err msg; in path end; val check_path = formal_check I; val check_file = formal_check File.check_file; val check_dir = formal_check File.check_dir; (* antiquotations *) local fun document_antiq (check: Proof.context -> Path.T option -> string * Position.T -> Path.T) = Args.context -- Scan.lift (Parse.position Parse.path) >> (fn (ctxt, (name, pos)) => let val _ = check ctxt NONE (name, pos); val latex = Latex.string (Latex.output_ascii_breakable "/" name); in Latex.enclose_block "\\isatt{" "}" [latex] end); fun ML_antiq check = Args.context -- Scan.lift (Parse.position Parse.path) >> (fn (ctxt, (name, pos)) => check ctxt (SOME Path.current) (name, pos) |> ML_Syntax.print_path); in val _ = Theory.setup (Thy_Output.antiquotation_verbatim_embedded \<^binding>\session\ (Scan.lift Parse.embedded_position) check_session #> Thy_Output.antiquotation_verbatim_embedded \<^binding>\doc\ (Scan.lift Parse.embedded_position) check_doc #> Thy_Output.antiquotation_raw_embedded \<^binding>\path\ (document_antiq check_path) (K I) #> Thy_Output.antiquotation_raw_embedded \<^binding>\file\ (document_antiq check_file) (K I) #> Thy_Output.antiquotation_raw_embedded \<^binding>\dir\ (document_antiq check_dir) (K I) #> ML_Antiquotation.value_embedded \<^binding>\path\ (ML_antiq check_path) #> ML_Antiquotation.value_embedded \<^binding>\file\ (ML_antiq check_file) #> ML_Antiquotation.value_embedded \<^binding>\dir\ (ML_antiq check_dir) #> ML_Antiquotation.value_embedded \<^binding>\path_binding\ (Scan.lift (Parse.position Parse.path) >> (ML_Syntax.print_path_binding o Path.explode_binding)) #> ML_Antiquotation.value \<^binding>\master_dir\ (Args.theory >> (ML_Syntax.print_path o master_directory))); end; end; diff --git a/src/Pure/PIDE/resources.scala b/src/Pure/PIDE/resources.scala --- a/src/Pure/PIDE/resources.scala +++ b/src/Pure/PIDE/resources.scala @@ -1,448 +1,446 @@ /* Title: Pure/PIDE/resources.scala Author: Makarius Resources for theories and auxiliary files. */ package isabelle import scala.util.parsing.input.Reader import java.io.{File => JFile} class Resources( val sessions_structure: Sessions.Structure, val session_base: Sessions.Base, val log: Logger = No_Logger, command_timings: List[Properties.T] = Nil) { resources => /* init session */ def init_session_yxml: String = { import XML.Encode._ YXML.string_of_body( - pair(list(pair(string, int)), pair(list(pair(string, properties)), pair(list(pair(string, string)), pair(list(pair(string, string)), pair(list(pair(string, list(string))), pair(list(properties), pair(list(string), - pair(list(pair(string, string)), list(string)))))))))( - (Symbol.codes, + pair(list(pair(string, string)), list(string))))))))( (sessions_structure.session_positions, (sessions_structure.dest_session_directories, (sessions_structure.session_chapters, (sessions_structure.bibtex_entries, (command_timings, (session_base.doc_names, (session_base.global_theories.toList, - session_base.loaded_theories.keys)))))))))) + session_base.loaded_theories.keys))))))))) } /* file formats */ def make_theory_name(name: Document.Node.Name): Option[Document.Node.Name] = File_Format.registry.get(name).flatMap(_.make_theory_name(resources, name)) def make_theory_content(thy_name: Document.Node.Name): Option[String] = File_Format.registry.get_theory(thy_name).flatMap(_.make_theory_content(resources, thy_name)) def make_preview(snapshot: Document.Snapshot): Option[Presentation.Preview] = File_Format.registry.get(snapshot.node_name).flatMap(_.make_preview(snapshot)) def is_hidden(name: Document.Node.Name): Boolean = !name.is_theory || name.theory == Sessions.root_name || File_Format.registry.is_theory(name) def thy_path(path: Path): Path = path.ext("thy") /* file-system operations */ def append(dir: String, source_path: Path): String = (Path.explode(dir) + source_path).expand.implode def append(node_name: Document.Node.Name, source_path: Path): String = append(node_name.master_dir, source_path) def make_theory_node(dir: String, file: Path, theory: String): Document.Node.Name = { val node = append(dir, file) val master_dir = append(dir, file.dir) Document.Node.Name(node, master_dir, theory) } def loaded_theory_node(theory: String): Document.Node.Name = Document.Node.Name(theory, "", theory) /* source files of Isabelle/ML bootstrap */ def source_file(raw_name: String): Option[String] = { if (Path.is_wellformed(raw_name)) { if (Path.is_valid(raw_name)) { def check(p: Path): Option[Path] = if (p.is_file) Some(p) else None val path = Path.explode(raw_name) val path1 = if (path.is_absolute || path.is_current) check(path) else { check(Path.explode("~~/src/Pure") + path) orElse (if (Isabelle_System.getenv("ML_SOURCES") == "") None else check(Path.explode("$ML_SOURCES") + path)) } Some(File.platform_path(path1 getOrElse path)) } else None } else Some(raw_name) } /* theory files */ def loaded_files(syntax: Outer_Syntax, name: Document.Node.Name): () => List[Path] = { val (is_utf8, raw_text) = with_thy_reader(name, reader => (Scan.reader_is_utf8(reader), reader.source.toString)) () => { if (syntax.load_commands_in(raw_text)) { val text = Symbol.decode(Scan.reader_decode_utf8(is_utf8, raw_text)) val spans = syntax.parse_spans(text) val dir = Path.explode(name.master_dir) spans.iterator.flatMap(Command.span_files(syntax, _)._1). map(a => (dir + Path.explode(a)).expand).toList } else Nil } } def pure_files(syntax: Outer_Syntax): List[Path] = { val pure_dir = Path.explode("~~/src/Pure") val roots = for { (name, _) <- Thy_Header.ml_roots } yield (pure_dir + Path.explode(name)).expand val files = for { (path, (_, theory)) <- roots zip Thy_Header.ml_roots file <- loaded_files(syntax, Document.Node.Name(path.implode, path.dir.implode, theory))() } yield file roots ::: files } def theory_name(qualifier: String, theory: String): String = if (Long_Name.is_qualified(theory) || session_base.global_theories.isDefinedAt(theory)) theory else Long_Name.qualify(qualifier, theory) def find_theory_node(theory: String): Option[Document.Node.Name] = { val thy_file = thy_path(Path.basic(Long_Name.base_name(theory))) val session = session_base.theory_qualifier(theory) val dirs = sessions_structure.get(session) match { case Some(info) => info.dirs case None => Nil } dirs.collectFirst({ case dir if (dir + thy_file).is_file => make_theory_node("", dir + thy_file, theory) }) } def import_name(qualifier: String, dir: String, s: String): Document.Node.Name = { val theory = theory_name(qualifier, Thy_Header.import_name(s)) def theory_node = make_theory_node(dir, thy_path(Path.explode(s)), theory) if (!Thy_Header.is_base_name(s)) theory_node else if (session_base.loaded_theory(theory)) loaded_theory_node(theory) else { find_theory_node(theory) match { case Some(node_name) => node_name case None => if (Long_Name.is_qualified(s)) loaded_theory_node(theory) else theory_node } } } def import_name(name: Document.Node.Name, s: String): Document.Node.Name = import_name(session_base.theory_qualifier(name), name.master_dir, s) def import_name(info: Sessions.Info, s: String): Document.Node.Name = import_name(info.name, info.dir.implode, s) def find_theory(file: JFile): Option[Document.Node.Name] = { for { qualifier <- session_base.session_directories.get(File.canonical(file).getParentFile) theory_base <- proper_string(Thy_Header.theory_name(file.getName)) theory = theory_name(qualifier, theory_base) theory_node <- find_theory_node(theory) if File.eq(theory_node.path.file, file) } yield theory_node } def complete_import_name(context_name: Document.Node.Name, s: String): List[String] = { val context_session = session_base.theory_qualifier(context_name) val context_dir = try { Some(context_name.master_dir_path) } catch { case ERROR(_) => None } (for { (session, (info, _)) <- sessions_structure.imports_graph.iterator dir <- (if (session == context_session) context_dir.toList else info.dirs).iterator theory <- Thy_Header.try_read_dir(dir).iterator if Completion.completed(s)(theory) } yield { if (session == context_session || session_base.global_theories.isDefinedAt(theory)) theory else Long_Name.qualify(session, theory) }).toList.sorted } def with_thy_reader[A](name: Document.Node.Name, f: Reader[Char] => A): A = { val path = name.path if (path.is_file) using(Scan.byte_reader(path.file))(f) else if (name.node == name.theory) error("Cannot load theory " + quote(name.theory)) else error ("Cannot load theory file " + path) } def check_thy_reader(node_name: Document.Node.Name, reader: Reader[Char], start: Token.Pos = Token.Pos.command, strict: Boolean = true): Document.Node.Header = { if (node_name.is_theory && reader.source.length > 0) { try { val header = Thy_Header.read(reader, start, strict) val base_name = node_name.theory_base_name if (Long_Name.is_qualified(header.name)) { error("Bad theory name " + quote(header.name) + " with qualification" + Position.here(header.pos)) } if (base_name != header.name) { error("Bad theory name " + quote(header.name) + " for file " + thy_path(Path.basic(base_name)) + Position.here(header.pos) + Completion.report_theories(header.pos, List(base_name))) } val imports_pos = header.imports_pos.map({ case (s, pos) => val name = import_name(node_name, s) if (Sessions.exclude_theory(name.theory_base_name)) error("Bad theory name " + quote(name.theory_base_name) + Position.here(pos)) (name, pos) }) Document.Node.Header(imports_pos, header.keywords, header.abbrevs) } catch { case exn: Throwable => Document.Node.bad_header(Exn.message(exn)) } } else Document.Node.no_header } def check_thy(name: Document.Node.Name, start: Token.Pos = Token.Pos.command, strict: Boolean = true): Document.Node.Header = with_thy_reader(name, check_thy_reader(name, _, start, strict)) /* special header */ def special_header(name: Document.Node.Name): Option[Document.Node.Header] = { val imports = if (name.theory == Sessions.root_name) List(import_name(name, Sessions.theory_name)) else if (Thy_Header.is_ml_root(name.theory)) List(import_name(name, Thy_Header.ML_BOOTSTRAP)) else if (Thy_Header.is_bootstrap(name.theory)) List(import_name(name, Thy_Header.PURE)) else Nil if (imports.isEmpty) None else Some(Document.Node.Header(imports.map((_, Position.none)))) } /* blobs */ def undefined_blobs(nodes: Document.Nodes): List[Document.Node.Name] = (for { (node_name, node) <- nodes.iterator if !session_base.loaded_theory(node_name) cmd <- node.load_commands.iterator name <- cmd.blobs_undefined.iterator } yield name).toList /* document changes */ def parse_change( reparse_limit: Int, previous: Document.Version, doc_blobs: Document.Blobs, edits: List[Document.Edit_Text], consolidate: List[Document.Node.Name]): Session.Change = Thy_Syntax.parse_change(resources, reparse_limit, previous, doc_blobs, edits, consolidate) def commit(change: Session.Change) { } /* theory and file dependencies */ def dependencies( thys: List[(Document.Node.Name, Position.T)], progress: Progress = new Progress): Dependencies[Unit] = Dependencies.empty[Unit].require_thys((), thys, progress = progress) def session_dependencies(info: Sessions.Info, progress: Progress = new Progress) : Dependencies[Options] = { (Dependencies.empty[Options] /: info.theories)({ case (dependencies, (options, thys)) => dependencies.require_thys(options, for { (thy, pos) <- thys } yield (import_name(info, thy), pos), progress = progress) }) } object Dependencies { def empty[A]: Dependencies[A] = new Dependencies[A](Nil, Map.empty) private def show_path(names: List[Document.Node.Name]): String = names.map(name => quote(name.theory)).mkString(" via ") private def cycle_msg(names: List[Document.Node.Name]): String = "Cyclic dependency of " + show_path(names) private def required_by(initiators: List[Document.Node.Name]): String = if (initiators.isEmpty) "" else "\n(required by " + show_path(initiators.reverse) + ")" } final class Dependencies[A] private( rev_entries: List[Document.Node.Entry], seen: Map[Document.Node.Name, A]) { private def cons(entry: Document.Node.Entry): Dependencies[A] = new Dependencies[A](entry :: rev_entries, seen) def require_thy(adjunct: A, thy: (Document.Node.Name, Position.T), initiators: List[Document.Node.Name] = Nil, progress: Progress = new Progress): Dependencies[A] = { val (name, pos) = thy def message: String = "The error(s) above occurred for theory " + quote(name.theory) + Dependencies.required_by(initiators) + Position.here(pos) if (seen.isDefinedAt(name)) this else { val dependencies1 = new Dependencies[A](rev_entries, seen + (name -> adjunct)) if (session_base.loaded_theory(name)) dependencies1 else { try { if (initiators.contains(name)) error(Dependencies.cycle_msg(initiators)) progress.expose_interrupt() val header = try { check_thy(name, Token.Pos.file(name.node)).cat_errors(message) } catch { case ERROR(msg) => cat_error(msg, message) } val entry = Document.Node.Entry(name, header) dependencies1.require_thys(adjunct, header.imports_pos, initiators = name :: initiators, progress = progress).cons(entry) } catch { case e: Throwable => dependencies1.cons(Document.Node.Entry(name, Document.Node.bad_header(Exn.message(e)))) } } } } def require_thys(adjunct: A, thys: List[(Document.Node.Name, Position.T)], progress: Progress = new Progress, initiators: List[Document.Node.Name] = Nil): Dependencies[A] = (this /: thys)(_.require_thy(adjunct, _, progress = progress, initiators = initiators)) def entries: List[Document.Node.Entry] = rev_entries.reverse def theories: List[Document.Node.Name] = entries.map(_.name) def theories_adjunct: List[(Document.Node.Name, A)] = theories.map(name => (name, seen(name))) def errors: List[String] = entries.flatMap(_.header.errors) def check_errors: Dependencies[A] = errors match { case Nil => this case errs => error(cat_lines(errs)) } lazy val theory_graph: Document.Node.Name.Graph[Unit] = { val regular = theories.toSet val irregular = (for { entry <- entries.iterator imp <- entry.header.imports if !regular(imp) } yield imp).toSet Document.Node.Name.make_graph( irregular.toList.map(name => ((name, ()), Nil)) ::: entries.map(entry => ((entry.name, ()), entry.header.imports))) } lazy val loaded_theories: Graph[String, Outer_Syntax] = (session_base.loaded_theories /: entries)({ case (graph, entry) => val name = entry.name.theory val imports = entry.header.imports.map(_.theory) val graph1 = (graph /: (name :: imports))(_.default_node(_, Outer_Syntax.empty)) val graph2 = (graph1 /: imports)(_.add_edge(_, name)) val syntax0 = if (name == Thy_Header.PURE) List(Thy_Header.bootstrap_syntax) else Nil val syntax1 = (name :: graph2.imm_preds(name).toList).map(graph2.get_node) val syntax = Outer_Syntax.merge(syntax0 ::: syntax1) + entry.header graph2.map_node(name, _ => syntax) }) def loaded_files(pure: Boolean): List[(String, List[Path])] = { val loaded_files = theories.map(_.theory) zip Par_List.map((e: () => List[Path]) => e(), theories.map(name => resources.loaded_files(loaded_theories.get_node(name.theory), name))) if (pure) { val pure_files = resources.pure_files(overall_syntax) loaded_files.map({ case (name, files) => (name, if (name == Thy_Header.PURE) pure_files ::: files else files) }) } else loaded_files } def imported_files: List[Path] = { val base_theories = loaded_theories.all_preds(theories.map(_.theory)). filter(session_base.loaded_theories.defined) base_theories.map(theory => session_base.known_theories(theory).name.path) ::: base_theories.flatMap(session_base.known_loaded_files(_)) } lazy val overall_syntax: Outer_Syntax = Outer_Syntax.merge(loaded_theories.maximals.map(loaded_theories.get_node)) override def toString: String = entries.toString } } diff --git a/src/Pure/ROOT.ML b/src/Pure/ROOT.ML --- a/src/Pure/ROOT.ML +++ b/src/Pure/ROOT.ML @@ -1,355 +1,354 @@ (* Title: Pure/ROOT.ML Author: Makarius Main entry point for the Isabelle/Pure bootstrap process. Note: When this file is open in the Prover IDE, the ML files of Isabelle/Pure can be explored interactively. This is a separate copy of Pure within Pure: it does not affect the running logic session. *) chapter "Isabelle/Pure bootstrap"; ML_file "ML/ml_name_space.ML"; section "Bootstrap phase 0: Poly/ML setup"; ML_file "ML/ml_init.ML"; ML_file "ML/ml_system.ML"; ML_file "System/distribution.ML"; ML_file "General/basics.ML"; ML_file "General/symbol_explode.ML"; ML_file "Concurrent/multithreading.ML"; ML_file "Concurrent/unsynchronized.ML"; ML_file "Concurrent/synchronized.ML"; ML_file "Concurrent/counter.ML"; ML_file "ML/ml_heap.ML"; ML_file "ML/ml_profiling.ML"; ML_file "ML/ml_print_depth0.ML"; ML_file "ML/ml_pretty.ML"; ML_file "ML/ml_compiler0.ML"; section "Bootstrap phase 1: towards ML within position context"; subsection "Library of general tools"; ML_file "library.ML"; ML_file "General/print_mode.ML"; ML_file "General/alist.ML"; ML_file "General/table.ML"; ML_file "General/random.ML"; ML_file "General/value.ML"; ML_file "General/properties.ML"; ML_file "General/output.ML"; ML_file "PIDE/markup.ML"; ML_file "General/utf8.ML"; ML_file "General/scan.ML"; ML_file "General/source.ML"; ML_file "General/symbol.ML"; ML_file "General/position.ML"; ML_file "General/symbol_pos.ML"; ML_file "General/input.ML"; ML_file "General/comment.ML"; ML_file "General/antiquote.ML"; ML_file "ML/ml_lex.ML"; ML_file "ML/ml_compiler1.ML"; section "Bootstrap phase 2: towards ML within Isar context"; subsection "Library of general tools"; ML_file "General/integer.ML"; ML_file "General/stack.ML"; ML_file "General/queue.ML"; ML_file "General/heap.ML"; ML_file "General/same.ML"; ML_file "General/ord_list.ML"; ML_file "General/balanced_tree.ML"; ML_file "General/linear_set.ML"; ML_file "General/buffer.ML"; ML_file "General/pretty.ML"; ML_file "General/rat.ML"; ML_file "PIDE/xml.ML"; ML_file "General/path.ML"; ML_file "General/url.ML"; ML_file "System/bash_syntax.ML"; ML_file "General/file.ML"; ML_file "General/long_name.ML"; ML_file "General/binding.ML"; ML_file "General/socket_io.ML"; ML_file "General/seq.ML"; ML_file "General/timing.ML"; ML_file "General/sha1.ML"; ML_file "PIDE/byte_message.ML"; ML_file "PIDE/yxml.ML"; ML_file "PIDE/protocol_message.ML"; ML_file "PIDE/document_id.ML"; ML_file "General/change_table.ML"; ML_file "General/graph.ML"; ML_file "System/options.ML"; subsection "Fundamental structures"; ML_file "name.ML"; ML_file "term.ML"; ML_file "context.ML"; ML_file "config.ML"; ML_file "context_position.ML"; ML_file "soft_type_system.ML"; subsection "Concurrency within the ML runtime"; ML_file "ML/exn_properties.ML"; ML_file_no_debug "ML/exn_debugger.ML"; ML_file "Concurrent/thread_data_virtual.ML"; ML_file "Concurrent/isabelle_thread.ML"; ML_file "Concurrent/single_assignment.ML"; ML_file "Concurrent/par_exn.ML"; ML_file "Concurrent/task_queue.ML"; ML_file "Concurrent/future.ML"; ML_file "Concurrent/event_timer.ML"; ML_file "Concurrent/timeout.ML"; ML_file "Concurrent/lazy.ML"; ML_file "Concurrent/par_list.ML"; ML_file "Concurrent/mailbox.ML"; ML_file "Concurrent/cache.ML"; ML_file "PIDE/active.ML"; ML_file "Thy/export.ML"; subsection "Inner syntax"; ML_file "Syntax/type_annotation.ML"; ML_file "Syntax/term_position.ML"; ML_file "Syntax/lexicon.ML"; ML_file "Syntax/ast.ML"; ML_file "Syntax/syntax_ext.ML"; ML_file "Syntax/parser.ML"; ML_file "Syntax/syntax_trans.ML"; ML_file "Syntax/mixfix.ML"; ML_file "Syntax/printer.ML"; ML_file "Syntax/syntax.ML"; subsection "Core of tactical proof system"; ML_file "term_ord.ML"; ML_file "term_subst.ML"; ML_file "General/completion.ML"; ML_file "General/name_space.ML"; ML_file "sorts.ML"; ML_file "type.ML"; ML_file "logic.ML"; ML_file "Syntax/simple_syntax.ML"; ML_file "net.ML"; ML_file "item_net.ML"; ML_file "envir.ML"; ML_file "consts.ML"; ML_file "term_xml.ML"; ML_file "primitive_defs.ML"; ML_file "sign.ML"; ML_file "defs.ML"; ML_file "term_sharing.ML"; ML_file "pattern.ML"; ML_file "unify.ML"; ML_file "theory.ML"; ML_file "proofterm.ML"; ML_file "thm.ML"; ML_file "more_pattern.ML"; ML_file "more_unify.ML"; ML_file "more_thm.ML"; ML_file "facts.ML"; ML_file "thm_name.ML"; ML_file "global_theory.ML"; ML_file "pure_thy.ML"; ML_file "drule.ML"; ML_file "morphism.ML"; ML_file "variable.ML"; ML_file "conv.ML"; ML_file "goal_display.ML"; ML_file "tactical.ML"; ML_file "search.ML"; ML_file "tactic.ML"; ML_file "raw_simplifier.ML"; ML_file "conjunction.ML"; ML_file "assumption.ML"; subsection "Isar -- Intelligible Semi-Automated Reasoning"; (*ML support and global execution*) ML_file "ML/ml_syntax.ML"; ML_file "ML/ml_env.ML"; ML_file "ML/ml_options.ML"; ML_file "ML/ml_print_depth.ML"; ML_file_no_debug "Isar/runtime.ML"; ML_file "PIDE/execution.ML"; ML_file "ML/ml_compiler.ML"; ML_file "skip_proof.ML"; ML_file "goal.ML"; (*outer syntax*) ML_file "Isar/keyword.ML"; ML_file "Isar/token.ML"; ML_file "Isar/parse.ML"; ML_file "Thy/document_source.ML"; ML_file "Thy/thy_header.ML"; ML_file "Thy/document_marker.ML"; (*proof context*) ML_file "Isar/object_logic.ML"; ML_file "Isar/rule_cases.ML"; ML_file "Isar/auto_bind.ML"; ML_file "type_infer.ML"; ML_file "Syntax/local_syntax.ML"; ML_file "Isar/proof_context.ML"; ML_file "type_infer_context.ML"; ML_file "Syntax/syntax_phases.ML"; ML_file "Isar/args.ML"; (*theory specifications*) ML_file "Isar/local_defs.ML"; ML_file "Isar/local_theory.ML"; ML_file "Isar/entity.ML"; ML_file "PIDE/command_span.ML"; ML_file "Thy/thy_element.ML"; ML_file "Thy/markdown.ML"; -ML_file "Thy/html.ML"; ML_file "Thy/latex.ML"; (*ML with context and antiquotations*) ML_file "ML/ml_context.ML"; ML_file "ML/ml_antiquotation.ML"; ML_file "ML/ml_compiler2.ML"; ML_file "ML/ml_pid.ML"; section "Bootstrap phase 3: towards theory Pure and final ML toplevel setup"; (*basic proof engine*) ML_file "par_tactical.ML"; ML_file "context_tactic.ML"; ML_file "Isar/proof_display.ML"; ML_file "Isar/attrib.ML"; ML_file "Isar/context_rules.ML"; ML_file "Isar/method.ML"; ML_file "Isar/proof.ML"; ML_file "Isar/element.ML"; ML_file "Isar/obtain.ML"; ML_file "Isar/subgoal.ML"; ML_file "Isar/calculation.ML"; (*local theories and targets*) ML_file "Isar/locale.ML"; ML_file "Isar/generic_target.ML"; ML_file "Isar/bundle.ML"; ML_file "Isar/overloading.ML"; ML_file "axclass.ML"; ML_file "Isar/class.ML"; ML_file "Isar/named_target.ML"; ML_file "Isar/expression.ML"; ML_file "Isar/interpretation.ML"; ML_file "Isar/class_declaration.ML"; ML_file "Isar/target_context.ML"; ML_file "Isar/experiment.ML"; ML_file "simplifier.ML"; ML_file "Tools/plugin.ML"; (*executable theory content*) ML_file "Isar/code.ML"; (*specifications*) ML_file "Isar/spec_rules.ML"; ML_file "Isar/specification.ML"; ML_file "Isar/parse_spec.ML"; ML_file "Isar/typedecl.ML"; (*toplevel transactions*) ML_file "Isar/proof_node.ML"; ML_file "Isar/toplevel.ML"; (*proof term operations*) ML_file "Proof/proof_rewrite_rules.ML"; ML_file "Proof/proof_syntax.ML"; ML_file "Proof/proof_checker.ML"; ML_file "Proof/extraction.ML"; (*Isabelle system*) ML_file "System/bash.ML"; ML_file "System/isabelle_system.ML"; (*theory documents*) ML_file "Thy/term_style.ML"; ML_file "Isar/outer_syntax.ML"; ML_file "ML/ml_antiquotations.ML"; ML_file "Thy/document_antiquotation.ML"; ML_file "Thy/thy_output.ML"; ML_file "Thy/document_antiquotations.ML"; ML_file "General/graph_display.ML"; ML_file "pure_syn.ML"; ML_file "PIDE/command.ML"; ML_file "PIDE/query_operation.ML"; ML_file "PIDE/resources.ML"; ML_file "Thy/thy_info.ML"; ML_file "thm_deps.ML"; ML_file "Thy/export_theory.ML"; ML_file "Thy/sessions.ML"; ML_file "PIDE/session.ML"; ML_file "PIDE/document.ML"; (*theory and proof operations*) ML_file "Isar/isar_cmd.ML"; subsection "Isabelle/Isar system"; ML_file "System/command_line.ML"; ML_file "System/message_channel.ML"; ML_file "System/isabelle_process.ML"; ML_file "System/scala.ML"; ML_file "System/scala_compiler.ML"; ML_file "Thy/bibtex.ML"; ML_file "PIDE/protocol.ML"; ML_file "General/output_primitives_virtual.ML"; subsection "Miscellaneous tools and packages for Pure Isabelle"; ML_file "ML/ml_pp.ML"; ML_file "ML/ml_thms.ML"; ML_file "ML/ml_file.ML"; ML_file "Tools/build.ML"; ML_file "Tools/named_thms.ML"; ML_file "Tools/print_operation.ML"; ML_file "Tools/rail.ML"; ML_file "Tools/rule_insts.ML"; ML_file "Tools/thy_deps.ML"; ML_file "Tools/class_deps.ML"; ML_file "Tools/find_theorems.ML"; ML_file "Tools/find_consts.ML"; ML_file "Tools/simplifier_trace.ML"; ML_file_no_debug "Tools/debugger.ML"; ML_file "Tools/named_theorems.ML"; ML_file "Tools/jedit.ML"; ML_file "Tools/ghc.ML"; ML_file "Tools/generated_files.ML" diff --git a/src/Pure/Thy/html.ML b/src/Pure/Thy/html.ML deleted file mode 100644 --- a/src/Pure/Thy/html.ML +++ /dev/null @@ -1,142 +0,0 @@ -(* Title: Pure/Thy/html.ML - Author: Makarius - -HTML presentation elements. -*) - -signature HTML = -sig - type symbols - val make_symbols: (string * int) list -> symbols - val no_symbols: symbols - val present_span: symbols -> Keyword.keywords -> Command_Span.span -> Output.output - type text = string - val name: symbols -> string -> text - val keyword: symbols -> string -> text - val command: symbols -> string -> text - val href_name: string -> string -> string - val href_path: Url.T -> string -> string - val begin_document: symbols -> string -> text - val end_document: text -end; - -structure HTML: HTML = -struct - - -(* common markup *) - -fun span "" = ("", "") - | span class = ("", ""); - -val enclose_span = uncurry enclose o span; - -val hidden = enclose_span Markup.hiddenN; - - -(* symbol output *) - -abstype symbols = Symbols of string Symtab.table -with - -fun make_symbols codes = - let - val mapping = - map (apsnd (fn c => "&#" ^ Value.print_int c ^ ";")) codes @ - [("'", "'"), - ("\<^bsub>", hidden "⇘" ^ ""), - ("\<^esub>", hidden "⇙" ^ ""), - ("\<^bsup>", hidden "⇗" ^ ""), - ("\<^esup>", hidden "⇖" ^ "")]; - in Symbols (fold Symtab.update mapping Symtab.empty) end; - -val no_symbols = make_symbols []; - -fun output_sym (Symbols tab) s = - (case Symtab.lookup tab s of - SOME x => x - | NONE => XML.text s); - -end; - -local - -fun output_markup (bg, en) symbols s1 s2 = - hidden s1 ^ enclose bg en (output_sym symbols s2); - -val output_sub = output_markup ("", ""); -val output_sup = output_markup ("", ""); -val output_bold = output_markup (span "bold"); - -fun output_syms _ [] result = implode (rev result) - | output_syms symbols (s1 :: rest) result = - let - val (s2, ss) = (case rest of [] => ("", []) | s2 :: ss => (s2, ss)); - val (s, r) = - if s1 = "\<^sub>" then (output_sub symbols "⇩" s2, ss) - else if s1 = "\<^sup>" then (output_sup symbols "⇧" s2, ss) - else if s1 = "\<^bold>" then (output_bold symbols "❙" s2, ss) - else (output_sym symbols s1, rest); - in output_syms symbols r (s :: result) end; - -in - -fun output symbols str = output_syms symbols (Symbol.explode str) []; - -end; - - -(* presentation *) - -fun present_span symbols keywords = - let - fun present_markup (name, props) = - (case Properties.get props Markup.kindN of - SOME kind => - if kind = Markup.commandN orelse kind = Markup.keywordN then enclose_span kind else I - | NONE => I) #> enclose_span name; - fun present_token tok = - fold_rev present_markup (Token.markups keywords tok) - (output symbols (Token.unparse tok)); - in implode o map present_token o Command_Span.content end; - - - -(** HTML markup **) - -type text = string; - - -(* atoms *) - -val name = enclose "" "" oo output; -val keyword = enclose "" "" oo output; -val command = enclose "" "" oo output; - - -(* misc *) - -fun href_name s txt = "" ^ txt ^ ""; -fun href_path path txt = href_name (Url.implode path) txt; - - -(* document *) - -fun begin_document symbols title = - XML.header ^ - "\n" ^ - "\n" ^ - "\n" ^ - "\n" ^ - "" ^ output symbols (title ^ " (" ^ Distribution.version ^ ")") ^ "\n" ^ - "\n" ^ - "\n" ^ - "\n" ^ - "\n" ^ - "
" ^ - "

" ^ output symbols title ^ "

\n"; - -val end_document = "\n
\n\n\n"; - -end; diff --git a/src/Pure/Thy/presentation.scala b/src/Pure/Thy/presentation.scala --- a/src/Pure/Thy/presentation.scala +++ b/src/Pure/Thy/presentation.scala @@ -1,589 +1,649 @@ /* Title: Pure/Thy/present.scala Author: Makarius Theory presentation: HTML and LaTeX documents. */ package isabelle import scala.collection.immutable.SortedMap object Presentation { /* document variants */ object Document_Variant { def parse(name: String, tags: String): Document_Variant = Document_Variant(name, Library.space_explode(',', tags)) def parse(opt: String): Document_Variant = Library.space_explode('=', opt) match { case List(name) => Document_Variant(name, Nil) case List(name, tags) => parse(name, tags) case _ => error("Malformed document variant: " + quote(opt)) } } sealed case class Document_Variant(name: String, tags: List[String], sources: String = "") { def print_tags: String = tags.mkString(",") def print: String = if (tags.isEmpty) name else name + "=" + print_tags def path: Path = Path.basic(name) def latex_sty: String = Library.terminate_lines( tags.map(tag => tag.toList match { case '/' :: cs => "\\isafoldtag{" + cs.mkString + "}" case '-' :: cs => "\\isadroptag{" + cs.mkString + "}" case '+' :: cs => "\\isakeeptag{" + cs.mkString + "}" case cs => "\\isakeeptag{" + cs.mkString + "}" })) def set_sources(s: String): Document_Variant = copy(sources = s) } /* SQL data model */ object Data { val session_name = SQL.Column.string("session_name").make_primary_key val name = SQL.Column.string("name").make_primary_key val tags = SQL.Column.string("tags") val sources = SQL.Column.string("sources") val pdf = SQL.Column.bytes("pdf") val table = SQL.Table("isabelle_documents", List(session_name, name, tags, sources, pdf)) def where_equal(session_name: String, name: String = ""): SQL.Source = "WHERE " + Data.session_name.equal(session_name) + (if (name == "") "" else " AND " + Data.name.equal(name)) } def read_document_variants(db: SQL.Database, session_name: String): List[Document_Variant] = { val select = Data.table.select(List(Data.name, Data.tags, Data.sources), Data.where_equal(session_name)) db.using_statement(select)(stmt => stmt.execute_query().iterator(res => { val name = res.string(Data.name) val tags = res.string(Data.tags) val sources = res.string(Data.sources) Document_Variant.parse(name, tags).set_sources(sources) }).toList) } def read_document(db: SQL.Database, session_name: String, name: String) : Option[(Document_Variant, Bytes)] = { val select = Data.table.select(sql = Data.where_equal(session_name, name)) db.using_statement(select)(stmt => { val res = stmt.execute_query() if (res.next()) { val name = res.string(Data.name) val tags = res.string(Data.tags) val sources = res.string(Data.sources) val pdf = res.bytes(Data.pdf) Some(Document_Variant.parse(name, tags).set_sources(sources) -> pdf) } else None }) } def write_document(db: SQL.Database, session_name: String, doc: Document_Variant, pdf: Bytes) { db.using_statement(Data.table.insert())(stmt => { stmt.string(1) = session_name stmt.string(2) = doc.name stmt.string(3) = doc.print_tags stmt.string(4) = doc.sources stmt.bytes(5) = pdf stmt.execute() }) } /* presentation context */ object Context { val none: Context = new Context { def enabled: Boolean = false } val standard: Context = new Context { def enabled: Boolean = true } def dir(path: Path): Context = new Context { def enabled: Boolean = true override def dir(store: Sessions.Store): Path = path } def make(s: String): Context = if (s == ":") standard else dir(Path.explode(s)) } abstract class Context private { def enabled: Boolean def enabled(info: Sessions.Info): Boolean = enabled || info.browser_info def dir(store: Sessions.Store): Path = store.presentation_dir def dir(store: Sessions.Store, info: Sessions.Info): Path = dir(store) + Path.basic(info.chapter) + Path.basic(info.name) } /* maintain chapter index -- NOT thread-safe */ private val sessions_path = Path.basic(".sessions") private def read_sessions(dir: Path): List[(String, String)] = { val path = dir + sessions_path if (path.is_file) { import XML.Decode._ list(pair(string, string))(Symbol.decode_yxml(File.read(path))) } else Nil } private def write_sessions(dir: Path, sessions: List[(String, String)]) { import XML.Encode._ File.write(dir + sessions_path, YXML.string_of_body(list(pair(string, string))(sessions))) } def update_chapter_index(browser_info: Path, chapter: String, new_sessions: List[(String, String)]) { val dir = Isabelle_System.make_directory(browser_info + Path.basic(chapter)) val sessions0 = try { read_sessions(dir) } catch { case _: XML.Error => Nil } val sessions = (SortedMap.empty[String, String] ++ sessions0 ++ new_sessions).toList write_sessions(dir, sessions) val title = "Isabelle/" + chapter + " sessions" HTML.write_document(dir, "index.html", List(HTML.title(title + " (" + Distribution.version + ")")), HTML.chapter(title) :: (if (sessions.isEmpty) Nil else List(HTML.div("sessions", List(HTML.description( sessions.map({ case (name, description) => val descr = Symbol.trim_blank_lines(description) (List(HTML.link(name + "/index.html", HTML.text(name))), if (descr == "") Nil else HTML.break ::: List(HTML.pre(HTML.text(descr)))) }))))))) } def make_global_index(browser_info: Path) { if (!(browser_info + Path.explode("index.html")).is_file) { Isabelle_System.make_directory(browser_info) File.copy(Path.explode("~~/lib/logo/isabelle.gif"), browser_info + Path.explode("isabelle.gif")) File.write(browser_info + Path.explode("index.html"), File.read(Path.explode("~~/lib/html/library_index_header.template")) + File.read(Path.explode("~~/lib/html/library_index_content.template")) + File.read(Path.explode("~~/lib/html/library_index_footer.template"))) } } /* present session */ val session_graph_path = Path.explode("session_graph.pdf") val readme_path = Path.basic("README.html") def html_name(name: Document.Node.Name): String = name.theory_base_name + ".html" def document_html_name(name: Document.Node.Name): String = "document/" + html_name(name) - def theory_link(name: Document.Node.Name): XML.Tree = - HTML.link(html_name(name), HTML.text(name.theory_base_name)) + def token_markups(keywords: Keyword.Keywords, tok: Token): List[String] = { + if (keywords.is_command(tok, Keyword.theory_end)) + List(Markup.KEYWORD2, Markup.KEYWORD) + else if (keywords.is_command(tok, Keyword.proof_asm)) + List(Markup.KEYWORD3, Markup.COMMAND) + else if (keywords.is_command(tok, Keyword.improper)) + List(Markup.KEYWORD1, Markup.IMPROPER, Markup.COMMAND) + else if (tok.is_command) List(Markup.KEYWORD1, Markup.COMMAND) + else if (tok.is_delimiter) List(Markup.DELIMITER, Markup.KEYWORD) + else if (tok.is_keyword) List(Markup.KEYWORD2, Markup.KEYWORD) + else if (tok.is_comment) List(Markup.COMMENT) + else { + tok.kind match { + case Token.Kind.VAR => List(Markup.VAR) + case Token.Kind.TYPE_IDENT => List(Markup.TFREE) + case Token.Kind.TYPE_VAR => List(Markup.TVAR) + case Token.Kind.STRING => List(Markup.STRING) + case Token.Kind.ALT_STRING => List(Markup.ALT_STRING) + case Token.Kind.VERBATIM => List(Markup.VERBATIM) + case Token.Kind.CARTOUCHE => List(Markup.CARTOUCHE) + case _ => Nil + } + } + } def session_html( session: String, deps: Sessions.Deps, store: Sessions.Store, presentation: Context): Path = { val info = deps.sessions_structure(session) val options = info.options val base = deps(session) val session_dir = presentation.dir(store, info) val session_fonts = Isabelle_System.make_directory(session_dir + Path.explode("fonts")) for (entry <- Isabelle_Fonts.fonts(hidden = true)) File.copy(entry.path, session_fonts) Bytes.write(session_dir + session_graph_path, graphview.Graph_File.make_pdf(options, base.session_graph_display)) val links = { val deps_link = HTML.link(session_graph_path, HTML.text("theory dependencies")) val readme_links = if ((info.dir + readme_path).is_file) { File.copy(info.dir + readme_path, session_dir + readme_path) List(HTML.link(readme_path, HTML.text("README"))) } else Nil val document_links = for (doc <- info.documents) yield HTML.link(doc.path.pdf, HTML.text(doc.name)) Library.separate(HTML.break ::: HTML.nl, (deps_link :: readme_links ::: document_links). map(link => HTML.text("View ") ::: List(link))).flatten } + def theory_link(name1: Document.Node.Name, body: XML.Body): XML.Tree = + { + val session1 = base.theory_qualifier(name1) + val info1 = deps.sessions_structure(session1) + val prefix = + if (session == session1) "" + else if (info.chapter == info1.chapter) "../" + session1 + "/" + else "../../" + info1.chapter + "/" + session1 + "/" + HTML.link(prefix + html_name(name1), body) + } + val theories = - using(Export.open_database_context(deps.sessions_structure, store))(context => - for { - name <- base.session_theories - entry <- context.try_entry(session, name.theory, document_html_name(name)) - } yield name -> entry.uncompressed(cache = store.xz_cache)) + for (name <- base.session_theories) + yield { + val syntax = base.theory_syntax(name) + val keywords = syntax.keywords + val spans = syntax.parse_spans(Symbol.decode(File.read(name.path))) - val theories_body = - HTML.itemize(for ((name, _) <- theories) yield List(theory_link(name))) + val thy_body = + { + val imports_offset = base.known_theories(name.theory).header.imports_offset + var token_offset = 1 + spans.flatMap(span => + { + val is_init = span.is_kind(keywords, Keyword.theory_begin, false) + span.content.flatMap(tok => + { + val text = HTML.text(tok.source) + val item = + if (is_init && imports_offset.isDefinedAt(token_offset)) { + List(theory_link(imports_offset(token_offset), text)) + } + else text + + token_offset += tok.symbol_length + + (token_markups(keywords, tok) :\ item)( + { case (c, body) => List(HTML.span(c, body)) }) + }) + }) + } + + val title = "Theory " + name.theory_base_name + HTML.write_document(session_dir, html_name(name), + List(HTML.title(title)), + HTML.div("head", List(HTML.chapter(title))) :: List(HTML.pre("source", thy_body))) + + List(HTML.link(html_name(name), HTML.text(name.theory_base_name))) + } val title = "Session " + session HTML.write_document(session_dir, "index.html", List(HTML.title(title + " (" + Distribution.version + ")")), HTML.div("head", List(HTML.chapter(title), HTML.par(links))) :: (if (theories.isEmpty) Nil - else List(HTML.div("theories", List(HTML.section("Theories"), theories_body))))) - - for ((name, html) <- theories) Bytes.write(session_dir + Path.basic(html_name(name)), html) + else List(HTML.div("theories", List(HTML.section("Theories"), HTML.itemize(theories)))))) session_dir } /** preview **/ sealed case class Preview(title: String, content: String) def preview( resources: Resources, snapshot: Document.Snapshot, plain_text: Boolean = false, fonts_url: String => String = HTML.fonts_url()): Preview = { require(!snapshot.is_outdated) def output_document(title: String, body: XML.Body): String = HTML.output_document( List( HTML.style(HTML.fonts_css(fonts_url) + "\n\n" + File.read(HTML.isabelle_css)), HTML.title(title)), List(HTML.source(body)), css = "", structural = false) val name = snapshot.node_name if (plain_text) { val title = "File " + quote(name.path.file_name) val content = output_document(title, HTML.text(snapshot.node.source)) Preview(title, content) } else { resources.make_preview(snapshot) match { case Some(preview) => preview case None => val title = if (name.is_theory) "Theory " + quote(name.theory_base_name) else "File " + quote(name.path.file_name) val content = output_document(title, pide_document(snapshot)) Preview(title, content) } } } /* PIDE document */ private val document_elements = Rendering.foreground_elements ++ Rendering.text_color_elements ++ Rendering.markdown_elements + Markup.NUMERAL + Markup.COMMENT + Markup.LANGUAGE private val div_elements = Set(HTML.div.name, HTML.pre.name, HTML.par.name, HTML.list.name, HTML.enum.name, HTML.descr.name) private def html_div(html: XML.Body): Boolean = html exists { case XML.Elem(markup, body) => div_elements.contains(markup.name) || html_div(body) case XML.Text(_) => false } private def html_class(c: String, html: XML.Body): XML.Tree = if (html.forall(_ == HTML.no_text)) HTML.no_text else if (html_div(html)) HTML.div(c, html) else HTML.span(c, html) private def make_html(xml: XML.Body): XML.Body = xml map { case XML.Elem(Markup(Markup.LANGUAGE, Markup.Name(Markup.Language.DOCUMENT)), body) => html_class(Markup.Language.DOCUMENT, make_html(body)) case XML.Elem(Markup(Markup.MARKDOWN_PARAGRAPH, _), body) => HTML.par(make_html(body)) case XML.Elem(Markup(Markup.MARKDOWN_ITEM, _), body) => HTML.item(make_html(body)) case XML.Elem(Markup(Markup.Markdown_Bullet.name, _), _) => HTML.no_text case XML.Elem(Markup.Markdown_List(kind), body) => if (kind == Markup.ENUMERATE) HTML.enum(make_html(body)) else HTML.list(make_html(body)) case XML.Elem(markup, body) => val name = markup.name val html = markup.properties match { case Markup.Kind(kind) if kind == Markup.COMMAND || kind == Markup.KEYWORD => List(html_class(kind, make_html(body))) case _ => make_html(body) } Rendering.foreground.get(name) orElse Rendering.text_color.get(name) match { case Some(c) => html_class(c.toString, html) case None => html_class(name, html) } case XML.Text(text) => XML.Text(Symbol.decode(text)) } def pide_document(snapshot: Document.Snapshot): XML.Body = make_html(snapshot.markup_to_XML(Text.Range.full, document_elements)) /** build documents **/ val session_tex_path = Path.explode("session.tex") def tex_name(name: Document.Node.Name): String = name.theory_base_name + ".tex" def document_tex_name(name: Document.Node.Name): String = "document/" + tex_name(name) def build_documents( session: String, deps: Sessions.Deps, store: Sessions.Store, progress: Progress = new Progress, verbose: Boolean = false, verbose_latex: Boolean = false): List[(Document_Variant, Bytes)] = { /* session info */ val info = deps.sessions_structure(session) val options = info.options val base = deps(session) val graph_pdf = graphview.Graph_File.make_pdf(options, base.session_graph_display) /* prepare document directory */ lazy val tex_files = using(Export.open_database_context(deps.sessions_structure, store))(context => for (name <- base.session_theories ::: base.document_theories) yield { val entry = context.entry(session, name.theory, document_tex_name(name)) Path.basic(tex_name(name)) -> entry.uncompressed(cache = store.xz_cache) } ) def prepare_dir1(dir: Path, doc: Document_Variant): (Path, String) = { val doc_dir = dir + Path.basic(doc.name) Isabelle_System.make_directory(doc_dir) Isabelle_System.bash("isabelle latex -o sty", cwd = doc_dir.file).check File.write(doc_dir + Path.explode("isabelletags.sty"), doc.latex_sty) for ((base_dir, src) <- info.document_files) File.copy_base(info.dir + base_dir, src, doc_dir) File.write(doc_dir + session_tex_path, Library.terminate_lines( base.session_theories.map(name => "\\input{" + tex_name(name) + "}"))) for ((path, tex) <- tex_files) Bytes.write(doc_dir + path, tex) val root1 = "root_" + doc.name val root = if ((doc_dir + Path.explode(root1).tex).is_file) root1 else "root" (doc_dir, root) } def prepare_dir2(dir: Path, doc: Document_Variant): Unit = { val doc_dir = dir + Path.basic(doc.name) // non-deterministic, but irrelevant Bytes.write(doc_dir + session_graph_path, graph_pdf) } /* produce documents */ val doc_output = info.document_output val documents = for (doc <- info.documents) yield { Isabelle_System.with_tmp_dir("document")(tmp_dir => { progress.echo_document("Building document " + session + "/" + doc.name + " ...") val start = Time.now() // prepare sources val (doc_dir, root) = prepare_dir1(tmp_dir, doc) val digests = File.find_files(doc_dir.file, follow_links = true).map(SHA1.digest) val sources = SHA1.digest_set(digests).toString prepare_dir2(tmp_dir, doc) doc_output.foreach(prepare_dir1(_, doc)) doc_output.foreach(prepare_dir2(_, doc)) // old document from database val old_document = using(store.open_database(session))(db => for { document@(doc, pdf) <- read_document(db, session, doc.name) if doc.sources == sources } yield { Bytes.write(doc_dir + doc.path.pdf, pdf) document }) old_document getOrElse { // bash scripts def root_bash(ext: String): String = Bash.string(root + "." + ext) def latex_bash(fmt: String = "pdf", ext: String = "tex"): String = "isabelle latex -o " + Bash.string(fmt) + " " + Bash.string(root + "." + ext) def bash(items: String*): Process_Result = progress.bash(items.mkString(" && "), cwd = doc_dir.file, echo = verbose_latex, watchdog = Time.seconds(0.5)) // prepare document val result = if ((doc_dir + Path.explode("build")).is_file) { bash("./build pdf " + Bash.string(doc.name)) } else { bash( latex_bash(), "{ [ ! -f " + root_bash("bib") + " ] || " + latex_bash("bbl") + "; }", "{ [ ! -f " + root_bash("idx") + " ] || " + latex_bash("idx") + "; }", latex_bash(), latex_bash()) } // result val root_pdf = Path.basic(root).pdf val result_path = doc_dir + root_pdf if (!result.ok) { cat_error( Library.trim_line(result.err), cat_lines(Latex.latex_errors(doc_dir, root) ::: Bibtex.bibtex_errors(doc_dir, root)), "Failed to build document " + quote(doc.name)) } else if (!result_path.is_file) { error("Bad document result: expected to find " + root_pdf) } else { val stop = Time.now() val timing = stop - start progress.echo_document("Finished document " + session + "/" + doc.name + " (" + timing.message_hms + " elapsed time)") doc.set_sources(sources) -> Bytes.read(result_path) } } }) } for (dir <- doc_output; (doc, pdf) <- documents) { val path = dir + doc.path.pdf Bytes.write(path, pdf) progress.echo_document("Document at " + path.absolute) } documents } /* Isabelle tool wrapper */ val isabelle_tool = Isabelle_Tool("document", "prepare session theory document", args => { var verbose_latex = false var dirs: List[Path] = Nil var options = Options.init() var verbose_build = false val getopts = Getopts( """ Usage: isabelle document [OPTIONS] SESSION Options are: -O set option "document_output", relative to current directory -V verbose latex -d DIR include session directory -o OPTION override Isabelle system OPTION (via NAME=VAL or NAME) -v verbose build Prepare the theory document of a session. """, "O:" -> (arg => options += ("document_output=" + Path.explode(arg).absolute.implode)), "V" -> (_ => verbose_latex = true), "d:" -> (arg => dirs = dirs ::: List(Path.explode(arg))), "o:" -> (arg => options = options + arg), "v" -> (_ => verbose_build = true)) val more_args = getopts(args) val session = more_args match { case List(a) => a case _ => getopts.usage() } val progress = new Console_Progress(verbose = verbose_build) val store = Sessions.store(options) progress.interrupt_handler { val res = Build.build(options, selection = Sessions.Selection.session(session), dirs = dirs, progress = progress, verbose = verbose_build) if (!res.ok) error("Failed to build session " + quote(session)) val deps = Sessions.load_structure(options + "document=pdf", dirs = dirs). selection_deps(Sessions.Selection.session(session)) if (deps.sessions_structure(session).document_output.isEmpty) { progress.echo_warning("No document_output") } build_documents(session, deps, store, progress = progress, verbose = true, verbose_latex = verbose_latex) } }) } diff --git a/src/Pure/Thy/sessions.scala b/src/Pure/Thy/sessions.scala --- a/src/Pure/Thy/sessions.scala +++ b/src/Pure/Thy/sessions.scala @@ -1,1425 +1,1428 @@ /* Title: Pure/Thy/sessions.scala Author: Makarius Cumulative session information. */ package isabelle import java.io.{File => JFile} import java.nio.ByteBuffer import java.nio.channels.FileChannel import java.nio.file.StandardOpenOption import scala.collection.{SortedSet, SortedMap} import scala.collection.mutable object Sessions { /* session and theory names */ val root_name: String = "ROOT" val theory_name: String = "Pure.Sessions" val UNSORTED = "Unsorted" val DRAFT = "Draft" def is_pure(name: String): Boolean = name == Thy_Header.PURE def exclude_session(name: String): Boolean = name == "" || name == DRAFT def exclude_theory(name: String): Boolean = name == root_name || name == "README" || name == "index" || name == "bib" /* base info and source dependencies */ object Base { def bootstrap( session_directories: Map[JFile, String], global_theories: Map[String, String]): Base = Base( session_directories = session_directories, global_theories = global_theories, overall_syntax = Thy_Header.bootstrap_syntax) } sealed case class Base( pos: Position.T = Position.none, doc_names: List[String] = Nil, session_directories: Map[JFile, String] = Map.empty, global_theories: Map[String, String] = Map.empty, session_theories: List[Document.Node.Name] = Nil, document_theories: List[Document.Node.Name] = Nil, loaded_theories: Graph[String, Outer_Syntax] = Graph.string, used_theories: List[(Document.Node.Name, Options)] = Nil, known_theories: Map[String, Document.Node.Entry] = Map.empty, known_loaded_files: Map[String, List[Path]] = Map.empty, overall_syntax: Outer_Syntax = Outer_Syntax.empty, imported_sources: List[(Path, SHA1.Digest)] = Nil, sources: List[(Path, SHA1.Digest)] = Nil, session_graph_display: Graph_Display.Graph = Graph_Display.empty_graph, errors: List[String] = Nil) { override def toString: String = "Sessions.Base(loaded_theories = " + loaded_theories.size + ", used_theories = " + used_theories.length + ")" def theory_qualifier(name: String): String = global_theories.getOrElse(name, Long_Name.qualifier(name)) def theory_qualifier(name: Document.Node.Name): String = theory_qualifier(name.theory) def loaded_theory(name: String): Boolean = loaded_theories.defined(name) def loaded_theory(name: Document.Node.Name): Boolean = loaded_theory(name.theory) def loaded_theory_syntax(name: String): Option[Outer_Syntax] = if (loaded_theory(name)) Some(loaded_theories.get_node(name)) else None def loaded_theory_syntax(name: Document.Node.Name): Option[Outer_Syntax] = loaded_theory_syntax(name.theory) + def theory_syntax(name: Document.Node.Name): Outer_Syntax = + loaded_theory_syntax(name) getOrElse overall_syntax + def node_syntax(nodes: Document.Nodes, name: Document.Node.Name): Outer_Syntax = nodes(name).syntax orElse loaded_theory_syntax(name) getOrElse overall_syntax } sealed case class Deps(sessions_structure: Structure, session_bases: Map[String, Base]) { override def toString: String = "Sessions.Deps(" + sessions_structure + ")" def is_empty: Boolean = session_bases.isEmpty def apply(name: String): Base = session_bases(name) def get(name: String): Option[Base] = session_bases.get(name) def imported_sources(name: String): List[SHA1.Digest] = session_bases(name).imported_sources.map(_._2) def sources(name: String): List[SHA1.Digest] = session_bases(name).sources.map(_._2) def errors: List[String] = (for { (name, base) <- session_bases.iterator if base.errors.nonEmpty } yield cat_lines(base.errors) + "\nThe error(s) above occurred in session " + quote(name) + Position.here(base.pos) ).toList def check_errors: Deps = errors match { case Nil => this case errs => error(cat_lines(errs)) } } def deps(sessions_structure: Structure, progress: Progress = new Progress, inlined_files: Boolean = false, verbose: Boolean = false, list_files: Boolean = false, check_keywords: Set[String] = Set.empty): Deps = { var cache_sources = Map.empty[JFile, SHA1.Digest] def check_sources(paths: List[Path]): List[(Path, SHA1.Digest)] = { for { path <- paths file = path.file if cache_sources.isDefinedAt(file) || file.isFile } yield { cache_sources.get(file) match { case Some(digest) => (path, digest) case None => val digest = SHA1.digest(file) cache_sources = cache_sources + (file -> digest) (path, digest) } } } val doc_names = Doc.doc_names() val bootstrap = Sessions.Base.bootstrap( sessions_structure.session_directories, sessions_structure.global_theories) val session_bases = (Map("" -> bootstrap) /: sessions_structure.imports_topological_order)({ case (session_bases, session_name) => progress.expose_interrupt() val info = sessions_structure(session_name) try { val deps_base = info.deps_base(session_bases) val resources = new Resources(sessions_structure, deps_base) if (verbose || list_files) { val groups = if (info.groups.isEmpty) "" else info.groups.mkString(" (", " ", ")") progress.echo("Session " + info.chapter + "/" + info.name + groups) } val dependencies = resources.session_dependencies(info) val overall_syntax = dependencies.overall_syntax val session_theories = dependencies.theories.filter(name => deps_base.theory_qualifier(name) == session_name) val theory_files = dependencies.theories.map(_.path) val (loaded_files, loaded_files_errors) = try { if (inlined_files) (dependencies.loaded_files(Sessions.is_pure(info.name)), Nil) else (Nil, Nil) } catch { case ERROR(msg) => (Nil, List(msg)) } val session_files = (theory_files ::: loaded_files.flatMap(_._2) ::: info.document_files.map(file => info.dir + file._1 + file._2)).map(_.expand) val imported_files = if (inlined_files) dependencies.imported_files else Nil if (list_files) progress.echo(cat_lines(session_files.map(_.implode).sorted.map(" " + _))) if (check_keywords.nonEmpty) { Check_Keywords.check_keywords( progress, overall_syntax.keywords, check_keywords, theory_files) } val session_graph_display: Graph_Display.Graph = { def session_node(name: String): Graph_Display.Node = Graph_Display.Node("[" + name + "]", "session." + name) def node(name: Document.Node.Name): Graph_Display.Node = { val qualifier = deps_base.theory_qualifier(name) if (qualifier == info.name) Graph_Display.Node(name.theory_base_name, "theory." + name.theory) else session_node(qualifier) } val required_sessions = dependencies.loaded_theories.all_preds(dependencies.theories.map(_.theory)) .map(theory => deps_base.theory_qualifier(theory)) .filter(name => name != info.name && sessions_structure.defined(name)) val required_subgraph = sessions_structure.imports_graph .restrict(sessions_structure.imports_graph.all_preds(required_sessions).toSet) .transitive_closure .restrict(required_sessions.toSet) .transitive_reduction_acyclic val graph0 = (Graph_Display.empty_graph /: required_subgraph.topological_order)( { case (g, session) => val a = session_node(session) val bs = required_subgraph.imm_preds(session).toList.map(session_node) ((g /: (a :: bs))(_.default_node(_, Nil)) /: bs)(_.add_edge(_, a)) }) (graph0 /: dependencies.entries)( { case (g, entry) => val a = node(entry.name) val bs = entry.header.imports.map(node).filterNot(_ == a) ((g /: (a :: bs))(_.default_node(_, Nil)) /: bs)(_.add_edge(_, a)) }) } val known_theories = (deps_base.known_theories /: dependencies.entries.iterator.map(entry => entry.name.theory -> entry))(_ + _) val known_loaded_files = deps_base.known_loaded_files ++ loaded_files val import_errors = { val known_sessions = sessions_structure.imports_requirements(List(session_name)).toSet for { name <- dependencies.theories qualifier = deps_base.theory_qualifier(name) if !known_sessions(qualifier) } yield "Bad import of theory " + quote(name.toString) + ": need to include sessions " + quote(qualifier) + " in ROOT" } val document_errors = info.document_theories.flatMap( { case (thy, pos) => val parent_sessions = if (sessions_structure.build_graph.defined(session_name)) { sessions_structure.build_requirements(List(session_name)) } else Nil def err(msg: String): Option[String] = Some(msg + " " + quote(thy) + Position.here(pos)) known_theories.get(thy).map(_.name) match { case None => err("Unknown document theory") case Some(name) => val qualifier = deps_base.theory_qualifier(name) if (session_theories.contains(name)) { err("Redundant document theory from this session:") } else if (parent_sessions.contains(qualifier)) None else if (dependencies.theories.contains(name)) None else err("Document theory from other session not imported properly:") } }) val document_theories = info.document_theories.map({ case (thy, _) => known_theories(thy).name }) val dir_errors = { val ok = info.dirs.map(_.canonical_file).toSet val bad = (for { name <- session_theories.iterator path = name.master_dir_path if !ok(path.canonical_file) path1 = File.relative_path(info.dir.canonical, path).getOrElse(path) } yield (path1, name)).toList val bad_dirs = (for { (path1, _) <- bad } yield path1.toString).distinct.sorted val errs1 = for { (path1, name) <- bad } yield "Implicit use of directory " + path1 + " for theory " + quote(name.toString) val errs2 = if (bad_dirs.isEmpty) Nil else List("Implicit use of session directories: " + commas(bad_dirs)) val errs3 = for (p <- info.dirs if !p.is_dir) yield "No such directory: " + p val errs4 = (for { name <- session_theories.iterator name1 <- resources.find_theory_node(name.theory) if name.node != name1.node } yield "Incoherent theory file import:\n " + name.path + " vs. \n " + name1.path) .toList errs1 ::: errs2 ::: errs3 ::: errs4 } val sources_errors = for (p <- session_files if !p.is_file) yield "No such file: " + p val path_errors = try { Path.check_case_insensitive(session_files ::: imported_files); Nil } catch { case ERROR(msg) => List(msg) } val bibtex_errors = try { info.bibtex_entries; Nil } catch { case ERROR(msg) => List(msg) } val base = Base( pos = info.pos, doc_names = doc_names, session_directories = sessions_structure.session_directories, global_theories = sessions_structure.global_theories, session_theories = session_theories, document_theories = document_theories, loaded_theories = dependencies.loaded_theories, used_theories = dependencies.theories_adjunct, known_theories = known_theories, known_loaded_files = known_loaded_files, overall_syntax = overall_syntax, imported_sources = check_sources(imported_files), sources = check_sources(session_files), session_graph_display = session_graph_display, errors = dependencies.errors ::: loaded_files_errors ::: import_errors ::: document_errors ::: dir_errors ::: sources_errors ::: path_errors ::: bibtex_errors) session_bases + (info.name -> base) } catch { case ERROR(msg) => cat_error(msg, "The error(s) above occurred in session " + quote(info.name) + Position.here(info.pos)) } }) Deps(sessions_structure, session_bases) } /* base info */ sealed case class Base_Info( session: String, sessions_structure: Structure, errors: List[String], base: Base, infos: List[Info]) { def check: Base_Info = if (errors.isEmpty) this else error(cat_lines(errors)) } def base_info(options: Options, session: String, progress: Progress = new Progress, dirs: List[Path] = Nil, include_sessions: List[String] = Nil, session_ancestor: Option[String] = None, session_requirements: Boolean = false): Base_Info = { val full_sessions = load_structure(options, dirs = dirs) val selected_sessions = full_sessions.selection(Selection(sessions = session :: session_ancestor.toList)) val info = selected_sessions(session) val ancestor = session_ancestor orElse info.parent val (session1, infos1) = if (session_requirements && ancestor.isDefined) { val deps = Sessions.deps(selected_sessions, progress = progress) val base = deps(session) val ancestor_loaded = deps.get(ancestor.get) match { case Some(ancestor_base) if !selected_sessions.imports_requirements(List(ancestor.get)).contains(session) => ancestor_base.loaded_theories.defined _ case _ => error("Bad ancestor " + quote(ancestor.get) + " for session " + quote(session)) } val required_theories = for { thy <- base.loaded_theories.keys if !ancestor_loaded(thy) && base.theory_qualifier(thy) != session } yield thy if (required_theories.isEmpty) (ancestor.get, Nil) else { val other_name = info.name + "_requirements(" + ancestor.get + ")" Isabelle_System.isabelle_tmp_prefix() (other_name, List( make_info(info.options, dir_selected = false, dir = Path.explode("$ISABELLE_TMP_PREFIX"), chapter = info.chapter, Session_Entry( pos = info.pos, name = other_name, groups = info.groups, path = ".", parent = ancestor, description = "Required theory imports from other sessions", options = Nil, imports = info.deps, directories = Nil, theories = List((Nil, required_theories.map(thy => ((thy, Position.none), false)))), document_theories = Nil, document_files = Nil, export_files = Nil)))) } } else (session, Nil) val full_sessions1 = if (infos1.isEmpty) full_sessions else load_structure(options, dirs = dirs, infos = infos1) val selected_sessions1 = full_sessions1.selection(Selection(sessions = session1 :: session :: include_sessions)) val deps1 = Sessions.deps(selected_sessions1, progress = progress) Base_Info(session1, full_sessions1, deps1.errors, deps1(session1), infos1) } /* cumulative session info */ sealed case class Info( name: String, chapter: String, dir_selected: Boolean, pos: Position.T, groups: List[String], dir: Path, parent: Option[String], description: String, directories: List[Path], options: Options, imports: List[String], theories: List[(Options, List[(String, Position.T)])], global_theories: List[String], document_theories: List[(String, Position.T)], document_files: List[(Path, Path)], export_files: List[(Path, Int, List[String])], meta_digest: SHA1.Digest) { def deps: List[String] = parent.toList ::: imports def deps_base(session_bases: String => Base): Base = { val parent_base = session_bases(parent.getOrElse("")) val imports_bases = imports.map(session_bases) parent_base.copy( known_theories = (parent_base.known_theories /: (for { base <- imports_bases.iterator (_, entry) <- base.known_theories.iterator } yield (entry.name.theory -> entry)))(_ + _), known_loaded_files = (parent_base.known_loaded_files /: imports_bases.iterator.map(_.known_loaded_files))(_ ++ _)) } def dirs: List[Path] = dir :: directories def timeout: Time = Time.seconds(options.real("timeout") * options.real("timeout_scale")) def document_enabled: Boolean = options.string("document") match { case "" | "false" => false case "pdf" => true case doc => error("Bad document specification " + quote(doc)) } def documents_variants: List[Presentation.Document_Variant] = { val variants = Library.space_explode(':', options.string("document_variants")). map(Presentation.Document_Variant.parse) val dups = Library.duplicates(variants.map(_.name)) if (dups.nonEmpty) error("Duplicate document variants: " + commas_quote(dups)) variants } def documents: List[Presentation.Document_Variant] = { val variants = documents_variants if (!document_enabled || document_files.isEmpty) Nil else variants } def document_output: Option[Path] = options.string("document_output") match { case "" => None case s => Some(dir + Path.explode(s)) } def browser_info: Boolean = options.bool("browser_info") lazy val bibtex_entries: List[Text.Info[String]] = (for { (document_dir, file) <- document_files.iterator if Bibtex.is_bibtex(file.file_name) info <- Bibtex.entries(File.read(dir + document_dir + file)).iterator } yield info).toList def record_proofs: Boolean = options.int("record_proofs") >= 2 def is_afp: Boolean = chapter == AFP.chapter def is_afp_bulky: Boolean = is_afp && groups.exists(AFP.groups_bulky.contains) } def make_info(options: Options, dir_selected: Boolean, dir: Path, chapter: String, entry: Session_Entry): Info = { try { val name = entry.name if (exclude_session(name)) error("Bad session name") if (is_pure(name) && entry.parent.isDefined) error("Illegal parent session") if (!is_pure(name) && !entry.parent.isDefined) error("Missing parent session") val session_path = dir + Path.explode(entry.path) val directories = entry.directories.map(dir => session_path + Path.explode(dir)) val session_options = options ++ entry.options val theories = entry.theories.map({ case (opts, thys) => (session_options ++ opts, thys.map({ case ((thy, pos), _) => if (exclude_theory(thy)) error("Bad theory name " + quote(thy) + Position.here(pos)) else (thy, pos) })) }) val global_theories = for { (_, thys) <- entry.theories; ((thy, pos), global) <- thys if global } yield { val thy_name = Path.explode(thy).file_name if (Long_Name.is_qualified(thy_name)) error("Bad qualified name for global theory " + quote(thy_name) + Position.here(pos)) else thy_name } val conditions = theories.flatMap(thys => space_explode(',', thys._1.string("condition"))).distinct.sorted. map(x => (x, Isabelle_System.getenv(x) != "")) val document_files = entry.document_files.map({ case (s1, s2) => (Path.explode(s1), Path.explode(s2)) }) val export_files = entry.export_files.map({ case (dir, prune, pats) => (Path.explode(dir), prune, pats) }) val meta_digest = SHA1.digest( (name, chapter, entry.parent, entry.directories, entry.options, entry.imports, entry.theories_no_position, conditions, entry.document_theories_no_position, entry.document_files) .toString) Info(name, chapter, dir_selected, entry.pos, entry.groups, session_path, entry.parent, entry.description, directories, session_options, entry.imports, theories, global_theories, entry.document_theories, document_files, export_files, meta_digest) } catch { case ERROR(msg) => error(msg + "\nThe error(s) above occurred in session entry " + quote(entry.name) + Position.here(entry.pos)) } } object Selection { val empty: Selection = Selection() val all: Selection = Selection(all_sessions = true) def session(session: String): Selection = Selection(sessions = List(session)) } sealed case class Selection( requirements: Boolean = false, all_sessions: Boolean = false, base_sessions: List[String] = Nil, exclude_session_groups: List[String] = Nil, exclude_sessions: List[String] = Nil, session_groups: List[String] = Nil, sessions: List[String] = Nil) { def ++ (other: Selection): Selection = Selection( requirements = requirements || other.requirements, all_sessions = all_sessions || other.all_sessions, base_sessions = Library.merge(base_sessions, other.base_sessions), exclude_session_groups = Library.merge(exclude_session_groups, other.exclude_session_groups), exclude_sessions = Library.merge(exclude_sessions, other.exclude_sessions), session_groups = Library.merge(session_groups, other.session_groups), sessions = Library.merge(sessions, other.sessions)) } def make(infos: List[Info]): Structure = { def add_edges(graph: Graph[String, Info], kind: String, edges: Info => Traversable[String]) : Graph[String, Info] = { def add_edge(pos: Position.T, name: String, g: Graph[String, Info], parent: String) = { if (!g.defined(parent)) error("Bad " + kind + " session " + quote(parent) + " for " + quote(name) + Position.here(pos)) try { g.add_edge_acyclic(parent, name) } catch { case exn: Graph.Cycles[_] => error(cat_lines(exn.cycles.map(cycle => "Cyclic session dependency of " + cycle.map(c => quote(c.toString)).mkString(" via "))) + Position.here(pos)) } } (graph /: graph.iterator) { case (g, (name, (info, _))) => (g /: edges(info))(add_edge(info.pos, name, _, _)) } } val info_graph = (Graph.string[Info] /: infos) { case (graph, info) => if (graph.defined(info.name)) error("Duplicate session " + quote(info.name) + Position.here(info.pos) + Position.here(graph.get_node(info.name).pos)) else graph.new_node(info.name, info) } val build_graph = add_edges(info_graph, "parent", _.parent) val imports_graph = add_edges(build_graph, "imports", _.imports) val session_positions: List[(String, Position.T)] = (for ((name, (info, _)) <- info_graph.iterator) yield (name, info.pos)).toList val session_directories: Map[JFile, String] = (Map.empty[JFile, String] /: (for { session <- imports_graph.topological_order.iterator info = info_graph.get_node(session) dir <- info.dirs.iterator } yield (info, dir)))({ case (dirs, (info, dir)) => val session = info.name val canonical_dir = dir.canonical_file dirs.get(canonical_dir) match { case Some(session1) => val info1 = info_graph.get_node(session1) error("Duplicate use of directory " + dir + "\n for session " + quote(session1) + Position.here(info1.pos) + "\n vs. session " + quote(session) + Position.here(info.pos)) case None => dirs + (canonical_dir -> session) } }) val global_theories: Map[String, String] = (Thy_Header.bootstrap_global_theories.toMap /: (for { session <- imports_graph.topological_order.iterator info = info_graph.get_node(session) thy <- info.global_theories.iterator } yield (info, thy)))({ case (global, (info, thy)) => val qualifier = info.name global.get(thy) match { case Some(qualifier1) if qualifier != qualifier1 => error("Duplicate global theory " + quote(thy) + Position.here(info.pos)) case _ => global + (thy -> qualifier) } }) new Structure( session_positions, session_directories, global_theories, build_graph, imports_graph) } final class Structure private[Sessions]( val session_positions: List[(String, Position.T)], val session_directories: Map[JFile, String], val global_theories: Map[String, String], val build_graph: Graph[String, Info], val imports_graph: Graph[String, Info]) { sessions_structure => def dest_session_directories: List[(String, String)] = for ((file, session) <- session_directories.toList) yield (File.standard_path(file), session) lazy val chapters: SortedMap[String, List[Info]] = (SortedMap.empty[String, List[Info]] /: build_graph.iterator)( { case (chs, (_, (info, _))) => chs + (info.chapter -> (info :: chs.getOrElse(info.chapter, Nil))) }) def build_graph_display: Graph_Display.Graph = Graph_Display.make_graph(build_graph) def imports_graph_display: Graph_Display.Graph = Graph_Display.make_graph(imports_graph) def defined(name: String): Boolean = imports_graph.defined(name) def apply(name: String): Info = imports_graph.get_node(name) def get(name: String): Option[Info] = if (defined(name)) Some(apply(name)) else None def theory_qualifier(name: String): String = global_theories.getOrElse(name, Long_Name.qualifier(name)) def check_sessions(names: List[String]) { val bad_sessions = SortedSet(names.filterNot(defined): _*).toList if (bad_sessions.nonEmpty) error("Undefined session(s): " + commas_quote(bad_sessions)) } def check_sessions(sel: Selection): Unit = check_sessions(sel.base_sessions ::: sel.exclude_sessions ::: sel.sessions) private def selected(graph: Graph[String, Info], sel: Selection): List[String] = { check_sessions(sel) val select_group = sel.session_groups.toSet val select_session = sel.sessions.toSet ++ imports_graph.all_succs(sel.base_sessions) val selected0 = if (sel.all_sessions) graph.keys else { (for { (name, (info, _)) <- graph.iterator if info.dir_selected || select_session(name) || graph.get_node(name).groups.exists(select_group) } yield name).toList } if (sel.requirements) (graph.all_preds(selected0).toSet -- selected0).toList else selected0 } def selection(sel: Selection): Structure = { check_sessions(sel) val excluded = { val exclude_group = sel.exclude_session_groups.toSet val exclude_group_sessions = (for { (name, (info, _)) <- imports_graph.iterator if imports_graph.get_node(name).groups.exists(exclude_group) } yield name).toList imports_graph.all_succs(exclude_group_sessions ::: sel.exclude_sessions).toSet } def restrict(graph: Graph[String, Info]): Graph[String, Info] = { val sessions = graph.all_preds(selected(graph, sel)).filterNot(excluded) graph.restrict(graph.all_preds(sessions).toSet) } new Structure( session_positions, session_directories, global_theories, restrict(build_graph), restrict(imports_graph)) } def selection(session: String): Structure = selection(Selection.session(session)) def selection_deps( selection: Selection, progress: Progress = new Progress, loading_sessions: Boolean = false, inlined_files: Boolean = false, verbose: Boolean = false): Deps = { val deps = Sessions.deps(sessions_structure.selection(selection), progress = progress, inlined_files = inlined_files, verbose = verbose) if (loading_sessions) { val selection_size = deps.sessions_structure.build_graph.size if (selection_size > 1) progress.echo("Loading " + selection_size + " sessions ...") } deps } def build_selection(sel: Selection): List[String] = selected(build_graph, sel) def build_descendants(ss: List[String]): List[String] = build_graph.all_succs(ss) def build_requirements(ss: List[String]): List[String] = build_graph.all_preds_rev(ss) def build_topological_order: List[String] = build_graph.topological_order def imports_selection(sel: Selection): List[String] = selected(imports_graph, sel) def imports_descendants(ss: List[String]): List[String] = imports_graph.all_succs(ss) def imports_requirements(ss: List[String]): List[String] = imports_graph.all_preds_rev(ss) def imports_topological_order: List[String] = imports_graph.topological_order def bibtex_entries: List[(String, List[String])] = build_topological_order.flatMap(name => apply(name).bibtex_entries match { case Nil => None case entries => Some(name -> entries.map(_.info)) }) def session_chapters: List[(String, String)] = imports_topological_order.map(name => name -> apply(name).chapter) override def toString: String = imports_graph.keys_iterator.mkString("Sessions.Structure(", ", ", ")") } /* parser */ val ROOT: Path = Path.explode("ROOT") val ROOTS: Path = Path.explode("ROOTS") private val CHAPTER = "chapter" private val SESSION = "session" private val IN = "in" private val DESCRIPTION = "description" private val DIRECTORIES = "directories" private val OPTIONS = "options" private val SESSIONS = "sessions" private val THEORIES = "theories" private val GLOBAL = "global" private val DOCUMENT_THEORIES = "document_theories" private val DOCUMENT_FILES = "document_files" private val EXPORT_FILES = "export_files" val root_syntax: Outer_Syntax = Outer_Syntax.empty + "(" + ")" + "+" + "," + "=" + "[" + "]" + GLOBAL + IN + (CHAPTER, Keyword.THY_DECL) + (SESSION, Keyword.THY_DECL) + (DESCRIPTION, Keyword.QUASI_COMMAND) + (DIRECTORIES, Keyword.QUASI_COMMAND) + (OPTIONS, Keyword.QUASI_COMMAND) + (SESSIONS, Keyword.QUASI_COMMAND) + (THEORIES, Keyword.QUASI_COMMAND) + (DOCUMENT_THEORIES, Keyword.QUASI_COMMAND) + (DOCUMENT_FILES, Keyword.QUASI_COMMAND) + (EXPORT_FILES, Keyword.QUASI_COMMAND) abstract class Entry sealed case class Chapter(name: String) extends Entry sealed case class Session_Entry( pos: Position.T, name: String, groups: List[String], path: String, parent: Option[String], description: String, options: List[Options.Spec], imports: List[String], directories: List[String], theories: List[(List[Options.Spec], List[((String, Position.T), Boolean)])], document_theories: List[(String, Position.T)], document_files: List[(String, String)], export_files: List[(String, Int, List[String])]) extends Entry { def theories_no_position: List[(List[Options.Spec], List[(String, Boolean)])] = theories.map({ case (a, b) => (a, b.map({ case ((c, _), d) => (c, d) })) }) def document_theories_no_position: List[String] = document_theories.map(_._1) } private object Parser extends Options.Parser { private val chapter: Parser[Chapter] = { val chapter_name = atom("chapter name", _.is_name) command(CHAPTER) ~! chapter_name ^^ { case _ ~ a => Chapter(a) } } private val session_entry: Parser[Session_Entry] = { val option = option_name ~ opt($$$("=") ~! option_value ^^ { case _ ~ x => x }) ^^ { case x ~ y => (x, y) } val options = $$$("[") ~> rep1sep(option, $$$(",")) <~ $$$("]") val theory_entry = position(theory_name) ~ opt_keyword(GLOBAL) ^^ { case x ~ y => (x, y) } val theories = $$$(THEORIES) ~! ((options | success(Nil)) ~ rep1(theory_entry)) ^^ { case _ ~ (x ~ y) => (x, y) } val in_path = $$$("(") ~! ($$$(IN) ~ path ~ $$$(")")) ^^ { case _ ~ (_ ~ x ~ _) => x } val document_theories = $$$(DOCUMENT_THEORIES) ~! rep1(position(name)) ^^ { case _ ~ x => x } val document_files = $$$(DOCUMENT_FILES) ~! ((in_path | success("document")) ~ rep1(path)) ^^ { case _ ~ (x ~ y) => y.map((x, _)) } val prune = $$$("[") ~! (nat ~ $$$("]")) ^^ { case _ ~ (x ~ _) => x } | success(0) val export_files = $$$(EXPORT_FILES) ~! ((in_path | success("export")) ~ prune ~ rep1(embedded)) ^^ { case _ ~ (x ~ y ~ z) => (x, y, z) } command(SESSION) ~! (position(session_name) ~ (($$$("(") ~! (rep1(name) <~ $$$(")")) ^^ { case _ ~ x => x }) | success(Nil)) ~ (($$$(IN) ~! path ^^ { case _ ~ x => x }) | success(".")) ~ ($$$("=") ~! (opt(session_name ~! $$$("+") ^^ { case x ~ _ => x }) ~ (($$$(DESCRIPTION) ~! text ^^ { case _ ~ x => x }) | success("")) ~ (($$$(OPTIONS) ~! options ^^ { case _ ~ x => x }) | success(Nil)) ~ (($$$(SESSIONS) ~! rep1(session_name) ^^ { case _ ~ x => x }) | success(Nil)) ~ (($$$(DIRECTORIES) ~! rep1(path) ^^ { case _ ~ x => x }) | success(Nil)) ~ rep(theories) ~ (opt(document_theories) ^^ (x => x.getOrElse(Nil))) ~ (rep(document_files) ^^ (x => x.flatten)) ~ rep(export_files)))) ^^ { case _ ~ ((a, pos) ~ b ~ c ~ (_ ~ (d ~ e ~ f ~ g ~ h ~ i ~ j ~ k ~ l))) => Session_Entry(pos, a, b, c, d, e, f, g, h, i, j, k, l) } } def parse_root(path: Path): List[Entry] = { val toks = Token.explode(root_syntax.keywords, File.read(path)) val start = Token.Pos.file(path.implode) parse_all(rep(chapter | session_entry), Token.reader(toks, start)) match { case Success(result, _) => result case bad => error(bad.toString) } } } def parse_root(path: Path): List[Entry] = Parser.parse_root(path) def parse_root_entries(path: Path): List[Session_Entry] = for (entry <- Parser.parse_root(path) if entry.isInstanceOf[Session_Entry]) yield entry.asInstanceOf[Session_Entry] def read_root(options: Options, select: Boolean, path: Path): List[Info] = { var entry_chapter = UNSORTED val infos = new mutable.ListBuffer[Info] parse_root(path).foreach { case Chapter(name) => entry_chapter = name case entry: Session_Entry => infos += make_info(options, select, path.dir, entry_chapter, entry) } infos.toList } def parse_roots(roots: Path): List[String] = { for { line <- split_lines(File.read(roots)) if !(line == "" || line.startsWith("#")) } yield line } /* load sessions from certain directories */ private def is_session_dir(dir: Path): Boolean = (dir + ROOT).is_file || (dir + ROOTS).is_file private def check_session_dir(dir: Path): Path = if (is_session_dir(dir)) File.pwd() + dir.expand else error("Bad session root directory: " + dir.toString) def directories(dirs: List[Path], select_dirs: List[Path]): List[(Boolean, Path)] = { val default_dirs = Isabelle_System.components().filter(is_session_dir) for { (select, dir) <- (default_dirs ::: dirs).map((false, _)) ::: select_dirs.map((true, _)) } yield (select, dir.canonical) } def load_structure(options: Options, dirs: List[Path] = Nil, select_dirs: List[Path] = Nil, infos: List[Info] = Nil): Structure = { def load_dir(select: Boolean, dir: Path): List[(Boolean, Path)] = load_root(select, dir) ::: load_roots(select, dir) def load_root(select: Boolean, dir: Path): List[(Boolean, Path)] = { val root = dir + ROOT if (root.is_file) List((select, root)) else Nil } def load_roots(select: Boolean, dir: Path): List[(Boolean, Path)] = { val roots = dir + ROOTS if (roots.is_file) { for { entry <- parse_roots(roots) dir1 = try { check_session_dir(dir + Path.explode(entry)) } catch { case ERROR(msg) => error(msg + "\nThe error(s) above occurred in session catalog " + roots.toString) } res <- load_dir(select, dir1) } yield res } else Nil } val roots = for { (select, dir) <- directories(dirs, select_dirs) res <- load_dir(select, check_session_dir(dir)) } yield res val unique_roots = ((Map.empty[JFile, (Boolean, Path)] /: roots) { case (m, (select, path)) => val file = path.canonical_file m.get(file) match { case None => m + (file -> (select, path)) case Some((select1, path1)) => m + (file -> (select1 || select, path1)) } }).toList.map(_._2) make(unique_roots.flatMap(p => read_root(options, p._1, p._2)) ::: infos) } /* Isabelle tool wrapper */ val isabelle_tool = Isabelle_Tool("sessions", "explore structure of Isabelle sessions", args => { var base_sessions: List[String] = Nil var select_dirs: List[Path] = Nil var requirements = false var exclude_session_groups: List[String] = Nil var all_sessions = false var dirs: List[Path] = Nil var session_groups: List[String] = Nil var exclude_sessions: List[String] = Nil val getopts = Getopts(""" Usage: isabelle sessions [OPTIONS] [SESSIONS ...] Options are: -B NAME include session NAME and all descendants -D DIR include session directory and select its sessions -R refer to requirements of selected sessions -X NAME exclude sessions from group NAME and all descendants -a select all sessions -d DIR include session directory -g NAME select session group NAME -x NAME exclude session NAME and all descendants Explore the structure of Isabelle sessions and print result names in topological order (on stdout). """, "B:" -> (arg => base_sessions = base_sessions ::: List(arg)), "D:" -> (arg => select_dirs = select_dirs ::: List(Path.explode(arg))), "R" -> (_ => requirements = true), "X:" -> (arg => exclude_session_groups = exclude_session_groups ::: List(arg)), "a" -> (_ => all_sessions = true), "d:" -> (arg => dirs = dirs ::: List(Path.explode(arg))), "g:" -> (arg => session_groups = session_groups ::: List(arg)), "x:" -> (arg => exclude_sessions = exclude_sessions ::: List(arg))) val sessions = getopts(args) val options = Options.init() val selection = Selection(requirements = requirements, all_sessions = all_sessions, base_sessions = base_sessions, exclude_session_groups = exclude_session_groups, exclude_sessions = exclude_sessions, session_groups = session_groups, sessions = sessions) val sessions_structure = load_structure(options, dirs = dirs, select_dirs = select_dirs).selection(selection) for (name <- sessions_structure.imports_topological_order) { Output.writeln(name, stdout = true) } }) /** heap file with SHA1 digest **/ private val sha1_prefix = "SHA1:" def read_heap_digest(heap: Path): Option[String] = { if (heap.is_file) { using(FileChannel.open(heap.file.toPath, StandardOpenOption.READ))(file => { val len = file.size val n = sha1_prefix.length + SHA1.digest_length if (len >= n) { file.position(len - n) val buf = ByteBuffer.allocate(n) var i = 0 var m = 0 do { m = file.read(buf) if (m != -1) i += m } while (m != -1 && n > i) if (i == n) { val prefix = new String(buf.array(), 0, sha1_prefix.length, UTF8.charset) val s = new String(buf.array(), sha1_prefix.length, SHA1.digest_length, UTF8.charset) if (prefix == sha1_prefix) Some(s) else None } else None } else None }) } else None } def write_heap_digest(heap: Path): String = read_heap_digest(heap) match { case None => val s = SHA1.digest(heap).rep File.append(heap, sha1_prefix + s) s case Some(s) => s } /** persistent store **/ object Session_Info { val session_name = SQL.Column.string("session_name").make_primary_key // Build_Log.Session_Info val session_timing = SQL.Column.bytes("session_timing") val command_timings = SQL.Column.bytes("command_timings") val theory_timings = SQL.Column.bytes("theory_timings") val ml_statistics = SQL.Column.bytes("ml_statistics") val task_statistics = SQL.Column.bytes("task_statistics") val errors = SQL.Column.bytes("errors") val build_log_columns = List(session_name, session_timing, command_timings, theory_timings, ml_statistics, task_statistics, errors) // Build.Session_Info val sources = SQL.Column.string("sources") val input_heaps = SQL.Column.string("input_heaps") val output_heap = SQL.Column.string("output_heap") val return_code = SQL.Column.int("return_code") val build_columns = List(sources, input_heaps, output_heap, return_code) val table = SQL.Table("isabelle_session_info", build_log_columns ::: build_columns) } def store(options: Options): Store = new Store(options) class Store private[Sessions](val options: Options) { store => override def toString: String = "Store(output_dir = " + output_dir.absolute + ")" val xml_cache: XML.Cache = XML.make_cache() val xz_cache: XZ.Cache = XZ.make_cache() /* directories */ val system_output_dir: Path = Path.explode("$ISABELLE_HEAPS_SYSTEM/$ML_IDENTIFIER") val user_output_dir: Path = Path.explode("$ISABELLE_HEAPS/$ML_IDENTIFIER") def system_heaps: Boolean = options.bool("system_heaps") val output_dir: Path = if (system_heaps) system_output_dir else user_output_dir val input_dirs: List[Path] = if (system_heaps) List(system_output_dir) else List(user_output_dir, system_output_dir) def presentation_dir: Path = if (system_heaps) Path.explode("$ISABELLE_BROWSER_INFO_SYSTEM") else Path.explode("$ISABELLE_BROWSER_INFO") /* file names */ def heap(name: String): Path = Path.basic(name) def database(name: String): Path = Path.basic("log") + Path.basic(name).ext("db") def log(name: String): Path = Path.basic("log") + Path.basic(name) def log_gz(name: String): Path = log(name).ext("gz") def output_heap(name: String): Path = output_dir + heap(name) def output_database(name: String): Path = output_dir + database(name) def output_log(name: String): Path = output_dir + log(name) def output_log_gz(name: String): Path = output_dir + log_gz(name) def prepare_output_dir() { Isabelle_System.make_directory(output_dir + Path.basic("log")) } /* heap */ def find_heap(name: String): Option[Path] = input_dirs.map(_ + heap(name)).find(_.is_file) def find_heap_digest(name: String): Option[String] = find_heap(name).flatMap(read_heap_digest) def the_heap(name: String): Path = find_heap(name) getOrElse error("Missing heap image for session " + quote(name) + " -- expected in:\n" + cat_lines(input_dirs.map(dir => " " + dir.expand.implode))) /* database */ def database_server: Boolean = options.bool("build_database_server") def open_database_server(): SQL.Database = PostgreSQL.open_database( user = options.string("build_database_user"), password = options.string("build_database_password"), database = options.string("build_database_name"), host = options.string("build_database_host"), port = options.int("build_database_port"), ssh = options.proper_string("build_database_ssh_host").map(ssh_host => SSH.open_session(options, host = ssh_host, user = options.string("build_database_ssh_user"), port = options.int("build_database_ssh_port"))), ssh_close = true) def try_open_database(name: String, output: Boolean = false): Option[SQL.Database] = { def check(db: SQL.Database): Option[SQL.Database] = if (output || session_info_exists(db)) Some(db) else { db.close; None } if (database_server) check(open_database_server()) else if (output) Some(SQLite.open_database(output_database(name))) else { (for { dir <- input_dirs.view path = dir + database(name) if path.is_file db <- check(SQLite.open_database(path)) } yield db).headOption } } def open_database(name: String, output: Boolean = false): SQL.Database = try_open_database(name, output = output) getOrElse error("Missing build database for session " + quote(name)) def clean_output(name: String): (Boolean, Boolean) = { val relevant_db = database_server && { try_open_database(name) match { case Some(db) => try { db.transaction { val relevant_db = session_info_defined(db, name) init_session_info(db, name) relevant_db } } finally { db.close } case None => false } } val del = for { dir <- (if (system_heaps) List(user_output_dir, system_output_dir) else List(user_output_dir)) file <- List(heap(name), database(name), log(name), log_gz(name)) path = dir + file if path.is_file } yield path.file.delete val relevant = relevant_db || del.nonEmpty val ok = del.forall(b => b) (relevant, ok) } /* SQL database content */ def read_bytes(db: SQL.Database, name: String, column: SQL.Column): Bytes = db.using_statement(Session_Info.table.select(List(column), Session_Info.session_name.where_equal(name)))(stmt => { val res = stmt.execute_query() if (!res.next()) Bytes.empty else res.bytes(column) }) def read_properties(db: SQL.Database, name: String, column: SQL.Column): List[Properties.T] = Properties.uncompress( read_bytes(db, name, column), cache = xz_cache, xml_cache = Some(xml_cache)) /* session info */ def init_session_info(db: SQL.Database, name: String) { db.transaction { db.create_table(Session_Info.table) db.using_statement( Session_Info.table.delete(Session_Info.session_name.where_equal(name)))(_.execute) db.create_table(Export.Data.table) db.using_statement( Export.Data.table.delete(Export.Data.session_name.where_equal(name)))(_.execute) db.create_table(Presentation.Data.table) db.using_statement( Presentation.Data.table.delete( Presentation.Data.session_name.where_equal(name)))(_.execute) } } def session_info_exists(db: SQL.Database): Boolean = { val tables = db.tables tables.contains(Session_Info.table.name) && tables.contains(Export.Data.table.name) } def session_info_defined(db: SQL.Database, name: String): Boolean = db.transaction { session_info_exists(db) && { db.using_statement( Session_Info.table.select(List(Session_Info.session_name), Session_Info.session_name.where_equal(name)))(stmt => stmt.execute_query().next()) } } def write_session_info( db: SQL.Database, name: String, build_log: Build_Log.Session_Info, build: Build.Session_Info) { db.transaction { db.using_statement(Session_Info.table.insert())(stmt => { stmt.string(1) = name stmt.bytes(2) = Properties.encode(build_log.session_timing) stmt.bytes(3) = Properties.compress(build_log.command_timings, cache = xz_cache) stmt.bytes(4) = Properties.compress(build_log.theory_timings, cache = xz_cache) stmt.bytes(5) = Properties.compress(build_log.ml_statistics, cache = xz_cache) stmt.bytes(6) = Properties.compress(build_log.task_statistics, cache = xz_cache) stmt.bytes(7) = Build_Log.compress_errors(build_log.errors, cache = xz_cache) stmt.string(8) = build.sources stmt.string(9) = cat_lines(build.input_heaps) stmt.string(10) = build.output_heap getOrElse "" stmt.int(11) = build.return_code stmt.execute() }) } } def read_session_timing(db: SQL.Database, name: String): Properties.T = Properties.decode(read_bytes(db, name, Session_Info.session_timing), Some(xml_cache)) def read_command_timings(db: SQL.Database, name: String): List[Properties.T] = read_properties(db, name, Session_Info.command_timings) def read_theory_timings(db: SQL.Database, name: String): List[Properties.T] = read_properties(db, name, Session_Info.theory_timings) def read_ml_statistics(db: SQL.Database, name: String): List[Properties.T] = read_properties(db, name, Session_Info.ml_statistics) def read_task_statistics(db: SQL.Database, name: String): List[Properties.T] = read_properties(db, name, Session_Info.task_statistics) def read_errors(db: SQL.Database, name: String): List[String] = Build_Log.uncompress_errors(read_bytes(db, name, Session_Info.errors), cache = xz_cache) def read_build(db: SQL.Database, name: String): Option[Build.Session_Info] = { if (db.tables.contains(Session_Info.table.name)) { db.using_statement(Session_Info.table.select(Session_Info.build_columns, Session_Info.session_name.where_equal(name)))(stmt => { val res = stmt.execute_query() if (!res.next()) None else { Some( Build.Session_Info( res.string(Session_Info.sources), split_lines(res.string(Session_Info.input_heaps)), res.string(Session_Info.output_heap) match { case "" => None case s => Some(s) }, res.int(Session_Info.return_code))) } }) } else None } } } diff --git a/src/Pure/Thy/thy_info.ML b/src/Pure/Thy/thy_info.ML --- a/src/Pure/Thy/thy_info.ML +++ b/src/Pure/Thy/thy_info.ML @@ -1,522 +1,470 @@ (* Title: Pure/Thy/thy_info.ML Author: Makarius Global theory info database, with auto-loading according to theory and file dependencies, and presentation of theory documents. *) signature THY_INFO = sig type presentation_context = {options: Options.T, file_pos: Position.T, adjust_pos: Position.T -> Position.T, segments: Thy_Output.segment list} val adjust_pos_properties: presentation_context -> Position.T -> Properties.T val apply_presentation: presentation_context -> theory -> unit val add_presentation: (presentation_context -> theory -> unit) -> theory -> theory val get_names: unit -> string list val lookup_theory: string -> theory option val get_theory: string -> theory val master_directory: string -> Path.T val remove_thy: string -> unit val use_theories: Options.T -> string -> (string * Position.T) list -> unit val use_thy: string -> unit val script_thy: Position.T -> string -> theory -> theory val register_thy: theory -> unit val finish: unit -> unit end; structure Thy_Info: THY_INFO = struct (** theory presentation **) (* artefact names *) fun document_name ext thy = Path.explode_binding0 ("document/" ^ Context.theory_name thy ^ "." ^ ext); -val document_html_name = document_name "html"; val document_tex_name = document_name "tex"; -fun html_name thy = Path.basic (Context.theory_name thy ^ ".html"); - - -(* theory as HTML *) - -local - -fun get_session_chapter thy = - let - val session = Resources.theory_qualifier (Context.theory_long_name thy); - val chapter = Resources.session_chapter session; - in (session, chapter) end; - -fun theory_link thy thy' = - let - val ((session, chapter), (session', chapter')) = apply2 get_session_chapter (thy, thy'); - val link = html_name thy'; - in - if session = session' then link - else if chapter = chapter' then Path.parent + Path.basic session' + link - else Path.parent + Path.parent + Path.basic chapter' + Path.basic session' + link - end; - -fun theory_document symbols A Bs body = - HTML.begin_document symbols ("Theory " ^ A) ^ "\n" ^ - HTML.command symbols "theory" ^ " " ^ HTML.name symbols A ^ "
\n" ^ - (if null Bs then "" - else - HTML.keyword symbols "imports" ^ " " ^ - space_implode " " (map (uncurry HTML.href_path o apsnd (HTML.name symbols)) Bs) ^ "
\n") ^ - "\n\n
\n
" ::
-  body @ ["
\n", HTML.end_document]; - -in - -fun export_html thy spans = - let - val name = Context.theory_name thy; - val parents = - Theory.parents_of thy |> map (fn thy' => - (Url.File (theory_link thy thy'), Context.theory_name thy')); - - val symbols = Resources.html_symbols (); - val keywords = Thy_Header.get_keywords thy; - val body = map (HTML.present_span symbols keywords) spans; - val html = XML.blob (theory_document symbols name parents body); - in Export.export thy (document_html_name thy) html end - -end; - (* hook for consolidated theory *) type presentation_context = {options: Options.T, file_pos: Position.T, adjust_pos: Position.T -> Position.T, segments: Thy_Output.segment list}; fun adjust_pos_properties (context: presentation_context) pos = Position.offset_properties_of (#adjust_pos context pos) @ Position.id_properties_of pos; structure Presentation = Theory_Data ( type T = ((presentation_context -> theory -> unit) * stamp) list; val empty = []; val extend = I; fun merge data : T = Library.merge (eq_snd op =) data; ); fun apply_presentation (context: presentation_context) thy = ignore (Presentation.get thy |> Par_List.map (fn (f, _) => f context thy)); fun add_presentation f = Presentation.map (cons (f, stamp ())); val _ = Theory.setup (add_presentation (fn {options, file_pos, segments, ...} => fn thy => - (export_html thy (map #span segments); if exists (Toplevel.is_skipped_proof o #state) segments then () else let val body = Thy_Output.present_thy options thy segments; in if Options.string options "document" = "false" then () else let val thy_name = Context.theory_name thy; val document_tex_name = document_tex_name thy; val latex = Latex.isabelle_body thy_name body; val output = [Latex.output_text latex, Latex.output_positions file_pos latex]; in Export.export thy document_tex_name (XML.blob output) end - end))); + end)); (** thy database **) (* messages *) val show_path = space_implode " via " o map quote; fun cycle_msg names = "Cyclic dependency of " ^ show_path names; (* derived graph operations *) fun add_deps name parents G = String_Graph.add_deps_acyclic (name, parents) G handle String_Graph.CYCLES namess => error (cat_lines (map cycle_msg namess)); fun new_entry name parents entry = String_Graph.new_node (name, entry) #> add_deps name parents; (* global thys *) type deps = {master: (Path.T * SHA1.digest), (*master dependencies for thy file*) imports: (string * Position.T) list}; (*source specification of imports (partially qualified)*) fun make_deps master imports : deps = {master = master, imports = imports}; fun master_dir_deps (d: deps option) = the_default Path.current (Option.map (Path.dir o #1 o #master) d); local val global_thys = Synchronized.var "Thy_Info.thys" (String_Graph.empty: (deps option * theory option) String_Graph.T); in fun get_thys () = Synchronized.value global_thys; fun change_thys f = Synchronized.change global_thys f; end; fun get_names () = String_Graph.topological_order (get_thys ()); (* access thy *) fun lookup thys name = try (String_Graph.get_node thys) name; fun lookup_thy name = lookup (get_thys ()) name; fun get thys name = (case lookup thys name of SOME thy => thy | NONE => error ("Theory loader: nothing known about theory " ^ quote name)); fun get_thy name = get (get_thys ()) name; (* access deps *) val lookup_deps = Option.map #1 o lookup_thy; val master_directory = master_dir_deps o #1 o get_thy; (* access theory *) fun lookup_theory name = (case lookup_thy name of SOME (_, SOME theory) => SOME theory | _ => NONE); fun get_theory name = (case lookup_theory name of SOME theory => theory | _ => error ("Theory loader: undefined entry for theory " ^ quote name)); val get_imports = Resources.imports_of o get_theory; (** thy operations **) (* remove *) fun remove name thys = (case lookup thys name of NONE => thys | SOME (NONE, _) => error ("Cannot update finished theory " ^ quote name) | SOME _ => let val succs = String_Graph.all_succs thys [name]; val _ = writeln ("Theory loader: removing " ^ commas_quote succs); in fold String_Graph.del_node succs thys end); val remove_thy = change_thys o remove; (* update *) fun update deps theory thys = let val name = Context.theory_long_name theory; val parents = map Context.theory_long_name (Theory.parents_of theory); val thys' = remove name thys; val _ = map (get thys') parents; in new_entry name parents (SOME deps, SOME theory) thys' end; fun update_thy deps theory = change_thys (update deps theory); (* scheduling loader tasks *) datatype result = Result of {theory: theory, exec_id: Document_ID.exec, present: unit -> unit, commit: unit -> unit, weight: int}; fun theory_result theory = Result {theory = theory, exec_id = Document_ID.none, present = I, commit = I, weight = 0}; fun result_theory (Result {theory, ...}) = theory; fun result_present (Result {present, ...}) = present; fun result_commit (Result {commit, ...}) = commit; fun result_ord (Result {weight = i, ...}, Result {weight = j, ...}) = int_ord (j, i); fun join_theory (Result {theory, exec_id, ...}) = let val _ = Execution.join [exec_id]; val res = Exn.capture Thm.consolidate_theory theory; val exns = maps Task_Queue.group_status (Execution.peek exec_id); in res :: map Exn.Exn exns end; datatype task = Task of string list * (theory list -> result) | Finished of theory; fun task_finished (Task _) = false | task_finished (Finished _) = true; fun task_parents deps (parents: string list) = map (the o AList.lookup (op =) deps) parents; val schedule_seq = String_Graph.schedule (fn deps => fn (_, task) => (case task of Task (parents, body) => let val result = body (task_parents deps parents); val _ = Par_Exn.release_all (join_theory result); val _ = result_present result (); val _ = result_commit result (); in result_theory result end | Finished thy => thy)) #> ignore; val schedule_futures = Thread_Attributes.uninterruptible (fn _ => fn tasks => let val futures = tasks |> String_Graph.schedule (fn deps => fn (name, task) => (case task of Task (parents, body) => (singleton o Future.forks) {name = "theory:" ^ name, group = NONE, deps = map (Future.task_of o #2) deps, pri = 0, interrupts = true} (fn () => (case filter (not o can Future.join o #2) deps of [] => body (map (result_theory o Future.join) (task_parents deps parents)) | bad => error ("Failed to load theory " ^ quote name ^ " (unresolved " ^ commas_quote (map #1 bad) ^ ")"))) | Finished theory => Future.value (theory_result theory))); val results1 = futures |> maps (fn future => (case Future.join_result future of Exn.Res result => join_theory result | Exn.Exn exn => [Exn.Exn exn])); val results2 = futures |> map_filter (Exn.get_res o Future.join_result) |> sort result_ord |> Par_List.map (fn result => Exn.capture (result_present result) ()); (* FIXME more precise commit order (!?) *) val results3 = futures |> map (fn future => Exn.capture (fn () => result_commit (Future.join future) ()) ()); (* FIXME avoid global Execution.reset (!??) *) val results4 = map Exn.Exn (maps Task_Queue.group_status (Execution.reset ())); val _ = Par_Exn.release_all (results1 @ results2 @ results3 @ results4); in () end); (* eval theory *) fun excursion keywords master_dir init elements = let fun prepare_span st span = Command_Span.content span |> Command.read keywords (Command.read_thy st) master_dir init ([], ~1) |> (fn tr => Toplevel.timing (Resources.last_timing tr) tr); fun element_result span_elem (st, _) = let val elem = Thy_Element.map_element (prepare_span st) span_elem; val (results, st') = Toplevel.element_result keywords elem st; val pos' = Toplevel.pos_of (Thy_Element.last_element elem); in (results, (st', pos')) end; val (results, (end_state, end_pos)) = fold_map element_result elements (Toplevel.init_toplevel (), Position.none); val thy = Toplevel.end_theory end_pos end_state; in (results, thy) end; fun eval_thy options update_time master_dir header text_pos text parents = let val (name, _) = #name header; val keywords = fold (curry Keyword.merge_keywords o Thy_Header.get_keywords) parents (Keyword.add_keywords (#keywords header) Keyword.empty_keywords); val spans = Outer_Syntax.parse_spans (Token.explode keywords text_pos text); val elements = Thy_Element.parse_elements keywords spans; fun init () = Resources.begin_theory master_dir header parents; val (results, thy) = cond_timeit true ("theory " ^ quote name) (fn () => excursion keywords master_dir init elements); fun present () = let val segments = (spans ~~ maps Toplevel.join_results results) |> map (fn (span, (tr, st')) => {span = span, command = tr, state = st'}); val context: presentation_context = {options = options, file_pos = text_pos, adjust_pos = I, segments = segments}; in apply_presentation context thy end; in (thy, present, size text) end; (* require_thy -- checking database entries wrt. the file-system *) local fun required_by _ [] = "" | required_by s initiators = s ^ "(required by " ^ show_path (rev initiators) ^ ")"; fun load_thy options initiators update_time deps text (name, pos) keywords parents = let val _ = remove_thy name; val _ = writeln ("Loading theory " ^ quote name ^ required_by " " initiators); val _ = Output.try_protocol_message (Markup.loading_theory name) []; val {master = (thy_path, _), imports} = deps; val dir = Path.dir thy_path; val header = Thy_Header.make (name, pos) imports keywords; val _ = (imports ~~ parents) |> List.app (fn ((_, pos), thy) => Context_Position.reports_global thy [(pos, Theory.get_markup thy)]); val exec_id = Document_ID.make (); val _ = Execution.running Document_ID.none exec_id [] orelse raise Fail ("Failed to register execution: " ^ Document_ID.print exec_id); val timing_start = Timing.start (); val text_pos = Position.put_id (Document_ID.print exec_id) (Path.position thy_path); val (theory, present, weight) = eval_thy options update_time dir header text_pos text (if name = Context.PureN then [Context.the_global_context ()] else parents); val timing_result = Timing.result timing_start; val timing_props = [Markup.theory_timing, (Markup.nameN, name)]; val _ = Output.try_protocol_message (timing_props @ Markup.timing_properties timing_result) [] fun commit () = update_thy deps theory; in Result {theory = theory, exec_id = exec_id, present = present, commit = commit, weight = weight} end; fun check_thy_deps dir name = (case lookup_deps name of SOME NONE => (true, NONE, Position.none, get_imports name, []) | NONE => let val {master, text, theory_pos, imports, keywords} = Resources.check_thy dir name in (false, SOME (make_deps master imports, text), theory_pos, imports, keywords) end | SOME (SOME {master, ...}) => let val {master = master', text = text', theory_pos = theory_pos', imports = imports', keywords = keywords'} = Resources.check_thy dir name; val deps' = SOME (make_deps master' imports', text'); val current = #2 master = #2 master' andalso (case lookup_theory name of NONE => false | SOME theory => Resources.loaded_files_current theory); in (current, deps', theory_pos', imports', keywords') end); in fun require_thys options initiators qualifier dir strs tasks = fold_map (require_thy options initiators qualifier dir) strs tasks |>> forall I and require_thy options initiators qualifier dir (s, require_pos) tasks = let val {master_dir, theory_name, ...} = Resources.import_name qualifier dir s; in (case try (String_Graph.get_node tasks) theory_name of SOME task => (task_finished task, tasks) | NONE => let val _ = member (op =) initiators theory_name andalso error (cycle_msg initiators); val (current, deps, theory_pos, imports, keywords) = check_thy_deps master_dir theory_name handle ERROR msg => cat_error msg ("The error(s) above occurred for theory " ^ quote theory_name ^ Position.here require_pos ^ required_by "\n" initiators); val qualifier' = Resources.theory_qualifier theory_name; val dir' = dir + master_dir_deps (Option.map #1 deps); val parents = map (#theory_name o Resources.import_name qualifier' dir' o #1) imports; val (parents_current, tasks') = require_thys options (theory_name :: initiators) qualifier' dir' imports tasks; val all_current = current andalso parents_current; val task = if all_current then Finished (get_theory theory_name) else (case deps of NONE => raise Fail "Malformed deps" | SOME (dep, text) => let val update_time = serial (); val load = load_thy options initiators update_time dep text (theory_name, theory_pos) keywords; in Task (parents, load) end); val tasks'' = new_entry theory_name parents task tasks'; in (all_current, tasks'') end) end; end; (* use theories *) fun use_theories options qualifier imports = let val (_, tasks) = require_thys options [] qualifier Path.current imports String_Graph.empty in if Multithreading.max_threads () > 1 then schedule_futures tasks else schedule_seq tasks end; fun use_thy name = use_theories (Options.default ()) Resources.default_qualifier [(name, Position.none)]; (* toplevel scripting -- without maintaining database *) fun script_thy pos txt thy = let val trs = Outer_Syntax.parse_text thy (K thy) pos txt; val end_pos = if null trs then pos else Toplevel.pos_of (List.last trs); val end_state = fold (Toplevel.command_exception true) trs (Toplevel.init_toplevel ()); in Toplevel.end_theory end_pos end_state end; (* register theory *) fun register_thy theory = let val name = Context.theory_long_name theory; val {master, ...} = Resources.check_thy (Resources.master_directory theory) name; val imports = Resources.imports_of theory; in change_thys (fn thys => let val thys' = remove name thys; val _ = writeln ("Registering theory " ^ quote name); in update (make_deps master imports) theory thys' end) end; (* finish all theories *) fun finish () = change_thys (String_Graph.map (fn _ => fn (_, entry) => (NONE, entry))); end; fun use_thy name = Runtime.toplevel_program (fn () => Thy_Info.use_thy name);