diff --git a/src/Pure/Isar/keyword.scala b/src/Pure/Isar/keyword.scala --- a/src/Pure/Isar/keyword.scala +++ b/src/Pure/Isar/keyword.scala @@ -1,229 +1,229 @@ /* Title: Pure/Isar/keyword.scala Author: Makarius Isar keyword classification. */ package isabelle object Keyword { /** keyword classification **/ /* kinds */ val DIAG = "diag" val DOCUMENT_HEADING = "document_heading" val DOCUMENT_BODY = "document_body" val DOCUMENT_RAW = "document_raw" val THY_BEGIN = "thy_begin" val THY_END = "thy_end" val THY_DECL = "thy_decl" val THY_DECL_BLOCK = "thy_decl_block" val THY_DEFN = "thy_defn" val THY_STMT = "thy_stmt" val THY_LOAD = "thy_load" val THY_GOAL = "thy_goal" val THY_GOAL_DEFN = "thy_goal_defn" val THY_GOAL_STMT = "thy_goal_stmt" val QED = "qed" val QED_SCRIPT = "qed_script" val QED_BLOCK = "qed_block" val QED_GLOBAL = "qed_global" val PRF_GOAL = "prf_goal" val PRF_BLOCK = "prf_block" val NEXT_BLOCK = "next_block" val PRF_OPEN = "prf_open" val PRF_CLOSE = "prf_close" val PRF_CHAIN = "prf_chain" val PRF_DECL = "prf_decl" val PRF_ASM = "prf_asm" val PRF_ASM_GOAL = "prf_asm_goal" val PRF_SCRIPT = "prf_script" val PRF_SCRIPT_GOAL = "prf_script_goal" val PRF_SCRIPT_ASM_GOAL = "prf_script_asm_goal" val BEFORE_COMMAND = "before_command" val QUASI_COMMAND = "quasi_command" /* command categories */ val vacuous = Set(DIAG, DOCUMENT_HEADING, DOCUMENT_BODY, DOCUMENT_RAW) val diag = Set(DIAG) val document_heading = Set(DOCUMENT_HEADING) val document_body = Set(DOCUMENT_BODY) val document_raw = Set(DOCUMENT_RAW) val document = Set(DOCUMENT_HEADING, DOCUMENT_BODY, DOCUMENT_RAW) val theory_begin = Set(THY_BEGIN) val theory_end = Set(THY_END) val theory_load = Set(THY_LOAD) val theory = Set(THY_BEGIN, THY_END, THY_LOAD, THY_DECL, THY_DECL_BLOCK, THY_DEFN, THY_STMT, THY_GOAL, THY_GOAL_DEFN, THY_GOAL_STMT) val theory_block = Set(THY_BEGIN, THY_DECL_BLOCK) val theory_body = Set(THY_LOAD, THY_DECL, THY_DECL_BLOCK, THY_DEFN, THY_STMT, THY_GOAL, THY_GOAL_DEFN, THY_GOAL_STMT) val theory_defn = Set(THY_DEFN, THY_GOAL_DEFN) val prf_script = Set(PRF_SCRIPT) val proof = Set(QED, QED_SCRIPT, QED_BLOCK, QED_GLOBAL, PRF_GOAL, PRF_BLOCK, NEXT_BLOCK, PRF_OPEN, PRF_CLOSE, PRF_CHAIN, PRF_DECL, PRF_ASM, PRF_ASM_GOAL, PRF_SCRIPT, PRF_SCRIPT_GOAL, PRF_SCRIPT_ASM_GOAL) val proof_body = Set(DIAG, DOCUMENT_HEADING, DOCUMENT_BODY, DOCUMENT_RAW, PRF_BLOCK, NEXT_BLOCK, PRF_OPEN, PRF_CLOSE, PRF_CHAIN, PRF_DECL, PRF_ASM, PRF_ASM_GOAL, PRF_SCRIPT, PRF_SCRIPT_GOAL, PRF_SCRIPT_ASM_GOAL) val proof_asm = Set(PRF_ASM, PRF_ASM_GOAL) val improper = Set(QED_SCRIPT, PRF_SCRIPT, PRF_SCRIPT_GOAL, PRF_SCRIPT_ASM_GOAL) val theory_goal = Set(THY_GOAL, THY_GOAL_DEFN, THY_GOAL_STMT) val proof_goal = Set(PRF_GOAL, PRF_ASM_GOAL, PRF_SCRIPT_GOAL, PRF_SCRIPT_ASM_GOAL) val qed = Set(QED, QED_SCRIPT, QED_BLOCK) val qed_global = Set(QED_GLOBAL) val proof_open = proof_goal + PRF_OPEN val proof_close = qed + PRF_CLOSE val proof_enclose = Set(PRF_BLOCK, NEXT_BLOCK, QED_BLOCK, PRF_CLOSE) val close_structure = Set(NEXT_BLOCK, QED_BLOCK, PRF_CLOSE, THY_END) /** keyword tables **/ - object Spec + sealed case class Spec( + kind: String = "", + load_command: String = "", + tags: List[String] = Nil) { - val none: Spec = Spec("") - } - sealed case class Spec(kind: String, load_command: String = "", tags: List[String] = Nil) - { - def is_none: Boolean = kind == "" - override def toString: String = kind + (if (load_command.isEmpty) "" else " (" + quote(load_command) + ")") + (if (tags.isEmpty) "" else tags.map(quote).mkString(" % ", " % ", "")) + + def map(f: String => String): Spec = + copy(kind = f(kind), load_command = f(load_command), tags = tags.map(f)) } object Keywords { def empty: Keywords = new Keywords() } class Keywords private( val kinds: Map[String, String] = Map.empty, val load_commands: Map[String, String] = Map.empty) { override def toString: String = { val entries = for ((name, kind) <- kinds.toList.sortBy(_._1)) yield { val load_decl = load_commands.get(name) match { case Some(load_command) => " (" + quote(load_command) + ")" case None => "" } val kind_decl = if (kind == "") "" else " :: " + quote(kind) + load_decl quote(name) + kind_decl } entries.mkString("keywords\n ", " and\n ", "") } /* merge */ def is_empty: Boolean = kinds.isEmpty def ++ (other: Keywords): Keywords = if (this eq other) this else if (is_empty) other else { val kinds1 = if (kinds eq other.kinds) kinds else if (kinds.isEmpty) other.kinds else (kinds /: other.kinds) { case (m, e) => if (m.isDefinedAt(e._1)) m else m + e } val load_commands1 = if (load_commands eq other.load_commands) load_commands else if (load_commands.isEmpty) other.load_commands else (load_commands /: other.load_commands) { case (m, e) => if (m.isDefinedAt(e._1)) m else m + e } new Keywords(kinds1, load_commands1) } /* add keywords */ def + (name: String, kind: String = "", load_command: String = ""): Keywords = { val kinds1 = kinds + (name -> kind) val load_commands1 = if (kind == THY_LOAD) { if (!Symbol.iterator(name).forall(Symbol.is_ascii)) error("Bad theory load command " + quote(name)) load_commands + (name -> load_command) } else load_commands new Keywords(kinds1, load_commands1) } def add_keywords(header: Thy_Header.Keywords): Keywords = (this /: header) { case (keywords, (name, spec)) => - if (spec.is_none) + if (spec.kind.isEmpty) keywords + Symbol.decode(name) + Symbol.encode(name) else keywords + (Symbol.decode(name), spec.kind, spec.load_command) + (Symbol.encode(name), spec.kind, spec.load_command) } /* command kind */ def is_command(token: Token, check_kind: String => Boolean): Boolean = token.is_command && (kinds.get(token.source) match { case Some(k) => check_kind(k) case None => false }) def is_before_command(token: Token): Boolean = token.is_keyword && kinds.get(token.source) == Some(BEFORE_COMMAND) def is_quasi_command(token: Token): Boolean = token.is_keyword && kinds.get(token.source) == Some(QUASI_COMMAND) def is_indent_command(token: Token): Boolean = token.is_begin_or_command || is_quasi_command(token) /* lexicons */ private def make_lexicon(is_minor: Boolean): Scan.Lexicon = (Scan.Lexicon.empty /: kinds)( { case (lex, (name, kind)) => if ((kind == "" || kind == BEFORE_COMMAND || kind == QUASI_COMMAND) == is_minor) lex + name else lex }) lazy val minor: Scan.Lexicon = make_lexicon(true) lazy val major: Scan.Lexicon = make_lexicon(false) } } diff --git a/src/Pure/PIDE/protocol.scala b/src/Pure/PIDE/protocol.scala --- a/src/Pure/PIDE/protocol.scala +++ b/src/Pure/PIDE/protocol.scala @@ -1,429 +1,429 @@ /* Title: Pure/PIDE/protocol.scala Author: Makarius Protocol message formats for interactive proof documents. */ package isabelle object Protocol { /* markers for inlined messages */ val Loading_Theory_Marker = Protocol_Message.Marker("loading_theory") val Meta_Info_Marker = Protocol_Message.Marker("meta_info") val Command_Timing_Marker = Protocol_Message.Marker("command_timing") val Theory_Timing_Marker = Protocol_Message.Marker("theory_timing") val Session_Timing_Marker = Protocol_Message.Marker("session_timing") val ML_Statistics_Marker = Protocol_Message.Marker("ML_statistics") val Task_Statistics_Marker = Protocol_Message.Marker("task_statistics") val Error_Message_Marker = Protocol_Message.Marker("error_message") /* batch build */ object Loading_Theory { def unapply(props: Properties.T): Option[(Document.Node.Name, Document_ID.Exec)] = (props, props, props) match { case (Markup.Name(name), Position.File(file), Position.Id(id)) if Path.is_wellformed(file) => val master_dir = Path.explode(file).dir.implode Some((Document.Node.Name(file, master_dir, name), id)) case _ => None } } /* document editing */ object Commands_Accepted { def unapply(text: String): Option[List[Document_ID.Command]] = try { Some(space_explode(',', text).map(Value.Long.parse)) } catch { case ERROR(_) => None } val message: XML.Elem = XML.elem(Markup.STATUS, List(XML.elem(Markup.ACCEPTED))) } object Assign_Update { def unapply(text: String) : Option[(Document_ID.Version, List[String], Document.Assign_Update)] = { try { import XML.Decode._ def decode_upd(body: XML.Body): (Long, List[Long]) = space_explode(',', string(body)).map(Value.Long.parse) match { case a :: bs => (a, bs) case _ => throw new XML.XML_Body(body) } Some(triple(long, list(string), list(decode_upd))(Symbol.decode_yxml(text))) } catch { case ERROR(_) => None case _: XML.Error => None } } } object Removed { def unapply(text: String): Option[List[Document_ID.Version]] = try { import XML.Decode._ Some(list(long)(Symbol.decode_yxml(text))) } catch { case ERROR(_) => None case _: XML.Error => None } } /* command timing */ object Command_Timing { def unapply(props: Properties.T): Option[(Properties.T, Document_ID.Generic, isabelle.Timing)] = props match { case Markup.Command_Timing(args) => (args, args) match { case (Position.Id(id), Markup.Timing_Properties(timing)) => Some((args, id, timing)) case _ => None } case _ => None } } /* theory timing */ object Theory_Timing { def unapply(props: Properties.T): Option[(String, isabelle.Timing)] = props match { case Markup.Theory_Timing(args) => (args, args) match { case (Markup.Name(name), Markup.Timing_Properties(timing)) => Some((name, timing)) case _ => None } case _ => None } } /* result messages */ def is_message(pred: XML.Elem => Boolean, body: XML.Body): Boolean = body match { case List(elem: XML.Elem) => pred(elem) case _ => false } def is_result(msg: XML.Tree): Boolean = msg match { case XML.Elem(Markup(Markup.RESULT, _), _) => true case _ => false } def is_tracing(msg: XML.Tree): Boolean = msg match { case XML.Elem(Markup(Markup.TRACING, _), _) => true case XML.Elem(Markup(Markup.TRACING_MESSAGE, _), _) => true case _ => false } def is_state(msg: XML.Tree): Boolean = msg match { case XML.Elem(Markup(Markup.STATE, _), _) => true case XML.Elem(Markup(Markup.STATE_MESSAGE, _), _) => true case _ => false } def is_information(msg: XML.Tree): Boolean = msg match { case XML.Elem(Markup(Markup.INFORMATION, _), _) => true case XML.Elem(Markup(Markup.INFORMATION_MESSAGE, _), _) => true case _ => false } def is_writeln(msg: XML.Tree): Boolean = msg match { case XML.Elem(Markup(Markup.WRITELN, _), _) => true case XML.Elem(Markup(Markup.WRITELN_MESSAGE, _), _) => true case _ => false } def is_warning(msg: XML.Tree): Boolean = msg match { case XML.Elem(Markup(Markup.WARNING, _), _) => true case XML.Elem(Markup(Markup.WARNING_MESSAGE, _), _) => true case _ => false } def is_legacy(msg: XML.Tree): Boolean = msg match { case XML.Elem(Markup(Markup.LEGACY, _), _) => true case XML.Elem(Markup(Markup.LEGACY_MESSAGE, _), _) => true case _ => false } def is_error(msg: XML.Tree): Boolean = msg match { case XML.Elem(Markup(Markup.ERROR, _), _) => true case XML.Elem(Markup(Markup.ERROR_MESSAGE, _), _) => true case _ => false } def is_inlined(msg: XML.Tree): Boolean = !(is_result(msg) || is_tracing(msg) || is_state(msg)) def is_exported(msg: XML.Tree): Boolean = is_writeln(msg) || is_warning(msg) || is_legacy(msg) || is_error(msg) def message_text(body: XML.Body, margin: Double = Pretty.default_margin, breakgain: Double = Pretty.default_breakgain, metric: Pretty.Metric = Pretty.Default_Metric): String = { val text = Pretty.string_of(Protocol_Message.expose_no_reports(body), margin = margin, breakgain = breakgain, metric = metric) if (is_message(is_warning, body) || is_message(is_legacy, body)) Output.warning_prefix(text) else if (is_message(is_error, body)) Output.error_prefix(text) else text } /* export */ object Export { sealed case class Args( id: Option[String] = None, serial: Long = 0L, theory_name: String, name: String, executable: Boolean = false, compress: Boolean = true, strict: Boolean = true) { def compound_name: String = isabelle.Export.compound_name(theory_name, name) } def unapply(props: Properties.T): Option[Args] = props match { case List( (Markup.FUNCTION, Markup.EXPORT), (Markup.ID, id), (Markup.SERIAL, Value.Long(serial)), (Markup.THEORY_NAME, theory_name), (Markup.NAME, name), (Markup.EXECUTABLE, Value.Boolean(executable)), (Markup.COMPRESS, Value.Boolean(compress)), (Markup.STRICT, Value.Boolean(strict))) => Some(Args(proper_string(id), serial, theory_name, name, executable, compress, strict)) case _ => None } } /* breakpoints */ object ML_Breakpoint { def unapply(tree: XML.Tree): Option[Long] = tree match { case XML.Elem(Markup(Markup.ML_BREAKPOINT, Markup.Serial(breakpoint)), _) => Some(breakpoint) case _ => None } } /* dialogs */ object Dialog_Args { def unapply(props: Properties.T): Option[(Document_ID.Generic, Long, String)] = (props, props, props) match { case (Position.Id(id), Markup.Serial(serial), Markup.Result(result)) => Some((id, serial, result)) case _ => None } } object Dialog { def unapply(tree: XML.Tree): Option[(Document_ID.Generic, Long, String)] = tree match { case XML.Elem(Markup(Markup.DIALOG, Dialog_Args(id, serial, result)), _) => Some((id, serial, result)) case _ => None } } object Dialog_Result { def apply(id: Document_ID.Generic, serial: Long, result: String): XML.Elem = { val props = Position.Id(id) ::: Markup.Serial(serial) XML.Elem(Markup(Markup.RESULT, props), List(XML.Text(result))) } def unapply(tree: XML.Tree): Option[String] = tree match { case XML.Elem(Markup(Markup.RESULT, _), List(XML.Text(result))) => Some(result) case _ => None } } } trait Protocol { /* protocol commands */ def protocol_command_raw(name: String, args: List[Bytes]): Unit def protocol_command_args(name: String, args: List[String]) def protocol_command(name: String, args: String*): Unit /* options */ def options(opts: Options): Unit = protocol_command("Prover.options", Symbol.encode_yxml(opts.encode)) /* resources */ def init_session(resources: Resources): Unit = protocol_command("Prover.init_session", resources.init_session_yxml) /* interned items */ def define_blob(digest: SHA1.Digest, bytes: Bytes): Unit = protocol_command_raw("Document.define_blob", List(Bytes(digest.toString), bytes)) private def encode_command(command: Command): (String, String, String, String, List[String]) = { import XML.Encode._ val blobs_yxml = { val encode_blob: T[Exn.Result[Command.Blob]] = variant(List( { case Exn.Res(Command.Blob(a, b, c)) => (Nil, triple(string, string, option(string))( (a.node, b.implode, c.map(p => p._1.toString)))) }, { case Exn.Exn(e) => (Nil, string(Exn.message(e))) })) Symbol.encode_yxml(pair(list(encode_blob), int)(command.blobs, command.blobs_index)) } val toks_yxml = { val encode_tok: T[Token] = (tok => pair(int, int)((tok.kind.id, Symbol.length(tok.source)))) Symbol.encode_yxml(list(encode_tok)(command.span.content)) } val toks_sources = command.span.content.map(tok => Symbol.encode(tok.source)) (Document_ID(command.id), Symbol.encode(command.span.name), blobs_yxml, toks_yxml, toks_sources) } def define_command(command: Command) { val (command_id, name, blobs_yxml, toks_yxml, toks_sources) = encode_command(command) protocol_command_args( "Document.define_command", command_id :: name :: blobs_yxml :: toks_yxml :: toks_sources) } def define_commands(commands: List[Command]) { protocol_command_args("Document.define_commands", commands.map(command => { import XML.Encode._ val (command_id, name, blobs_yxml, toks_yxml, toks_sources) = encode_command(command) val body = pair(string, pair(string, pair(string, pair(string, list(string)))))( command_id, (name, (blobs_yxml, (toks_yxml, toks_sources)))) YXML.string_of_body(body) })) } def define_commands_bulk(commands: List[Command]) { val (irregular, regular) = commands.partition(command => YXML.detect(command.source)) irregular.foreach(define_command) regular match { case Nil => case List(command) => define_command(command) case _ => define_commands(regular) } } /* execution */ def discontinue_execution(): Unit = protocol_command("Document.discontinue_execution") def cancel_exec(id: Document_ID.Exec): Unit = protocol_command("Document.cancel_exec", Document_ID(id)) /* document versions */ def update(old_id: Document_ID.Version, new_id: Document_ID.Version, edits: List[Document.Edit_Command], consolidate: List[Document.Node.Name]) { val consolidate_yxml = { import XML.Encode._ Symbol.encode_yxml(list(string)(consolidate.map(_.node))) } val edits_yxml = { import XML.Encode._ def id: T[Command] = (cmd => long(cmd.id)) def encode_edit(name: Document.Node.Name) : T[Document.Node.Edit[Command.Edit, Command.Perspective]] = variant(List( { case Document.Node.Edits(a) => (Nil, list(pair(option(id), option(id)))(a)) }, { case Document.Node.Deps(header) => val master_dir = File.standard_url(name.master_dir) val imports = header.imports.map(_.node) - val keywords = header.keywords.map({ case (a, Keyword.Spec(b, _, c)) => (a, (b, c)) }) + val keywords = header.keywords.map({ case (a, spec) => (a, (spec.kind, spec.tags)) }) (Nil, pair(string, pair(string, pair(list(string), pair(list(pair(string, pair(string, list(string)))), list(string)))))( (master_dir, (name.theory, (imports, (keywords, header.errors)))))) }, { case Document.Node.Perspective(a, b, c) => (bool_atom(a) :: b.commands.map(cmd => long_atom(cmd.id)), list(pair(id, pair(string, list(string))))(c.dest)) })) edits.map({ case (name, edit) => Symbol.encode_yxml(pair(string, encode_edit(name))(name.node, edit)) }) } protocol_command_args("Document.update", Document_ID(old_id) :: Document_ID(new_id) :: consolidate_yxml :: edits_yxml) } def remove_versions(versions: List[Document.Version]) { val versions_yxml = { import XML.Encode._ Symbol.encode_yxml(list(long)(versions.map(_.id))) } protocol_command("Document.remove_versions", versions_yxml) } /* dialog via document content */ def dialog_result(serial: Long, result: String): Unit = protocol_command("Document.dialog_result", Value.Long(serial), result) } diff --git a/src/Pure/Thy/thy_header.scala b/src/Pure/Thy/thy_header.scala --- a/src/Pure/Thy/thy_header.scala +++ b/src/Pure/Thy/thy_header.scala @@ -1,243 +1,241 @@ /* Title: Pure/Thy/thy_header.scala Author: Makarius Static theory header information. */ package isabelle import scala.util.parsing.input.Reader import scala.util.matching.Regex object Thy_Header { /* bootstrap keywords */ type Keywords = List[(String, Keyword.Spec)] type Abbrevs = List[(String, String)] val CHAPTER = "chapter" val SECTION = "section" val SUBSECTION = "subsection" val SUBSUBSECTION = "subsubsection" val PARAGRAPH = "paragraph" val SUBPARAGRAPH = "subparagraph" val TEXT = "text" val TXT = "txt" val TEXT_RAW = "text_raw" val THEORY = "theory" val IMPORTS = "imports" val KEYWORDS = "keywords" val ABBREVS = "abbrevs" val AND = "and" val BEGIN = "begin" val bootstrap_header: Keywords = List( - ("%", Keyword.Spec.none), - ("(", Keyword.Spec.none), - (")", Keyword.Spec.none), - (",", Keyword.Spec.none), - ("::", Keyword.Spec.none), - ("=", Keyword.Spec.none), - (AND, Keyword.Spec.none), - (BEGIN, Keyword.Spec(Keyword.QUASI_COMMAND)), - (IMPORTS, Keyword.Spec(Keyword.QUASI_COMMAND)), - (KEYWORDS, Keyword.Spec(Keyword.QUASI_COMMAND)), - (ABBREVS, Keyword.Spec(Keyword.QUASI_COMMAND)), - (CHAPTER, Keyword.Spec(Keyword.DOCUMENT_HEADING)), - (SECTION, Keyword.Spec(Keyword.DOCUMENT_HEADING)), - (SUBSECTION, Keyword.Spec(Keyword.DOCUMENT_HEADING)), - (SUBSUBSECTION, Keyword.Spec(Keyword.DOCUMENT_HEADING)), - (PARAGRAPH, Keyword.Spec(Keyword.DOCUMENT_HEADING)), - (SUBPARAGRAPH, Keyword.Spec(Keyword.DOCUMENT_HEADING)), - (TEXT, Keyword.Spec(Keyword.DOCUMENT_BODY)), - (TXT, Keyword.Spec(Keyword.DOCUMENT_BODY)), - (TEXT_RAW, Keyword.Spec(Keyword.DOCUMENT_RAW)), - (THEORY, Keyword.Spec(Keyword.THY_BEGIN, tags = List("theory"))), - ("ML", Keyword.Spec(Keyword.THY_DECL, tags = List("ML")))) + ("%", Keyword.Spec()), + ("(", Keyword.Spec()), + (")", Keyword.Spec()), + (",", Keyword.Spec()), + ("::", Keyword.Spec()), + ("=", Keyword.Spec()), + (AND, Keyword.Spec()), + (BEGIN, Keyword.Spec(kind = Keyword.QUASI_COMMAND)), + (IMPORTS, Keyword.Spec(kind = Keyword.QUASI_COMMAND)), + (KEYWORDS, Keyword.Spec(kind = Keyword.QUASI_COMMAND)), + (ABBREVS, Keyword.Spec(kind = Keyword.QUASI_COMMAND)), + (CHAPTER, Keyword.Spec(kind = Keyword.DOCUMENT_HEADING)), + (SECTION, Keyword.Spec(kind = Keyword.DOCUMENT_HEADING)), + (SUBSECTION, Keyword.Spec(kind = Keyword.DOCUMENT_HEADING)), + (SUBSUBSECTION, Keyword.Spec(kind = Keyword.DOCUMENT_HEADING)), + (PARAGRAPH, Keyword.Spec(kind = Keyword.DOCUMENT_HEADING)), + (SUBPARAGRAPH, Keyword.Spec(kind = Keyword.DOCUMENT_HEADING)), + (TEXT, Keyword.Spec(kind = Keyword.DOCUMENT_BODY)), + (TXT, Keyword.Spec(kind = Keyword.DOCUMENT_BODY)), + (TEXT_RAW, Keyword.Spec(kind = Keyword.DOCUMENT_RAW)), + (THEORY, Keyword.Spec(kind = Keyword.THY_BEGIN, tags = List("theory"))), + ("ML", Keyword.Spec(kind = Keyword.THY_DECL, tags = List("ML")))) private val bootstrap_keywords = Keyword.Keywords.empty.add_keywords(bootstrap_header) val bootstrap_syntax: Outer_Syntax = Outer_Syntax.empty.add_keywords(bootstrap_header) /* file name vs. theory name */ val PURE = "Pure" val ML_BOOTSTRAP = "ML_Bootstrap" val ml_roots = List("ROOT0.ML" -> "ML_Root0", "ROOT.ML" -> "ML_Root") val bootstrap_thys = List(PURE, ML_BOOTSTRAP).map(a => a -> ("Bootstrap_" + a)) val bootstrap_global_theories = (Sessions.root_name :: (ml_roots ::: bootstrap_thys).map(_._2)).map(_ -> PURE) private val Thy_File_Name = new Regex(""".*?([^/\\:]+)\.thy""") private val Split_File_Name = new Regex("""(.*?)[/\\]*([^/\\:]+)""") private val File_Name = new Regex(""".*?([^/\\:]+)""") def is_base_name(s: String): Boolean = s != "" && !s.exists("/\\:".contains(_)) def split_file_name(s: String): Option[(String, String)] = s match { case Split_File_Name(s1, s2) => Some((s1, s2)) case _ => None } def import_name(s: String): String = s match { case File_Name(name) if !name.endsWith(".thy") => name case _ => error("Malformed theory import: " + quote(s)) } def theory_name(s: String): String = s match { case Thy_File_Name(name) => bootstrap_name(name) case File_Name(name) => if (name == Sessions.root_name) name else ml_roots.collectFirst({ case (a, b) if a == name => b }).getOrElse("") case _ => "" } def is_ml_root(theory: String): Boolean = ml_roots.exists({ case (_, b) => b == theory }) def is_bootstrap(theory: String): Boolean = bootstrap_thys.exists({ case (_, b) => b == theory }) def bootstrap_name(theory: String): String = bootstrap_thys.collectFirst({ case (a, b) if a == theory => b }).getOrElse(theory) def try_read_dir(dir: Path): List[String] = { val files = try { File.read_dir(dir) } catch { case ERROR(_) => Nil } for { Thy_File_Name(name) <- files } yield name } /* parser */ trait Parser extends Parse.Parser { val header: Parser[Thy_Header] = { def load_command = ($$$("(") ~! (name <~ $$$(")")) ^^ { case _ ~ x => x }) | success("") def load_command_spec(kind: String) = (if (kind == Keyword.THY_LOAD) load_command else success("")) ^^ (x => (kind, x)) val keyword_spec = (atom("outer syntax keyword specification", _.is_name) >> load_command_spec) ~ tags ^^ - { case (x, y) ~ z => Keyword.Spec(x, y, z) } + { case (x, y) ~ z => Keyword.Spec(kind = x, load_command = y, tags = z) } val keyword_decl = rep1(string) ~ opt($$$("::") ~! keyword_spec ^^ { case _ ~ x => x }) ^^ - { case xs ~ y => xs.map((_, y.getOrElse(Keyword.Spec.none))) } + { case xs ~ y => xs.map((_, y.getOrElse(Keyword.Spec()))) } val keyword_decls = keyword_decl ~ rep($$$(AND) ~! keyword_decl ^^ { case _ ~ x => x }) ^^ { case xs ~ yss => (xs :: yss).flatten } val abbrevs = rep1sep(rep1(text) ~ ($$$("=") ~! rep1(text)), $$$("and")) ^^ { case res => for ((as ~ (_ ~ bs)) <- res; a <- as; b <- bs) yield (a, b) } val args = position(theory_name) ~ (opt($$$(IMPORTS) ~! rep1(position(theory_name))) ^^ { case None => Nil case Some(_ ~ xs) => xs }) ~ (opt($$$(KEYWORDS) ~! keyword_decls) ^^ { case None => Nil case Some(_ ~ xs) => xs }) ~ (opt($$$(ABBREVS) ~! abbrevs) ^^ { case None => Nil case Some(_ ~ xs) => xs }) ~ - $$$(BEGIN) ^^ - { case (name, pos) ~ imports ~ keywords ~ abbrevs ~ _ => - val f = Symbol.decode _ - Thy_Header((f(name), pos), - imports.map({ case (a, b) => (f(a), b) }), - keywords.map({ case (a, Keyword.Spec(b, c, d)) => - (f(a), Keyword.Spec(f(b), f(c), d.map(f))) }), - abbrevs.map({ case (a, b) => (f(a), f(b)) })) - } + $$$(BEGIN) ^^ { case a ~ b ~ c ~ d ~ _ => Thy_Header(a, b, c, d).map(Symbol.decode) } val heading = (command(CHAPTER) | command(SECTION) | command(SUBSECTION) | command(SUBSUBSECTION) | command(PARAGRAPH) | command(SUBPARAGRAPH) | command(TEXT) | command(TXT) | command(TEXT_RAW)) ~ annotation ~! document_source (rep(heading) ~ command(THEORY) ~ annotation) ~! args ^^ { case _ ~ x => x } } } /* read -- lazy scanning */ private def read_tokens(reader: Reader[Char], strict: Boolean): (List[Token], List[Token]) = { val token = Token.Parsers.token(bootstrap_keywords) def make_tokens(in: Reader[Char]): Stream[Token] = token(in) match { case Token.Parsers.Success(tok, rest) => tok #:: make_tokens(rest) case _ => Stream.empty } val all_tokens = make_tokens(reader) val drop_tokens = if (strict) Nil else all_tokens.takeWhile(tok => !tok.is_command(Thy_Header.THEORY)).toList val tokens = all_tokens.drop(drop_tokens.length) val tokens1 = tokens.takeWhile(tok => !tok.is_begin).toList val tokens2 = tokens.dropWhile(tok => !tok.is_begin).headOption.toList (drop_tokens, tokens1 ::: tokens2) } private object Parser extends Parser { def parse_header(tokens: List[Token], pos: Token.Pos): Thy_Header = parse(commit(header), Token.reader(tokens, pos)) match { case Success(result, _) => result case bad => error(bad.toString) } } def read(reader: Reader[Char], start: Token.Pos, strict: Boolean = true): Thy_Header = { val (_, tokens0) = read_tokens(reader, true) val text = Scan.reader_decode_utf8(reader, Token.implode(tokens0)) val (drop_tokens, tokens) = read_tokens(Scan.char_reader(text), strict) val pos = (start /: drop_tokens)(_.advance(_)) Parser.parse_header(tokens, pos) } } sealed case class Thy_Header( name_pos: (String, Position.T), imports_pos: List[(String, Position.T)], keywords: Thy_Header.Keywords, abbrevs: Thy_Header.Abbrevs) { def name: String = name_pos._1 def pos: Position.T = name_pos._2 def imports: List[String] = imports_pos.map(_._1) + + def map(f: String => String): Thy_Header = + Thy_Header((f(name), pos), + imports_pos.map({ case (a, b) => (f(a), b) }), + keywords.map({ case (a, spec) => (f(a), spec.map(f)) }), + abbrevs.map({ case (a, b) => (f(a), f(b)) })) }