diff --git a/src/Pure/Isar/outer_syntax.scala b/src/Pure/Isar/outer_syntax.scala --- a/src/Pure/Isar/outer_syntax.scala +++ b/src/Pure/Isar/outer_syntax.scala @@ -1,214 +1,210 @@ /* Title: Pure/Isar/outer_syntax.scala Author: Makarius Isabelle/Isar outer syntax. */ package isabelle import scala.collection.mutable object Outer_Syntax { /* syntax */ val empty: Outer_Syntax = new Outer_Syntax() def merge(syns: List[Outer_Syntax]): Outer_Syntax = (empty /: syns)(_ ++ _) /* string literals */ def quote_string(str: String): String = { val result = new StringBuilder(str.length + 10) result += '"' for (s <- Symbol.iterator(str)) { if (s.length == 1) { val c = s(0) if (c < 32 && c != YXML.X && c != YXML.Y || c == '\\' || c == '"') { result += '\\' if (c < 10) result += '0' if (c < 100) result += '0' result ++= c.asInstanceOf[Int].toString } else result += c } else result ++= s } result += '"' result.toString } } final class Outer_Syntax private( val keywords: Keyword.Keywords = Keyword.Keywords.empty, val rev_abbrevs: Thy_Header.Abbrevs = Nil, val language_context: Completion.Language_Context = Completion.Language_Context.outer, val has_tokens: Boolean = true) { /** syntax content **/ override def toString: String = keywords.toString /* keywords */ def + (name: String, kind: String = "", load_command: String = ""): Outer_Syntax = new Outer_Syntax( keywords + (name, kind, load_command), rev_abbrevs, language_context, has_tokens = true) def add_keywords(keywords: Thy_Header.Keywords): Outer_Syntax = (this /: keywords) { case (syntax, (name, spec)) => syntax + (Symbol.decode(name), spec.kind, spec.load_command) + (Symbol.encode(name), spec.kind, spec.load_command) } /* abbrevs */ def abbrevs: Thy_Header.Abbrevs = rev_abbrevs.reverse def add_abbrevs(new_abbrevs: Thy_Header.Abbrevs): Outer_Syntax = if (new_abbrevs.isEmpty) this else { val rev_abbrevs1 = Library.distinct(new_abbrevs) reverse_::: rev_abbrevs new Outer_Syntax(keywords, rev_abbrevs1, language_context, has_tokens) } /* completion */ private lazy val completion: Completion = { val completion_keywords = (keywords.minor.iterator ++ keywords.major.iterator).toList val completion_abbrevs = completion_keywords.flatMap(name => (if (Keyword.theory_block.contains(keywords.kinds(name))) List((name, name + "\nbegin\n\u0007\nend")) else Nil) ::: (if (Completion.Word_Parsers.is_word(name)) List((name, name)) else Nil)) ::: abbrevs.flatMap( { case (a, b) => val a1 = Symbol.decode(a) val a2 = Symbol.encode(a) val b1 = Symbol.decode(b) List((a1, b1), (a2, b1)) }) Completion.make(completion_keywords, completion_abbrevs) } def complete( history: Completion.History, unicode: Boolean, explicit: Boolean, start: Text.Offset, text: CharSequence, caret: Int, context: Completion.Language_Context): Option[Completion.Result] = { completion.complete(history, unicode, explicit, start, text, caret, context) } /* build */ def + (header: Document.Node.Header): Outer_Syntax = add_keywords(header.keywords).add_abbrevs(header.abbrevs) def ++ (other: Outer_Syntax): Outer_Syntax = if (this eq other) this else if (this eq Outer_Syntax.empty) other else { val keywords1 = keywords ++ other.keywords val rev_abbrevs1 = Library.merge(rev_abbrevs, other.rev_abbrevs) if ((keywords eq keywords1) && (rev_abbrevs eq rev_abbrevs1)) this else new Outer_Syntax(keywords1, rev_abbrevs1, language_context, has_tokens) } /* load commands */ def load_command(name: String): Option[String] = keywords.load_commands.get(name) def has_load_commands(text: String): Boolean = keywords.load_commands.exists({ case (cmd, _) => text.containsSlice(cmd) }) /* language context */ def set_language_context(context: Completion.Language_Context): Outer_Syntax = new Outer_Syntax(keywords, rev_abbrevs, context, has_tokens) def no_tokens: Outer_Syntax = { require(keywords.is_empty) new Outer_Syntax( rev_abbrevs = rev_abbrevs, language_context = language_context, has_tokens = false) } /** parsing **/ /* command spans */ def parse_spans(toks: List[Token]): List[Command_Span.Span] = { val result = new mutable.ListBuffer[Command_Span.Span] val content = new mutable.ListBuffer[Token] val ignored = new mutable.ListBuffer[Token] - var start = 0 def ship(content: List[Token]) { val kind = if (content.forall(_.is_ignored)) Command_Span.Ignored_Span else if (content.exists(_.is_error)) Command_Span.Malformed_Span else content.find(_.is_command) match { case None => Command_Span.Malformed_Span case Some(cmd) => val name = cmd.source val offset = (0 /: content.takeWhile(_ != cmd)) { case (i, tok) => i + Symbol.length(tok.source) } val end_offset = offset + Symbol.length(name) val range = Text.Range(offset, end_offset) + 1 - val pos = Position.Range(range) - val abs_pos = Position.Range(range + start) - Command_Span.Command_Span(name, pos, abs_pos) + Command_Span.Command_Span(name, Position.Range(range)) } - for (tok <- content) start += Symbol.length(tok.source) result += Command_Span.Span(kind, content) } def flush() { if (content.nonEmpty) { ship(content.toList); content.clear } if (ignored.nonEmpty) { ship(ignored.toList); ignored.clear } } for (tok <- toks) { if (tok.is_ignored) ignored += tok else if (keywords.is_before_command(tok) || tok.is_command && (!content.exists(keywords.is_before_command) || content.exists(_.is_command))) { flush(); content += tok } else { content ++= ignored; ignored.clear; content += tok } } flush() result.toList } def parse_spans(input: CharSequence): List[Command_Span.Span] = parse_spans(Token.explode(keywords, input)) } diff --git a/src/Pure/PIDE/command.scala b/src/Pure/PIDE/command.scala --- a/src/Pure/PIDE/command.scala +++ b/src/Pure/PIDE/command.scala @@ -1,651 +1,651 @@ /* Title: Pure/PIDE/command.scala Author: Fabian Immler, TU Munich Author: Makarius Prover commands with accumulated results from execution. */ package isabelle import scala.collection.immutable.SortedMap object Command { /* blobs */ object Blob { def read_file(name: Document.Node.Name, src_path: Path): Blob = { val bytes = Bytes.read(name.path) val chunk = Symbol.Text_Chunk(bytes.text) Blob(name, src_path, Some((bytes.sha1_digest, chunk))) } } sealed case class Blob( name: Document.Node.Name, src_path: Path, content: Option[(SHA1.Digest, Symbol.Text_Chunk)]) { def read_file: Bytes = Bytes.read(name.path) def chunk_file: Symbol.Text_Chunk.File = Symbol.Text_Chunk.File(name.node) } object Blobs_Info { val none: Blobs_Info = Blobs_Info(Nil) def errors(msgs: List[String]): Blobs_Info = Blobs_Info(msgs.map(msg => Exn.Exn[Blob](ERROR(msg)))) } sealed case class Blobs_Info(blobs: List[Exn.Result[Blob]], index: Int = -1) /** accumulated results from prover **/ /* results */ object Results { type Entry = (Long, XML.Elem) val empty: Results = new Results(SortedMap.empty) def make(args: TraversableOnce[Results.Entry]): Results = (empty /: args)(_ + _) def merge(args: TraversableOnce[Results]): Results = (empty /: args)(_ ++ _) } final class Results private(private val rep: SortedMap[Long, XML.Elem]) { def is_empty: Boolean = rep.isEmpty def defined(serial: Long): Boolean = rep.isDefinedAt(serial) def get(serial: Long): Option[XML.Elem] = rep.get(serial) def iterator: Iterator[Results.Entry] = rep.iterator def + (entry: Results.Entry): Results = if (defined(entry._1)) this else new Results(rep + entry) def ++ (other: Results): Results = if (this eq other) this else if (rep.isEmpty) other else (this /: other.iterator)(_ + _) override def hashCode: Int = rep.hashCode override def equals(that: Any): Boolean = that match { case other: Results => rep == other.rep case _ => false } override def toString: String = iterator.mkString("Results(", ", ", ")") } /* exports */ object Exports { type Entry = (Long, Export.Entry) val empty: Exports = new Exports(SortedMap.empty) def merge(args: TraversableOnce[Exports]): Exports = (empty /: args)(_ ++ _) } final class Exports private(private val rep: SortedMap[Long, Export.Entry]) { def is_empty: Boolean = rep.isEmpty def iterator: Iterator[Exports.Entry] = rep.iterator def + (entry: Exports.Entry): Exports = if (rep.isDefinedAt(entry._1)) this else new Exports(rep + entry) def ++ (other: Exports): Exports = if (this eq other) this else if (rep.isEmpty) other else (this /: other.iterator)(_ + _) override def hashCode: Int = rep.hashCode override def equals(that: Any): Boolean = that match { case other: Exports => rep == other.rep case _ => false } override def toString: String = iterator.mkString("Exports(", ", ", ")") } /* markups */ object Markup_Index { val markup: Markup_Index = Markup_Index(false, Symbol.Text_Chunk.Default) def blob(blob: Blob): Markup_Index = Markup_Index(false, blob.chunk_file) } sealed case class Markup_Index(status: Boolean, chunk_name: Symbol.Text_Chunk.Name) object Markups { type Entry = (Markup_Index, Markup_Tree) val empty: Markups = new Markups(Map.empty) def init(markup: Markup_Tree): Markups = new Markups(Map(Markup_Index.markup -> markup)) def make(args: TraversableOnce[Entry]): Markups = (empty /: args)(_ + _) def merge(args: TraversableOnce[Markups]): Markups = (empty /: args)(_ ++ _) } final class Markups private(private val rep: Map[Markup_Index, Markup_Tree]) { def is_empty: Boolean = rep.isEmpty def apply(index: Markup_Index): Markup_Tree = rep.getOrElse(index, Markup_Tree.empty) def add(index: Markup_Index, markup: Text.Markup): Markups = new Markups(rep + (index -> (this(index) + markup))) def + (entry: Markups.Entry): Markups = { val (index, tree) = entry new Markups(rep + (index -> (this(index).merge(tree, Text.Range.full, Markup.Elements.full)))) } def ++ (other: Markups): Markups = if (this eq other) this else if (rep.isEmpty) other else (this /: other.rep.iterator)(_ + _) def redirection_iterator: Iterator[Document_ID.Generic] = for (Markup_Index(_, Symbol.Text_Chunk.Id(id)) <- rep.keysIterator) yield id def redirect(other_id: Document_ID.Generic): Markups = { val rep1 = (for { (Markup_Index(status, Symbol.Text_Chunk.Id(id)), markup) <- rep.iterator if other_id == id } yield (Markup_Index(status, Symbol.Text_Chunk.Default), markup)).toMap if (rep1.isEmpty) Markups.empty else new Markups(rep1) } override def hashCode: Int = rep.hashCode override def equals(that: Any): Boolean = that match { case other: Markups => rep == other.rep case _ => false } override def toString: String = rep.iterator.mkString("Markups(", ", ", ")") } /* state */ object State { def get_result(states: List[State], serial: Long): Option[XML.Elem] = states.find(st => st.results.defined(serial)).map(st => st.results.get(serial).get) def get_result_proper(states: List[State], props: Properties.T): Option[Results.Entry] = for { serial <- Markup.Serial.unapply(props) elem <- get_result(states, serial) if elem.body.nonEmpty } yield serial -> elem def merge_results(states: List[State]): Results = Results.merge(states.map(_.results)) def merge_exports(states: List[State]): Exports = Exports.merge(states.map(_.exports)) def merge_markups(states: List[State]): Markups = Markups.merge(states.map(_.markups)) def merge_markup(states: List[State], index: Markup_Index, range: Text.Range, elements: Markup.Elements): Markup_Tree = Markup_Tree.merge(states.map(_.markup(index)), range, elements) def merge(command: Command, states: List[State]): State = State(command, states.flatMap(_.status), merge_results(states), merge_exports(states), merge_markups(states)) } sealed case class State( command: Command, status: List[Markup] = Nil, results: Results = Results.empty, exports: Exports = Exports.empty, markups: Markups = Markups.empty) { def initialized: Boolean = status.exists(markup => markup.name == Markup.INITIALIZED) def consolidating: Boolean = status.exists(markup => markup.name == Markup.CONSOLIDATING) def consolidated: Boolean = status.exists(markup => markup.name == Markup.CONSOLIDATED) lazy val maybe_consolidated: Boolean = { var touched = false var forks = 0 var runs = 0 for (markup <- status) { markup.name match { case Markup.FORKED => touched = true; forks += 1 case Markup.JOINED => forks -= 1 case Markup.RUNNING => touched = true; runs += 1 case Markup.FINISHED => runs -= 1 case _ => } } touched && forks == 0 && runs == 0 } lazy val document_status: Document_Status.Command_Status = { val warnings = if (results.iterator.exists(p => Protocol.is_warning(p._2) || Protocol.is_legacy(p._2))) List(Markup(Markup.WARNING, Nil)) else Nil val errors = if (results.iterator.exists(p => Protocol.is_error(p._2))) List(Markup(Markup.ERROR, Nil)) else Nil Document_Status.Command_Status.make((warnings ::: errors ::: status).iterator) } def markup(index: Markup_Index): Markup_Tree = markups(index) def redirect(other_command: Command): Option[State] = { val markups1 = markups.redirect(other_command.id) if (markups1.is_empty) None else Some(new State(other_command, markups = markups1)) } private def add_status(st: Markup): State = copy(status = st :: status) private def add_result(entry: Results.Entry): State = copy(results = results + entry) def add_export(entry: Exports.Entry): Option[State] = if (command.node_name.theory == entry._2.theory_name) Some(copy(exports = exports + entry)) else None private def add_markup( status: Boolean, chunk_name: Symbol.Text_Chunk.Name, m: Text.Markup): State = { val markups1 = if (status || Document_Status.Command_Status.liberal_elements(m.info.name)) markups.add(Markup_Index(true, chunk_name), m) else markups copy(markups = markups1.add(Markup_Index(false, chunk_name), m)) } def accumulate( self_id: Document_ID.Generic => Boolean, other_id: (Document.Node.Name, Document_ID.Generic) => Option[(Symbol.Text_Chunk.Id, Symbol.Text_Chunk)], message: XML.Elem, cache: XML.Cache): State = message match { case XML.Elem(Markup(Markup.STATUS, _), msgs) => if (command.span.is_theory) this else { (this /: msgs)((state, msg) => msg match { case elem @ XML.Elem(markup, Nil) => state. add_status(markup). add_markup(true, Symbol.Text_Chunk.Default, Text.Info(command.core_range, elem)) case _ => Output.warning("Ignored status message: " + msg) state }) } case XML.Elem(Markup(Markup.REPORT, atts0), msgs) => (this /: msgs)((state, msg) => { def bad(): Unit = Output.warning("Ignored report message: " + msg) msg match { case XML.Elem(Markup(name, atts), args) => command.reported_position(atts) orElse command.reported_position(atts0) match { case Some((id, chunk_name, target_range)) => val target = if (self_id(id) && command.chunks.isDefinedAt(chunk_name)) Some((chunk_name, command.chunks(chunk_name))) else if (chunk_name == Symbol.Text_Chunk.Default) other_id(command.node_name, id) else None (target, target_range) match { case (Some((target_name, target_chunk)), Some(symbol_range)) if !symbol_range.is_singularity => target_chunk.incorporate(symbol_range) match { case Some(range) => val props = atts.filterNot(Markup.position_property) val elem = cache.elem(XML.Elem(Markup(name, props), args)) state.add_markup(false, target_name, Text.Info(range, elem)) case None => bad(); state } case _ => // silently ignore excessive reports state } case _ => bad(); state } case _ => bad(); state } }) case XML.Elem(Markup(name, props), body) => props match { case Markup.Serial(i) => val markup_message = cache.elem(XML.Elem(Markup(Markup.message(name), props), body)) val message_markup = cache.elem(XML.elem(Markup(name, props.filter(p => p._1 == Markup.SERIAL)))) var st = add_result(i -> markup_message) if (Protocol.is_inlined(message)) { for { (chunk_name, chunk) <- command.chunks.iterator range <- command.message_positions(self_id, chunk_name, chunk, message) } st = st.add_markup(false, chunk_name, Text.Info(range, message_markup)) } st case _ => Output.warning("Ignored message without serial number: " + message) this } } } /** static content **/ /* make commands */ def apply( id: Document_ID.Command, node_name: Document.Node.Name, blobs_info: Blobs_Info, span: Command_Span.Span): Command = { val (source, span1) = span.compact_source new Command(id, node_name, blobs_info, span1, source, Results.empty, Markups.empty) } val empty: Command = Command(Document_ID.none, Document.Node.Name.empty, Blobs_Info.none, Command_Span.empty) def unparsed( source: String, theory: Boolean = false, id: Document_ID.Command = Document_ID.none, node_name: Document.Node.Name = Document.Node.Name.empty, blobs_info: Blobs_Info = Blobs_Info.none, results: Results = Results.empty, markups: Markups = Markups.empty): Command = { val (source1, span1) = Command_Span.unparsed(source, theory).compact_source new Command(id, node_name, blobs_info, span1, source1, results, markups) } def text(source: String): Command = unparsed(source) def rich_text(id: Document_ID.Command, results: Results, body: XML.Body): Command = unparsed(XML.content(body), id = id, results = results, markups = Markups.init(Markup_Tree.from_XML(body))) /* edits and perspective */ type Edit = (Option[Command], Option[Command]) object Perspective { val empty: Perspective = Perspective(Nil) } sealed case class Perspective(commands: List[Command]) // visible commands in canonical order { def is_empty: Boolean = commands.isEmpty def same(that: Perspective): Boolean = { val cmds1 = this.commands val cmds2 = that.commands require(!cmds1.exists(_.is_undefined)) require(!cmds2.exists(_.is_undefined)) cmds1.length == cmds2.length && (cmds1.iterator zip cmds2.iterator).forall({ case (c1, c2) => c1.id == c2.id }) } } /* blobs: inlined errors and auxiliary files */ def blobs_info( resources: Resources, syntax: Outer_Syntax, get_blob: Document.Node.Name => Option[Document.Blob], can_import: Document.Node.Name => Boolean, node_name: Document.Node.Name, span: Command_Span.Span): Blobs_Info = { span.name match { // inlined errors case Thy_Header.THEORY => val reader = span.content_reader val header = resources.check_thy(node_name, span.content_reader) val imports_pos = header.imports_pos val raw_imports = try { val read_imports = Thy_Header.read(node_name, reader).imports.map(_._1) if (imports_pos.length == read_imports.length) read_imports else error("") } catch { case _: Throwable => List.fill(header.imports.length)("") } val errors = for { ((import_name, pos), s) <- imports_pos zip raw_imports if !can_import(import_name) } yield { val completion = if (Thy_Header.is_base_name(s)) resources.complete_import_name(node_name, s) else Nil "Bad theory import " + Markup.Path(import_name.node).markup(quote(import_name.toString)) + Position.here(pos) + Completion.report_theories(pos, completion) } Blobs_Info.errors(errors) // auxiliary files case _ => val loaded_files = span.loaded_files(syntax) val blobs = loaded_files.files.map(file => (Exn.capture { val src_path = Path.explode(file) val name = Document.Node.Name(resources.append(node_name, src_path)) val content = get_blob(name).map(blob => (blob.bytes.sha1_digest, blob.chunk)) Blob(name, src_path, content) }).user_error) Blobs_Info(blobs, index = loaded_files.index) } } def build_blobs_info( syntax: Outer_Syntax, node_name: Document.Node.Name, load_commands: List[Command_Span.Span]): Blobs_Info = { val blobs = for { span <- load_commands file <- span.loaded_files(syntax).files } yield { (Exn.capture { val dir = node_name.master_dir_path val src_path = Path.explode(file) val name = Document.Node.Name((dir + src_path).expand.implode_symbolic) Blob.read_file(name, src_path) }).user_error } Blobs_Info(blobs) } } final class Command private( val id: Document_ID.Command, val node_name: Document.Node.Name, val blobs_info: Command.Blobs_Info, val span: Command_Span.Span, val source: String, val init_results: Command.Results, val init_markups: Command.Markups) { override def toString: String = id + "/" + span.kind.toString /* classification */ def is_proper: Boolean = span.kind.isInstanceOf[Command_Span.Command_Span] def is_ignored: Boolean = span.kind == Command_Span.Ignored_Span def is_undefined: Boolean = id == Document_ID.none val is_unparsed: Boolean = span.content.exists(_.is_unparsed) val is_unfinished: Boolean = span.content.exists(_.is_unfinished) def potentially_initialized: Boolean = span.name == Thy_Header.THEORY /* blobs */ def blobs: List[Exn.Result[Command.Blob]] = blobs_info.blobs def blobs_index: Int = blobs_info.index def blobs_ok: Boolean = blobs.forall({ case Exn.Res(_) => true case _ => false }) def blobs_names: List[Document.Node.Name] = for (Exn.Res(blob) <- blobs) yield blob.name def blobs_undefined: List[Document.Node.Name] = for (Exn.Res(blob) <- blobs if blob.content.isEmpty) yield blob.name def blobs_defined: List[(Document.Node.Name, SHA1.Digest)] = for (Exn.Res(blob) <- blobs; (digest, _) <- blob.content) yield (blob.name, digest) def blobs_changed(doc_blobs: Document.Blobs): Boolean = blobs.exists({ case Exn.Res(blob) => doc_blobs.changed(blob.name) case _ => false }) /* source chunks */ val chunk: Symbol.Text_Chunk = Symbol.Text_Chunk(source) val chunks: Map[Symbol.Text_Chunk.Name, Symbol.Text_Chunk] = ((Symbol.Text_Chunk.Default -> chunk) :: (for (Exn.Res(blob) <- blobs; (_, file) <- blob.content) yield blob.chunk_file -> file)).toMap def length: Int = source.length def range: Text.Range = chunk.range val core_range: Text.Range = Text.Range(0, (length /: span.content.reverse.iterator.takeWhile(_.is_ignored))(_ - _.source.length)) def source(range: Text.Range): String = range.substring(source) /* theory parents */ def theory_parents(resources: Resources): List[Document.Node.Name] = if (span.name == Thy_Header.THEORY) { try { val header = Thy_Header.read(node_name, span.content_reader) for ((s, _) <- header.imports) yield { try { resources.import_name(node_name, s) } catch { case ERROR(_) => Document.Node.Name.empty } } } catch { case ERROR(_) => Nil } } else Nil /* reported positions */ def reported_position(pos: Position.T) : Option[(Document_ID.Generic, Symbol.Text_Chunk.Name, Option[Symbol.Range])] = { pos match { case Position.Id(id) => val chunk_name = pos match { case Position.File(name) if name != node_name.node => Symbol.Text_Chunk.File(name) case _ => Symbol.Text_Chunk.Default } Some((id, chunk_name, Position.Range.unapply(pos))) case _ => None } } def message_positions( self_id: Document_ID.Generic => Boolean, chunk_name: Symbol.Text_Chunk.Name, chunk: Symbol.Text_Chunk, message: XML.Elem): Set[Text.Range] = { def elem(props: Properties.T, set: Set[Text.Range]): Set[Text.Range] = reported_position(props) match { case Some((id, name, reported_range)) if self_id(id) && name == chunk_name => val opt_range = reported_range orElse { if (name == Symbol.Text_Chunk.Default) - Position.Range.unapply(span.absolute_position) + Position.Range.unapply(span.position) else None } opt_range match { case Some(symbol_range) => chunk.incorporate(symbol_range) match { case Some(range) => set + range case _ => set } case None => set } case _ => set } def tree(set: Set[Text.Range], t: XML.Tree): Set[Text.Range] = t match { case XML.Wrapped_Elem(Markup(name, props), _, body) => body.foldLeft(if (Rendering.position_elements(name)) elem(props, set) else set)(tree) case XML.Elem(Markup(name, props), body) => body.foldLeft(if (Rendering.position_elements(name)) elem(props, set) else set)(tree) case XML.Text(_) => set } val set = tree(Set.empty, message) if (set.isEmpty) elem(message.markup.properties, set) else set } /* accumulated results */ val init_state: Command.State = Command.State(this, results = init_results, markups = init_markups) val empty_state: Command.State = Command.State(this) } diff --git a/src/Pure/PIDE/command_span.scala b/src/Pure/PIDE/command_span.scala --- a/src/Pure/PIDE/command_span.scala +++ b/src/Pure/PIDE/command_span.scala @@ -1,149 +1,146 @@ /* Title: Pure/PIDE/command_span.scala Author: Makarius Syntactic representation of command spans. */ package isabelle import scala.collection.mutable import scala.util.parsing.input.CharSequenceReader object Command_Span { /* loaded files */ object Loaded_Files { val none: Loaded_Files = Loaded_Files(Nil, -1) } sealed case class Loaded_Files(files: List[String], index: Int) class Load_Command(val name: String) extends Isabelle_System.Service { override def toString: String = name def extensions: List[String] = Nil def loaded_files(tokens: List[(Token, Int)]): Loaded_Files = tokens.collectFirst({ case (t, i) if t.is_embedded => (t.content, i) }) match { case Some((file, i)) => extensions match { case Nil => Loaded_Files(List(file), i) case exts => Loaded_Files(exts.map(ext => file + "." + ext), i) } case None => Loaded_Files.none } } lazy val load_commands: List[Load_Command] = new Load_Command("") :: Isabelle_System.make_services(classOf[Load_Command]) /* span kind */ sealed abstract class Kind { override def toString: String = this match { - case Command_Span(name, _, _) => proper_string(name) getOrElse "" + case Command_Span(name, _) => proper_string(name) getOrElse "" case Ignored_Span => "" case Malformed_Span => "" case Theory_Span => "" } } - case class Command_Span(name: String, pos: Position.T, abs_pos: Position.T) extends Kind + case class Command_Span(name: String, pos: Position.T) extends Kind case object Ignored_Span extends Kind case object Malformed_Span extends Kind case object Theory_Span extends Kind /* span */ sealed case class Span(kind: Kind, content: List[Token]) { def is_theory: Boolean = kind == Theory_Span def name: String = kind match { case k: Command_Span => k.name case _ => "" } def position: Position.T = kind match { case k: Command_Span => k.pos case _ => Position.none } - def absolute_position: Position.T = - kind match { case k: Command_Span => k.abs_pos case _ => Position.none } - def keyword_pos(start: Token.Pos): Token.Pos = kind match { case _: Command_Span => (start /: content.iterator.takeWhile(tok => !tok.is_command))(_.advance(_)) case _ => start } def is_kind(keywords: Keyword.Keywords, pred: String => Boolean, other: Boolean): Boolean = keywords.kinds.get(name) match { case Some(k) => pred(k) case None => other } def is_begin: Boolean = content.exists(_.is_begin) def is_end: Boolean = content.exists(_.is_end) def content_reader: CharSequenceReader = Scan.char_reader(Token.implode(content)) def length: Int = (0 /: content)(_ + _.source.length) def compact_source: (String, Span) = { val source = Token.implode(content) val content1 = new mutable.ListBuffer[Token] var i = 0 for (Token(kind, s) <- content) { val n = s.length val s1 = source.substring(i, i + n) content1 += Token(kind, s1) i += n } (source, Span(kind, content1.toList)) } def clean_arguments: List[(Token, Int)] = { if (name.nonEmpty) { def clean(toks: List[(Token, Int)]): List[(Token, Int)] = toks match { case (t1, i1) :: (t2, i2) :: rest => if (t1.is_keyword && t1.source == "%" && t2.is_name) clean(rest) else (t1, i1) :: clean((t2, i2) :: rest) case _ => toks } clean(content.zipWithIndex.filter({ case (t, _) => t.is_proper })) .dropWhile({ case (t, _) => !t.is_command }) .dropWhile({ case (t, _) => t.is_command }) } else Nil } def is_load_command(syntax: Outer_Syntax): Boolean = syntax.load_command(name).isDefined def loaded_files(syntax: Outer_Syntax): Loaded_Files = syntax.load_command(name) match { case None => Loaded_Files.none case Some(a) => load_commands.find(_.name == a) match { case Some(load_command) => load_command.loaded_files(clean_arguments) case None => error("Undefined load command function: " + a) } } } val empty: Span = Span(Ignored_Span, Nil) def unparsed(source: String, theory: Boolean): Span = { val kind = if (theory) Theory_Span else Malformed_Span Span(kind, List(Token(Token.Kind.UNPARSED, source))) } }