Merge branch 'porting_to_Isabelle2021-1'

This commit is contained in:
Achim D. Brucker 2022-03-26 18:17:57 +00:00
commit 5ae72e1103
27 changed files with 774 additions and 1536 deletions

View File

@ -6,9 +6,9 @@ DOF_LATEST_DOI="10.5281/zenodo.4625176"
DOF_GENERIC_DOI="10.5281/zenodo.3370482"
#
# Isabelle and AFP Configuration
ISABELLE_VERSION="Isabelle2021: February 2021"
ISABELLE_URL="https://isabelle.in.tum.de/website-Isabelle2021/"
AFP_DATE="afp-2021-03-09"
ISABELLE_VERSION="Isabelle2021-1"
ISABELLE_URL="https://isabelle.in.tum.de/website-Isabelle2021-1/"
AFP_DATE="afp-2021-12-28"
AFP_URL="https://www.isa-afp.org/release/"$AFP_DATE".tar.gz"
#
# Isabelle/DOF Repository Configuration

View File

@ -9,6 +9,9 @@ It may also contain additional tools and script that are useful for preparing a
### Latest Build
* [document.pdf](https://artifacts.logicalhacking.com/ci-confidential/Isabelle_DOF/Isabelle_DOF/main/latest/document.pdf)
* [browser_info](https://artifacts.logicalhacking.com/ci-confidential/Isabelle_DOF/Isabelle_DOF/main/latest/browser_info/Unsorted/Isabelle_DOF/)
* [aux files](https://artifacts.logicalhacking.com/ci-confidential/Isabelle_DOF/Isabelle_DOF/main/latest/)
* lualatex
* [browser_info](https://artifacts.logicalhacking.com/ci-confidential/Isabelle_DOF/Isabelle_DOF/main/latest/lualatex/browser_info/Unsorted/Isabelle_DOF/)
* [aux files](https://artifacts.logicalhacking.com/ci-confidential/Isabelle_DOF/Isabelle_DOF/main/latest/lualatex/)
* pdflatex
* [browser_info](https://artifacts.logicalhacking.com/ci-confidential/Isabelle_DOF/Isabelle_DOF/main/latest/pdflatex/browser_info/Unsorted/Isabelle_DOF/)
* [aux files](https://artifacts.logicalhacking.com/ci-confidential/Isabelle_DOF/Isabelle_DOF/main/latest/pdflatex/)

View File

@ -2,13 +2,13 @@ pipeline:
build:
image: docker.io/logicalhacking/isabelle2021
commands:
- export ARTIFACT_DIR=$CI_WORKSPACE/.artifacts/$CI_REPO/$CI_BRANCH/$CI_BUILD_NUMBER/
- export ARTIFACT_DIR=$CI_WORKSPACE/.artifacts/$CI_REPO/$CI_BRANCH/$CI_BUILD_NUMBER/$LATEX
- mkdir -p $ARTIFACT_DIR
- export `isabelle getenv ISABELLE_HOME_USER`
- echo "ISABELLE_PDFLATEX=\"$LATEX --file-line-error\"" >> $ISABELLE_HOME_USER/etc/settings
- ./install
- isabelle build -D . -o browser_info
- cp -r $ISABELLE_HOME_USER/browser_info $ARTIFACT_DIR
- cp output/document.pdf $ARTIFACT_DIR
- cd $ARTIFACT_DIR
- cd ..
- ln -s * latest
@ -24,4 +24,7 @@ pipeline:
from_secret: artifacts_ssh
user: artifacts
matrix:
LATEX:
- pdflatex
- lualatex

View File

@ -1,32 +0,0 @@
# Copyright (c) 2019 Achim D. Brucker
#
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
#
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# SPDX-License-Identifier: BSD-2-Clause
FROM logicalhacking/lh-docker-isabelle:isabelle2021
WORKDIR /home/isabelle
COPY thy_output.ML /home/isabelle/Isabelle/src/Pure/Thy
RUN Isabelle/bin/isabelle build -b Functional-Automata

255
NOTES.thy Normal file
View File

@ -0,0 +1,255 @@
chapter \<open>Notes on Isabelle/DOF for Isabelle2021-1\<close>
theory NOTES
imports Main
begin
section \<open>Isabelle/DOF component setup\<close>
subsection \<open>Terminology\<close>
text \<open>
\<^item> The concept of \<^emph>\<open>Isabelle system component\<close> is explained in \<^doc>\<open>system\<close>
section 1.1.1; see also \<^tool>\<open>components\<close> explained in section 7.3.
For example:
\<^verbatim>\<open>isabelle components -l\<close>
\<^item> In the private terminology of Burkhart, the word "component" has a
different meaning: a tool implemented in Isabelle/ML that happens to
declare context data (many ML tools do that, it is not very special,
similar to defining a \<^verbatim>\<open>datatype\<close> or \<^verbatim>\<open>structure\<close> in ML).
\<close>
subsection \<open>Isabelle/DOF as component\<close>
text \<open>
Formal Isabelle/DOF component setup happens here:
\<^item> \<^path>\<open>$ISABELLE_HOME_USER/etc/components\<close>
\<^item> A suitable directory entry in the file registers our component to
existing Isabelle installation; it also activates the session
directory tree starting at \<^file>\<open>$ISABELLE_DOF_HOME/ROOTS\<close>.
\<^item> Alternative: use \<^path>\<open>$ISABELLE_HOME/Admin/build_release\<close> with
option \<^verbatim>\<open>-c\<close> to produce a derived Isabelle distribution that bundles
our component for end-users (maybe even with AFP entries).
\<^item> \<^file>\<open>$ISABELLE_DOF_HOME/etc/settings\<close>
\<^item> This provides a pervasive Bash process environment (variables,
shell functions etc.). It may refer to \<^verbatim>\<open>$COMPONENT\<close> for the
component root, e.g. to retain it in variable \<^dir>\<open>$ISABELLE_DOF_HOME\<close>.
\<^item> Historically, it used to be the main configuration area, but today
we see more and more alternatives, e.g. system options or services in
Isabelle/Scala (see below).
\<^item> \<^file>\<open>$ISABELLE_DOF_HOME/etc/options\<close>
\<^item> options declared as \<^verbatim>\<open>public\<close> appear in the Isabelle/jEdit dialog
\<^action>\<open>plugin-options\<close> (according to their \<^verbatim>\<open>section\<close>)
\<^item> all options (public and non-public) are available for command-line
usage, e.g. \<^verbatim>\<open>isabelle build -o dof_url="..."\<close>
\<^item> access to options in Isabelle/ML:
\<^item> implicit (for the running ML session)
\<^ML>\<open>Options.default_string \<^system_option>\<open>dof_url\<close>\<close>
\<^item> explicit (e.g. for each theories section in
\<^file>\<open>$ISABELLE_HOME/src/Pure/Tools/build.ML\<close>):
\<^ML>\<open>fn options => Options.string options \<^system_option>\<open>dof_url\<close>\<close>
\<^item> access in Isabelle/Scala is always explicit; the initial options
should be created only once and passed around as explicit argument:
\<^scala>\<open>{
val options = isabelle.Options.init();
options.string("dof_url");
}\<close>
Note: there are no antiquotations in Isabelle/Scala, so the literal
string \<^scala>\<open>"dof_url"\<close> is unchecked.
\<close>
section \<open>Document preparation in Isabelle/ML\<close>
subsection \<open>Session presentation hook\<close>
text \<open>
\<^ML>\<open>Build.add_hook\<close> allows to install a global session presentation
hook. It is used e.g. in Isabelle/Mirabelle to analyze all loaded
theories via Sledgehammer and other tools. Isabelle/DOF could use it to
"wrap-up" the whole session, to check if all document constraints hold
(cf. "monitors").
\<close>
subsection \<open>Theory presentation hook\<close>
text \<open>
\<^ML>\<open>Thy_Info.add_presentation\<close> installs a hook to be invoked at the
end of successful loading of theories; the provided context
\<^ML_type>\<open>Thy_Info.presentation_context\<close> provides access to
\<^ML_type>\<open>Options.T\<close> and \<^ML_type>\<open>Document_Output.segment\<close> with
command-spans and semantic states.
An example is regular Latex output in
\<^file>\<open>$ISABELLE_HOME/src/Pure/Thy/thy_info.ML\<close> where \<^ML>\<open>Export.export\<close>
is used to produce export artifacts in the session build database, for
retrieval via Isabelle/Scala.
\<close>
subsection \<open>Document commands\<close>
text \<open>
Isar toplevel commands now support a uniform concept for
\<^ML_type>\<open>Toplevel.presentation\<close>, but the exported interfaces are
limited to commands that do not change the semantic state: see
\<^ML>\<open>Toplevel.present\<close> and \<^ML>\<open>Toplevel.present_local_theory\<close>.
Since \<^verbatim>\<open>Toplevel.present_theory\<close> is missing in Isabelle2021-1, we use a
workaround with an alternative presentation hook: it exports
\<^verbatim>\<open>document/latex_dof\<close> files instead of regular \<^verbatim>\<open>document/latex_dof\<close>.
\<close>
subsection \<open>Document content\<close>
text \<open>
XML is now used uniformly (sometimes as inlined YXML). The meaning of
markup elements and properties is defined in
\<^scala_type>\<open>isabelle.Latex.Output\<close> (or user-defined subclasses).
\<close>
section \<open>Isabelle/Scala services\<close>
subsection \<open>Isabelle/DOF/Scala module\<close>
text \<open>
\<^item> \<^file>\<open>$ISABELLE_DOF_HOME/etc/build.props\<close> is the main specification for
the Isabelle/DOF/Scala module. It is built on the spot as required, e.g.
for \<^verbatim>\<open>isabelle scala\<close> or \<^verbatim>\<open>isabelle jedit\<close>; an alternative is to invoke
\<^verbatim>\<open>isabelle scala_build\<close> manually. See also \<^doc>\<open>system\<close>, chapter 5,
especially section 5.2.
\<^item> \<^verbatim>\<open>isabelle scala_project\<close> helps to develop Isabelle/Scala tools with
proper IDE support, notably IntelliJ IDEA: the generated project uses
Maven. See also \<^doc>\<open>system\<close>, section 5.2.3.
\<^item> Command-line tools should be always implemented in Scala; old-fashioned
shell scripts are no longer required (and more difficult to implement
properly). Only a few low-level tools are outside the Scala environment,
e.g. \<^verbatim>\<open>isabelle getenv\<close>. Add-on components should always use a name
prefix for their tools, e.g. \<^verbatim>\<open>isabelle dof_mkroot\<close> as defined in
\<^file>\<open>$ISABELLE_DOF_HOME/src/scala/dof_mkroot.scala\<close>.
\<close>
subsection \<open>Document preparation\<close>
text \<open>
\<^item> \<^scala_type>\<open>isabelle.Document_Build.Engine\<close> is the main entry-point
for user-defined document preparation; existing templates and examples
are defined in the same module \<^file>\<open>~~/src/Pure/Thy/document_build.scala\<close>.
There are two stages:
\<^enum> \<^verbatim>\<open>prepare_directory\<close>: populate the document output directory (e.g.
copy static document files, collect generated document sources from the
session build database).
\<^enum> \<^verbatim>\<open>build_document\<close>: produce the final PDF within the document output
directory (e.g. via standard LaTeX tools).
See also \<^system_option>\<open>document_build\<close> as explained in \<^doc>\<open>system\<close>,
section 3.3.
\<close>
section \<open>Miscellaneous NEWS and Notes\<close>
text \<open>
\<^item> Document preparation: there are many new options etc. that might help
to fine-tune DOF output, e.g. \<^system_option>\<open>document_comment_latex\<close>.
\<^item> ML: Theory_Data / Generic_Data: "val extend = I" has been removed;
obsolete since Isabelle2021.
\<^item> ML: \<^ML>\<open>Thm.instantiate\<close> and related operations now use explicit
abstract types for the instantiate, see \<^file>\<open>~~/src/Pure/term_items.ML\<close>
\<^item> ML: new antiquotation "instantiate" allows to instantiate formal
entities (types, terms, theorems) with values given ML. For example:
\<^ML>\<open>fn (A, B) =>
\<^instantiate>\<open>'a = A and 'b = B in typ \<open>('a \<times> 'b) list\<close>\<close>\<close>
\<^ML>\<open>fn A =>
\<^instantiate>\<open>'a = A in
lemma (schematic) \<open>x = y \<Longrightarrow> y = x\<close> for x y :: 'a by simp\<close>\<close>
\<^item> ML: new antiquotations for type constructors and term constants. For
example:
\<^ML>\<open>\<^Type>\<open>nat\<close>\<close>
\<^ML>\<open>fn (A, B) => \<^Type>\<open>fun A B\<close>\<close>
\<^ML>\<open>\<^Type_fn>\<open>fun A B => \<open>(A, B)\<close>\<close>\<close>
\<^ML>\<open>fn (A, B) => \<^Const>\<open>conj for A B\<close>\<close>
\<^ML>\<open>\<^Const_fn>\<open>conj for A B => \<open>(A, B)\<close>\<close>\<close>
\<^ML>\<open>fn t =>
case t of
\<^Const_>\<open>plus T for x y\<close> => ("+", T, x, y)
| \<^Const_>\<open>minus T for x y\<close> => ("-", T, x, y)
| \<^Const_>\<open>times T for x y\<close> => ("*", T, x, y)\<close>
Note: do not use unchecked things like
\<^ML>\<open>Const ("List.list.Nil", Type ("Nat.nat", []))\<close>
\<^item> ML: antiquotations "try" and "can" operate directly on the given ML
expression, in contrast to functions "try" and "can" that modify
application of a function.
Note: instead of semantically ill-defined "handle _ => ...", use
something like this:
\<^ML>\<open>
fn (x, y) =>
(case \<^try>\<open>x div y\<close> of
SOME z => z
| NONE => 0)
\<close>
\<^ML>\<open>
fn (x, y) => \<^try>\<open>x div y\<close> |> the_default 0
\<close>
\<close>
text \<open>Adhoc examples:\<close>
ML \<open>
fun mk_plus x y = \<^Const>\<open>plus \<^Type>\<open>nat\<close> for x y\<close>;
fn \<^Const_>\<open>plus \<^Type>\<open>nat\<close> for \<^Const_>\<open>Groups.zero \<^Type>\<open>nat\<close>\<close> y\<close> => y;
\<close>
ML \<open>
fn (A, B) =>
\<^instantiate>\<open>A and B in term \<open>A \<and> B \<longrightarrow> B \<and> A\<close>\<close>;
fn (A, B) =>
\<^instantiate>\<open>A and B in lemma \<open>A \<and> B \<longrightarrow> B \<and> A\<close> by simp\<close>;
\<close>
end
(* :maxLineLen=75: *)

View File

@ -5,26 +5,15 @@ Isabelle/DOF allows for both conventional typesetting as well as formal
development. The manual for [Isabelle/DOF 1.1.0/Isabelle2021 is available
online.](https://artifacts.logicalhacking.com/releases/Isabelle_DOF/Isabelle_DOF/Isabelle_DOF-1.1.0_Isabelle2021.pdf)
## Running Isabelle/DOF using Docker
As an alternative to installing Isabelle/DOF locally, the latest official release Isabelle/DOF
is also available on Docker Hub. Thus, if you have Docker installed and your installation of
Docker supports X11 application, you can start Isabelle/DOF as follows:
```console
foo@bar:~$ docker run -ti --rm -e DISPLAY=$DISPLAY -v /tmp/.X11-unix:/tmp/.X11-unix \
logicalhacking/isabelle_dof-1.1.0_isabelle2021 isabelle jedit
```
## Pre-requisites
Isabelle/DOF has two major pre-requisites:
* **Isabelle:** Isabelle/DOF requires [Isabelle 2021](http://isabelle.in.tum.de/website-Isabelle2021/).
Please download the Isabelle 2021 distribution for your operating
system from the [Isabelle website](http://isabelle.in.tum.de/website-Isabelle2021/).
* **Isabelle:** Isabelle/DOF requires [Isabelle 2021-1](http://isabelle.in.tum.de/website-Isabelle2021-1/).
Please download the Isabelle 2021-1 distribution for your operating
system from the [Isabelle website](http://isabelle.in.tum.de/website-Isabelle2021-1/).
* **LaTeX:** Isabelle/DOF requires a modern LaTeX installation, i.e., at least
[TeX Live 2021](https://www.tug.org/texlive/) with all available updates applied.
[TeX Live 2022](https://www.tug.org/texlive/) with all available updates applied.
## Installation
@ -39,7 +28,7 @@ one), the full path to the ``isabelle`` command needs to be passed as
using the ``--isabelle`` command line argument of the ``install`` script:
```console
foo@bar:~$ ./install --isabelle /usr/local/Isabelle2021/bin/isabelle
foo@bar:~$ ./install --isabelle /usr/local/Isabelle2021-1/bin/isabelle
```
For further command line options of the installer, please use the

12
etc/build.props Normal file
View File

@ -0,0 +1,12 @@
title = Isabelle/DOF
module = $ISABELLE_HOME_USER/DOF/isabelle_dof.jar
no_build = false
requirements = \
env:ISABELLE_SCALA_JAR
sources = \
src/scala/dof_document_build.scala \
src/scala/dof_mkroot.scala \
src/scala/dof_tools.scala
services = \
isabelle_dof.DOF_Tools \
isabelle_dof.DOF_Document_Build$Engine

12
etc/options Normal file
View File

@ -0,0 +1,12 @@
(* :mode=isabelle-options: *)
section "Isabelle/DOF"
public option dof_url : string = "https://git.logicalhacking.com/Isabelle_DOF/Isabelle_DOF"
-- "Isabelle/DOF source repository"
option dof_artifact_dir : string = "releases/Isabelle_DOF/Isabelle_DOF"
-- "Isabelle/DOF release artifacts"
option dof_artifact_host : string = "artifacts.logicalhacking.com"

3
etc/settings Normal file
View File

@ -0,0 +1,3 @@
# -*- shell-script -*- :mode=shellscript:
ISABELLE_DOF_HOME="$COMPONENT"

View File

@ -1,5 +1,5 @@
session "mini_odo" = "Isabelle_DOF" +
options [document = pdf, document_output = "output"]
options [document = pdf, document_output = "output", document_build = dof]
theories
"mini_odo"
document_files

View File

@ -1,5 +1,6 @@
session "2018-cicm-isabelle_dof-applications" = "Isabelle_DOF" +
options [document = pdf, document_output = "output", quick_and_dirty = true]
options [document = pdf, document_output = "output", document_build = dof,
quick_and_dirty = true]
theories
IsaDofApplications
document_files

View File

@ -1,5 +1,5 @@
session "2020-iFM-csp" = "Isabelle_DOF" +
options [document = pdf, document_output = "output"]
options [document = pdf, document_output = "output", document_build = dof]
theories
"paper"
document_files

View File

@ -1,5 +1,6 @@
session "Isabelle_DOF-Manual" = "Isabelle_DOF" +
options [document = pdf, document_output = "output", quick_and_dirty = true]
options [document = pdf, document_output = "output", document_build = dof,
quick_and_dirty = true]
theories
"Isabelle_DOF-Manual"
document_files

View File

@ -1,2 +1,2 @@
Isabelle_DOF-Manual
TR_my_commented_isabelle
#TR_my_commented_isabelle

View File

@ -1,5 +1,6 @@
session "TR_MyCommentedIsabelle" = "Isabelle_DOF" +
options [document = pdf, document_output = "output",quick_and_dirty = true]
options [document = pdf, document_output = "output", document_build = dof,
quick_and_dirty = true]
theories
"TR_MyCommentedIsabelle"
document_files

View File

@ -612,18 +612,19 @@ subsection\<open>More operations on types\<close>
text\<open>
\<^item> \<^ML>\<open>Term_Subst.map_types_same : (typ -> typ) -> term -> term\<close>
\<^item> \<^ML>\<open>Term_Subst.map_aterms_same : (term -> term) -> term -> term\<close>
\<^item> \<^ML>\<open>Term_Subst.instantiate: ((indexname * sort) * typ) list * ((indexname * typ) * term) list
-> term -> term\<close>
\<^item> \<^ML>\<open>Term_Subst.instantiateT: ((indexname * sort) * typ) list -> typ -> typ\<close>
\<^item> \<^ML>\<open>Term_Subst.generalizeT: string list -> int -> typ -> typ\<close>
\<^item> \<^ML>\<open>Term_Subst.instantiate: typ TVars.table * term Vars.table -> term -> term\<close>
\<^item> \<^ML>\<open>Term_Subst.instantiateT: typ TVars.table -> typ -> typ\<close>
\<^item> \<^ML>\<open>Term_Subst.generalizeT: Names.set -> int -> typ -> typ\<close>
this is the standard type generalisation function !!!
only type-frees in the string-list were taken into account.
\<^item> \<^ML>\<open>Term_Subst.generalize: string list * string list -> int -> term -> term\<close>
\<^item> \<^ML>\<open>Term_Subst.generalize: Names.set * Names.set -> int -> term -> term\<close>
this is the standard term generalisation function !!!
only type-frees and frees in the string-lists were taken
into account.
\<close>
text \<open>Apparently, a bizarre conversion between the old-style interface and
the new-style \<^ML>\<open>tyenv\<close> is necessary. See the following example.\<close>
ML\<open>
@ -794,11 +795,11 @@ text\<open> We come now to the very heart of the LCF-Kernel of Isabelle, which
\<^item> \<^ML>\<open> Thm.forall_intr: cterm -> thm -> thm\<close>
\<^item> \<^ML>\<open> Thm.forall_elim: cterm -> thm -> thm\<close>
\<^item> \<^ML>\<open> Thm.transfer : theory -> thm -> thm\<close>
\<^item> \<^ML>\<open> Thm.generalize: string list * string list -> int -> thm -> thm\<close>
\<^item> \<^ML>\<open> Thm.instantiate: ((indexname*sort)*ctyp)list * ((indexname*typ)*cterm) list -> thm -> thm\<close>
\<^item> \<^ML>\<open> Thm.generalize: Names.set * Names.set -> int -> thm -> thm\<close>
\<^item> \<^ML>\<open> Thm.instantiate: ctyp TVars.table * cterm Vars.table -> thm -> thm\<close>
\<close>
text\<open> They reflect the Pure logic depicted in a number of presentations such as
text\<open> They reflect the Pure logic depicted in a number of presentations such as
M. Wenzel, \<^emph>\<open>Parallel Proof Checking in Isabelle/Isar\<close>, PLMMS 2009, or simiular papers.
Notated as logical inference rules, these operations were presented as follows:
\<close>
@ -909,14 +910,10 @@ high-level component (more low-level components such as \<^ML>\<open>Global_Theo
exist) for definitions and axiomatizations is here:
\<close>
text\<open>
\<^item> \<^ML>\<open>Specification.definition: (binding * typ option * mixfix) option ->
(binding * typ option * mixfix) list -> term list -> Attrib.binding * term ->
local_theory -> (term * (string * thm)) * local_theory\<close>
\<^item> \<^ML>\<open>Specification.definition': (binding * typ option * mixfix) option ->
(binding * typ option * mixfix) list -> term list -> Attrib.binding * term ->
bool -> local_theory -> (term * (string * thm)) * local_theory\<close>
\<^item> \<^ML>\<open>Specification.definition_cmd: (binding * string option * mixfix) option ->
(binding * string option * mixfix) list -> string list -> Attrib.binding * string ->
bool -> local_theory -> (term * (string * thm)) * local_theory\<close>
@ -1186,8 +1183,8 @@ text\<open> The extensibility of Isabelle as a system framework depends on a num
\<^item> \<^ML>\<open>Toplevel.theory': (bool -> theory -> theory) -> Toplevel.transition -> Toplevel.transition\<close>
\<^item> \<^ML>\<open>Toplevel.exit: Toplevel.transition -> Toplevel.transition\<close>
\<^item> \<^ML>\<open>Toplevel.ignored: Position.T -> Toplevel.transition\<close>
\<^item> \<^ML>\<open>Toplevel.present_local_theory: (xstring * Position.T) option ->
(Toplevel.state -> unit) -> Toplevel.transition -> Toplevel.transition\<close>
\<^item> \<^ML>\<open>Toplevel.present_local_theory: (xstring * Position.T) option ->
(Toplevel.state -> Latex.text) -> Toplevel.transition -> Toplevel.transition\<close>
\<close>
subsection*[cmdbinding::technical] \<open>Toplevel Transaction Management in the Isar-Engine\<close>
@ -1856,8 +1853,6 @@ Common Isar Syntax
Common Isar Syntax
\<^item>\<^ML>\<open>Args.embedded_token : Token.T parser\<close>
\<^item>\<^ML>\<open>Args.embedded_inner_syntax: string parser\<close>
\<^item>\<^ML>\<open>Args.embedded_input : Input.source parser\<close>
\<^item>\<^ML>\<open>Args.embedded : string parser\<close>
\<^item>\<^ML>\<open>Args.embedded_position: (string * Position.T) parser\<close>

48
install
View File

@ -45,8 +45,7 @@ print_help()
echo " --help, -h display this help message"
echo " --isabelle, -i isabelle isabelle command used for installation"
echo " (default: $ISABELLE)"
echo " --skip-patch-and-afp, -s skip installation of Isabelle/DOF patch for"
echo " Isabelle and required AFP entries. "
echo " --skip-afp, -s skip installation of AFP entries. "
echo " USE AT YOUR OWN RISK (default: $SKIP)"
}
@ -106,9 +105,8 @@ check_afp_entries() {
mkdir -p .afp
if curl -s -L $AFP_URL | tar zxf - -C .afp $extract; then
for e in $missing; do
echo " Registering $e in $ISABELLE_HOME_USER/ROOTS"
touch $ISABELLE_HOME_USER/ROOTS
grep -q $PWD/.afp/$AFP_DATE/thys/$e $ISABELLE_HOME_USER/ROOTS || echo "$PWD/.afp/$AFP_DATE/thys/$e" >> $ISABELLE_HOME_USER/ROOTS
echo " Registering $e"
$ISABELLE components -u "$PWD/.afp/$AFP_DATE/thys/$e"
done
echo " AFP installation successful."
else
@ -122,38 +120,6 @@ check_afp_entries() {
fi
}
check_isa_dof_patch() {
echo "* Check availability of Isabelle/DOF patch:"
src="src/patches/thy_output.ML"
dst="$ISABELLE_HOME/src/Pure/Thy/thy_output.ML"
if command -v cmp > /dev/null 2>&1 && cmp -s "$src" "$dst" ; then
echo " Success: latest Isabelle/DOF patch already applied"
if isabelle process -e 'Thy_Output.set_meta_args_parser' &> /dev/null ; then
true
else
echo " Warning: Isabelle/HOL needs to be rebuild to activate patch."
fi
else
command -v cmp >/dev/null 2>&1 || echo " Warning: cmp not available, cannot check if patch is already applied."
echo " Warning: Isabelle/DOF patch is not available or outdated."
echo " Trying to patch system ...."
if [ ! -f "$dst.backup-by-isadof-installer" ]; then
cp -f "$dst" "$dst.backup-by-isadof-installer" || true;
fi
if (cp -f $src $dst) &> /dev/null; then
echo " Applied patch successfully, Isabelle/HOL will be rebuilt during"
echo " the next start of Isabelle."
else
echo " FAILURE: Could not apply Isabelle/DOF patch."
echo " Please copy $src to $dst, e.g.:"
echo " cp -f $src $dst"
echo " and rebuild Isabelle/HOL."
exit_error
fi
fi
}
check_old_installation(){
echo "* Searching for existing installation:"
if [[ -d "$ISABELLE_HOME_USER/DOF" ]]; then
@ -222,8 +188,7 @@ install_and_register(){
sed -i -e "s|<isadofurl>|$DOF_URL|" $ISABELLE_HOME_USER/DOF/*/*
LTX_VERSION="$DATE $DOF_VERSION/$ISABELLE_SHORT_VERSION"
sed -i -e "s|<isadofltxversion>|$LTX_VERSION|" $ISABELLE_HOME_USER/DOF/*/*
touch $ISABELLE_HOME_USER/ROOTS
grep -q $PWD'$' $ISABELLE_HOME_USER/ROOTS || echo "$PWD" >> $ISABELLE_HOME_USER/ROOTS
$ISABELLE components -u "$PWD"
}
@ -235,7 +200,7 @@ do
--isabelle|-i)
ISABELLE="$2";
shift;;
--skip-patch-and-afp|-s)
--skip-afp|-s)
SKIP="true";;
--help|-h)
print_help
@ -260,9 +225,8 @@ echo "Isabelle/DOF Installer"
echo "======================"
check_isabelle_version
if [ "$SKIP" = "true" ]; then
echo "* Warning: skipping installation of Isabelle patch and AFP entries."
echo "* Warning: skipping installation of AFP entries."
else
check_isa_dof_patch
check_afp_entries
fi
check_old_installation

View File

@ -106,60 +106,32 @@ in
fun enriched_formal_statement_command ncid (S: (string * string) list) =
let fun transform_attr doc_attrs = (map (fn(cat,tag) => ((cat,@{here}),tag)) S) @
(("formal_results",@{here}),"([]::thm list)")::doc_attrs
in fn md => fn margs => fn thy =>
in fn margs => fn thy =>
gen_enriched_document_cmd {inline=true}
(transform_cid thy ncid) transform_attr md margs thy
(transform_cid thy ncid) transform_attr margs thy
end;
fun enriched_document_cmd_exp ncid (S: (string * string) list) =
(* expands ncid into supertype-check. *)
let fun transform_attr attrs = (map (fn(cat,tag) => ((cat,@{here}),tag)) S) @ attrs
in fn md => fn margs => fn thy =>
gen_enriched_document_cmd {inline=true} (transform_cid thy ncid) transform_attr md margs thy
in fn margs => fn thy =>
gen_enriched_document_cmd {inline=true} (transform_cid thy ncid) transform_attr margs thy
end;
end (* local *)
val _ =
Outer_Syntax.command ("title*", @{here}) "section heading"
(attributes -- Parse.opt_target -- Parse.document_source --| semi
>> (Toplevel.theory o (enriched_text_element_cmd NONE {markdown = false} ))) ;
fun heading_command (name, pos) descr level =
ODL_Command_Parser.document_command (name, pos) descr
{markdown = false, body = true} (enriched_text_element_cmd level);
val _ =
Outer_Syntax.command ("subtitle*", @{here}) "section heading"
(attributes -- Parse.opt_target -- Parse.document_source --| semi
>> (Toplevel.theory o (enriched_text_element_cmd NONE {markdown = false} )));
val _ =
Outer_Syntax.command ("chapter*", @{here}) "section heading"
(attributes -- Parse.opt_target -- Parse.document_source --| semi
>> (Toplevel.theory o (enriched_text_element_cmd (SOME(SOME 0)) {markdown = false} )));
val _ =
Outer_Syntax.command ("section*", @{here}) "section heading"
(attributes -- Parse.opt_target -- Parse.document_source --| semi
>> (Toplevel.theory o (enriched_text_element_cmd (SOME(SOME 1)) {markdown = false} )));
val _ =
Outer_Syntax.command ("subsection*", @{here}) "subsection heading"
(attributes -- Parse.opt_target -- Parse.document_source --| semi
>> (Toplevel.theory o (enriched_text_element_cmd (SOME(SOME 2)) {markdown = false} )));
val _ =
Outer_Syntax.command ("subsubsection*", @{here}) "subsubsection heading"
(attributes -- Parse.opt_target -- Parse.document_source --| semi
>> (Toplevel.theory o (enriched_text_element_cmd (SOME(SOME 3)) {markdown = false} )));
val _ =
Outer_Syntax.command ("paragraph*", @{here}) "paragraph heading"
(attributes -- Parse.opt_target -- Parse.document_source --| semi
>> (Toplevel.theory o (enriched_text_element_cmd (SOME(SOME 4)) {markdown = false} )));
val _ =
Outer_Syntax.command ("subparagraph*", @{here}) "subparagraph heading"
(attributes -- Parse.opt_target -- Parse.document_source --| semi
>> (Toplevel.theory o (enriched_text_element_cmd (SOME(SOME 5)) {markdown = false} )));
val _ = heading_command ("title*", @{here}) "section heading" NONE;
val _ = heading_command ("subtitle*", @{here}) "section heading" NONE;
val _ = heading_command ("chapter*", @{here}) "section heading" (SOME (SOME 0));
val _ = heading_command ("section*", @{here}) "section heading" (SOME (SOME 1));
val _ = heading_command ("subsection*", @{here}) "subsection heading" (SOME (SOME 2));
val _ = heading_command ("subsubsection*", @{here}) "subsubsection heading" (SOME (SOME 3));
val _ = heading_command ("paragraph*", @{here}) "paragraph heading" (SOME (SOME 4));
val _ = heading_command ("subparagraph*", @{here}) "subparagraph heading" (SOME (SOME 5));
end
end
@ -206,24 +178,13 @@ print_doc_classes
subsection\<open>Ontological Macros\<close>
ML\<open> local open ODL_Command_Parser in
ML\<open>
(* *********************************************************************** *)
(* Ontological Macro Command Support *)
(* *********************************************************************** *)
val _ =
Outer_Syntax.command ("figure*", @{here}) "figure"
(attributes -- Parse.opt_target -- Parse.document_source --| semi
>> (Toplevel.theory o (Onto_Macros.enriched_text_element_cmd NONE {markdown = false} )));
val _ =
Outer_Syntax.command ("side_by_side_figure*", @{here}) "multiple figures"
(attributes -- Parse.opt_target -- Parse.document_source --| semi
>> (Toplevel.theory o (Onto_Macros.enriched_text_element_cmd NONE {markdown = false} )));
end
val _ = Onto_Macros.heading_command ("figure*", @{here}) "figure" NONE;
val _ = Onto_Macros.heading_command ("side_by_side_figure*", @{here}) "multiple figures" NONE;
\<close>
(*<*)
@ -247,22 +208,22 @@ section\<open>Tables\<close>
ML\<open>
local
fun mk_line st1 st2 [a] = [a @ [Latex.string st2]]
|mk_line st1 st2 (a::S) = [a @ [Latex.string st1]] @ mk_line st1 st2 S;
fun mk_line st1 st2 [a] = [a @ Latex.string st2]
|mk_line st1 st2 (a::S) = [a @ Latex.string st1] @ mk_line st1 st2 S;
fun table_antiquotation name =
Thy_Output.antiquotation_raw_embedded name
Document_Output.antiquotation_raw_embedded name
(Scan.repeat1(Scan.repeat1(Scan.lift Args.cartouche_input)))
(fn ctxt =>
(fn content:Input.source list list =>
let fun check _ = () (* ToDo *)
val _ = check content
in content
|> (map(map (Thy_Output.output_document ctxt {markdown = false})
|> (map(map (Document_Output.output_document ctxt {markdown = false})
#> mk_line "&" "\\\\"
#> List.concat )
#> List.concat)
|> Latex.enclose_block "\\table[allerhandquatsch]{" "}"
|> XML.enclose "\\table[allerhandquatsch]{" "}"
end
)
);

View File

@ -75,9 +75,7 @@ val schemeN = "_scheme"
(* derived from: theory_markup *)
fun docref_markup_gen refN def name id pos =
if id = 0 then Markup.empty
else
Markup.properties (Position.entity_properties_of def id pos)
(Markup.entity refN name); (* or better store the thy-name as property ? ? ? *)
else Position.make_entity_markup {def = def} id refN (name, pos); (* or better store the thy-name as property ? ? ? *)
val docref_markup = docref_markup_gen docrefN
@ -1210,8 +1208,6 @@ fun document_command markdown (loc, txt) =
*)
ML\<open>
structure ODL_Command_Parser =
struct
@ -1221,7 +1217,6 @@ type meta_args_t = (((string * Position.T) *
(string * Position.T) option)
* ((string * Position.T) * string) list)
val semi = Scan.option (Parse.$$$ ";");
val is_improper = not o (Token.is_proper orf Token.is_begin_ignore orf Token.is_end_ignore);
val improper = Scan.many is_improper; (* parses white-space and comments *)
@ -1332,7 +1327,8 @@ fun calc_update_term thy cid_long (S:(string * Position.T * string * term)list)
let val cid_ty = cid_2_cidType cid_long thy
val generalize_term = Term.map_types (generalize_typ 0)
fun toString t = Syntax.string_of_term (Proof_Context.init_global thy) t
fun instantiate_term S t = Term_Subst.map_types_same (Term_Subst.instantiateT S) (t)
fun instantiate_term S t =
Term_Subst.map_types_same (Term_Subst.instantiateT (TVars.make S)) (t)
fun read_assn (lhs, pos:Position.T, opr, rhs) term =
let val info_opt = DOF_core.get_attribute_info cid_long (Long_Name.base_name lhs) thy
val (ln,lnt,lnu,lnut) = case info_opt of
@ -1560,74 +1556,6 @@ fun update_instance_command (((oid:string,pos),cid_pos),
end
(* old version :
fun gen_enriched_document_command {inline=is_inline} cid_transform attr_transform
markdown
(((((oid,pos),cid_pos), doc_attrs) : meta_args_t,
xstring_opt:(xstring * Position.T) option),
toks:Input.source)
: theory -> theory =
let val toplvl = Toplevel.theory_toplevel
fun check thy = let val ctxt = Toplevel.presentation_context (toplvl thy);
val pos = Input.pos_of toks;
val _ = Context_Position.reports ctxt
[(pos, Markup.language_document(Input.is_delimited toks)),
(pos, Markup.plain_text)];
in thy end
in
(* ... level-attribute information management *)
( create_and_check_docitem {is_monitor=false}
{is_inline=is_inline}
oid pos (cid_transform cid_pos)
(attr_transform doc_attrs)
(* checking and markup generation *)
#> check )
(* Thanks Frederic Tuong for the hints concerning toplevel ! ! ! *)
end;
*)
fun gen_enriched_document_cmd {inline=is_inline} cid_transform attr_transform
markdown
(((((oid,pos),cid_pos), doc_attrs) : meta_args_t,
xstring_opt:(xstring * Position.T) option),
toks:Input.source)
: theory -> theory =
let val toplvl = Toplevel.theory_toplevel
(* as side-effect, generates markup *)
fun check_n_tex_text thy = let val ctxt = Toplevel.presentation_context (toplvl thy);
val pos = Input.pos_of toks;
val _ = Context_Position.reports ctxt
[(pos, Markup.language_document (Input.is_delimited toks)),
(pos, Markup.plain_text)];
val text = Thy_Output.output_document
(Proof_Context.init_global thy)
markdown toks
(* val file = {path = Path.make [oid ^ "_snippet.tex"],
pos = @{here},
content = Latex.output_text text}
val _ = Generated_Files.write_file (Path.make ["latex_test"])
file
val _ = writeln (Latex.output_text text) *)
in thy end
(* ... generating the level-attribute syntax *)
in
(* ... level-attribute information management *)
( create_and_check_docitem
{is_monitor = false} {is_inline = is_inline}
oid pos (cid_transform cid_pos) (attr_transform doc_attrs)
(* checking and markup generation *)
#> check_n_tex_text )
(* Thanks Frederic Tuong for the hints concerning toplevel ! ! ! *)
end;
(* General criticism : attributes like "level" were treated here in the kernel instead of dragging
them out into the COL -- bu *)
@ -1677,8 +1605,193 @@ fun close_monitor_command (args as (((oid:string,pos),cid_pos),
|> delete_monitor_entry
end
(* Core Command Definitions *)
fun meta_args_2_latex thy ((((lab, _), cid_opt), attr_list) : meta_args_t) =
(* for the moment naive, i.e. without textual normalization of
attribute names and adapted term printing *)
let val l = "label = "^ (enclose "{" "}" lab)
(* val _ = writeln("meta_args_2_string lab:"^ lab ^":"^ (@{make_string } cid_opt) ) *)
val cid_long = case cid_opt of
NONE => (case DOF_core.get_object_global lab thy of
NONE => DOF_core.default_cid
| SOME X => #cid X)
| SOME(cid,_) => DOF_core.parse_cid_global thy cid
(* val _ = writeln("meta_args_2_string cid_long:"^ cid_long ) *)
val cid_txt = "type = " ^ (enclose "{" "}" cid_long);
fun ltx_of_term _ _ (Const ("List.list.Cons", @{typ "char \<Rightarrow> char list \<Rightarrow> char list"}) $ t1 $ t2)
= HOLogic.dest_string (Const ("List.list.Cons", @{typ "char \<Rightarrow> char list \<Rightarrow> char list"}) $ t1 $ t2)
| ltx_of_term _ _ (Const ("List.list.Nil", _)) = ""
| ltx_of_term _ _ (@{term "numeral :: _ \<Rightarrow> _"} $ t) = Int.toString(HOLogic.dest_numeral t)
| ltx_of_term ctx encl ((Const ("List.list.Cons", _) $ t1) $ t2) =
let val inner = (case t2 of
Const ("List.list.Nil", _) => (ltx_of_term ctx true t1)
| _ => ((ltx_of_term ctx false t1)^", " ^(ltx_of_term ctx false t2))
)
in if encl then enclose "{" "}" inner else inner end
| ltx_of_term _ _ (Const ("Option.option.None", _)) = ""
| ltx_of_term ctxt _ (Const ("Option.option.Some", _)$t) = ltx_of_term ctxt true t
| ltx_of_term ctxt _ t = ""^(Sledgehammer_Util.hackish_string_of_term ctxt t)
fun ltx_of_term_dbg ctx encl term = let
val t_str = ML_Syntax.print_term term
handle (TERM _) => "Exception TERM in ltx_of_term_dbg (print_term)"
val ltx = ltx_of_term ctx encl term
val _ = writeln("<STRING>"^(Sledgehammer_Util.hackish_string_of_term ctx term)^"</STRING>")
val _ = writeln("<LTX>"^ltx^"</LTX>")
val _ = writeln("<TERM>"^t_str^"</TERM>")
in ltx end
fun markup2string s = String.concat (List.filter (fn c => c <> Symbol.DEL)
(Symbol.explode (YXML.content_of s)))
fun ltx_of_markup ctxt s = let
val term = (Syntax.check_term ctxt o Syntax.parse_term ctxt) s
val str_of_term = ltx_of_term ctxt true term
handle _ => "Exception in ltx_of_term"
in
str_of_term
end
fun toLong n = #long_name(the(DOF_core.get_attribute_info cid_long (markup2string n) thy))
val ctxt = Proof_Context.init_global thy
val actual_args = map (fn ((lhs,_),rhs) => (toLong lhs, ltx_of_markup ctxt rhs))
attr_list
val default_args = map (fn (b,_,t) => (toLong (Long_Name.base_name ( Sign.full_name thy b)),
ltx_of_term ctxt true t))
(DOF_core.get_attribute_defaults cid_long thy)
val default_args_filtered = filter (fn (a,_) => not (exists (fn b => b = a)
(map (fn (c,_) => c) actual_args))) default_args
val str_args = map (fn (lhs,rhs) => lhs^" = "^(enclose "{" "}" rhs))
(actual_args@default_args_filtered)
val label_and_type = String.concat [ l, ",", cid_txt]
val str_args = label_and_type::str_args
in
Latex.string (enclose "[" "]" (String.concat [ label_and_type, ", args={", (commas str_args), "}"]))
end
(* level-attribute information management *)
fun gen_enriched_document_cmd {inline} cid_transform attr_transform
((((oid,pos),cid_pos), doc_attrs) : meta_args_t) : theory -> theory =
create_and_check_docitem {is_monitor = false} {is_inline = inline}
oid pos (cid_transform cid_pos) (attr_transform doc_attrs);
(* markup reports and document output *)
(* {markdown = true} sets the parsing process such that in the text-core
markdown elements are accepted. *)
fun document_output {markdown: bool, markup: Latex.text -> Latex.text} meta_args text ctxt =
let
val thy = Proof_Context.theory_of ctxt;
val _ = Context_Position.reports ctxt (Document_Output.document_reports text);
val output_meta = meta_args_2_latex thy meta_args;
val output_text = Document_Output.output_document ctxt {markdown = markdown} text;
in markup (output_meta @ output_text) end;
fun document_output_reports name {markdown, body} meta_args text ctxt =
let
val pos = Input.pos_of text;
val _ =
Context_Position.reports ctxt
[(pos, Markup.language_document (Input.is_delimited text)),
(pos, Markup.plain_text)];
fun markup xml =
let val m = if body then Markup.latex_body else Markup.latex_heading
in [XML.Elem (m (Latex.output_name name), xml)] end;
in document_output {markdown = markdown, markup = markup} meta_args text ctxt end;
(* document output commands *)
local
(* alternative presentation hook (workaround for missing Toplevel.present_theory) *)
structure Document_Commands = Theory_Data
(
type T = (string * (meta_args_t -> Input.source -> Proof.context -> Latex.text)) list;
val empty = [];
val merge = AList.merge (op =) (K true);
);
fun get_document_command thy name =
AList.lookup (op =) (Document_Commands.get thy) name;
fun document_segment (segment: Document_Output.segment) =
(case #span segment of
Command_Span.Span (Command_Span.Command_Span (name, _), _) =>
(case try Toplevel.theory_of (#state segment) of
SOME thy => get_document_command thy name
| _ => NONE)
| _ => NONE);
fun present_segment (segment: Document_Output.segment) =
(case document_segment segment of
SOME pr =>
let
val {span, command = tr, prev_state = st, state = st'} = segment;
val src = Command_Span.content (#span segment) |> filter_out Document_Source.is_improper;
val parse = attributes -- Parse.document_source;
fun present ctxt =
let val (meta_args, text) = #1 (Token.syntax (Scan.lift parse) src ctxt);
in pr meta_args text ctxt end;
val tr' =
Toplevel.empty
|> Toplevel.name (Toplevel.name_of tr)
|> Toplevel.position (Toplevel.pos_of tr)
|> Toplevel.present (Toplevel.presentation_context #> present);
val st'' = Toplevel.command_exception false tr' st'
handle Runtime.EXCURSION_FAIL (exn, _) => Exn.reraise exn;
val FIXME =
Toplevel.setmp_thread_position tr (fn () =>
writeln ("present_segment" ^ Position.here (Toplevel.pos_of tr) ^ "\n" ^
XML.string_of (XML.Elem (Markup.empty, the_default [] (Toplevel.output_of st'))) ^ "\n---\n" ^
XML.string_of (XML.Elem (Markup.empty, the_default [] (Toplevel.output_of st''))))) ()
in {span = span, command = tr, prev_state = st, state = st''} end
| _ => segment);
val _ =
Theory.setup (Thy_Info.add_presentation (fn {options, segments, ...} => fn thy =>
if exists (Toplevel.is_skipped_proof o #state) segments then ()
else
let
val segments' = map present_segment segments;
val body = Document_Output.present_thy options thy segments';
in
if Options.string options "document" = "false" orelse
forall (is_none o document_segment) segments' then ()
else
let
val thy_name = Context.theory_name thy;
val latex = Latex.isabelle_body thy_name body;
in Export.export thy \<^path_binding>\<open>document/latex_dof\<close> latex end
end));
in
fun document_command (name, pos) descr mark cmd =
(Outer_Syntax.command (name, pos) descr
(attributes -- Parse.document_source >>
(fn (meta_args, text) =>
Toplevel.theory (fn thy =>
let
val thy' = cmd meta_args thy;
val _ =
(case get_document_command thy' name of
SOME pr => ignore (pr meta_args text (Proof_Context.init_global thy'))
| NONE => ());
in thy' end)));
(Theory.setup o Document_Commands.map)
(AList.update (op =) (name, document_output_reports name mark)));
end;
(* Core Command Definitions *)
val _ =
Outer_Syntax.command @{command_keyword "open_monitor*"}
@ -1696,19 +1809,16 @@ val _ =
"update meta-attributes of an instance of a document class"
(attributes_upd >> (Toplevel.theory o update_instance_command));
val _ =
Outer_Syntax.command ("text*", @{here}) "formal comment (primary style)"
(attributes -- Parse.opt_target -- Parse.document_source
>> (Toplevel.theory o (gen_enriched_document_cmd {inline=true} I I {markdown = true} )));
document_command ("text*", @{here}) "formal comment (primary style)"
{markdown = true, body = true} (gen_enriched_document_cmd {inline=true} I I);
(* This is just a stub at present *)
val _ =
Outer_Syntax.command ("text-macro*", @{here}) "formal comment macro"
(attributes -- Parse.opt_target -- Parse.document_source
>> (Toplevel.theory o (gen_enriched_document_cmd {inline=false} (* declare as macro *)
I I {markdown = true} )));
document_command ("text-macro*", @{here}) "formal comment macro"
{markdown = true, body = true}
(gen_enriched_document_cmd {inline=false} (* declare as macro *) I I);
val _ =
Outer_Syntax.command @{command_keyword "declare_reference*"}
@ -1865,14 +1975,13 @@ val _ =
>> (fn (meta_args_opt, eval_args ) => pass_trans_to_value_cmd meta_args_opt eval_args));
val _ = Theory.setup
(Thy_Output.antiquotation_pretty_source_embedded \<^binding>\<open>value*\<close>
(Document_Output.antiquotation_pretty_source_embedded \<^binding>\<open>value*\<close>
(Scan.lift opt_evaluator -- Term_Style.parse -- Args.term)
(fn ctxt => fn ((name, style), t) =>
Thy_Output.pretty_term ctxt (style (value_select name ctxt t)))
#> add_evaluator (\<^binding>\<open>simp\<close>, Code_Simp.dynamic_value) #> snd
#> add_evaluator (\<^binding>\<open>nbe\<close>, Nbe.dynamic_value) #> snd
#> add_evaluator (\<^binding>\<open>code\<close>, Code_Evaluation.dynamic_value_strict)
#> snd);
(fn ctxt => fn ((name, style), t) =>
Document_Output.pretty_term ctxt (style (value_select name ctxt t)))
#> add_evaluator (\<^binding>\<open>simp\<close>, Code_Simp.dynamic_value) #> snd
#> add_evaluator (\<^binding>\<open>nbe\<close>, Nbe.dynamic_value) #> snd
#> add_evaluator (\<^binding>\<open>code\<close>, Code_Evaluation.dynamic_value_strict) #> snd);
end;
\<close>
@ -1985,17 +2094,6 @@ end
end
\<close>
ML\<open> (* Setting in thy_output.ML a parser for the syntactic handling of the meta-informations of
text elements - so text*[m<meta-info>]\<open> ... dfgdfg .... \<close> *)
val _ = Thy_Output.set_meta_args_parser
(fn thy => (Scan.optional (Document_Source.improper
|-- ODL_Command_Parser.attributes
>> ODL_LTX_Converter.meta_args_2_string thy) "")); \<close>
section\<open> Syntax for Ontological Antiquotations (the '' View'' Part II) \<close>
@ -2003,7 +2101,7 @@ ML\<open>
structure OntoLinkParser =
struct
val basic_entity = Thy_Output.antiquotation_pretty_source
val basic_entity = Document_Output.antiquotation_pretty_source
: binding -> 'a context_parser -> (Proof.context -> 'a -> Pretty.T) -> theory -> theory;
fun check_and_mark ctxt cid_decl (str:{strict_checking: bool}) {inline=inline_req} pos name =
@ -2055,20 +2153,19 @@ fun pretty_docitem_antiquotation_generic cid_decl ctxt ({unchecked, define}, src
val inline = Config.get ctxt Document_Antiquotation.thy_output_display
val _ = check_and_mark ctxt cid_decl {strict_checking = not unchecked}
{inline = inline} pos str
val enc = Latex.enclose_block
in
(case (define,inline) of
(true,false) => enc("\\csname isaDof.label\\endcsname[type={"^cid_decl^"}] {")"}"
|(false,false)=> enc("\\csname isaDof.ref\\endcsname[type={"^cid_decl^"}] {")"}"
|(true,true) => enc("\\csname isaDof.macroDef\\endcsname[type={"^cid_decl^"}]{")"}"
|(false,true) => enc("\\csname isaDof.macroExp\\endcsname[type={"^cid_decl^"}]{")"}"
(true,false) => XML.enclose("\\csname isaDof.label\\endcsname[type={"^cid_decl^"}] {")"}"
|(false,false)=> XML.enclose("\\csname isaDof.ref\\endcsname[type={"^cid_decl^"}] {")"}"
|(true,true) => XML.enclose("\\csname isaDof.macroDef\\endcsname[type={"^cid_decl^"}]{")"}"
|(false,true) => XML.enclose("\\csname isaDof.macroExp\\endcsname[type={"^cid_decl^"}]{")"}"
)
[Latex.text (Input.source_content src)]
(Latex.text (Input.source_content src))
end
fun docitem_antiquotation bind cid =
Thy_Output.antiquotation_raw bind docitem_antiquotation_parser
Document_Output.antiquotation_raw bind docitem_antiquotation_parser
(pretty_docitem_antiquotation_generic cid);
@ -2107,7 +2204,7 @@ ML\<open>
structure AttributeAccess =
struct
val basic_entity = Thy_Output.antiquotation_pretty_source
val basic_entity = Document_Output.antiquotation_pretty_source
: binding -> 'a context_parser -> (Proof.context -> 'a -> Pretty.T) -> theory -> theory;
fun symbex_attr_access0 ctxt proj_term term =
@ -2195,13 +2292,13 @@ fun compute_cid_repr ctxt cid pos =
local
fun pretty_attr_access_style ctxt (style, ((oid,pos),(attr,pos'))) =
Thy_Output.pretty_term ctxt (style (compute_attr_access (Context.Proof ctxt)
Document_Output.pretty_term ctxt (style (compute_attr_access (Context.Proof ctxt)
attr oid pos pos'));
fun pretty_trace_style ctxt (style, (oid,pos)) =
Thy_Output.pretty_term ctxt (style (compute_attr_access (Context.Proof ctxt)
Document_Output.pretty_term ctxt (style (compute_attr_access (Context.Proof ctxt)
"trace" oid pos pos));
fun pretty_cid_style ctxt (style, (cid,pos)) =
Thy_Output.pretty_term ctxt (style (compute_cid_repr ctxt cid pos));
Document_Output.pretty_term ctxt (style (compute_cid_repr ctxt cid pos));
in
val _ = Theory.setup
@ -2221,7 +2318,7 @@ end
text\<open> Note that the functions \<^verbatim>\<open>basic_entities\<close> and \<^verbatim>\<open>basic_entity\<close> in
@{ML_structure AttributeAccess} are copied from
@{file "$ISABELLE_HOME/src/Pure/Thy/thy_output.ML"} \<close>
@{file "$ISABELLE_HOME/src/Pure/Thy/document_output.ML"} \<close>
section\<open> Syntax for Ontologies (the '' View'' Part III) \<close>
@ -2262,7 +2359,15 @@ fun read_fields raw_fields ctxt =
val trace_attr = ((Binding.make("trace",@{here}), "(doc_class rexp \<times> string) list",Mixfix.NoSyn),
SOME "[]"): ((binding * string * mixfix) * string option)
fun def_cmd (decl, spec, prems, params) = #2 oo Specification.definition' decl params prems spec
fun def_cmd (decl, spec, prems, params) lthy =
let
val ((lhs as Free (x, T), _), lthy') = Specification.definition decl params prems spec lthy;
val lhs' = Morphism.term (Local_Theory.target_morphism lthy') lhs;
val _ =
Proof_Display.print_consts true (Position.thread_data ()) lthy'
(Frees.defined (Frees.build (Frees.add_frees lhs'))) [(x, T)]
in lthy' end
fun meta_eq_const T = Const (\<^const_name>\<open>Pure.eq\<close>, T --> T --> propT);
fun mk_meta_eq (t, u) = meta_eq_const (fastype_of t) $ t $ u;
@ -2270,7 +2375,7 @@ fun define_cond binding f_sty cond_suffix read_cond (ctxt:local_theory) =
let val bdg = Binding.suffix_name cond_suffix binding
val eq = mk_meta_eq(Free(Binding.name_of bdg, f_sty),read_cond)
val args = (SOME(bdg,NONE,NoSyn), (Binding.empty_atts,eq),[],[])
in def_cmd args true ctxt end
in def_cmd args ctxt end
fun define_inv cid_long ((lbl, pos), inv) thy =
let val bdg = Binding.make (lbl,pos)
@ -2433,34 +2538,34 @@ ML\<open>
structure DOF_lib =
struct
fun define_shortcut name latexshcut =
Thy_Output.antiquotation_raw name (Scan.succeed ())
Document_Output.antiquotation_raw name (Scan.succeed ())
(fn _ => fn () => Latex.string latexshcut)
(* This is a generalization of the Isabelle2020 function "control_antiquotation" from
document_antiquotations.ML. (Thanks Makarius!) *)
fun define_macro name s1 s2 reportNtest =
Thy_Output.antiquotation_raw_embedded name (Scan.lift Args.cartouche_input)
Document_Output.antiquotation_raw_embedded name (Scan.lift Args.cartouche_input)
(fn ctxt =>
fn src => let val () = reportNtest ctxt src
in src |> Latex.enclose_block s1 s2
o Thy_Output.output_document ctxt {markdown = false}
in src |> XML.enclose s1 s2
o Document_Output.output_document ctxt {markdown = false}
end);
local (* hide away really strange local construction *)
fun enclose_body2 front body1 middle body2 post =
(if front = "" then [] else [Latex.string front]) @ body1 @
(if middle = "" then [] else [Latex.string middle]) @ body2 @
(if post = "" then [] else [Latex.string post]);
(if front = "" then [] else Latex.string front) @ body1 @
(if middle = "" then [] else Latex.string middle) @ body2 @
(if post = "" then [] else Latex.string post);
in
fun define_macro2 name front middle post reportNtest1 reportNtest2 =
Thy_Output.antiquotation_raw_embedded name (Scan.lift ( Args.cartouche_input
Document_Output.antiquotation_raw_embedded name (Scan.lift ( Args.cartouche_input
-- Args.cartouche_input))
(fn ctxt =>
fn (src1,src2) => let val () = reportNtest1 ctxt src1
val () = reportNtest2 ctxt src2
val T1 = Thy_Output.output_document ctxt {markdown = false} src1
val T2 = Thy_Output.output_document ctxt {markdown = false} src2
in Latex.block(enclose_body2 front T1 middle T2 post)
val T1 = Document_Output.output_document ctxt {markdown = false} src1
val T2 = Document_Output.output_document ctxt {markdown = false} src2
in enclose_body2 front T1 middle T2 post
end);
end
@ -2488,8 +2593,8 @@ fun prepare_text ctxt =
fun string_2_text_antiquotation ctxt text =
prepare_text ctxt text
|> Thy_Output.output_source ctxt
|> Thy_Output.isabelle ctxt
|> Document_Output.output_source ctxt
|> Document_Output.isabelle ctxt
fun string_2_theory_text_antiquotation ctxt text =
let
@ -2497,12 +2602,12 @@ fun string_2_theory_text_antiquotation ctxt text =
in
prepare_text ctxt text
|> Token.explode0 keywords
|> maps (Thy_Output.output_token ctxt)
|> Thy_Output.isabelle ctxt
|> maps (Document_Output.output_token ctxt)
|> Document_Output.isabelle ctxt
end
fun gen_text_antiquotation name reportNcheck compile =
Thy_Output.antiquotation_raw_embedded name (Scan.lift Args.text_input)
Document_Output.antiquotation_raw_embedded name (Scan.lift Args.text_input)
(fn ctxt => fn text:Input.source =>
let
val _ = reportNcheck ctxt text;
@ -2556,15 +2661,15 @@ fun environment_delim name =
("%\n\\begin{" ^ Latex.output_name name ^ "}\n",
"\n\\end{" ^ Latex.output_name name ^ "}");
fun environment_block name = environment_delim name |-> Latex.enclose_body #> Latex.block;
fun environment_block name = environment_delim name |-> XML.enclose;
fun enclose_env verbatim ctxt block_env body =
if Config.get ctxt Document_Antiquotation.thy_output_display
then if verbatim
then environment_block block_env [body]
else Latex.environment_block block_env [body]
else Latex.block ([Latex.string ("\\inline"^block_env ^"{")] @ [body] @ [ Latex.string ("}")]);
then environment_block block_env body
else Latex.environment block_env body
else XML.enclose ("\\inline"^block_env ^"{") "}" body;
end
\<close>

View File

@ -1,5 +1,5 @@
session "Isabelle_DOF" = "Functional-Automata" +
options [document = pdf, document_output = "output"]
options [document = pdf, document_output = "output", document_build = dof]
sessions
"Regular-Sets"
directories

View File

@ -47,23 +47,15 @@ doc_class abstract =
ML\<open>
local open ODL_Command_Parser in
val _ = Outer_Syntax.command ("abstract*", @{here}) "Textual Definition"
(attributes -- Parse.opt_target -- Parse.document_source --| semi
>> (Toplevel.theory o (Onto_Macros.enriched_document_cmd_exp
(SOME "abstract")
[]
{markdown = true} )));
val _ =
ODL_Command_Parser.document_command ("abstract*", @{here}) "Textual Definition"
{markdown = true, body = true}
(Onto_Macros.enriched_document_cmd_exp (SOME "abstract") []);
val _ = Outer_Syntax.command ("author*", @{here}) "Textual Definition"
(attributes -- Parse.opt_target -- Parse.document_source --| semi
>> (Toplevel.theory o (Onto_Macros.enriched_document_cmd_exp
(SOME "author")
[]
{markdown = true} )));
end
val _ =
ODL_Command_Parser.document_command ("author*", @{here}) "Textual Definition"
{markdown = true, body = true}
(Onto_Macros.enriched_document_cmd_exp (SOME "author") []);
\<close>
text\<open>Scholarly Paper is oriented towards the classical domains in science:
@ -298,45 +290,41 @@ setup\<open>Theorem_default_class_setup\<close>
ML\<open> local open ODL_Command_Parser in
(* {markdown = true} sets the parsing process such that in the text-core
markdown elements are accepted. *)
val _ =
ODL_Command_Parser.document_command ("Definition*", @{here}) "Textual Definition"
{markdown = true, body = true}
(fn meta_args => fn thy =>
let
val ddc = Config.get_global thy Definition_default_class
val use_Definition_default = SOME(((ddc = "") ? (K "math_content")) ddc)
in
Onto_Macros.enriched_formal_statement_command
use_Definition_default [("mcc","defn")] meta_args thy
end);
val _ = let fun use_Definition_default thy =
let val ddc = Config.get_global thy Definition_default_class
in (SOME(((ddc = "") ? (K "math_content")) ddc)) end
in Outer_Syntax.command ("Definition*", @{here}) "Textual Definition"
(attributes -- Parse.opt_target -- Parse.document_source --| semi
>> (Toplevel.theory o (fn args => fn thy =>
Onto_Macros.enriched_formal_statement_command
(use_Definition_default thy)
[("mcc","defn")]
{markdown = true} args thy)))
end;
val _ =
ODL_Command_Parser.document_command ("Lemma*", @{here}) "Textual Lemma Outline"
{markdown = true, body = true}
(fn meta_args => fn thy =>
let
val ddc = Config.get_global thy Definition_default_class
val use_Lemma_default = SOME(((ddc = "") ? (K "math_content")) ddc)
in
Onto_Macros.enriched_formal_statement_command
use_Lemma_default [("mcc","lem")] meta_args thy
end);
val _ = let fun use_Lemma_default thy =
let val ddc = Config.get_global thy Definition_default_class
in (SOME(((ddc = "") ? (K "math_content")) ddc)) end
in Outer_Syntax.command ("Lemma*", @{here}) "Textual Lemma Outline"
(attributes -- Parse.opt_target -- Parse.document_source --| semi
>> (Toplevel.theory o (fn args => fn thy =>
Onto_Macros.enriched_formal_statement_command
(use_Lemma_default thy)
[("mcc","lem")]
{markdown = true} args thy)))
end;
val _ = let fun use_Theorem_default thy =
let val ddc = Config.get_global thy Definition_default_class
in (SOME(((ddc = "") ? (K "math_content")) ddc)) end
in Outer_Syntax.command ("Theorem*", @{here}) "Textual Theorem Outline"
(attributes -- Parse.opt_target -- Parse.document_source --| semi
>> (Toplevel.theory o (fn args => fn thy =>
Onto_Macros.enriched_formal_statement_command
(use_Theorem_default thy)
[("mcc","thm")]
{markdown = true} args thy)))
end;
val _ =
ODL_Command_Parser.document_command ("Theorem*", @{here}) "Textual Theorem Outline"
{markdown = true, body = true}
(fn meta_args => fn thy =>
let
val ddc = Config.get_global thy Definition_default_class
val use_Theorem_default = SOME(((ddc = "") ? (K "math_content")) ddc)
in
Onto_Macros.enriched_formal_statement_command
use_Theorem_default [("mcc","thm")] meta_args thy
end);
end
\<close>

View File

@ -1,566 +0,0 @@
(* Title: Pure/Thy/thy_output.ML
Author: Makarius
Theory document output.
*)
signature THY_OUTPUT =
sig
val output_document: Proof.context -> {markdown: bool} -> Input.source -> Latex.text list
val check_comments: Proof.context -> Symbol_Pos.T list -> unit
val output_token: Proof.context -> Token.T -> Latex.text list
val output_source: Proof.context -> string -> Latex.text list
type segment = {span: Command_Span.span, command: Toplevel.transition, state: Toplevel.state}
val present_thy: Options.T -> theory -> segment list -> Latex.text list
val pretty_term: Proof.context -> term -> Pretty.T
val pretty_thm: Proof.context -> thm -> Pretty.T
val lines: Latex.text list -> Latex.text list
val items: Latex.text list -> Latex.text list
val isabelle: Proof.context -> Latex.text list -> Latex.text
val isabelle_typewriter: Proof.context -> Latex.text list -> Latex.text
val typewriter: Proof.context -> string -> Latex.text
val verbatim: Proof.context -> string -> Latex.text
val source: Proof.context -> {embedded: bool} -> Token.src -> Latex.text
val pretty: Proof.context -> Pretty.T -> Latex.text
val pretty_source: Proof.context -> {embedded: bool} -> Token.src -> Pretty.T -> Latex.text
val pretty_items: Proof.context -> Pretty.T list -> Latex.text
val pretty_items_source: Proof.context -> {embedded: bool} -> Token.src ->
Pretty.T list -> Latex.text
val antiquotation_pretty:
binding -> 'a context_parser -> (Proof.context -> 'a -> Pretty.T) -> theory -> theory
val antiquotation_pretty_embedded:
binding -> 'a context_parser -> (Proof.context -> 'a -> Pretty.T) -> theory -> theory
val antiquotation_pretty_source:
binding -> 'a context_parser -> (Proof.context -> 'a -> Pretty.T) -> theory -> theory
val antiquotation_pretty_source_embedded:
binding -> 'a context_parser -> (Proof.context -> 'a -> Pretty.T) -> theory -> theory
val antiquotation_raw:
binding -> 'a context_parser -> (Proof.context -> 'a -> Latex.text) -> theory -> theory
val antiquotation_raw_embedded:
binding -> 'a context_parser -> (Proof.context -> 'a -> Latex.text) -> theory -> theory
val antiquotation_verbatim:
binding -> 'a context_parser -> (Proof.context -> 'a -> string) -> theory -> theory
val antiquotation_verbatim_embedded:
binding -> 'a context_parser -> (Proof.context -> 'a -> string) -> theory -> theory
end;
structure Thy_Output: THY_OUTPUT =
struct
(* output document source *)
val output_symbols = single o Latex.symbols_output;
fun output_comment ctxt (kind, syms) =
(case kind of
Comment.Comment =>
Input.cartouche_content syms
|> output_document (ctxt |> Config.put Document_Antiquotation.thy_output_display false)
{markdown = false}
|> Latex.enclose_body "%\n\\isamarkupcmt{" "%\n}"
| Comment.Cancel =>
Symbol_Pos.cartouche_content syms
|> output_symbols
|> Latex.enclose_body "%\n\\isamarkupcancel{" "}"
| Comment.Latex => [Latex.symbols (Symbol_Pos.cartouche_content syms)]
| Comment.Marker => [])
and output_comment_document ctxt (comment, syms) =
(case comment of
SOME kind => output_comment ctxt (kind, syms)
| NONE => [Latex.symbols syms])
and output_document_text ctxt syms =
Comment.read_body syms |> maps (output_comment_document ctxt)
and output_document ctxt {markdown} source =
let
val pos = Input.pos_of source;
val syms = Input.source_explode source;
val output_antiquotes =
maps (Document_Antiquotation.evaluate (output_document_text ctxt) ctxt);
fun output_line line =
(if Markdown.line_is_item line then [Latex.string "\\item "] else []) @
output_antiquotes (Markdown.line_content line);
fun output_block (Markdown.Par lines) =
Latex.block (separate (Latex.string "\n") (map (Latex.block o output_line) lines))
| output_block (Markdown.List {kind, body, ...}) =
Latex.environment_block (Markdown.print_kind kind) (output_blocks body)
and output_blocks blocks = separate (Latex.string "\n\n") (map output_block blocks);
in
if Toplevel.is_skipped_proof (Toplevel.presentation_state ctxt) then []
else if markdown andalso exists (Markdown.is_control o Symbol_Pos.symbol) syms
then
let
val ants = Antiquote.parse_comments pos syms;
val reports = Antiquote.antiq_reports ants;
val blocks = Markdown.read_antiquotes ants;
val _ = Context_Position.reports ctxt (reports @ Markdown.reports blocks);
in output_blocks blocks end
else
let
val ants = Antiquote.parse_comments pos (trim (Symbol.is_blank o Symbol_Pos.symbol) syms);
val reports = Antiquote.antiq_reports ants;
val _ = Context_Position.reports ctxt (reports @ Markdown.text_reports ants);
in output_antiquotes ants end
end;
(* output tokens with formal comments *)
local
val output_symbols_antiq =
(fn Antiquote.Text syms => output_symbols syms
| Antiquote.Control {name = (name, _), body, ...} =>
Latex.string (Latex.output_symbols [Symbol.encode (Symbol.Control name)]) ::
output_symbols body
| Antiquote.Antiq {body, ...} =>
Latex.enclose_body "%\n\\isaantiq\n" "{}%\n\\endisaantiq\n" (output_symbols body));
fun output_comment_symbols ctxt {antiq} (comment, syms) =
(case (comment, antiq) of
(NONE, false) => output_symbols syms
| (NONE, true) =>
Antiquote.parse_comments (#1 (Symbol_Pos.range syms)) syms
|> maps output_symbols_antiq
| (SOME comment, _) => output_comment ctxt (comment, syms));
fun output_body ctxt antiq bg en syms =
Comment.read_body syms
|> maps (output_comment_symbols ctxt {antiq = antiq})
|> Latex.enclose_body bg en;
in
fun output_token ctxt tok =
let
fun output antiq bg en =
output_body ctxt antiq bg en (Input.source_explode (Token.input_of tok));
in
(case Token.kind_of tok of
Token.Comment NONE => []
| Token.Comment (SOME Comment.Marker) => []
| Token.Command => output false "\\isacommand{" "}"
| Token.Keyword =>
if Symbol.is_ascii_identifier (Token.content_of tok)
then output false "\\isakeyword{" "}"
else output false "" ""
| Token.String => output false "{\\isachardoublequoteopen}" "{\\isachardoublequoteclose}"
| Token.Alt_String => output false "{\\isacharbackquoteopen}" "{\\isacharbackquoteclose}"
| Token.Verbatim => output true "{\\isacharverbatimopen}" "{\\isacharverbatimclose}"
| Token.Cartouche => output false "{\\isacartoucheopen}" "{\\isacartoucheclose}"
| _ => output false "" "")
end handle ERROR msg => error (msg ^ Position.here (Token.pos_of tok));
fun output_source ctxt s =
output_body ctxt false "" "" (Symbol_Pos.explode (s, Position.none));
fun check_comments ctxt =
Comment.read_body #> List.app (fn (comment, syms) =>
let
val pos = #1 (Symbol_Pos.range syms);
val _ =
comment |> Option.app (fn kind =>
Context_Position.reports ctxt (map (pair pos) (Comment.kind_markups kind)));
val _ = output_comment_symbols ctxt {antiq = false} (comment, syms);
in if comment = SOME Comment.Comment then check_comments ctxt syms else () end);
end;
(** present theory source **)
(* presentation tokens *)
datatype token =
Ignore
| Token of Token.T
| Heading of string * Input.source
| Body of string * Input.source
| Raw of Input.source;
fun token_with pred (Token tok) = pred tok
| token_with _ _ = false;
val white_token = token_with Document_Source.is_white;
val white_comment_token = token_with Document_Source.is_white_comment;
val blank_token = token_with Token.is_blank;
val newline_token = token_with Token.is_newline;
fun present_token ctxt tok =
(case tok of
Ignore => []
| Token tok => output_token ctxt tok
| Heading (cmd, source) =>
Latex.enclose_body ("%\n\\isamarkup" ^ cmd ^ "{") "%\n}\n"
(output_document ctxt {markdown = false} source)
| Body (cmd, source) =>
[Latex.environment_block ("isamarkup" ^ cmd) (output_document ctxt {markdown = true} source)]
| Raw source =>
Latex.string "%\n" :: output_document ctxt {markdown = true} source @ [Latex.string "\n"]);
(* command spans *)
type command = string * Position.T; (*name, position*)
type source = (token * (string * int)) list; (*token, markup flag, meta-comment depth*)
datatype span = Span of command * (source * source * source * source) * bool;
fun make_span cmd src =
let
fun chop_newline (tok :: toks) =
if newline_token (fst tok) then ([tok], toks, true)
else ([], tok :: toks, false)
| chop_newline [] = ([], [], false);
val (((src_prefix, src_main), src_suffix1), (src_suffix2, src_appendix, newline)) =
src
|> chop_prefix (white_token o fst)
||>> chop_suffix (white_token o fst)
||>> chop_prefix (white_comment_token o fst)
||> chop_newline;
in Span (cmd, (src_prefix, src_main, src_suffix1 @ src_suffix2, src_appendix), newline) end;
(* present spans *)
local
fun err_bad_nesting here =
error ("Bad nesting of commands in presentation" ^ here);
fun edge which f (x: string option, y) =
if x = y then I
else (case which (x, y) of NONE => I | SOME txt => cons (Latex.string (f txt)));
val begin_tag = edge #2 Latex.begin_tag;
val end_tag = edge #1 Latex.end_tag;
fun open_delim delim e = edge #2 Latex.begin_delim e #> delim #> edge #2 Latex.end_delim e;
fun close_delim delim e = edge #1 Latex.begin_delim e #> delim #> edge #1 Latex.end_delim e;
fun document_tag cmd_pos state state' tagging_stack =
let
val ctxt' = Toplevel.presentation_context state';
val nesting = Toplevel.level state' - Toplevel.level state;
val (tagging, taggings) = tagging_stack;
val (tag', tagging') = Document_Source.update_tagging ctxt' tagging;
val tagging_stack' =
if nesting = 0 andalso not (Toplevel.is_proof state) then tagging_stack
else if nesting >= 0 then (tagging', replicate nesting tagging @ taggings)
else
(case drop (~ nesting - 1) taggings of
tg :: tgs => (tg, tgs)
| [] => err_bad_nesting (Position.here cmd_pos));
in (tag', tagging_stack') end;
fun read_tag s =
(case space_explode "%" s of
["", b] => (SOME b, NONE)
| [a, b] => (NONE, SOME (a, b))
| _ => error ("Bad document_tags specification: " ^ quote s));
in
fun make_command_tag options keywords =
let
val document_tags =
map read_tag (space_explode "," (Options.string options \<^system_option>\<open>document_tags\<close>));
val document_tags_default = map_filter #1 document_tags;
val document_tags_command = map_filter #2 document_tags;
in
fn cmd_name => fn state => fn state' => fn active_tag =>
let
val keyword_tags =
if cmd_name = "end" andalso Toplevel.is_end_theory state' then ["theory"]
else Keyword.command_tags keywords cmd_name;
val command_tags =
the_list (AList.lookup (op =) document_tags_command cmd_name) @
keyword_tags @ document_tags_default;
val active_tag' =
(case command_tags of
default_tag :: _ => SOME default_tag
| [] =>
if Keyword.is_vacuous keywords cmd_name andalso Toplevel.is_proof state
then active_tag
else NONE);
in active_tag' end
end;
fun present_span command_tag span state state'
(tagging_stack, active_tag, newline, latex, present_cont) =
let
val ctxt' = Toplevel.presentation_context state';
val present = fold (fn (tok, (flag, 0)) =>
fold cons (present_token ctxt' tok)
#> cons (Latex.string flag)
| _ => I);
val Span ((cmd_name, cmd_pos), srcs, span_newline) = span;
val (tag', tagging_stack') = document_tag cmd_pos state state' tagging_stack;
val active_tag' =
if is_some tag' then Option.map #1 tag'
else command_tag cmd_name state state' active_tag;
val edge = (active_tag, active_tag');
val newline' =
if is_none active_tag' then span_newline else newline;
val latex' =
latex
|> end_tag edge
|> close_delim (fst present_cont) edge
|> snd present_cont
|> open_delim (present (#1 srcs)) edge
|> begin_tag edge
|> present (#2 srcs);
val present_cont' =
if newline then (present (#3 srcs), present (#4 srcs))
else (I, present (#3 srcs) #> present (#4 srcs));
in (tagging_stack', active_tag', newline', latex', present_cont') end;
fun present_trailer ((_, tags), active_tag, _, latex, present_cont) =
if not (null tags) then err_bad_nesting " at end of theory"
else
latex
|> end_tag (active_tag, NONE)
|> close_delim (fst present_cont) (active_tag, NONE)
|> snd present_cont;
end;
(* present_thy *)
local
val markup_true = "\\isamarkuptrue%\n";
val markup_false = "\\isamarkupfalse%\n";
val opt_newline = Scan.option (Scan.one Token.is_newline);
val ignore =
Scan.depend (fn d => opt_newline |-- Scan.one Token.is_begin_ignore
>> pair (d + 1)) ||
Scan.depend (fn d => Scan.one Token.is_end_ignore --|
(if d = 0 then Scan.fail_with (K (fn () => "Bad nesting of meta-comments")) else opt_newline)
>> pair (d - 1));
val locale =
Scan.option ((Parse.$$$ "(" -- Document_Source.improper -- Parse.$$$ "in") |--
Parse.!!!
(Document_Source.improper |-- Parse.name --| (Document_Source.improper -- Parse.$$$ ")")));
in
type segment = {span: Command_Span.span, command: Toplevel.transition, state: Toplevel.state};
fun present_thy options thy (segments: segment list) =
let
val keywords = Thy_Header.get_keywords thy;
(* tokens *)
val ignored = Scan.state --| ignore
>> (fn d => (NONE, (Ignore, ("", d))));
fun markup pred mk flag = Scan.peek (fn d =>
Document_Source.improper |--
Parse.position (Scan.one (fn tok =>
Token.is_command tok andalso pred keywords (Token.content_of tok))) --
(Document_Source.annotation |--
Parse.!!!! ((Document_Source.improper -- locale -- Document_Source.improper) |--
Parse.document_source --| Document_Source.improper_end))
>> (fn ((tok, pos'), source) =>
let val name = Token.content_of tok
in (SOME (name, pos'), (mk (name, source), (flag, d))) end));
val command = Scan.peek (fn d =>
Scan.optional (Scan.one Token.is_command_modifier ::: Document_Source.improper) [] --
Scan.one Token.is_command --| Document_Source.annotation
>> (fn (cmd_mod, cmd) =>
map (fn tok => (NONE, (Token tok, ("", d)))) cmd_mod @
[(SOME (Token.content_of cmd, Token.pos_of cmd),
(Token cmd, (markup_false, d)))]));
val cmt = Scan.peek (fn d =>
Scan.one Document_Source.is_black_comment >> (fn tok => (NONE, (Token tok, ("", d)))));
val other = Scan.peek (fn d =>
Parse.not_eof >> (fn tok => (NONE, (Token tok, ("", d)))));
val tokens =
(ignored ||
markup Keyword.is_document_heading Heading markup_true ||
markup Keyword.is_document_body Body markup_true ||
markup Keyword.is_document_raw (Raw o #2) "") >> single ||
command ||
(cmt || other) >> single;
(* spans *)
val is_eof = fn (_, (Token x, _)) => Token.is_eof x | _ => false;
val stopper = Scan.stopper (K (NONE, (Token Token.eof, ("", 0)))) is_eof;
val cmd = Scan.one (is_some o fst);
val non_cmd = Scan.one (is_none o fst andf not o is_eof) >> #2;
val white_comments = Scan.many (white_comment_token o fst o snd);
val blank = Scan.one (blank_token o fst o snd);
val newline = Scan.one (newline_token o fst o snd);
val before_cmd =
Scan.option (newline -- white_comments) --
Scan.option (newline -- white_comments) --
Scan.option (blank -- white_comments) -- cmd;
val span =
Scan.repeat non_cmd -- cmd --
Scan.repeat (Scan.unless before_cmd non_cmd) --
Scan.option (newline >> (single o snd))
>> (fn (((toks1, (cmd, tok2)), toks3), tok4) =>
make_span (the cmd) (toks1 @ (tok2 :: (toks3 @ the_default [] tok4))));
val spans = segments
|> maps (Command_Span.content o #span)
|> drop_suffix Token.is_space
|> Source.of_list
|> Source.source' 0 Token.stopper (Scan.error (Scan.bulk tokens >> flat))
|> Source.source stopper (Scan.error (Scan.bulk span))
|> Source.exhaust;
val command_results =
segments |> map_filter (fn {command, state, ...} =>
if Toplevel.is_ignored command then NONE else SOME (command, state));
(* present commands *)
val command_tag = make_command_tag options keywords;
fun present_command tr span st st' =
Toplevel.setmp_thread_position tr (present_span command_tag span st st');
fun present _ [] = I
| present st ((span, (tr, st')) :: rest) = present_command tr span st st' #> present st' rest;
in
if length command_results = length spans then
(([], []), NONE, true, [], (I, I))
|> present (Toplevel.init_toplevel ()) (spans ~~ command_results)
|> present_trailer
|> rev
else error "Messed-up outer syntax for presentation"
end;
end;
(** standard output operations **)
(* pretty printing *)
fun pretty_term ctxt t =
Syntax.pretty_term (Proof_Context.augment t ctxt) t;
fun pretty_thm ctxt = pretty_term ctxt o Thm.full_prop_of;
(* default output *)
val lines = separate (Latex.string "\\isanewline%\n");
val items = separate (Latex.string "\\isasep\\isanewline%\n");
fun isabelle ctxt body =
if Config.get ctxt Document_Antiquotation.thy_output_display
then Latex.environment_block "isabelle" body
else Latex.block (Latex.enclose_body "\\isa{" "}" body);
fun isabelle_typewriter ctxt body =
if Config.get ctxt Document_Antiquotation.thy_output_display
then Latex.environment_block "isabellett" body
else Latex.block (Latex.enclose_body "\\isatt{" "}" body);
fun typewriter ctxt s =
isabelle_typewriter ctxt [Latex.string (Latex.output_ascii s)];
fun verbatim ctxt =
if Config.get ctxt Document_Antiquotation.thy_output_display
then Document_Antiquotation.indent_lines ctxt #> typewriter ctxt
else split_lines #> map (typewriter ctxt) #> lines #> Latex.block;
fun token_source ctxt {embedded} tok =
if Token.is_kind Token.Cartouche tok andalso embedded andalso
not (Config.get ctxt Document_Antiquotation.thy_output_source_cartouche)
then Token.content_of tok
else Token.unparse tok;
fun is_source ctxt =
Config.get ctxt Document_Antiquotation.thy_output_source orelse
Config.get ctxt Document_Antiquotation.thy_output_source_cartouche;
fun source ctxt embedded =
Token.args_of_src
#> map (token_source ctxt embedded #> Document_Antiquotation.prepare_lines ctxt)
#> space_implode " "
#> output_source ctxt
#> isabelle ctxt;
fun pretty ctxt =
Document_Antiquotation.output ctxt #> Latex.string #> single #> isabelle ctxt;
fun pretty_source ctxt embedded src prt =
if is_source ctxt then source ctxt embedded src else pretty ctxt prt;
fun pretty_items ctxt =
map (Document_Antiquotation.output ctxt #> Latex.string) #> items #> isabelle ctxt;
fun pretty_items_source ctxt embedded src prts =
if is_source ctxt then source ctxt embedded src else pretty_items ctxt prts;
(* antiquotation variants *)
local
fun gen_setup name embedded =
if embedded
then Document_Antiquotation.setup_embedded name
else Document_Antiquotation.setup name;
fun gen_antiquotation_pretty name embedded scan f =
gen_setup name embedded scan (fn {context = ctxt, argument = x, ...} => pretty ctxt (f ctxt x));
fun gen_antiquotation_pretty_source name embedded scan f =
gen_setup name embedded scan
(fn {context = ctxt, source = src, argument = x} =>
pretty_source ctxt {embedded = embedded} src (f ctxt x));
fun gen_antiquotation_raw name embedded scan f =
gen_setup name embedded scan (fn {context = ctxt, argument = x, ...} => f ctxt x);
fun gen_antiquotation_verbatim name embedded scan f =
gen_antiquotation_raw name embedded scan (fn ctxt => verbatim ctxt o f ctxt);
in
fun antiquotation_pretty name = gen_antiquotation_pretty name false;
fun antiquotation_pretty_embedded name = gen_antiquotation_pretty name true;
fun antiquotation_pretty_source name = gen_antiquotation_pretty_source name false;
fun antiquotation_pretty_source_embedded name = gen_antiquotation_pretty_source name true;
fun antiquotation_raw name = gen_antiquotation_raw name false;
fun antiquotation_raw_embedded name = gen_antiquotation_raw name true;
fun antiquotation_verbatim name = gen_antiquotation_verbatim name false;
fun antiquotation_verbatim_embedded name = gen_antiquotation_verbatim name true;
end;
end;

View File

@ -1,590 +0,0 @@
(* Title: Pure/Thy/thy_output.ML
Author: Makarius
Theory document output.
This is a modified/patched version that supports Isabelle/DOF.
*)
signature THY_OUTPUT =
sig
val output_document: Proof.context -> {markdown: bool} -> Input.source -> Latex.text list
val check_comments: Proof.context -> Symbol_Pos.T list -> unit
val output_token: Proof.context -> Token.T -> Latex.text list
val output_source: Proof.context -> string -> Latex.text list
type segment = {span: Command_Span.span, command: Toplevel.transition, state: Toplevel.state}
val present_thy: Options.T -> theory -> segment list -> Latex.text list
val set_meta_args_parser : (theory -> string parser) -> unit
val pretty_term: Proof.context -> term -> Pretty.T
val pretty_thm: Proof.context -> thm -> Pretty.T
val lines: Latex.text list -> Latex.text list
val items: Latex.text list -> Latex.text list
val isabelle: Proof.context -> Latex.text list -> Latex.text
val isabelle_typewriter: Proof.context -> Latex.text list -> Latex.text
val typewriter: Proof.context -> string -> Latex.text
val verbatim: Proof.context -> string -> Latex.text
val source: Proof.context -> {embedded: bool} -> Token.src -> Latex.text
val pretty: Proof.context -> Pretty.T -> Latex.text
val pretty_source: Proof.context -> {embedded: bool} -> Token.src -> Pretty.T -> Latex.text
val pretty_items: Proof.context -> Pretty.T list -> Latex.text
val pretty_items_source: Proof.context -> {embedded: bool} -> Token.src ->
Pretty.T list -> Latex.text
val antiquotation_pretty:
binding -> 'a context_parser -> (Proof.context -> 'a -> Pretty.T) -> theory -> theory
val antiquotation_pretty_embedded:
binding -> 'a context_parser -> (Proof.context -> 'a -> Pretty.T) -> theory -> theory
val antiquotation_pretty_source:
binding -> 'a context_parser -> (Proof.context -> 'a -> Pretty.T) -> theory -> theory
val antiquotation_pretty_source_embedded:
binding -> 'a context_parser -> (Proof.context -> 'a -> Pretty.T) -> theory -> theory
val antiquotation_raw:
binding -> 'a context_parser -> (Proof.context -> 'a -> Latex.text) -> theory -> theory
val antiquotation_raw_embedded:
binding -> 'a context_parser -> (Proof.context -> 'a -> Latex.text) -> theory -> theory
val antiquotation_verbatim:
binding -> 'a context_parser -> (Proof.context -> 'a -> string) -> theory -> theory
val antiquotation_verbatim_embedded:
binding -> 'a context_parser -> (Proof.context -> 'a -> string) -> theory -> theory
end;
structure Thy_Output: THY_OUTPUT =
struct
(* output document source *)
val output_symbols = single o Latex.symbols_output;
fun output_comment ctxt (kind, syms) =
(case kind of
Comment.Comment =>
Input.cartouche_content syms
|> output_document (ctxt |> Config.put Document_Antiquotation.thy_output_display false)
{markdown = false}
|> Latex.enclose_body "%\n\\isamarkupcmt{" "%\n}"
| Comment.Cancel =>
Symbol_Pos.cartouche_content syms
|> output_symbols
|> Latex.enclose_body "%\n\\isamarkupcancel{" "}"
| Comment.Latex => [Latex.symbols (Symbol_Pos.cartouche_content syms)]
| Comment.Marker => [])
and output_comment_document ctxt (comment, syms) =
(case comment of
SOME kind => output_comment ctxt (kind, syms)
| NONE => [Latex.symbols syms])
and output_document_text ctxt syms =
Comment.read_body syms |> maps (output_comment_document ctxt)
and output_document ctxt {markdown} source =
let
val pos = Input.pos_of source;
val syms = Input.source_explode source;
val output_antiquotes =
maps (Document_Antiquotation.evaluate (output_document_text ctxt) ctxt);
fun output_line line =
(if Markdown.line_is_item line then [Latex.string "\\item "] else []) @
output_antiquotes (Markdown.line_content line);
fun output_block (Markdown.Par lines) =
Latex.block (separate (Latex.string "\n") (map (Latex.block o output_line) lines))
| output_block (Markdown.List {kind, body, ...}) =
Latex.environment_block (Markdown.print_kind kind) (output_blocks body)
and output_blocks blocks = separate (Latex.string "\n\n") (map output_block blocks);
in
if Toplevel.is_skipped_proof (Toplevel.presentation_state ctxt) then []
else if markdown andalso exists (Markdown.is_control o Symbol_Pos.symbol) syms
then
let
val ants = Antiquote.parse_comments pos syms;
val reports = Antiquote.antiq_reports ants;
val blocks = Markdown.read_antiquotes ants;
val _ = Context_Position.reports ctxt (reports @ Markdown.reports blocks);
in output_blocks blocks end
else
let
val ants = Antiquote.parse_comments pos (trim (Symbol.is_blank o Symbol_Pos.symbol) syms);
val reports = Antiquote.antiq_reports ants;
val _ = Context_Position.reports ctxt (reports @ Markdown.text_reports ants);
in output_antiquotes ants end
end;
(* output tokens with formal comments *)
local
val output_symbols_antiq =
(fn Antiquote.Text syms => output_symbols syms
| Antiquote.Control {name = (name, _), body, ...} =>
Latex.string (Latex.output_symbols [Symbol.encode (Symbol.Control name)]) ::
output_symbols body
| Antiquote.Antiq {body, ...} =>
Latex.enclose_body "%\n\\isaantiq\n" "{}%\n\\endisaantiq\n" (output_symbols body));
fun output_comment_symbols ctxt {antiq} (comment, syms) =
(case (comment, antiq) of
(NONE, false) => output_symbols syms
| (NONE, true) =>
Antiquote.parse_comments (#1 (Symbol_Pos.range syms)) syms
|> maps output_symbols_antiq
| (SOME comment, _) => output_comment ctxt (comment, syms));
fun output_body ctxt antiq bg en syms =
Comment.read_body syms
|> maps (output_comment_symbols ctxt {antiq = antiq})
|> Latex.enclose_body bg en;
in
fun output_token ctxt tok =
let
fun output antiq bg en =
output_body ctxt antiq bg en (Input.source_explode (Token.input_of tok));
in
(case Token.kind_of tok of
Token.Comment NONE => []
| Token.Comment (SOME Comment.Marker) => []
| Token.Command => output false "\\isacommand{" "}"
| Token.Keyword =>
if Symbol.is_ascii_identifier (Token.content_of tok)
then output false "\\isakeyword{" "}"
else output false "" ""
| Token.String => output false "{\\isachardoublequoteopen}" "{\\isachardoublequoteclose}"
| Token.Alt_String => output false "{\\isacharbackquoteopen}" "{\\isacharbackquoteclose}"
| Token.Verbatim => output true "{\\isacharverbatimopen}" "{\\isacharverbatimclose}"
| Token.Cartouche => output false "{\\isacartoucheopen}" "{\\isacartoucheclose}"
| _ => output false "" "")
end handle ERROR msg => error (msg ^ Position.here (Token.pos_of tok));
fun output_source ctxt s =
output_body ctxt false "" "" (Symbol_Pos.explode (s, Position.none));
fun check_comments ctxt =
Comment.read_body #> List.app (fn (comment, syms) =>
let
val pos = #1 (Symbol_Pos.range syms);
val _ =
comment |> Option.app (fn kind =>
Context_Position.reports ctxt (map (pair pos) (Markup.cartouche :: Comment.kind_markups kind)));
val _ = output_comment_symbols ctxt {antiq = false} (comment, syms);
in if comment = SOME Comment.Comment then check_comments ctxt syms else () end);
end;
(** present theory source **)
(* presentation tokens *)
datatype token =
Ignore
| Token of Token.T
| Heading of string * string * Input.source
| Body of string * string * Input.source
| Raw of Input.source;
fun token_with pred (Token tok) = pred tok
| token_with _ _ = false;
val white_token = token_with Document_Source.is_white;
val white_comment_token = token_with Document_Source.is_white_comment;
val blank_token = token_with Token.is_blank;
val newline_token = token_with Token.is_newline;
fun present_token ctxt tok =
(case tok of
Ignore => []
| Token tok => output_token ctxt tok
| Heading (cmd, meta_args, source) =>
Latex.enclose_body ("%\n\\isamarkup" ^ cmd (* ^ meta_args *) ^ "{") "%\n}\n"
(output_document ctxt {markdown = false} source)
| Body (cmd, meta_args, source) =>
[Latex.environment_block
("isamarkup" ^ cmd (* ^ meta_args*))
(Latex.string meta_args :: output_document ctxt {markdown = true} source)]
| Raw source =>
Latex.string "%\n" :: output_document ctxt {markdown = true} source @ [Latex.string "\n"]);
(* command spans *)
type command = string * Position.T; (*name, position*)
type source = (token * (string * int)) list; (*token, markup flag, meta-comment depth*)
datatype span = Span of command * (source * source * source * source) * bool;
fun make_span cmd src =
let
fun chop_newline (tok :: toks) =
if newline_token (fst tok) then ([tok], toks, true)
else ([], tok :: toks, false)
| chop_newline [] = ([], [], false);
val (((src_prefix, src_main), src_suffix1), (src_suffix2, src_appendix, newline)) =
src
|> chop_prefix (white_token o fst)
||>> chop_suffix (white_token o fst)
||>> chop_prefix (white_comment_token o fst)
||> chop_newline;
in Span (cmd, (src_prefix, src_main, src_suffix1 @ src_suffix2, src_appendix), newline) end;
(* present spans *)
local
fun err_bad_nesting here =
error ("Bad nesting of commands in presentation" ^ here);
fun edge which f (x: string option, y) =
if x = y then I
else (case which (x, y) of NONE => I | SOME txt => cons (Latex.string (f txt)));
val begin_tag = edge #2 Latex.begin_tag;
val end_tag = edge #1 Latex.end_tag;
fun open_delim delim e = edge #2 Latex.begin_delim e #> delim #> edge #2 Latex.end_delim e;
fun close_delim delim e = edge #1 Latex.begin_delim e #> delim #> edge #1 Latex.end_delim e;
fun document_tag cmd_pos state state' tagging_stack =
let
val ctxt' = Toplevel.presentation_context state';
val nesting = Toplevel.level state' - Toplevel.level state;
val (tagging, taggings) = tagging_stack;
val (tag', tagging') = Document_Source.update_tagging ctxt' tagging;
val tagging_stack' =
if nesting = 0 andalso not (Toplevel.is_proof state) then tagging_stack
else if nesting >= 0 then (tagging', replicate nesting tagging @ taggings)
else
(case drop (~ nesting - 1) taggings of
tg :: tgs => (tg, tgs)
| [] => err_bad_nesting (Position.here cmd_pos));
in (tag', tagging_stack') end;
fun read_tag s =
(case space_explode "%" s of
["", b] => (SOME b, NONE)
| [a, b] => (NONE, SOME (a, b))
| _ => error ("Bad document_tags specification: " ^ quote s));
in
fun make_command_tag options keywords =
let
val document_tags =
map read_tag (space_explode "," (Options.string options \<^system_option>\<open>document_tags\<close>));
val document_tags_default = map_filter #1 document_tags;
val document_tags_command = map_filter #2 document_tags;
in
fn cmd_name => fn state => fn state' => fn active_tag =>
let
val keyword_tags =
if cmd_name = "end" andalso Toplevel.is_end_theory state' then ["theory"]
else Keyword.command_tags keywords cmd_name;
val command_tags =
the_list (AList.lookup (op =) document_tags_command cmd_name) @
keyword_tags @ document_tags_default;
val active_tag' =
(case command_tags of
default_tag :: _ => SOME default_tag
| [] =>
if Keyword.is_vacuous keywords cmd_name andalso Toplevel.is_proof state
then active_tag
else NONE);
in active_tag' end
end;
fun present_span command_tag span state state'
(tagging_stack, active_tag, newline, latex, present_cont) =
let
val ctxt' = Toplevel.presentation_context state';
val present = fold (fn (tok, (flag, 0)) =>
fold cons (present_token ctxt' tok)
#> cons (Latex.string flag)
| _ => I);
val Span ((cmd_name, cmd_pos), srcs, span_newline) = span;
val (tag', tagging_stack') = document_tag cmd_pos state state' tagging_stack;
val active_tag' =
if is_some tag' then Option.map #1 tag'
else command_tag cmd_name state state' active_tag;
val edge = (active_tag, active_tag');
val newline' =
if is_none active_tag' then span_newline else newline;
val latex' =
latex
|> end_tag edge
|> close_delim (fst present_cont) edge
|> snd present_cont
|> open_delim (present (#1 srcs)) edge
|> begin_tag edge
|> present (#2 srcs);
val present_cont' =
if newline then (present (#3 srcs), present (#4 srcs))
else (I, present (#3 srcs) #> present (#4 srcs));
in (tagging_stack', active_tag', newline', latex', present_cont') end;
fun present_trailer ((_, tags), active_tag, _, latex, present_cont) =
if not (null tags) then err_bad_nesting " at end of theory"
else
latex
|> end_tag (active_tag, NONE)
|> close_delim (fst present_cont) (active_tag, NONE)
|> snd present_cont;
end;
(* present_thy *)
local
val markup_true = "\\isamarkuptrue%\n";
val markup_false = "\\isamarkupfalse%\n";
val opt_newline = Scan.option (Scan.one Token.is_newline);
val ignore =
Scan.depend (fn d => opt_newline |-- Scan.one Token.is_begin_ignore
>> pair (d + 1)) ||
Scan.depend (fn d => Scan.one Token.is_end_ignore --|
(if d = 0 then Scan.fail_with (K (fn () => "Bad nesting of meta-comments")) else opt_newline)
>> pair (d - 1));
val locale =
Scan.option ((Parse.$$$ "(" -- Document_Source.improper -- Parse.$$$ "in") |--
Parse.!!!
(Document_Source.improper |-- Parse.name --| (Document_Source.improper -- Parse.$$$ ")")));
(*
val meta_args_parser_hook = Synchronized.var "meta args parser hook"
((fn thy => fn s => ("",s)): theory -> string parser);
*)
val meta_args_parser_hook = Unsynchronized.ref ((fn _ => fn s => ("",s)): theory -> string parser);
in
type segment = {span: Command_Span.span, command: Toplevel.transition, state: Toplevel.state};
fun present_thy options thy (segments: segment list) =
let
val keywords = Thy_Header.get_keywords thy;
(* tokens *)
val ignored = Scan.state --| ignore
>> (fn d => (NONE, (Ignore, ("", d))));
fun markup pred mk flag = Scan.peek (fn d =>
Document_Source.improper
|-- Parse.position (Scan.one (fn tok => Token.is_command tok andalso
pred keywords (Token.content_of tok)))
-- (Document_Source.annotation |--
(Parse.!!!!
( ( (!meta_args_parser_hook thy))
-- ( (Document_Source.improper -- locale -- Document_Source.improper)
|-- Parse.document_source )
--| Document_Source.improper_end)
)
)
>> (fn ((tok, pos'), (meta_args, source)) =>
let val name = Token.content_of tok
in (SOME (name, pos'), (mk (name, meta_args , source), (flag, d))) end));
val command = Scan.peek (fn d =>
Scan.optional (Scan.one Token.is_command_modifier ::: Document_Source.improper) [] --
Scan.one Token.is_command --| Document_Source.annotation
>> (fn (cmd_mod, cmd) =>
map (fn tok => (NONE, (Token tok, ("", d)))) cmd_mod @
[(SOME (Token.content_of cmd, Token.pos_of cmd),
(Token cmd, (markup_false, d)))]));
val cmt = Scan.peek (fn d =>
Scan.one Document_Source.is_black_comment >> (fn tok => (NONE, (Token tok, ("", d)))));
val other = Scan.peek (fn d =>
Parse.not_eof >> (fn tok => (NONE, (Token tok, ("", d)))));
val tokens =
(ignored ||
markup Keyword.is_document_heading Heading markup_true ||
markup Keyword.is_document_body Body markup_true ||
markup Keyword.is_document_raw (Raw o #3) "") >> single ||
command ||
(cmt || other) >> single;
(* spans *)
val is_eof = fn (_, (Token x, _)) => Token.is_eof x | _ => false;
val stopper = Scan.stopper (K (NONE, (Token Token.eof, ("", 0)))) is_eof;
val cmd = Scan.one (is_some o fst);
val non_cmd = Scan.one (is_none o fst andf not o is_eof) >> #2;
val white_comments = Scan.many (white_comment_token o fst o snd);
val blank = Scan.one (blank_token o fst o snd);
val newline = Scan.one (newline_token o fst o snd);
val before_cmd =
Scan.option (newline -- white_comments) --
Scan.option (newline -- white_comments) --
Scan.option (blank -- white_comments) -- cmd;
val span =
Scan.repeat non_cmd -- cmd --
Scan.repeat (Scan.unless before_cmd non_cmd) --
Scan.option (newline >> (single o snd))
>> (fn (((toks1, (cmd, tok2)), toks3), tok4) =>
make_span (the cmd) (toks1 @ (tok2 :: (toks3 @ the_default [] tok4))));
val spans = segments
|> maps (Command_Span.content o #span)
|> drop_suffix Token.is_space
|> Source.of_list
|> Source.source' 0 Token.stopper (Scan.error (Scan.bulk tokens >> flat))
|> Source.source stopper (Scan.error (Scan.bulk span))
|> Source.exhaust;
val command_results =
segments |> map_filter (fn {command, state, ...} =>
if Toplevel.is_ignored command then NONE else SOME (command, state));
(* present commands *)
val command_tag = make_command_tag options keywords;
fun present_command tr span st st' =
Toplevel.setmp_thread_position tr (present_span command_tag span st st');
fun present _ [] = I
| present st ((span, (tr, st')) :: rest) = present_command tr span st st' #> present st' rest;
in
if length command_results = length spans then
(([], []), NONE, true, [], (I, I))
|> present (Toplevel.init_toplevel ()) (spans ~~ command_results)
|> present_trailer
|> rev
else error "Messed-up outer syntax for presentation"
end;
fun set_meta_args_parser f =
let
val _ = writeln "Meta-args parser set to new value"
in
(meta_args_parser_hook := f)
end
end;
(** standard output operations **)
(* pretty printing *)
fun pretty_term ctxt t =
Syntax.pretty_term (Proof_Context.augment t ctxt) t;
fun pretty_thm ctxt = pretty_term ctxt o Thm.full_prop_of;
(* default output *)
val lines = separate (Latex.string "\\isanewline%\n");
val items = separate (Latex.string "\\isasep\\isanewline%\n");
fun isabelle ctxt body =
if Config.get ctxt Document_Antiquotation.thy_output_display
then Latex.environment_block "isabelle" body
else Latex.block (Latex.enclose_body "\\isa{" "}" body);
fun isabelle_typewriter ctxt body =
if Config.get ctxt Document_Antiquotation.thy_output_display
then Latex.environment_block "isabellett" body
else Latex.block (Latex.enclose_body "\\isatt{" "}" body);
fun typewriter ctxt s =
isabelle_typewriter ctxt [Latex.string (Latex.output_ascii s)];
fun verbatim ctxt =
if Config.get ctxt Document_Antiquotation.thy_output_display
then Document_Antiquotation.indent_lines ctxt #> typewriter ctxt
else split_lines #> map (typewriter ctxt) #> lines #> Latex.block;
fun token_source ctxt {embedded} tok =
if Token.is_kind Token.Cartouche tok andalso embedded andalso
not (Config.get ctxt Document_Antiquotation.thy_output_source_cartouche)
then Token.content_of tok
else Token.unparse tok;
fun is_source ctxt =
Config.get ctxt Document_Antiquotation.thy_output_source orelse
Config.get ctxt Document_Antiquotation.thy_output_source_cartouche;
fun source ctxt embedded =
Token.args_of_src
#> map (token_source ctxt embedded #> Document_Antiquotation.prepare_lines ctxt)
#> space_implode " "
#> output_source ctxt
#> isabelle ctxt;
fun pretty ctxt =
Document_Antiquotation.output ctxt #> Latex.string #> single #> isabelle ctxt;
fun pretty_source ctxt embedded src prt =
if is_source ctxt then source ctxt embedded src else pretty ctxt prt;
fun pretty_items ctxt =
map (Document_Antiquotation.output ctxt #> Latex.string) #> items #> isabelle ctxt;
fun pretty_items_source ctxt embedded src prts =
if is_source ctxt then source ctxt embedded src else pretty_items ctxt prts;
(* antiquotation variants *)
local
fun gen_setup name embedded =
if embedded
then Document_Antiquotation.setup_embedded name
else Document_Antiquotation.setup name;
fun gen_antiquotation_pretty name embedded scan f =
gen_setup name embedded scan (fn {context = ctxt, argument = x, ...} => pretty ctxt (f ctxt x));
fun gen_antiquotation_pretty_source name embedded scan f =
gen_setup name embedded scan
(fn {context = ctxt, source = src, argument = x} =>
pretty_source ctxt {embedded = embedded} src (f ctxt x));
fun gen_antiquotation_raw name embedded scan f =
gen_setup name embedded scan (fn {context = ctxt, argument = x, ...} => f ctxt x);
fun gen_antiquotation_verbatim name embedded scan f =
gen_antiquotation_raw name embedded scan (fn ctxt => verbatim ctxt o f ctxt);
in
fun antiquotation_pretty name = gen_antiquotation_pretty name false;
fun antiquotation_pretty_embedded name = gen_antiquotation_pretty name true;
fun antiquotation_pretty_source name = gen_antiquotation_pretty_source name false;
fun antiquotation_pretty_source_embedded name = gen_antiquotation_pretty_source name true;
fun antiquotation_raw name = gen_antiquotation_raw name false;
fun antiquotation_raw_embedded name = gen_antiquotation_raw name true;
fun antiquotation_verbatim name = gen_antiquotation_verbatim name false;
fun antiquotation_verbatim_embedded name = gen_antiquotation_verbatim name true;
end;
end;

View File

@ -0,0 +1,52 @@
/* Author: Makarius
Build theory document (PDF) from session database.
*/
package isabelle_dof
import isabelle._
object DOF_Document_Build
{
class Engine extends Document_Build.Bash_Engine("dof")
{
override def use_build_script: Boolean = true
override def prepare_directory(
context: Document_Build.Context,
dir: Path,
doc: Document_Build.Document_Variant): Document_Build.Directory =
{
val latex_output = new Latex_Output(context.options)
val directory = context.prepare_directory(dir, doc, latex_output)
// produced by alternative presentation hook (workaround for missing Toplevel.present_theory)
for (name <- context.document_theories) {
val path = Path.basic(Document_Build.tex_name(name))
val xml =
YXML.parse_body(context.get_export(name.theory, Export.DOCUMENT_LATEX + "_dof").text)
if (xml.nonEmpty) {
File.Content(path, xml).output(latex_output(_, file_pos = path.implode_symbolic))
.write(directory.doc_dir)
}
}
directory
}
}
class Latex_Output(options: Options) extends Latex.Output(options)
{
override def latex_environment(
name: String,
body: Latex.Text,
optional_argument: String = ""): Latex.Text =
{
XML.enclose(
"\n\\begin{" + name + "}" + optional_argument + "\n",
"\n\\end{" + name + "}", body)
}
}
}

View File

@ -0,0 +1,69 @@
/* Author: Makarius
Prepare session root directory for Isabelle/DOF.
*/
package isabelle_dof
import isabelle._
object DOF_Mkroot
{
/** mkroot **/
def mkroot(
session_name: String = "",
session_dir: Path = Path.current,
session_parent: String = "",
init_repos: Boolean = false,
title: String = "",
author: String = "",
progress: Progress = new Progress): Unit =
{
Mkroot.mkroot(session_name = session_name, session_dir = session_dir,
session_parent = session_parent, init_repos = init_repos,
title = title, author = author, progress = progress)
}
/** Isabelle tool wrapper **/
val isabelle_tool = Isabelle_Tool("dof_mkroot", "prepare session root directory for Isabelle/DOF",
Scala_Project.here, args =>
{
var author = ""
var init_repos = false
var title = ""
var session_name = ""
val getopts = Getopts("""
Usage: isabelle dof_mkroot [OPTIONS] [DIRECTORY]
Options are:
-A LATEX provide author in LaTeX notation (default: user name)
-I init Mercurial repository and add generated files
-T LATEX provide title in LaTeX notation (default: session name)
-n NAME alternative session name (default: directory base name)
Prepare session root directory (default: current directory).
""",
"A:" -> (arg => author = arg),
"I" -> (arg => init_repos = true),
"T:" -> (arg => title = arg),
"n:" -> (arg => session_name = arg))
val more_args = getopts(args)
val session_dir =
more_args match {
case Nil => Path.current
case List(dir) => Path.explode(dir)
case _ => getopts.usage()
}
mkroot(session_name = session_name, session_dir = session_dir, init_repos = init_repos,
author = author, title = title, progress = new Console_Progress)
})
}

13
src/scala/dof_tools.scala Normal file
View File

@ -0,0 +1,13 @@
/* Author: Makarius
Isabelle/DOF command-line tools.
*/
package isabelle_dof
import isabelle._
class DOF_Tools extends Isabelle_Scala_Tools(
DOF_Mkroot.isabelle_tool
)

View File

@ -126,12 +126,11 @@ rm -f *.aux
sed -i -e 's/<@>/@/g' *.tex
$ISABELLE_TOOL latex -o sty "root.tex" && \
$ISABELLE_TOOL latex -o "$OUTFORMAT" "root.tex" && \
{ [ ! -f "$ROOT_NAME.bib" ] || $ISABELLE_TOOL latex -o bbl "root.tex"; } && \
{ [ ! -f "$ROOT_NAME.idx" ] || $ISABELLE_TOOL latex -o idx "root.tex"; } && \
$ISABELLE_TOOL latex -o "$OUTFORMAT" "root.tex" && \
$ISABELLE_TOOL latex -o "$OUTFORMAT" "root.tex"
$ISABELLE_PDFLATEX root && \
{ [ ! -f "$ROOT_NAME.bib" ] || $ISABELLE_BIBTEX root; } && \
{ [ ! -f "$ROOT_NAME.idx" ] || $ISABELLE_MAKEINDEX root; } && \
$ISABELLE_PDFLATEX root && \
$ISABELLE_PDFLATEX root
MISSING_CITATIONS=`grep 'Warning.*Citation' root.log | awk '{print $5}' | sort -u`
if [ "$MISSING_CITATIONS" != "" ]; then