Merged changes from master.

This commit is contained in:
Achim D. Brucker 2019-08-12 10:02:30 +01:00
commit c5d54e8528
165 changed files with 7010 additions and 3843 deletions

3
.ci/Jenkinsfile vendored
View File

@ -10,7 +10,7 @@ pipeline {
}
}
stage('Check Docker') {
when { changeset "patches/*" }
when { changeset "src/patches/*" }
steps {
sh 'cp patches/thy_output.ML .ci/isabelle4isadof/'
sh 'docker build --no-cache -t logicalhacking:isabelle4dof-2018 .ci/isabelle4isadof'
@ -19,6 +19,7 @@ pipeline {
}
stage('Build Isabelle/DOF') {
steps {
sh 'find -type d -name "output" -exec rm -rf {} \\; || true'
sh 'docker run -v $PWD:/DOF logicalhacking:isabelle4dof-2018 sh -c "cd /DOF && ./install && isabelle build -D ."'
}
}

View File

@ -24,9 +24,9 @@
#
# SPDX-License-Identifier: BSD-2-Clause
FROM logicalhacking:isabelle2018
FROM logicalhacking:isabelle2019
WORKDIR /home/isabelle
COPY thy_output.ML /home/isabelle/Isabelle/src/Pure/Thy
RUN Isabelle/bin/isabelle build -s -b Functional-Automata
RUN Isabelle/bin/isabelle build -b Functional-Automata

8
.config Normal file
View File

@ -0,0 +1,8 @@
DOF_VERSION="Unreleased"
ISABELLE_VERSION="Isabelle2018: August 2018"
ISABELLE_URL="https://isabelle.in.tum.de/website-Isabelle2018/"
DOF_URL="https://git.logicalhacking.com/Isabelle_DOF/Isabelle_DOF"
DOF_ARTIFACT_URL="https://artifacts.logicalhacking.com/releases/Isabelle_DOF/Isabelle_DOF"
AFP_DATE="afp-2019-06-04"
AFP_URL="https://sourceforge.net/projects/afp/files/afp-Isabelle2018/"$AFP_DATE".tar.gz"

1
.gitignore vendored
View File

@ -1,3 +1,4 @@
output
*.template.sty
.afp
*~

View File

@ -1,6 +1,6 @@
Copyright (C) 2018-2019 The University of Sheffield
2019-2019 The University of Exeter
2018-2019 The University of Paris-Sud
2018-2019 The University of Paris-Saclay
All rights reserved.
Redistribution and use in source and binary forms, with or without

144
README.md
View File

@ -1,62 +1,44 @@
# Isabelle/DOF: Document Preparation Setup
# [Isabelle/DOF](https://git.logicalhacking.com/Isabelle_DOF/Isabelle_DOF): Document Preparation Setup
Isabelle/DOF is a novel Document Ontology Framework on top of
Isabelle. Isabelle/DOF allows for both conventional typesetting as
well as formal development.
Isabelle/DOF is a novel Document Ontology Framework on top of Isabelle.
Isabelle/DOF allows for both conventional typesetting as well as formal
development.
## Prerequisites
## Pre-requisites
Isabelle/DOF requires [Isabelle 2018](http://isabelle.in.tum.de/website-Isabelle2018/).
Please download the Isabelle 2018 distribution for your operating
system from the [Isabelle website](http://isabelle.in.tum.de/website-Isabelle2018/).
Isabelle/DOF has two major pre-requisites:
## Installation
* **Isabelle:** Isabelle/DOF requires [Isabelle 2019](http://isabelle.in.tum.de/website-Isabelle2019/).
Please download the Isabelle 2019 distribution for your operating
system from the [Isabelle website](http://isabelle.in.tum.de/website-Isabelle2019/).
* **LaTeX:** Isabelle/DOF requires a modern pdfTeX-engine supporting the \expanded{}-primitive. This
is, for example, included in the [TeXLive 2019](https://www.tug.org/texlive/) (or later)
distribution. Please follow the [TeXLive installation instructions](https://www.tug.org/texlive/acquire-netinstall.html)
for installing TeXLive.
### Quick Installation Guide
## Installation
In most case, the DOF-plugin can be installed as follows:
```console
```console
foo@bar:~$ ./install
```
If a specific Isabelle version should be used (i.e., not the default
one), the full path to the ``isabelle`` command needs to be passed as
using the ``-i`` command line argument of the ``install`` script:
```console
foo@bar:~$ ./install -i /usr/local/Isabelle2018/bin/isabelle
If a specific Isabelle version should be used (i.e., not the default
one), the full path to the ``isabelle`` command needs to be passed as
using the ``--isabelle`` command line argument of the ``install`` script:
```console
foo@bar:~$ ./install --isabelle /usr/local/Isabelle2019/bin/isabelle
```
For further command line options of the installer, please use the
For further command line options of the installer, please use the
built-in help:
```console
foo@bar:~$ ./install -h
```console
foo@bar:~$ ./install --help
```
### What The Installer Actually Does
The installer will
* apply a patch to Isabelle that is necessary to use Isabelle/DOF.
If this patch installations fails, you need to manually replace
the file ``Isabelle2018/src/Pure/Thy/thy_output.ML`` in the Isabelle
distribution with the file ``patches/thy_output.ML`` from the
Isabelle/DOF distribution:
```console
cp patches/thy_output.ML `isabelle getenv -b ISABELLE_HOME`/src/Pure/Thy/
```
* install required entries from the [AFP](https://www.isa-afp.org). If this
installations fails, you need to manually install the AFP for Isabelle 2018 as follows:
Download the [AFP for Isabelle 2018](https://sourceforge.net/projects/afp/files/afp-Isabelle2018/afp-2019-06-04.tar.gz)
and follow the [instructions for installing the AFP as Isabelle
component](https://www.isa-afp.org/using.html). If you have extracted
the AFP archive into the directory to `/home/myself/afp`, you should
run the following command to make the AFP session `ROOTS` available to
Isabelle:
```console
echo "/home/myself/afp/thys" >> ~/.isabelle/Isabelle2018/ROOTS
```
* install the Isabelle/DOF-plugin into the Isabelle user directory
(the exact location depends on the Isabelle version).
* check that the AFP has been installed successfully.
## Usage
### Opening an Example
@ -71,7 +53,7 @@ isabelle jedit -d . -l Isabelle_DOF examples/scholarly_paper/2018_cicm/IsaDofApp
This will open an example of a scientific paper using the pre-compiled
session ``Isabelle_DOF``, i.e., you will not be able to edit the
ontology definitions. If you want to edit the ontology definition,
just open the theory file with the default HOL session:
just open the theory file with the default HOL session:
```console
isabelle jedit -d . -l HOL examples/scholarly_paper/2018_cicm/IsaDofApplications.thy
@ -79,68 +61,70 @@ isabelle jedit -d . -l HOL examples/scholarly_paper/2018_cicm/IsaDofApplications
While this gives you more flexibility, it might "clutter" your editing
experience, as a lot of internal theories are loaded into Isabelle's
editor.
editor.
### Creating a New Project
The DOF-plugin provides an alternative to Isabelle's ``mkroot`` command.
Isabelle projects that use DOF need to be created using
```console
foo@bar:~$ isabelle DOF_mkroot -d
```console
foo@bar:~$ isabelle mkroot_DOF
```
The ``DOF_mkroot`` command takes the same parameter as the standard
``mkroot`` command of Isabelle. Thereafter, the normal Isabelle
command for building documents can be used.
The ``mkroot_DOF`` command takes the same parameter as the standard
``mkroot`` command of Isabelle. Thereafter, the normal Isabelle
command for building documents can be used.
Using the ``-o`` option, different ontology setups can be
selected and using the ``-t`` option, different LaTeX setups
can be selected (use ``-h`` to obtain a list of all installed setups):
```console
foo@bar:~$ isabelle DOF_mkroot -h
can be selected. For example,
Usage: isabelle DOF_mkroot [OPTIONS] [DIR]
```console
foo@bar:~$ isabelle mkroot_DOF -o scholarly_paper -t scrartcl
```
creates a setup using the scholarly_paper ontology and the article
class from the KOMA-Script bundle.
The help (option ``-h``) show a list of all supported ontologies and
document templates:
```console
foo@bar:~$ isabelle mkroot_DOF -h
Usage: isabelle mkroot_DOF [OPTIONS] [DIR]
Options are:
-h print this help text and exit
-d enable document preparation
-n NAME alternative session name (default: DIR base name)
-o ONTOLOGY (default: core)
-o ONTOLOGY (default: scholarly_paper)
Available ontologies:
* cenelec_50128
* core
* mathex
* scholarly_paper
-t TEMPLATE (default: DEFAULT_TEMPLATE)
-t TEMPLATE (default: scrartcl)
Available document templates:
* lncs
* scrartcl
* scrreprt
* scrreprt-modern
Prepare session root DIR (default: current directory).
```
For example,
```console
foo@bar:~$ isabelle DOF_mkroot -d -o scholarly_paper -t lncs
```
creates a setup using the scholarly_paper ontology and Springer's
LNCS LaTeX class as document class. Note that the generated setup
does not include the actual ``llncs.cls`` file. This is due to
license restrictions. You need to obtain the file from Springer's
website and either copy it in you ``texmf`` directory or the ``root``
folder. In the latter case, you also need to add it in the ``ROOT`` file
as dependency.
## Team
Main contacts:
* [Achim D. Brucker](http://www.brucker.ch/)
* [Burkhart Wolff](https://www.lri.fr/~wolff/)
### Contributors
* Idir Ait-Sadoune
* Paolo Crisafulli
* Idir Ait-Sadoune
* Paolo Crisafulli
* Chantal Keller
## License
@ -153,12 +137,16 @@ SPDX-License-Identifier: BSD-2-Clause
* Achim D. Brucker, Idir Ait-Sadoune, Paolo Crisafulli, and Burkhart
Wolff. [Using The Isabelle Ontology Framework: Linking the Formal
with the Informal]({https://www.brucker.ch/bibliography/download/2018/brucker.ea-isabelle-ontologies-2018.pdf).
In Conference on Intelligent Computer Mathematics (CICM). Lecture
with the Informal](https://www.brucker.ch/bibliography/download/2018/brucker.ea-isabelle-ontologies-2018.pdf).
In Conference on Intelligent Computer Mathematics (CICM). Lecture
Notes in Computer Science (11006), Springer-Verlag, 2018.
* Achim D. Brucker and Burkhart Wolff. [Isabelle/DOF: Design and
Implementation](https://www.brucker.ch/bibliography/download/2019/brucker.ea-isabelledof-2019.pdf).
In Software Engineering and Formal Methods (SEFM). Lecture Notes in
Computer Science, Springer-Verlag, 2019.
## Master Repository
The master git repository for this project is hosted
The master git repository for this project is hosted
<https://git.logicalhacking.com/Isabelle_DOF/Isabelle_DOF>.

12
ROOT
View File

@ -1,12 +0,0 @@
session "Isabelle_DOF" = "Functional-Automata" +
options [document = pdf, document_output = "output"]
sessions
"Regular-Sets"
theories
Isa_DOF
"ontologies/Conceptual"
"ontologies/CENELEC_50128"
"ontologies/scholarly_paper"
"ontologies/technical_report"
"ontologies/mathex_onto"

1
ROOTS
View File

@ -1 +1,2 @@
src
examples

View File

@ -1,26 +0,0 @@
# Isabelle_DOF: Document Preparation Setup
This directory contains the LaTeX setup for Isabelle's
document generation system.
## Tips and Tricks
During debugging of LaTeX errors, it can be very helpful to use
more than 79 characters for error messages (otherwise, long errors
are truncated)"
``` bash
max_print_line=200 error_line=200 half_error_line=100 pdflatex root.tex
```
## Team
Main contacts:
* [Achim D. Brucker](http://www.brucker.ch/)
* [Burkhart Wolff](https://www.lri.fr/~wolff/)
## License
This project is licensed under a 2-clause BSD license.
SPDX-License-Identifier: BSD-2-Clause

View File

@ -1,19 +0,0 @@
%% Copyright (C) 2018 The University of Sheffield
%% 2018 The University of Paris-Sud
%%
%% License:
%% This program can be redistributed and/or modified under the terms
%% of the LaTeX Project Public License Distributed from CTAN
%% archives in directory macros/latex/base/lppl.txt; either
%% version 1 of the License, or any later version.
%% OR
%% The 2-clause BSD-style license.
%%
%% SPDX-License-Identifier: LPPL-1.0+ OR BSD-2-Clause
\NeedsTeXFormat{LaTeX2e}\relax
\ProvidesPackage{DOF-cenelec_50128}
[0000/00/00 Unreleased v0.0.0+%
Document-Type Support Framework for Isabelle (CENELEC 50128).]
\RequirePackage{DOF-COL}

View File

@ -6,3 +6,10 @@ session "mini_odo" = "Isabelle_DOF" +
"isadof.cfg"
"preamble.tex"
"build"
"root.bib"
"root.mst"
"lstisadof.sty"
"figures/df-numerics-encshaft.png"
"figures/odometer.jpeg"
"figures/three-phase-odo.pdf"
"figures/wheel-df.png"

View File

@ -1,6 +1,6 @@
#!/usr/bin/env bash
# Copyright (c) 2018-2019 The University of Sheffield. All rights reserved.
# 2018 The University of Paris-Sud. All rights reserved.
# 2018 The University of Paris-Saclay. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
@ -34,7 +34,7 @@ if [ ! -f $ISABELLE_HOME_USER/DOF/document-template/build_lib.sh ]; then
echo "This is a Isabelle/DOF project. The document preparation requires"
echo "the Isabelle/DOF framework. Please obtain the framework by cloning"
echo "the Isabelle/DOF git repository, i.e.: "
echo " git clone https://git.logicalhacking.com/HOL-OCL/Isabelle_DOF"
echo " git clone https://git.logicalhacking.com/Isabelle_DOF/Isabelle_DOF"
echo "You can install the framework as follows:"
echo " cd Isabelle_DOF/document-generator"
echo " ./install"

Binary file not shown.

After

Width:  |  Height:  |  Size: 27 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 407 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 23 KiB

View File

@ -0,0 +1,3 @@
Template: scrreprt-modern
Ontology: technical_report
Ontology: cenelec_50128

View File

@ -60,33 +60,67 @@
\def\endlstdelim{\texttt{\textbf{\color{black!60}#2}}\egroup}%
\ttfamily\textbf{\color{black!60}#1}\bgroup\rmfamily\color{#3}\aftergroup\endlstdelim%
}
\newcommand{\subscr}[1]{\ensuremath{_{\mbox{#1}}}}
\newcommand{\supscr}[1]{\ensuremath{^{\mbox{#1}}}}
\newcommand{\subscr}[1]{\ensuremath{_{\text{#1}}}}
\newcommand{\supscr}[1]{\ensuremath{^{\text{#1}}}}
\lstdefinestyle{ISAR}{%
language=%
,basicstyle=\ttfamily%
,showspaces=false%
,showlines=false%
,columns=flexible%
,keepspaces
,mathescape=false,
,morecomment=[s]{(*}{*)}%
% ,moredelim=*[s][\rmfamily]{\{*}{*\}}%
,moredelim = **[is][\beginlstdelim{\{*}{*\}}{black}]{\{*}{*\}}
,showstringspaces=false%
,moredelim=*[is][\supscr]{\\<^bsup>}{\\<^esup>}%
,moredelim=*[is][\supscr]{<bsup>}{<esup>}%
,moredelim=*[is][\subscr]{<bsub>}{<esub>}%
,literate={%
{...}{\,\ldots\,}3%
{\\<Open>}{\ensuremath{\isacartoucheopen}}1%
{\\at}{@}1%
{\\<Close>}{\ensuremath{\isacartoucheclose}}1%
{<Open>}{\ensuremath{\isacartoucheopen}}1%
{<open>}{\ensuremath{\isacartoucheopen}}1%
{<@>}{@}1%
{"}{}0%
{~}{\ }1%
{::}{:\!:}1%
{<Close>}{\ensuremath{\isacartoucheclose}}1%
{<close>}{\ensuremath{\isacartoucheclose}}1%
{\\<Gamma>}{\ensuremath{\Gamma}}1%
{\\<times>}{\ensuremath{\times}}1%
{\\<rbrakk>}{\ensuremath{\mathclose{\rbrack\mkern-3mu\rbrack}}}1%
{\\<rbrace>}{\ensuremath{\mathclose{\mid\mkern-4.5mu\rbrace}}}1%
{\\<lbrakk>}{\ensuremath{\mathopen{\lbrack\mkern-3mu\lbrack}}}1%
{\\<lbrace>}{\ensuremath{\mathopen{\lbrace\mkern-4.5mu\mid}}}1%
{\\<equiv>}{\ensuremath{\equiv}}1%
{\\<Rightarrow>}{\ensuremath{\Rightarrow}}1%
{\\<rightarrow>}{\ensuremath{\rightarrow}}1%
{\\<longrightarrow>}{\ensuremath{\rightarrow}}1%
{\\<and>}{\ensuremath{\land}}1%
{\\<or>}{\ensuremath{\lor}}1%
{\\<lfloor>}{\ensuremath{\lfloor}}1%
{\\<rfloor>}{\ensuremath{\rfloor}}1%
%{\\<lparr>}{\ensuremath{\lparr}}1%
%{\\<rparr>}{\ensuremath{\rparr}}1%
{\\<le>}{\ensuremath{\le}}1%
{\\<delta>}{\ensuremath{\delta}}1%
{\\<lambda>}{\ensuremath{\lambda}}1%
{\\<bar>}{\ensuremath{\vert}}1%
{\<sigma>}{\ensuremath{\sigma}}1%
{\\<lparr>}{\ensuremath{\isasymlparr}}1%
{\\<rparr>}{\ensuremath{\isasymrparr}}1%
{\\<leftrightarrow>}{\ensuremath{\leftrightarrow}}1%
{\{*}{\raise.3ex\hbox{$\scriptscriptstyle\langle$}}1%
{*\}}{\raise.3ex\hbox{$\scriptscriptstyle\rangle$}}1%
{\\<open>}{\raise.3ex\hbox{$\scriptscriptstyle\langle$}}1%
{\\<close>}{\raise.3ex\hbox{$\scriptscriptstyle\rangle$}}1%
{\\<forall>}{\ensuremath{\forall}}1%
{\\<exists>}{\ensuremath{\exists}}1%
{\\<in>}{\ensuremath{\in}}1%
{\\<delta>}{\ensuremath{\delta}}1%
{\\<real>}{\ensuremath{\mathbb{R}}}1%
{\\<noteq>}{\ensuremath{\neq}}1%
{\\<Forall>}{\ensuremath{\bigwedge\,}}1%
{\\<lbrakk>}{\ensuremath{\mathopen{\lbrack\mkern-3mu\lbrack}}}1%
{\\<lbrace>}{\ensuremath{\mathopen{\lbrace\mkern-4.5mu\mid}}}1%
{\\<rbrakk>}{\ensuremath{\mathclose{\rbrack\mkern-3mu\rbrack}}}1%
{\\<rbrace>}{\ensuremath{\mathclose{\mid\mkern-4.5mu\rbrace}}}1%
}%
% % Defining "tags" (text-antiquotations) based on 1-keywords
,tag=**[s]{@\{}{\}}%
@ -97,18 +131,18 @@
% Defining 2-keywords
,keywordstyle=[2]{\color{Blue!60}\bfseries}%
,alsoletter={*,-}
,morekeywords=[2]{theory, begin, end, ML,section,subsection,paragraph,chapter,text}%
,morekeywords=[2]{case, then, show, theory, begin, end, ML,section,subsection,paragraph,chapter,text}%
%,moredelim=[s][\textit]{<}{>}
% Defining 3-keywords
,keywordstyle=[3]{\color{OliveGreen!60}\bfseries}%
,morekeywords=[3]{doc_class,declare_reference,update_instance*,
open_monitor*, close_monitor*, figure*, title*, subtitle*,declare_reference*,section*,text*}%
open_monitor*, close_monitor*, declare_reference*,section*,text*,title*,abstract*}%
% Defining 4-keywords
,keywordstyle=[4]{\color{black!60}\bfseries}%
,morekeywords=[4]{where, imports}%
,morekeywords=[4]{where, imports, keywords}%
% Defining 5-keywords
,keywordstyle=[5]{\color{BrickRed!70}\bfseries}%
,morekeywords=[5]{datatype, typedecl, consts, theorem}%
,morekeywords=[5]{datatype, by, fun, Definition*, definition, type_synonym, typedecl, consts, assumes, and, shows, proof, next, qed, lemma, theorem}%
% Defining 6-keywords
,keywordstyle=[6]{\itshape}%
,morekeywords=[6]{meta-args, ref, expr, class_id}%
@ -117,8 +151,7 @@
%%
\lstnewenvironment{isar}[1][]{\lstset{style=ISAR,
backgroundcolor=\color{black!2},
frame=lines,
mathescape=true,
frame=lines,mathescape,
basicstyle=\footnotesize\ttfamily,#1}}{}
%%%
\def\inlineisar{\lstinline[style=ISAR,breaklines=true,mathescape,breakatwhitespace=true]}

View File

@ -0,0 +1,12 @@
%% This is a placeholder for user-specific configuration and packages.
\usepackage{listings}
\usepackage{lstisadof}
\usepackage{wrapfig}
\usepackage{paralist}
\usepackage{numprint}
\newcommand{\fixIsarList}{\vspace{-\topsep}\vspace{-\baselineskip}\mbox{}\\[0pt]\noindent}
\newcommand{\eg}{e.\,g.}
\newcommand{\ie}{i.\,e.}
\author{}
\title{}

View File

@ -0,0 +1,884 @@
@STRING{pub-springer={Springer} }
@STRING{pub-springer:adr="" }
@STRING{s-lncs = "LNCS" }
@Manual{ wenzel:isabelle-isar:2017,
title = {The Isabelle/Isar Reference Manual},
author = {Makarius Wenzel},
year = 2017,
note = {Part of the Isabelle distribution.}
}
@Book{ adler:r:2010,
abstract = {Presents a guide to the R computer language, covering such
topics as the user interface, packages, syntax, objects,
functions, object-oriented programming, data sets, lattice
graphics, regression models, and bioconductor.},
added-at = {2013-01-10T22:39:38.000+0100},
address = {Sebastopol, CA},
author = {Adler, Joseph},
isbn = {9780596801700 059680170X},
keywords = {R},
publisher = {O'Reilly},
refid = 432987461,
title = {R in a nutshell},
year = 2010
}
@InCollection{ wenzel.ea:building:2007,
abstract = {We present the generic system framework of
Isabelle/Isarunderlying recent versions of Isabelle. Among
other things, Isar provides an infrastructure for Isabelle
plug-ins, comprising extensible state components and
extensible syntax that can be bound to tactical ML
programs. Thus the Isabelle/Isar architecture may be
understood as an extension and refinement of the
traditional LCF approach, with explicit infrastructure for
building derivative systems. To demonstrate the technical
potential of the framework, we apply it to a concrete
formalmethods tool: the HOL-Z 3.0 environment, which is
geared towards the analysis of Z specifications and formal
proof of forward-refinements.},
author = {Makarius Wenzel and Burkhart Wolff},
booktitle = {TPHOLs 2007},
editor = {Klaus Schneider and Jens Brandt},
language = {USenglish},
acknowledgement={none},
pages = {352--367},
publisher = pub-springer,
address = pub-springer:adr,
number = 4732,
series = s-lncs,
title = {Building Formal Method Tools in the {Isabelle}/{Isar}
Framework},
doi = {10.1007/978-3-540-74591-4_26},
year = 2007
}
@Misc{ w3c:ontologies:2015,
title = {Ontologies},
organisation = {W3c},
url = {https://www.w3.org/standards/semanticweb/ontology},
year = 2018
}
@Book{ boulanger:cenelec-50128:2015,
author = {Boulanger, Jean-Louis},
title = {{CENELEC} 50128 and {IEC} 62279 Standards},
publisher = {Wiley-ISTE},
year = 2015,
address = {Boston},
note = {The reference on the standard.}
}
@Booklet{ cc:cc-part3:2006,
bibkey = {cc:cc-part3:2006},
key = {Common Criteria},
institution = {Common Criteria},
language = {USenglish},
month = sep,
year = 2006,
public = {yes},
title = {Common Criteria for Information Technology Security
Evaluation (Version 3.1), {Part} 3: Security assurance
components},
note = {Available as document
\href{http://www.commoncriteriaportal.org/public/files/CCPART3V3.1R1.pdf}
{CCMB-2006-09-003}},
number = {CCMB-2006-09-003},
acknowledgement={brucker, 2007-04-24}
}
@Book{ nipkow.ea:isabelle:2002,
author = {Tobias Nipkow and Lawrence C. Paulson and Markus Wenzel},
title = {Isabelle/HOL---A Proof Assistant for Higher-Order Logic},
publisher = pub-springer,
address = pub-springer:adr,
series = s-lncs,
volume = 2283,
doi = {10.1007/3-540-45949-9},
abstract = {This book is a self-contained introduction to interactive
proof in higher-order logic (\acs{hol}), using the proof
assistant Isabelle2002. It is a tutorial for potential
users rather than a monograph for researchers. The book has
three parts.
1. Elementary Techniques shows how to model functional
programs in higher-order logic. Early examples involve
lists and the natural numbers. Most proofs are two steps
long, consisting of induction on a chosen variable followed
by the auto tactic. But even this elementary part covers
such advanced topics as nested and mutual recursion. 2.
Logic and Sets presents a collection of lower-level tactics
that you can use to apply rules selectively. It also
describes Isabelle/\acs{hol}'s treatment of sets, functions
and relations and explains how to define sets inductively.
One of the examples concerns the theory of model checking,
and another is drawn from a classic textbook on formal
languages. 3. Advanced Material describes a variety of
other topics. Among these are the real numbers, records and
overloading. Advanced techniques are described involving
induction and recursion. A whole chapter is devoted to an
extended example: the verification of a security protocol. },
year = 2002,
acknowledgement={brucker, 2007-02-19},
bibkey = {nipkow.ea:isabelle:2002},
tags = {noTAG},
clearance = {unclassified},
timestap = {2008-05-26}
}
@InProceedings{ wenzel:asynchronous:2014,
author = {Makarius Wenzel},
title = {Asynchronous User Interaction and Tool Integration in
Isabelle/{PIDE}},
booktitle = {Interactive Theorem Proving (ITP)},
pages = {515--530},
year = 2014,
crossref = {klein.ea:interactive:2014},
doi = {10.1007/978-3-319-08970-6_33},
timestamp = {Sun, 21 May 2017 00:18:59 +0200},
abstract = { Historically, the LCF tradition of interactive theorem
proving was tied to the read-eval-print loop, with
sequential and synchronous evaluation of prover commands
given on the command-line. This user-interface technology
was adequate when R. Milner introduced his LCF proof
assistant in the 1970-ies, but it severely limits the
potential of current multicore hardware and advanced IDE
front-ends.
Isabelle/PIDE breaks this loop and retrofits the
read-eval-print phases into an asynchronous model of
document-oriented proof processing. Instead of feeding a
sequence of individual commands into the prover process,
the primary interface works via edits over a family of
document versions. Execution is implicit and managed by the
prover on its own account in a timeless and stateless
manner. Various aspects of interactive proof checking are
scheduled according to requirements determined by the
front-end perspective on the proof document, while making
adequate use of the CPU resources on multicore hardware on
the back-end.
Recent refinements of Isabelle/PIDE provide an explicit
concept of asynchronous print functions over existing proof
states. This allows to integrate long-running or
potentially non-terminating tools into the document-model.
Applications range from traditional proof state output
(which may consume substantial time in interactive
development) to automated provers and dis-provers that
report on existing proof document content (e.g.
Sledgehammer, Nitpick, Quickcheck in Isabelle/HOL).
Moreover, it is possible to integrate query operations via
additional GUI panels with separate input and output (e.g.
for Sledgehammer or find-theorems). Thus the Prover IDE
provides continuous proof processing, augmented by add-on
tools that help the user to continue writing proofs. }
}
@Proceedings{ klein.ea:interactive:2014,
editor = {Gerwin Klein and Ruben Gamboa},
title = {Interactive Theorem Proving - 5th International
Conference, {ITP} 2014, Held as Part of the Vienna Summer
of Logic, {VSL} 2014, Vienna, Austria, July 14-17, 2014.
Proceedings},
series = s-lncs,
volume = 8558,
publisher = pub-springer,
year = 2014,
doi = {10.1007/978-3-319-08970-6},
isbn = {978-3-319-08969-0}
}
@InProceedings{ bezzecchi.ea:making:2018,
title = {Making Agile Development Processes fit for V-style
Certification Procedures},
author = {Bezzecchi, S. and Crisafulli, P. and Pichot, C. and Wolff,
B.},
booktitle = {{ERTS'18}},
abstract = {We present a process for the development of safety and
security critical components in transportation systems
targeting a high-level certification (CENELEC 50126/50128,
DO 178, CC ISO/IEC 15408).
The process adheres to the objectives of an ``agile
development'' in terms of evolutionary flexibility and
continuous improvement. Yet, it enforces the overall
coherence of the development artifacts (ranging from proofs
over tests to code) by a particular environment (CVCE).
In particular, the validation process is built around a
formal development based on the interactive theorem proving
system Isabelle/HOL, by linking the business logic of the
application to the operating system model, down to code and
concrete hardware models thanks to a series of refinement
proofs.
We apply both the process and its support in CVCE to a
case-study that comprises a model of an odometric service
in a railway-system with its corresponding implementation
integrated in seL4 (a secure kernel for which a
comprehensive Isabelle development exists). Novel
techniques implemented in Isabelle enforce the coherence of
semi-formal and formal definitions within to specific
certification processes in order to improve their
cost-effectiveness. },
pdf = {https://www.lri.fr/~wolff/papers/conf/2018erts-agile-fm.pdf},
year = 2018,
series = {ERTS Conference Proceedings},
location = {Toulouse}
}
@Misc{ owl2012,
title = {OWL 2 Web Ontology Language},
note = {\url{https://www.w3.org/TR/owl2-overview/}, Document
Overview (Second Edition)},
author = {World Wide Web Consortium}
}
@Misc{ protege,
title = {Prot{\'e}g{\'e}},
note = {\url{https://protege.stanford.edu}},
year = 2018
}
@Misc{ cognitum,
title = {Fluent Editor},
note = {\url{http://www.cognitum.eu/Semantics/FluentEditor/}},
year = 2018
}
@Misc{ neon,
title = {The NeOn Toolkit},
note = {\url{http://neon-toolkit.org}},
year = 2018
}
@Misc{ owlgred,
title = {OWLGrEd},
note = {\url{http://owlgred.lumii.lv/}},
year = 2018
}
@Misc{ rontorium,
title = {R Language Package for FLuent Editor (rOntorion)},
note = {\url{http://www.cognitum.eu/semantics/FluentEditor/rOntorionFE.aspx}},
year = 2018
}
@InProceedings{ DBLP:conf/mkm/BlanchetteHMN15,
author = {Jasmin Christian Blanchette and Maximilian P. L. Haslbeck
and Daniel Matichuk and Tobias Nipkow},
title = {Mining the Archive of Formal Proofs},
booktitle = {Intelligent Computer Mathematics - International
Conference, {CICM} 2015, Washington, DC, USA, July 13-17,
2015, Proceedings},
pages = {3--17},
year = 2015,
url = {https://doi.org/10.1007/978-3-319-20615-8\_1},
doi = {10.1007/978-3-319-20615-8\_1},
timestamp = {Fri, 02 Nov 2018 09:40:47 +0100},
biburl = {https://dblp.org/rec/bib/conf/mkm/BlanchetteHMN15},
bibsource = {dblp computer science bibliography, https://dblp.org}
}
@InCollection{ brucker.ea:isabelle-ontologies:2018,
abstract = {While Isabelle is mostly known as part of Isabelle/HOL (an
interactive theorem prover), it actually provides a
framework for developing a wide spectrum of applications. A
particular strength of the Isabelle framework is the
combination of text editing, formal verification, and code
generation.\\\\Up to now, Isabelle's document preparation
system lacks a mechanism for ensuring the structure of
different document types (as, e.g., required in
certification processes) in general and, in particular,
mechanism for linking informal and formal parts of a
document.\\\\In this paper, we present Isabelle/DOF, a
novel Document Ontology Framework on top of Isabelle.
Isabelle/DOF allows for conventional typesetting \emph{as
well} as formal development. We show how to model document
ontologies inside Isabelle/DOF, how to use the resulting
meta-information for enforcing a certain document
structure, and discuss ontology-specific IDE support.},
address = {Heidelberg},
author = {Achim D. Brucker and Idir Ait-Sadoune and Paolo Crisafulli
and Burkhart Wolff},
booktitle = {Conference on Intelligent Computer Mathematics (CICM)},
doi = {10.1007/978-3-319-96812-4_3},
keywords = {Isabelle/Isar, HOL, Ontologies},
language = {USenglish},
location = {Hagenberg, Austria},
number = 11006,
pdf = {https://www.brucker.ch/bibliography/download/2018/brucker.ea-isabelle-ontologies-2018.pdf},
publisher = {Springer-Verlag},
series = {Lecture Notes in Computer Science},
title = {Using the {Isabelle} Ontology Framework: Linking the
Formal with the Informal},
url = {https://www.brucker.ch/bibliography/abstract/brucker.ea-isabelle-ontologies-2018},
year = 2018
}
@InCollection{ brucker.wolff:isa_def-design-impl:2019,
abstract = {DOF is a novel framework for defining ontologies and enforcing them during document
development and evolution. A major goal of DOF the integrated development of formal
certification documents (e.g., for Common Criteria or CENELEC 50128) that require
consistency across both formal and informal arguments. To support a consistent
development of formal and informal parts of a document, we implemented Isabelle/DOF,
an implementation of DOF on top of the formal methods framework Isabelle/HOL. A
particular emphasis is put on a deep integration into Isabelles IDE, which allows
for smooth ontology development as well as immediate ontological feedback during
the editing of a document.
In this paper, we give an in-depth presentation of the design concepts of DOFs
Ontology Definition Language (ODL) and key aspects of the technology of its
implementation. Isabelle/DOF is the first ontology lan- guage supporting
machine-checked links between the formal and informal parts in an LCF-style
interactive theorem proving environment. Sufficiently annotated, large documents
can easily be developed collaboratively, while ensuring their consistency, and the
impact of changes (in the formal and the semi-formal content) is tracked automatically.},
address = {Heidelberg},
author = {Achim D. Brucker and Burkhart Wolff},
booktitle = {International Conference on Software Engineering and Formal Methods},
keywords = {Isabelle/Isar, HOL, Ontologies, Documentation},
language = {USenglish},
location = {Oslo, Austria},
number = "to appear",
publisher = {Springer-Verlag},
series = {Lecture Notes in Computer Science},
title = {{I}sabelle/{DOF}: {D}esign and {I}mplementation},
year = 2019
}
@InProceedings{ DBLP:conf/itp/Wenzel14,
author = {Makarius Wenzel},
title = {Asynchronous User Interaction and Tool Integration in Isabelle/PIDE},
booktitle = {Interactive Theorem Proving (ITP)},
pages = {515--530},
year = 2014,
doi = {10.1007/978-3-319-08970-6_33},
timestamp = {Sun, 21 May 2017 00:18:59 +0200},
biburl = {https://dblp.org/rec/bib/conf/itp/Wenzel14},
bibsource = {dblp computer science bibliography, https://dblp.org}
}
@InProceedings{ DBLP:journals/corr/Wenzel14,
author = {Makarius Wenzel},
title = {System description: Isabelle/jEdit in 2014},
booktitle = {Proceedings Eleventh Workshop on User Interfaces for
Theorem Provers, {UITP} 2014, Vienna, Austria, 17th July
2014.},
pages = {84--94},
year = 2014,
doi = {10.4204/EPTCS.167.10},
timestamp = {Wed, 03 May 2017 14:47:58 +0200},
biburl = {https://dblp.org/rec/bib/journals/corr/Wenzel14},
bibsource = {dblp computer science bibliography, https://dblp.org}
}
@InProceedings{ DBLP:conf/mkm/BarrasGHRTWW13,
author = {Bruno Barras and Lourdes Del Carmen
Gonz{\'{a}}lez{-}Huesca and Hugo Herbelin and Yann
R{\'{e}}gis{-}Gianas and Enrico Tassi and Makarius Wenzel
and Burkhart Wolff},
title = {Pervasive Parallelism in Highly-Trustable Interactive
Theorem Proving Systems},
booktitle = {Intelligent Computer Mathematics - MKM, Calculemus, DML,
and Systems and Projects},
pages = {359--363},
year = 2013,
doi = {10.1007/978-3-642-39320-4_29},
timestamp = {Sun, 04 Jun 2017 10:10:26 +0200},
biburl = {https://dblp.org/rec/bib/conf/mkm/BarrasGHRTWW13},
bibsource = {dblp computer science bibliography, https://dblp.org}
}
@TechReport{ bsi:50128:2014,
type = {Standard},
key = {BS EN 50128:2011},
month = apr,
year = 2014,
series = {British Standards Publication},
title = {BS EN 50128:2011: Railway applications -- Communication,
signalling and processing systems -- Software for railway
control and protecting systems},
institution = {Britisch Standards Institute (BSI)},
keywords = {CENELEC},
abstract = {This European Standard is part of a group of related
standards. The others are EN 50126-1:1999 "Railway
applications -- The specification and demonstration of
Reliability, Availability, Maintainability and Safety
(RAMS) -- Part 1: Basic requirements and generic process --
and EN 50129:2003 "Railway applications -- Communication,
signalling and processing systems -- Safety related
electronic systems for signalling". EN 50126-1 addresses
system issues on the widest scale, while EN 50129 addresses
the approval process for individual systems which can exist
within the overall railway control and protection system.
This European Standard concentrates on the methods which
need to be used in order to provide software which meets
the demands for safety integrity which are placed upon it
by these wider considerations. This European Standard
provides a set of requirements with which the development,
deployment and maintenance of any safety-related software
intended for railway control and protection applications
shall comply. It defines requirements concerning
organisational structure, the relationship between
organisations and division of responsibility involved in
the development, deployment and maintenanceactivities.}
}
@TechReport{ ds:50126-1:2014,
type = {Standard},
key = {DS/EN 50126-1:1999},
month = oct,
year = 2014,
series = {Dansk standard},
title = {EN 50126-1:1999: Railway applications -- The specification
and demonstration of Reliability, Availability,
Maintainability and Safety (RAMS) -- Part 1: Basic
requirements and generic process},
institution = {Danish Standards Foundation},
keywords = {CENELEC},
abstract = {This European Standard provides Railway Authorities and
the railway support industry, throughout the European
Union, with a process which will enable the implementation
of a consistent approach to the management of reliablity,
availability, maintainability and safety, denoted by the
acronym RAMS. Processes for the specification and
demonstration of RAMS requirements are cornerstones of this
standard. This European Standardc aims to promote a common
understanding and approach to the management of RAMS.
This European Standard can be applied systematically by a
railway authority and railway support industry,
throughoutall phasesof thelifecycle of a railway
application, to develop railway specific RAMS requirements
and to achieve compliance with these requirements. The
systems-level approach defined by this European Standard
facilitates assessment of the RAMS interactions between
elements of complex railway applications. This European
Standard promotes co-operation between railway authority
and railway support industry, within a variety of
procurementstrategies, in the achievement of an optimal
combination of RAMS and costfor railway applications.
Adoption of this European Standard will support the
principles of the European Single Market andfacilitate
Europeanrailway inter-operability. The process defined by
this European Standard assumesthat railway authorities and
railway support industry have business-level policies
addressing Quality, Performance and Safety. The approach
defined in this standard is consistent with the application
of quality management requirements contained within the ISO
9000 series of International standards.}
}
@Book{ paulson:ml:1996,
author = {Lawrence C. Paulson},
title = {{ML} for the Working Programmer},
publisher = {Cambridge Press},
year = 1996,
url = {http://www.cl.cam.ac.uk/~lp15/MLbook/pub-details.html},
acknowledgement={none}
}
@Book{ pollak:beginning:2009,
title = {Beginning Scala},
author = {David Pollak},
publisher = {Apress},
year = 2009,
isbn = {978-1-4302-1989-7}
}
@Article{ klein:operating:2009,
author = {Gerwin Klein},
title = {Operating System Verification --- An Overview},
journal = {S\={a}dhan\={a}},
publisher = pub-springer,
year = 2009,
volume = 34,
number = 1,
month = feb,
pages = {27--69},
abstract = {This paper gives a high-level introduction to the topic of
formal, interactive, machine-checked software verification
in general, and the verification of operating systems code
in particular. We survey the state of the art, the
advantages and limitations of machine-checked code proofs,
and describe two specific ongoing larger-scale verification
projects in more detail.}
}
@InProceedings{ wenzel:system:2014,
author = {Makarius Wenzel},
title = {System description: Isabelle/jEdit in 2014},
booktitle = {Workshop on User Interfaces for Theorem Provers, {UITP}},
pages = {84--94},
year = 2014,
doi = {10.4204/EPTCS.167.10},
timestamp = {Wed, 12 Sep 2018 01:05:15 +0200},
editor = {Christoph Benzm{\"{u}}ller and Bruno {Woltzenlogel Paleo}},
volume = 167
}
@InProceedings{ feliachi.ea:circus:2013,
author = {Abderrahmane Feliachi and Marie{-}Claude Gaudel and
Makarius Wenzel and Burkhart Wolff},
title = {The Circus Testing Theory Revisited in Isabelle/HOL},
booktitle = {{ICFEM}},
series = {Lecture Notes in Computer Science},
volume = 8144,
pages = {131--147},
publisher = {Springer},
year = 2013
}
@Article{ Klein2014,
author = {Gerwin Klein and June Andronick and Kevin Elphinstone and
Toby C. Murray and Thomas Sewell and Rafal Kolanski and
Gernot Heiser},
title = {Comprehensive formal verification of an {OS} microkernel},
journal = {{ACM} Trans. Comput. Syst.},
year = 2014,
volume = 32,
number = 1,
pages = {2:1--2:70},
bibsource = {dblp computer science bibliography, https://dblp.org},
biburl = {https://dblp.org/rec/bib/journals/tocs/KleinAEMSKH14},
doi = {10.1145/2560537},
timestamp = {Tue, 03 Jan 2017 11:51:57 +0100},
url = {http://doi.acm.org/10.1145/2560537}
}
@InProceedings{ bicchierai.ea:using:2013,
author = {Bicchierai, Irene and Bucci, Giacomo and Nocentini, Carlo
and Vicario, Enrico},
editor = {Keller, Hubert B. and Pl{\"o}dereder, Erhard and Dencker,
Peter and Klenk, Herbert},
title = {Using Ontologies in the Integration of Structural,
Functional, and Process Perspectives in the Development of
Safety Critical Systems},
booktitle = {Reliable Software Technologies -- Ada-Europe 2013},
year = 2013,
publisher = {Springer Berlin Heidelberg},
address = {Berlin, Heidelberg},
pages = {95--108},
abstract = {We present a systematic approach for the efficient
management of the data involved in the development process
of safety critical systems, illustrating how the activities
performed during the life-cycle can be integrated in a
common framework. Information needed in these activities
reflects concepts that pertain to three different
perspectives: i) structural elements of design and
implementation; ii) functional requirements and quality
attributes; iii) organization of the overall process. The
integration of these concepts may considerably improve the
trade-off between reward and effort spent in verification
and quality-driven activities.},
isbn = {978-3-642-38601-5}
}
@Article{ zhao.ea:formal:2016,
author = {Yongwang Zhao and David San{\'{a}}n and Fuyuan Zhang and
Yang Liu},
title = {Formal Specification and Analysis of Partitioning
Operating Systems by Integrating Ontology and Refinement},
journal = {{IEEE} Trans. Industrial Informatics},
volume = 12,
number = 4,
pages = {1321--1331},
year = 2016,
abstract = {Partitioning operating systems (POSs) have been widely
applied in safety-critical domains from aerospace to
automotive. In order to improve the safety and the
certification process of POSs, the ARINC 653 standard has
been developed and complied with by the mainstream POSs.
Rigorous formalization of ARINC 653 can reveal hidden
errors in this standard and provide a necessary foundation
for formal verification of POSs and ARINC 653 applica-
tions. For the purpose of reusability and efficiency, a
novel methodology by integrating ontology and refinement is
proposed to formally specify and analyze POSs in this
paper. An ontology of POSs is developed as an intermediate
model between informal descriptions of ARINC 653 and the
formal specification in Event-B. A semiautomatic
translation from the ontology and ARINC 653 into Event-B is
implemented, which leads to a complete Event-B
specification for ARINC 653 compliant POSs. During the
formal analysis, six hidden errors in ARINC 653 have been
discovered and fixed in the Event-B specification. We also
validate the existence of these errors in two open-source
POSs, i.e., XtratuM and POK. By introducing the ontology,
the degree of automatic verification of the Event-B
specification reaches a higher level}
}
@InProceedings{ denney.ea:evidence:2013,
author = {E. {Denney} and G. {Pai}},
booktitle = {2013 IEEE International Symposium on Software Reliability
Engineering Workshops (ISSREW)},
title = {Evidence arguments for using formal methods in software
certification},
year = 2013,
pages = {375--380},
abstract = {We describe a generic approach for automatically
integrating the output generated from a formal method/tool
into a software safety assurance case, as an evidence
argument, by (a) encoding the underlying reasoning as a
safety case pattern, and (b) instantiating it using the
data produced from the method/tool. We believe this
approach not only improves the trustworthiness of the
evidence generated from a formal method/tool, by explicitly
presenting the reasoning and mechanisms underlying its
genesis, but also provides a way to gauge the suitability
of the evidence in the context of the wider assurance case.
We illustrate our work by application to a real example-an
unmanned aircraft system - where we invoke a formal code
analysis tool from its autopilot software safety case,
automatically transform the verification output into an
evidence argument, and then integrate it into the former.},
keywords = {aircraft;autonomous aerial vehicles;formal
verification;safety-critical software;evidence
arguments;formal methods;software certification;software
safety assurance case;safety case pattern;unmanned aircraft
system;formal code analysis;autopilot software safety
case;verification output;Safety;Software
safety;Cognition;Computer
architecture;Context;Encoding;Safety cases;Safety case
patterns;Formal methods;Argumentation;Software
certification},
doi = {10.1109/ISSREW.2013.6688924},
month = {Nov}
}
@InProceedings{ kaluvuri.ea:quantitative:2014,
author = {Kaluvuri, Samuel Paul and Bezzi, Michele and Roudier,
Yves},
editor = {Eckert, Claudia and Katsikas, Sokratis K. and Pernul,
G{\"u}nther},
title = {A Quantitative Analysis of Common Criteria Certification
Practice},
booktitle = {Trust, Privacy, and Security in Digital Business},
year = 2014,
publisher = {Springer International Publishing},
address = {Cham},
pages = {132--143},
abstract = {The Common Criteria (CC) certification framework defines a
widely recognized, multi-domain certification scheme that
aims to provide security assurances about IT products to c\
onsumers. However, the CC scheme does not prescribe a
monitoring scheme for the CC practice, raising concerns
about the quality of the security assurance provided by the
certification a\ nd questions on its usefulness. In this
paper, we present a critical analysis of the CC practice
that concretely exposes the limitations of current
approaches. We also provide direction\ s to improve the CC
practice.},
isbn = {978-3-319-09770-1}
}
@InProceedings{ ekelhart.ea:ontological:2007,
author = {Ekelhart, Andreas and Fenz, Stefan and Goluch, Gernot and
Weippl, Edgar},
editor = {Venter, Hein and Eloff, Mariki and Labuschagne, Les and
Eloff, Jan and von Solms, Rossouw},
title = {Ontological Mapping of Common Criteria's Security
Assurance Requirements},
booktitle = {New Approaches for Security, Privacy and Trust in Complex
Environments},
year = 2007,
publisher = {Springer US},
address = {Boston, MA},
pages = {85--95},
abstract = {The Common Criteria (CC) for Information Technology
Security Evaluation provides comprehensive guidelines \ for
the evaluation and certification of IT security regarding
data security and data privacy. Due to the very comple\ x
and time-consuming certification process a lot of companies
abstain from a CC certification. We created the CC Ont\
ology tool, which is based on an ontological representation
of the CC catalog, to support the evaluator at the certi\
fication process. Tasks such as the planning of an
evaluation process, the review of relevant documents or the
creat\ ing of reports are supported by the CC Ontology
tool. With the development of this tool we reduce the time
and costs\ needed to complete a certification.},
isbn = {978-0-387-72367-9}
}
@InProceedings{ fenz.ea:formalizing:2009,
author = {Fenz, Stefan and Ekelhart, Andreas},
title = {Formalizing Information Security Knowledge},
booktitle = {Proceedings of the 4th International Symposium on
Information, Computer, and Communications Security},
series = {ASIACCS '09},
year = 2009,
isbn = {978-1-60558-394-5},
location = {Sydney, Australia},
pages = {183--194},
numpages = 12,
url = {http://doi.acm.org/10.1145/1533057.1533084},
doi = {10.1145/1533057.1533084},
acmid = 1533084,
publisher = {ACM},
address = {New York, NY, USA},
keywords = {information security, risk management, security ontology},
abstract = {Unified and formal knowledge models of the information
security domain are fundamental requirements for supporting
and enhancing existing risk management approaches. This
paper describes a security ontology which provides an
ontological structure for information security domain
knowledge. Besides existing best-practice guidelines such
as the German IT Grundschutz Manual also concrete knowledge
of the considered organization is incorporated. An
evaluation conducted by an information security expert team
has shown that this knowledge model can be used to support
a broad range of information security risk management
approaches.}
}
@InProceedings{ gleirscher.ea:incremental:2007,
author = {M. {Gleirscher} and D. {Ratiu} and B. {Schatz}},
booktitle = {2007 International Conference on Systems Engineering and
Modeling},
title = {Incremental Integration of Heterogeneous Systems Views},
year = 2007,
pages = {50--59},
abstract = {To master systems complexity, their industrial development
requires specialized heterogeneous views and techniques and
- correspondingly - engineering tools. These views
generally cover only parts of the system under development,
and critical development defects often occur at the gaps
between them. To successfully achieve an integration that
bridges these gaps, we must tackle it both from the
methodical as well as from the tooling sides. The former
requires answers to questions like: What are the views
provided by the tools? How are they related and extended to
achieve consistency or to form new views? - while the
latter requires answers to: How are views extracted from
the tools? How are they composed and provided to the user?
Our approach, suitable for incremental integration, is
demonstrated in the tool integration framework ToolNet.},
keywords = {computer aided engineering;computer aided software
engineering;software tools;heterogeneous systems
views;systems complexity;tool integration
framework;ToolNet;engineering tools;Systems engineering and
theory;Certification;Integrated circuit
modeling;Bridges;Software tools;Computer aided software
engineering;Computer aided engineering;Costs;Natural
languages;Formal specifications},
doi = {10.1109/ICSEM.2007.373334},
month = {March}
}
@Booklet{ omg:sacm:2018,
bibkey = {omg:sacm:2018},
key = omg,
abstract = {This specification defines a metamodel for representing
structured assurance cases. An Assurance Case is a set of
auditable claims, arguments, and evidence created to
support the claim that a defined system/service will
satisfy the particular requirements. An Assurance Case is a
document that facilitates information exchange between
various system stakeholders such as suppliers and
acquirers, and between the operator and regulator, where
the knowledge related to the safety and security of the
system is communicated in a clear and defendable way. Each
assurance case should communicate the scope of the system,
the operational context, the claims, the safety and/or
security arguments, along with the corresponding
evidence.},
publisher = omg,
language = {USenglish},
month = mar,
keywords = {SACM},
topic = {formalism},
note = {Available as OMG document
\href{http://www.omg.org/cgi-bin/doc?formal/2018-02-02}
{formal/2018-02-02}},
public = {yes},
title = {Structured Assurance Case Metamodel (SACM)},
year = 2018
}
@InProceedings{ kelly.ea:goal:2004,
title = {The Goal Structuring Notation -- A Safety Argument
Notation},
booktitle = {Dependable Systems and Networks},
year = 2004,
month = jul,
author = {Tim Kelly and Rob Weaver}
}
@TechReport{ rushby:formal:1993,
author = {John Rushby},
title = {Formal Methods and the Certification of Critical Systems},
institution = {Computer Science Laboratory, SRI International},
year = 1993,
number = {SRI-CSL-93-7},
address = {Menlo Park, CA},
note = {Also issued under the title {\em Formal Methods and
Digital Systems Validation for Airborne Systems\/} as NASA
Contractor Report 4551, December 1993},
month = dec
}
@InProceedings{ greenaway.ea:bridging:2012,
author = {Greenaway, David and Andronick, June and Klein, Gerwin},
editor = {Beringer, Lennart and Felty, Amy},
title = {Bridging the Gap: Automatic Verified Abstraction of C},
booktitle = {Interactive Theorem Proving},
year = 2012,
publisher = {Springer Berlin Heidelberg},
address = {Berlin, Heidelberg},
pages = {99--115},
abstract = {Before low-level imperative code can be reasoned about in
an interactive theorem prover, it must first be converted
into a logical representation in that theorem prover.
Accurate translations of such code should be conservative,
choosing safe representations over representations
convenient to reason about. This paper bridges the gap
between conservative representation and convenient
reasoning. We present a tool that automatically abstracts
low-level C semantics into higher level specifications,
while generating proofs of refinement in Isabelle/HOL for
each translation step. The aim is to generate a verified,
human-readable specification, convenient for further
reasoning.},
isbn = {978-3-642-32347-8}
}
@inproceedings{BCPW2018,
title = {Making Agile Development Processes fit for V-style Certification
Procedures},
author = {Bezzecchi, S. and Crisafulli, P. and Pichot, C. and Wolff, B.},
booktitle = {{ERTS'18}},
abstract = {We present a process for the development of safety and security
critical components in transportation systems targeting a high-level
certification (CENELEC 50126/50128, DO 178, CC ISO/IEC 15408).
The process adheres to the objectives of an ``agile development'' in
terms of evolutionary flexibility and continuous improvement. Yet, it
enforces the overall coherence of the development artifacts (ranging from
proofs over tests to code) by a particular environment (CVCE).
In particular, the validation process is built around a formal development
based on the interactive theorem proving system Isabelle/HOL, by linking the
business logic of the application to the operating system model, down to
code and concrete hardware models thanks to a series of refinement proofs.
We apply both the process and its support in CVCE to a case-study that
comprises a model of an odometric service in a railway-system with its
corresponding implementation integrated in seL4 (a secure kernel for
which a comprehensive Isabelle development exists). Novel techniques
implemented in Isabelle enforce the coherence of semi-formal
and formal definitions within to specific certification processes
in order to improve their cost-effectiveness.
},
pdf = {https://www.lri.fr/~wolff/papers/conf/2018erts-agile-fm.pdf},
year = {2018},
series = {ERTS Conference Proceedings},
location = {Toulouse}
}

View File

@ -0,0 +1,5 @@
heading_prefix "{\\large\\textbf{"
heading_suffix "}\\hfil}\\nopagebreak\n"
headings_flag 1
symhead_positive "Symbols"

View File

@ -0,0 +1,560 @@
(*<*)
theory
mini_odo
imports
"Isabelle_DOF.CENELEC_50128"
"Isabelle_DOF.technical_report"
begin
declare[[strict_monitor_checking=true]]
(*>*)
title*[title::title]\<open>The CENELEC 50128 Ontology\<close>
subtitle*[subtitle::subtitle]\<open>Case Study: An Odometer-Subsystem\<close>
chapter*[casestudy::technical]\<open>A Case-Study: An Odometer-Subsystem\<close>
text\<open>
In our case study, we will follow the phases of analysis, design, and implementation of the
odometry function of a train. This software processes data from an odometer to compute the position,
speed, and acceleration of a train. This system provides the basis for many
safety critical decisions, \eg, the opening of the doors. Due to its relatively small size, it
is a manageable, albeit realistic target for a comprehensive formal development: it covers a
physical model of the environment, the physical and architectural model of the odometer including
the problem of numerical sampling, and the boundaries of efficient computations. The interplay
between environment and measuring-device as well as the implementation problems on a platform
with limited resources makes the odometer a fairly typical safety critical embedded system.
Due to space reasons, we will focus on the analysis part of the integrated
document; the design and code parts will only be outlined in a final resume. The
\<^emph>\<open>ontological embedding\<close>, which represents a main contribution of this paper, will be presented
in the next two sections.
We start with the capture of a number of informal documents available at the beginning of the
development.
\<close>
section\<open>System Requirements Specification as an \<^emph>\<open>Integrated Source\<close>\<close>
text\<open>Accurate information of a train's location along a track is in an important prerequisite
to safe railway operation. Position, speed and acceleration measurement usually lies on a
set of independent measurements based on different physical principles---as a way to enhance
precision and availability. One of them is an \<^emph>\<open>odometer\<close>, which allows estimating a relative
location while the train runs positions established by other measurements. \<close>
subsection\<open>Capturing ``Basic Principles of Motion and Motion Measurement.''\<close>
text\<open>
A rotary encoder measures the motion of a train. To achieve this, the encoder's shaft is fixed to
the trains wheels axle. When the train moves, the encoder produces a signal pattern directly
related to the trains progress. By measuring the fractional rotation of the encoders shaft and
considering the wheels effective ratio, relative movement of the train can be calculated.
\begin{wrapfigure}[8]{l}{3.9cm}
\centering
\vspace{-.7cm}
\includegraphics[width=3.4cm]{figures/wheel-df}
\caption{Motion sensing via an odometer.}
\label{wheel-df}
\end{wrapfigure}
\autoref{wheel-df} shows that we model a train, seen from a pure kinematics standpoint, as physical
system characterized by a one-dimensional continuous distance function, which represents the
observable of the physical system. Concepts like speed and acceleration were derived concepts
defined as their (gradient) derivatives. We assume the use of the meter, kilogram, and second
(MKS) system.
This model is already based on several fundamental assumptions relevant for the correct
functioning of the system and for its integration into the system as a whole. In
particular, we need to make the following assumptions explicit:\vspace{-.3cm}
\<^item> that the wheel is perfectly circular with a given, constant radius,
\<^item> that the slip between the trains wheel and the track negligible,
\<^item> the distance between all teeth of a wheel is the same and constant, and
\<^item> the sampling rate of positions is a given constant.
These assumptions have to be traced throughout the certification process as
\<^emph>\<open>derived requirements\<close> (or, in CENELEC terminology, as \<^emph>\<open>exported constraints\<close>), which is
also reflected by their tracing throughout the body of certification documents. This may result
in operational regulations, \eg, regular checks for tolerable wheel defects. As for the
\<^emph>\<open>no slip\<close>-assumption, this leads to the modeling of constraints under which physical
slip can be neglected: the device can only produce reliable results under certain physical
constraints (speed and acceleration limits). Moreover, the \<^emph>\<open>no slip\<close>-assumption motivates
architectural arrangements for situations where this assumption cannot be assured (as is the
case, for example, of an emergency breaking) together with error-detection and error-recovery.
\<close>
subsection\<open>Capturing ``System Architecture.''\<close>
text\<open>
\begin{figure}
\centering
\includegraphics[width=.70\textwidth]{figures/three-phase-odo}
\begin{picture}(0,0)
\put(-112,44){\includegraphics[width=.30\textwidth]{figures/odometer}}
\end{picture}
\caption{An odometer with three sensors \inlineisar{C1}, \inlineisar{C2}, and \inlineisar{C3}.}\label{three-phase}
\end{figure}
The requirements analysis also contains a sub-document \<^emph>\<open>system architecture description\<close>
(CENELEC notion) that contains technical drawing of the odometer, a timing diagrams
(see \autoref{three-phase}), and tables describing the encoding of the position
for the possible signal transitions of the sensors \inlineisar{C1}, \inlineisar{C2}, and $C3.$
\<close>
subsection\<open>Capturing ``System Interfaces.''\<close>
text\<open>
The requirements analysis also contains a sub-document \<^emph>\<open>functions and interfaces\<close>
(CENELEC notion) describing the technical format of the output of the odometry function.
This section, \eg, specifies the output \<^emph>\<open>speed\<close> as given by a \<^verbatim>\<open>int_32\<close> to be the
``Estimation of the speed (in mm/sec) evaluated over the latest $N_{\text{avg}}$ samples''
where the speed refers to the physical speed of the train and $N_{\text{avg}}$ a parameter of the
sub-system configuration. \<close>
(*<*)
declare_reference*["df-numerics-encshaft"::figure]
(*>*)
subsection\<open>Capturing ``Required Performances.''\<close>
text\<open>
The given analysis document is relatively implicit on the expected precision of the measurements;
however, certain interface parameters like \inlineisar*Odometric_Position_TimeStamp*
(a counter on the number of samplings) and \inlineisar*Relative_Position* are defined by as
unsigned 32 bit integer. These definitions imply that exported constraints concerning the acceptable
time of service as well the maximum distance before a necessary reboot of the subsystem.
For our case-study, we assume maximum deviation of the \inlineisar*Relative_Position* to the
theoretical distance.
The requirement analysis document describes the physical environment, the architecture
of the measuring device, and the required format and precision of the measurements of the odometry
function as represented (see @{figure (unchecked) "df-numerics-encshaft"}).\<close>
figure*["df-numerics-encshaft"::figure,relative_width="76",src="''figures/df-numerics-encshaft''"]
\<open>Real distance vs. discrete distance vs. shaft-encoder sequence\<close>
subsection\<open>Capturing the ``Software Design Spec'' (Resume).\<close>
text\<open>
\enlargethispage{\baselineskip}
The design provides a function that manages an internal first-in-first-out buffer of
shaft-encodings and corresponding positions. Central for the design is a step-function analyzing
new incoming shaft encodings, checking them and propagating two kinds of error-states (one allowing
recovery, another one, fatal, signaling, \eg, a defect of the receiver hardware),
calculating the relative position, speed and acceleration.
\<close>
subsection\<open>Capturing the ``Software Implementation'' (Resume).\<close>
text\<open>
While the design is executable on a Linux system, it turns out that the generated code from an
Isabelle model is neither executable on resource-constraint target platform, an ARM-based
Sabre-light card, nor certifiable, since the compilation chain via ML to C implies the
inclusion of a run-time system and quite complex libraries.
We adopted therefore a similar approach as used in the seL4 project~@{cite "Klein2014"}: we use a
hand-written implementation in C and verify it via
AutoCorres~@{cite "greenaway.ea:bridging:2012"} against
the design model. The hand-written C-source is integrated into the Isabelle/HOL technically by
registering it in the build-configuration and logically by a trusted C-to-HOL compiler included
in AutoCorres.
\<close>
section\<open>Formal Enrichment of the Software Requirements Specification\<close>
text\<open>
After the \<^emph>\<open>capture\<close>-phase, where we converted/integrated existing informal analysis and design
documents as well as code into an integrated Isabelle document, we entered into the phase of
\<open>formal enrichment\<close>. For example, from the assumptions in the architecture follow
the definitions:
\begin{isar}
definition teeth_per_wheelturn::nat ("tpw") where "tpw \<equiv> SOME x. x > 0"
definition wheel_diameter::real ("w$_d$") where "w$_d$ \<equiv> SOME x. x > 0"
definition wheel_circumference::real ("w$_{\text{circ}}$") where "w$_{\text{circ}}$ \<equiv> pi * w$_d$"
definition \<delta>s$_{\text{res}}$::real where "\<delta>s$_{\text{res}}$ \<equiv> w$_{\text{circ}}$ / (2 * 3 * tpw)"
\end{isar}
Here, \inlineisar{real} refers to the real numbers as defined in the HOL-Analysis
library, which provides concepts such as Cauchy Sequences, limits,
differentiability, and a very substantial part of classical Calculus. \inlineisar{SOME} is the
Hilbert choice operator from HOL; the definitions of the model parameters admit all possible positive values as uninterpreted
constants. Our perfect-wheel assumption is translated into a calculation of the circumference of the
wheel, while \inlineisar{\<delta>s<bsub>res<esub>}, the resolution of the odometer, can be calculated
from the these parameters. HOL-Analysis permits to formalize the fundamental physical observables:
\begin{isar}
type_synonym distance_function = "real\<Rightarrow>real"
definition Speed::"distance_function\<Rightarrow>real\<Rightarrow>real" where "Speed f \<equiv> deriv f"
definition Accel::"distance_function\<Rightarrow>real\<Rightarrow>real"
where "Accel f \<equiv> deriv (deriv f)"
\end{isar}
which permits to constrain the central observable \inlineisar|distance_function| in a
way that they describe the space of ``normal behavior'' where we expect the odometer to produce
reliable measurements over a \inlineisar|distance_function df|.
The essence of the physics of the train is covered by the following definition:
\begin{isar}
definition normally_behaved_distance_function :: "(real \<Rightarrow> real) \<Rightarrow> bool"
where normally_behaved_distance_function df =
( \<forall> t. df(t) \<in> \<real>$_{\ge 0}$ \<and> (\<forall> t \<in> \<real>$_{\le 0}$. df(t) = 0)
\<and> df differentiable on$_{\text{R}}$ \<and> (Speed df)differentiable on$_{\text{R}}$
\<and> (Accel df)differentiable on$_{\ensuremath{R}}$
\<and> (\<forall> t. (Speed df) t \<in> {-Speed$_{\text{Max}}$ .. Speed$_{\text{Max}}$})
\<and> (\<forall> t. (Accel df) t \<in> {-\<bar>Accel$_{\text{Max}}$\<bar> .. \<bar>Accel$_{\text{Max}}$\<bar>}))
\end{isar}
which constrains the distance functions in the bounds described of the informal descriptions and
states them as three-fold differentiable function in certain bounds concerning speed and acceleration.
Note that violations, in particular of the constraints on speed and acceleration, \<^emph>\<open>do\<close> occur in practice.
In such cases, the global system adapts recovery strategies that are out of the scope of our model.
Concepts like \inlineisar+shaft_encoder_state+ (a triple with the sensor values
\inlineisar{C1}, \inlineisar{C2}, \inlineisar{C3}) were formalized as types, while tables were defined as recursive functions:
\enlargethispage{2\baselineskip}\begin{isar}
fun phase$_0$ :: "nat \<Rightarrow> shaft_encoder_state" where
"phase$_0$ (0) = \<lparr> C1 = False, C2 = False, C3 = True \<rparr>"
|"phase$_0$ (1) = \<lparr> C1 = True, C2 = False, C3 = True \<rparr>"
|"phase$_0$ (2) = \<lparr> C1 = True, C2 = False, C3 = False\<rparr>"
|"phase$_0$ (3) = \<lparr> C1 = True, C2 = True, C3 = False\<rparr>"
|"phase$_0$ (4) = \<lparr> C1 = False, C2 = True, C3 = False\<rparr>"
|"phase$_0$ (5) = \<lparr> C1 = False, C2 = True, C3 = True \<rparr>"
|"phase$_0$ x = phase$_0$(x - 6)"
definition Phase ::"nat\<Rightarrow>shaft_encoder_state" where Phase(x) = phase$_0$(x-1)
\end{isar}
We now define shaft encoder sequences as
translations of distance functions:
\begin{isar}
definition encoding::"distance_function\<Rightarrow>nat\<Rightarrow>real\<Rightarrow>shaft_encoder_state"
where "encoding df init$_{\text{pos}}$ \<equiv> \<lambda>x. Phase(nat\<lfloor>df(x) / \<delta>s$_{\text{res}}$\<rfloor> + init$_{\text{pos}}$)"
\end{isar}
where \inlineisar+init$_{\text{pos}}$+ is the initial position of the wheel.
\inlineisar+sampling+'s were constructed from encoding sequences over discretized time points:
\begin{isar}
definition $\!\!$sampling::"distance$\!$_function\<Rightarrow>nat\<Rightarrow>real\<Rightarrow>nat\<Rightarrow>shaft$\!$_encoder$\!$_state"
where "sampling df init$_{\text{pos}}$ \<delta>t \<equiv> \<lambda>n::nat. encoding df init$_{\text{pos}}$ (n * \<delta>t)"
\end{isar}
The sampling interval \inlineisar+\<delta>t+ (the inverse of the sampling frequency) is a critical
parameter of the configuration of a system.
Finally, we can formally define the required performances. From the interface description
and the global model parameters such as wheel diameter, the number of teeth per wheel, the sampling
frequency etc., we can infer the maximal time of service as well the maximum distance the
device can measure.
As an example configuration, choosing 1m for
\inlineisar+w$_d$+, 100 for \inlineisar+tpw+, 80km/h \inlineisar+Speed$_{\text{Max}}$+,
and 14400Hz for the sampling frequency, results in an odometer resolution of 2.3mm,
a maximum distance of 9878km, and a maximal system up-time of 123.4 hours.
The required precision of an odometer can be defined by a constant describing
the maximally allowed difference between \inlineisar+df(n*\<delta>t)+ and
\inlineisar+sampling df init$_{\text{pos}}$ \<delta>t n+ for all \inlineisar+init$_{\text{pos}}$ \<in>{0..5}+.
\<close>
(*<*)
ML\<open>val two_thirty2 = 1024 * 1024 * 1024 * 4;
val dist_max = 0.0023 * (real two_thirty2) / 1000.0;
val dist_h = dist_max / 80.0\<close>
(*>*)
section*[verific::technical]\<open>Verification of the Software Requirements Specification\<close>
text\<open>The original documents contained already various statements that motivate certain safety
properties of the device. For example, the \inlineisar+Phase+-table excludes situations in which
all sensors \inlineisar{C1}, \inlineisar{C2}, and \inlineisar{C3} are all ``off'' or situations in
which sensors are ``on,'' reflecting a physical or electrical error in the odometer. It can be
shown by a very small Isabelle case-distinction proof that this safety requirement follows indeed from the
above definitions:
\begin{isar}
lemma Encoder_Property_1:(C1(Phase x) \<and> C2(Phase x) \<and> C3(Phase x))=False
proof (cases x)
case 0 then show ?thesis by (simp add: Phase_def)
next
case (Suc n) then show ?thesis
by(simp add: Phase_def,rule_tac n = n in cycle_case_split,simp_all)
qed
\end{isar}
for all positions \inlineisar+x+. Similarly, it is proved that the table is indeed
cyclic: \inlineisar+ phase$_0$ x = phase$_0$(x mod 6)+ and locally injective:
\inlineisar+\<forall>x<6. \<forall>y<6. phase$_0$ x = phase$_0$ y \<longrightarrow> x = y+.
These lemmas, building the ``theory of an odometer,'' culminate in a theorem
that we would like to present in more detail.
\begin{isar}
theorem minimal_sampling :
assumes * : normally_behaved_distance_function df
and ** : \<delta>t * Speed$_{\text{Max}}$ < \<delta>s$_{\text{res}}$
shows \<forall> \<delta>X\<le>\<delta>t. 0<\<delta>X \<longrightarrow>
\<exists>f. retracting (f::nat\<Rightarrow>nat) \<and>
sampling df init$_{\text{pos}}$ \<delta>X = (sampling df init$_{\text{pos}}$ \<delta>t) o f
\end{isar}
This theorem states for \inlineisar+normally_behaved_distance_function+s that there is
a minimal sampling frequency assuring the safety of the measurements; samplings on
some \inlineisar$df$ gained from this minimal sampling frequency can be ``pumped up''
to samplings of these higher sampling frequencies; they do not contain more information.
Of particular interest is the second assumption, labelled ``\inlineisar|**|,'' which
establishes a lower bound from \inlineisar+w$_{\text{circ}}$+, \inlineisar+tpw+,
\inlineisar+Speed$_{\text{Max}}$+ for the sampling frequency. Methodologically, this represents
an exported constraint that can not be represented \<^emph>\<open>inside\<close> the design model: it means that the
computations have to be fast enough on the computing platform in order to assure that the
calculations are valid. It was in particular this exported constraint that forced us to give up
the original plan to generate the code from the design model and to execute this directly on the
target platform.
For our example configuration (1m diameter, 100 teeth per wheel, 80km/h max), this theorem justifies
that 14,4 kHz is indeed enough to assure valid samplings. Such properties are called
``internal consistency of the software requirements specification'' in the CENELEC
standard~@{cite "bsi:50128:2014"}, 7.2.4.22 and are usually addressed in an own report.
\<close>
chapter*[ontomodeling::text_section]\<open>The CENELEC 50128 Ontology\<close>
text\<open>
Modeling an ontology from a semi-formal text such as~@{cite"bsi:50128:2014"} is,
like any other modeling activity, not a simple one-to-one translation of some
concepts to some formalism. Rather, implicit and self-understood principles
have to be made explicit, abstractions have to be made, and decisions about
the kind of desirable user-interaction may have an influence similarly to
design decisions influenced by strengths or weaknesses of a programming language.
\<close>
section*[lhf::text_section]
\<open>Tracking Concepts and Definitions\<close>
text\<open>
\isadof is designed to annotate text elements with structured meta-information and to reference
these text elements throughout the integrated source. A classical application of this capability
is the annotation of concepts and terms definitions---be them informal, semi-formal or formal---and
their consistent referencing. In the context of our CENELEC ontology, \eg, we can translate the
third chapter of @{cite "bsi:50128:2014"} ``Terms, Definitions and Abbreviations'' directly
into our Ontology Definition Language (ODL). Picking one example out of 49, consider the definition
of the concept ``traceability'' in paragraphs 3.1.46 (a notion referenced 31 times in the standard),
which we translated directly into:
\begin{isar}
Definition*[traceability::concept]<open> degree to which relationship
can be established between two or more products of a development
process, especially those having a predecessor/successor or
master/subordinate relationship to one another. <close>
\end{isar}
In the integrated source of the odometry study, we can reference in a text element to this
concept as follows:
\begin{isar}
text*[...]<open> ... to assure <@>{concept traceability} for
<@>{requirement bitwiseAND}, we prove ... <close>
\end{isar}
The presentation of this document element inside \isadof is immediately hyperlinked against the
\inlineisar+Definition*+ element shown above; this serves as documentation of
the standard for the development team working on the integrated source. The PDF presentation
of such links depends on the actual configurations for the document generation; We will explain
this later.
CENELEC foresees also a number of roles, phases, safety integration levels, etc., which were
directly translated into HOL enumeration types usable in ontological concepts of ODL.
\begin{isar}
datatype role =
PM (* Program Manager *) | RQM (* Requirements Manager *)
| DES (* Designer *) | IMP (* Implementer *) |
| VER (* Verifier *) | VAL (* Validator *) | ...
datatype phase =
SYSDEV_ext (* System Development *) | SPl (* Software Planning *)
| SR (* Software Requirement *) | SA (* Software Architecture *)
| SDES (* Software Design *) | ...
\end{isar}
Similarly, we can formalize the Table A.5: Verification and Testing of @{cite "bsi:50128:2014"}:
a classification of \<^emph>\<open>verification and testing techniques\<close>:
\begin{isar}
datatype vnt_technique =
formal_proof "thm list" | stat_analysis
| dyn_analysis dyn_ana_kind | ...
\end{isar}
In contrast to the standard, we can parameterize \inlineisar+formal_proof+ with a list of
theorems, an entity known in the Isabelle kernel. Here, \isadof assures for text elements
annotated with theorem names, that they refer indeed to established theorems in the Isabelle
environment. Additional checks could be added to make sure that these theorems have a particular
form.
While we claim that this possibility to link to theorems (and test-results) is unique in the
world of systems attempting to assure traceability, referencing a particular (proven) theorem is
definitively not sufficient to satisfy the claimed requirement. Human evaluators will always have
to check that the provided theorem \<open>adequately\<close> represents the claim; we do not in the slightest
suggest that their work is superfluous. Our framework allows to statically check that tests or proofs
have been provided, at places where the ontology requires them to be, and both assessors and developers
can rely on this check and navigate through related information easily. It does not guarantee that
intended concepts for, \eg, safety or security have been adequately modeled.
\<close>
section*[moe::text_section]
\<open>Major Ontological Entities: Requirements and Evidence\<close>
text\<open>
We introduce central concept of a \<^emph>\<open>requirement\<close> as an ODL \inlineisar*doc_class*
based on some generic basic library \inlineisar*text_element* providing basic layout attributes.
\begin{isar}
doc_class requirement = text_element +
long_name :: "string option"
is_concerned :: "role set"
\end{isar}
where the \inlineisar*roles* are exactly the ones defined in the previous section and represent
the groups of stakeholders in the CENELEC process. Therefore, the \inlineisar+is_concerned+-attribute
allows expressing who ``owns'' this text-element. \isadof supports a role-based
presentation, \eg, different presentation styles of the
integrated source may decide to highlight, to omit, to defer into an annex, text entities
according to the role-set.
Since ODL supports single inheritance, we can express sub-requirements and therefore a style
of requirement decomposition as advocated in GSN~@{cite "kelly.ea:goal:2004"}:
\begin{isar}
doc_class sub_requirement =
decomposes :: "requirement"
relates_to :: "requirement set"
\end{isar}\<close>
section*[claimsreqevidence::text_section]\<open>Tracking Claims, Derived Requirements and Evidence\<close>
text\<open>An example for making explicit implicit principles,
consider the following statement @{cite "bsi:50128:2014"}, pp. 25.:\vspace{-1.5mm}
\begin{quote}\small
The objective of software verification is to examine and arrive at a judgment based on
evidence that output items (process, documentation, software or application) of a specific
development phase fulfill the requirements and plans with respect to completeness, correctness
and consistency.
\end{quote}\vspace{-1.5mm}
The terms \<^emph>\<open>judgment\<close> and \<^emph>\<open>evidence\<close> are used as a kind of leitmotif throughout the CENELEC
standard, but they are neither explained nor even listed in the general glossary. However, the
standard is fairly explicit on the \<^emph>\<open>phase\<close>s and the organizational roles that different stakeholders
should have in the process. Our version to express this key concept of judgment, \eg, by
the following concept:
\begin{isar}
doc_class judgement =
refers_to :: requirement
evidence :: "vnt_technique list"
status :: status
is_concerned :: "role set" <= "{VER,ASR,VAL}"
\end{isar}
As one can see, the role set is per default set to the verification team, the assessors and the
validation team.
There are different views possible here: an alternative would be to define \inlineisar+evidence+
as ontological concept with \inlineisar+vnt_technique+'s (rather than an attribute of judgement)
and consider the basis of judgments as a relation between requirements and relation:
\begin{isar}
doc_class judgement =
based_on :: "(requirement \<times> evidence) set"
status :: status
is_concerned :: "role set" <= "{VER,ASR,VAL}"
\end{isar}
More experimentation will be needed to find out what kind of ontological modeling is most
adequate for developers in the context of \isadof.
\<close>
section*[ontocontrol::text_section]\<open>Ontological Compliance\<close>
text\<open>From the variety of different possibilities for adding CENELEC annotations to the
integrated source, we will, in the following, point out three scenarios.\<close>
subsection\<open>Internal Verification of Claims in the Requirements Specification.\<close>
text\<open>In our case, the SR-team early on detected a property necessary
for error-detection of the device (c.f. @{docitem verific}):
\enlargethispage{2\baselineskip}\begin{isar}
text*[encoder_props::requirement]<open> The requirement specification team ...
C1 & C2 & C3 = 0 (bitwise logical AND operation)
C1 | C2 | C3 = 1 (bitwise logical OR operation) <close>
\end{isar}
After the Isabelle proofs shown in @{docitem verific}, we can either register the theorems
directly in an evidence statement:
\begin{isar}
text*[J1::judgement, refers_to="<@>{docitem <open>encoder_props<close>}",
evidence="[formal_proof[<@>{thm <open>Encoder_Property_1<close>},
<@>{thm <open>Encoder_Property_2<close>}]]"]
<open>The required encoder properties are in fact verified to be consistent
with the formalization of <@>{term "phase$_0$"}.<close>
\end{isar}
The references \inlineisar|<@>{...}|, called antiquotation, allow us not only to reference to
formal concepts, they are checked for consistency and there are also antiquotations that
print the formally checked content (\eg, the statement of a theorem).
\<close>
subsection\<open>Exporting Claims of the Requirements Specification.\<close>
text\<open>By definition, the main purpose of the requirement specification is the
identification of the safety requirements. As an example, we state the required precision of an
odometric function: for any normally behaved distance function \inlineisar+df+, and any representable
and valid sampling sequence that can be constructed for \inlineisar+df+, we require that the
difference between the physical distance and distance calculable from the
@{term Odometric_Position_Count} is bound by the minimal resolution of the odometer.
\begin{isar}
text*[R5::safety_requirement]<open>We can now state ... <close>
definition
Odometric_Position_Count_precise::(shaft_encoder_state list\<Rightarrow>output)\<Rightarrow>bool
where Odometric_Position_Count_precise odofunction \<equiv>
(\<forall> df. \<forall>S. normally_behaved_distance_function df
\<longrightarrow> representable S
\<longrightarrow> valid_sampling S df
\<longrightarrow> (let pos = uint(Odometric_Position_Count(odofunction S))
in \<bar>df((length S - 1)*\<delta>t$_{\text{odo}}$) - (\<delta>s$_{\text{res}}$ * pos)\<bar> \<le> \<delta>s$_{\text{res}}$))
update_instance*[R5::safety_requirement,
formal_definition:="[<@>{thm <open>Odometric_Position_Count_precise_def<close>}]"]
\end{isar}
By \inlineisar+update_instance*+, we book the property \inlineisar+Position_Count_precise_def+ as
\inlineisar+safety_requirement+, a specific sub-class of \inlineisar+requirement+s
requesting a formal definition in Isabelle.\<close>
subsection\<open>Exporting Derived Requirements.\<close>
text\<open>Finally, we discuss the situation where the verification team discovered a critical side-condition
for a major theorem necessary for the safety requirements; this was in our development the case for
the condition labelled ``\inlineisar|**|'' in @{docitem verific}. The current CENELEC standard clearly separates
``requirement specifications'' from ``verification reports,'' which is probably motivated
by the overall concern of organizational separation and of document consistency. While this
document organization is possible in \isadof, it is in our experience often counter-productive
in practice: organizations tend to defend their documents because the impact of changes is more and more
difficult to oversee. This effect results in a dramatic development slow-down and an increase of
costs. Furthermore, these barriers exclude situations where developers perfectly know, for example,
invariants, but can not communicate them to the verification team because the precise formalization
is not known in time. Rather than advocating document separation, we tend to integrate these documents,
keep proof as close as possible to definitions, and plead for consequent version control of the
integrated source, together with the proposed methods to strengthen the links between the informal
and formal parts by anti-quotations and continuous ontological checking. Instead of separation
of the documents, we would rather emphasize the \<^emph>\<open>separation of the views\<close> of the different
document representations. Such views were systematically generated out of the integrated source in
different PDF versions and for each version, document specific consistency guarantees can be
automatically enforced.
In our case study, we define this condition as predicate, declare an explanation of it as
\inlineisar+SRAC+ (CENELEC for: safety-related application condition; ontologically, this is a
derived class from \inlineisar+requirement+.) and add the definition of the predicate into the
document instance as described in the previous section.\<close>
text\<open>\appendix\<close>
chapter\<open>Appendix\<close>
text\<open>
\<^item> \inlineisar|<@>{thm refl}|: @{thm refl}
\<^item> \inlineisar|<@>{thm [source] refl}|: @{thm [source] refl}
\<^item> \inlineisar|<@>{thm[mode=Rule] conjI}|: @{thm[mode=Rule] conjI}
\<^item> \inlineisar|<@>{file "mini_odo.thy"}|: @{file "mini_odo.thy"}
\<^item> \inlineisar|<@>{value "3+4::int"}|: @{value "3+4::int"}
\<^item> \inlineisar|<@>{const hd}|: @{const hd}
\<^item> \inlineisar|<@>{theory HOL.List}|: @{theory HOL.List}
\<^item> \inlineisar|<@>{term "3"}|: @{term "3"}
\<^item> \inlineisar|<@>{type bool}|: @{type bool}
\<^item> \inlineisar|<@>{term [show_types] "f x = a + x"}|: @{term [show_types] "f x = a + x"}
\<close>
text\<open>Examples for declaration of typed doc-items "assumption" and "hypothesis",
concepts defined in the underlying ontology @{theory "Isabelle_DOF.CENELEC_50128"}. \<close>
text*[ass1::assumption, long_name="Some ''assumption one''"] \<open> The subsystem Y is safe. \<close>
text*[hyp1::hypothesis] \<open> P not equal NP \<close>
text\<open>A real example fragment from a larger project, declaring a text-element as a
"safety-related application condition", a concept defined in the
@{theory "Isabelle_DOF.CENELEC_50128"} ontology:\<close>
text*[hyp2::hypothesis]\<open>Under the assumption @{assumption \<open>ass1\<close>} we establish the following: ... \<close>
text*[ass122::SRAC, long_name="Some ''ass122''"] \<open> The overall sampling frequence of the odometer
subsystem is therefore 14 khz, which includes sampling, computing and
result communication times... \<close>
text*[ass123::SRAC] \<open> The overall sampling frequence of the odometer
subsystem is therefore 14 khz, which includes sampling, computing and
result communication times... \<close>
text*[ass124::EC, long_name="Some ''ass124''"] \<open> The overall sampling frequence of the odometer
subsystem is therefore 14 khz, which includes sampling, computing and
result communication times... \<close>
text*[t10::test_result] \<open> This is a meta-test. This could be an ML-command
that governs the external test-execution via, eg., a makefile or specific calls
to a test-environment or test-engine \<close>
text\<open>Finally some examples of references to doc-items, i.e. text-elements with declared
meta-information and status. \<close>
text \<open> As established by @{docref (unchecked) \<open>t10\<close>},
@{docref (define) \<open>t10\<close>} \<close>
text \<open> the @{docref \<open>t10\<close>}
as well as the @{docref \<open>ass122\<close>}\<close>
text \<open> represent a justification of the safety related applicability
condition @{SRAC \<open>ass122\<close>} aka exported constraint @{EC \<open>ass122\<close>}.\<close>
(*<*)
end
(*>*)

View File

@ -1,4 +1,4 @@
scholarly_paper
technical_report
math_exam
cenelec
CENELEC_50128

View File

@ -1,94 +0,0 @@
chapter\<open> Example : Forward and Standard (use-after-define) Referencing\<close>
theory Example
imports "../../ontologies/CENELEC_50128"
"../../ontologies/scholarly_paper"
begin
section\<open> Some examples of Isabelle's standard antiquotations. \<close>
(* some show-off of standard anti-quotations: *)
text \<open>THIS IS A TEXT\<close>
term "[]"
text\<open> @{thm refl} of name @{thm [source] refl}
@{thm[mode=Rule] conjI}
@{file "../../Isa_DOF.thy"}
@{value "3+4::int"}
@{const hd}
@{theory HOL.List}}
@{term "3"}
@{type bool}
@{term [show_types] "f x = a + x"} \<close>
section\<open> Core Examples for stating text-elements as doc-items.\<close>
text\<open>An "anonymous" text-item, automatically coerced into the top-class "text".\<close>
text*[tralala] \<open> Brexit means Brexit \<close>
text\<open>Examples for declaration of typed doc-items "assumption" and "hypothesis",
concepts defined in the underlying ontology @{theory "Draft.CENELEC_50128"}. \<close>
text*[ass1::assumption] \<open> The subsystem Y is safe. \<close>
text*[hyp1::hypothesis] \<open> P not equal NP \<close>
text\<open>A real example fragment from a larger project, declaring a text-element as a
"safety-related application condition", a concept defined in the
@{theory "Draft.CENELEC_50128"}
ontology:\<close>
text*[new_ass::hypothesis]\<open>Under the assumption @{assumption \<open>ass1\<close>} we establish the following: ... \<close>
text*[ass122::SRAC] \<open> The overall sampling frequence of the odometer
subsystem is therefore 14 khz, which includes sampling, computing and
result communication times... \<close>
text*[t10::test_result] \<open> This is a meta-test. This could be an ML-command
that governs the external test-execution via, eg., a makefile or specific calls
to a test-environment or test-engine \<close>
section\<open> References to doc-items.\<close>
text\<open>Finally some examples of references to doc-items, i.e. text-elements with declared
meta-information and status. \<close>
text \<open> As established by @{docref (unchecked) \<open>t10\<close>},
@{docref (define) \<open>t10\<close>} \<close>
text \<open> the @{docref \<open>t10\<close>}
as well as the @{docref \<open>ass122\<close>}\<close>
text \<open> represent a justification of the safety related applicability
condition @{SRAC \<open>ass122\<close>} aka exported constraint @{EC \<open>ass122\<close>}.\<close>
section\<open> Some Tests for Ontology Framework and its CENELEC Instance \<close>
declare_reference* [lalala::requirement, alpha="main", beta=42]
declare_reference* [blablabla::cid, alpha="beta sdf", beta=gamma, delta=dfg_fgh\<^sub>1]
paragraph* [sdfk::introduction] \<open> just a paragraph - lexical variant \<close>
section*[h::example]\<open> Some global inspection commands for the status of docitem and doc-class tables ... \<close>
section*[i::example]\<open> Text Antiquotation Infrastructure ... \<close>
text\<open> @{docitem \<open>lalala\<close>} -- produces warning. \<close>
text\<open> @{docitem (unchecked) \<open>lalala\<close>} -- produces no warning. \<close>
text\<open> @{docitem \<open>ass122\<close>} -- global reference to a text-item in another file. \<close>
text\<open> @{EC \<open>ass122\<close>} -- global reference to a exported constraint in another file.
Note that the link is actually a srac, which, according to
the ontology, happens to be an "ec". \<close>
end

View File

@ -1,3 +0,0 @@
Template: scrartcl
Ontology: scholarly_paper
Ontology: cenelec_50128

View File

@ -1,3 +0,0 @@
%% This is a placeholder for user-specific configuration and packages.
\title{mini-odo}{}{}{}{}{}{}
\author{By brucker}{}{}{}{}{}

View File

@ -1,90 +0,0 @@
theory mini_odo
imports "Isabelle_DOF.CENELEC_50128"
"Isabelle_DOF.scholarly_paper"
begin
section\<open> Some examples of Isabelle's standard antiquotations. \<close>
(* some show-off of standard anti-quotations: *)
text \<open>THIS IS A TEXT\<close>
text\<open> @{thm refl} of name @{thm [source] refl}
@{thm[mode=Rule] conjI}
@{file "mini_odo.thy"}
@{value "3+4::int"}
@{const hd}
@{theory HOL.List}
@{term "3"}
@{type bool}
@{term [show_types] "f x = a + x"} \<close>
section\<open> Core Examples for stating text-elements as doc-items.\<close>
text\<open>An "anonymous" text-item, automatically coerced into the top-class "text".\<close>
text*[tralala] \<open> Brexit means Brexit \<close>
text\<open>Examples for declaration of typed doc-items "assumption" and "hypothesis",
concepts defined in the underlying ontology @{theory "Isabelle_DOF.CENELEC_50128"}. \<close>
text*[ass1::assumption] \<open> The subsystem Y is safe. \<close>
text*[hyp1::hypothesis] \<open> P not equal NP \<close>
text\<open>A real example fragment from a larger project, declaring a text-element as a
"safety-related application condition", a concept defined in the
@{theory "Isabelle_DOF.CENELEC_50128"} ontology:\<close>
text*[new_ass::hypothesis]\<open>Under the assumption @{assumption \<open>ass1\<close>} we establish the following: ... \<close>
text*[ass122::SRAC] \<open> The overall sampling frequence of the odometer
subsystem is therefore 14 khz, which includes sampling, computing and
result communication times... \<close>
text*[t10::test_result] \<open> This is a meta-test. This could be an ML-command
that governs the external test-execution via, eg., a makefile or specific calls
to a test-environment or test-engine \<close>
section\<open> References to doc-items.\<close>
text\<open>Finally some examples of references to doc-items, i.e. text-elements with declared
meta-information and status. \<close>
text \<open> As established by @{docref (unchecked) \<open>t10\<close>},
@{docref (define) \<open>t10\<close>} \<close>
text \<open> the @{docref \<open>t10\<close>}
as well as the @{docref \<open>ass122\<close>}\<close>
text \<open> represent a justification of the safety related applicability
condition @{SRAC \<open>ass122\<close>} aka exported constraint @{EC \<open>ass122\<close>}.\<close>
section\<open> Some Tests for Ontology Framework and its CENELEC Instance \<close>
declare_reference* [lalala::requirement, alpha="main", beta=42]
declare_reference* [blablabla::cid, alpha="beta sdf", beta=gamma, delta=dfg_fgh\<^sub>1]
section*[h::example]\<open> Some global inspection commands for the status of docitem and
doc-class tables ... \<close>
section*[i::example]\<open> Text Antiquotation Infrastructure ... \<close>
(*<*)
text\<open> @{docitem \<open>lalala\<close>} -- produces warning. \<close>
text\<open> @{docitem (unchecked) \<open>lalala\<close>} -- produces no warning. \<close>
(*>*)
text\<open> @{docitem \<open>ass122\<close>} -- global reference to a text-item in another file. \<close>
text\<open> @{EC \<open>ass122\<close>} -- global reference to a exported constraint in another file.
Note that the link is actually a srac, which, according to
the ontology, happens to be an "ec". \<close>
end

View File

@ -1,117 +0,0 @@
theory BAC2017
imports "Isabelle_DOF.mathex_onto"
Deriv
Transcendental
begin
open_monitor*[exam::MathExam]
(* currently rethinking on "deep ontologies" necessary ... Achim
text*[idir::Author,affiliation="''LRI, CentraleSupelec''",
email="''idir.aitsadoune@centralesupelec.fr''"]
{*Idir AIT SADOUNE*}
text*[keller::Author,affiliation="''LRI, Univ. Paris-Sud''",
email="''Chantal.Keller@lri.fr''"]
{*Chantal KELLER*}
text{* This example is an excerpt from the french baccaleareat 2017.
The textual explanations were kept in french.
*}
*)
text*[header::Header,examSubject="[analysis,geometry]", date="''21-06-2017''",
timeAllowed="240::int"]{* BACCALAUREAT GENERAL MATHEMATIQUES *}
text{*
\begin{itemize}
\item Les calculatrices électroniques de poche sont autorisées, conformément à la réglementation
en vigueur.
\item Le sujet est composé de 4 exercices indépendants.
\item Le candidat doit traiter tous les exercices.
\item Le candidat est invité à faire figurer sur la copie toute trace de recherche, même incomplète
ou non fructueuse, quil aura développée.
\item Il est rappelé que la qualité de la rédaction, la clarté et la précision des raisonnements
entreront pour une part importante dans lappréciation des copies.
\end{itemize}
*}
text*[exo1 :: Exercise,
concerns= "{setter,student,checker,externalExaminer}"]
{* On considère la fonction h définie sur lintervalle [0..+\<infinity>] par :
@{term "h(x) = x * exponent (-x)"}
*}
definition h :: "real \<Rightarrow> real"
where "h x \<equiv> x * exp (- x)"
text*[q1::Task, concerns= "{setter,student}",
level="oneStar", mark="1::int", type="formal"]
{* Déterminer la limite de la fonction @{term h} en +\<infinity>. *}
text*[a1::Answer_Formal_Step] {* Fill in term and justification*}
lemma q1 : "(h \<longlongrightarrow> (0::real)) at_top" sorry
text*[v1::Validation, proofs="[@{thm ''HOL.refl''}::thm]"] {* See lemma @{thm q1}. *}
text*[q2::Task, concerns= "{setter,checker,student}",
level="oneStar", mark="1::int", type="formal"]
{* Étudier les variations de la fonction @{term h} sur l'intervalle [0..+\<infinity>] et
dresser son tableau de variation *}
text*[a2::Answer_Formal_Step]
{* Fill in term and justification*}
definition h' :: "real \<Rightarrow> real"
where "h' x \<equiv> (1 - x) * exp (- x)"
lemma q2_a : "DERIV h x :> h' x"
proof -
have * : "DERIV (exp \<circ> uminus) x :> - (exp (-x))"
sorry (* by (simp add: has_derivative_compose) *)
have ** : "DERIV id x :> 1"
by (metis DERIV_ident eq_id_iff)
have *** : "DERIV h x :> x * (- (exp (- x))) + 1 * (exp (- x))"
sorry (* by (simp add: * ** has_derivative_mult comp_def) *)
show ?thesis
sorry (* by (metis "***" left_diff_distrib mult_minus_right uminus_add_conv_diff) *)
qed
lemma q2_b : "0 \<le> x \<and> x \<le> y \<and> y \<le> 1 \<Longrightarrow> h x \<le> h y"
sorry
lemma q2_c : "1 \<le> x \<and> x \<le> y \<Longrightarrow> h x \<ge> h y"
sorry
text*[v2::Validation, proofs="[@{thm ''BAC2017.q2_b''}, @{thm ''BAC2017.q2_c''}]"]
{* See lemmas @{thm q2_b} and @{thm q2_c}. *}
text*[q3a::Task, concerns= "{setter,checker,student}",
level="oneStar", mark="1::int", type="formal"]
{* Vérifier que pour tout nombre réel x appartenant à l'intervalle [0..+\<infinity>], on a :
@{term "h x = (exp (- x)) - (h' x)"}. *}
text*[a3a::Answer_Formal_Step]
{* Fill in term and justification*}
lemma q3a : "h x = (exp (- x)) - (h' x)"
by (simp add : h_def h'_def left_diff_distrib)
subsubsection*[v3a::Validation, proofs="[@{thm ''BAC2017.q3a''}::thm]"]
{* See lemma @{thm q3a}. *}
subsection*[sol1 :: Solution,
content="[exo1::Exercise]",
valids = "[v1::Validation,v2,v3a]"]
{* See validations. *}
close_monitor*[exam]
end

View File

@ -1,11 +0,0 @@
session "BAC2017" = "Isabelle_DOF" +
options [document = pdf, document_output = "output",quick_and_dirty=true]
theories [document = false]
"Deriv"
"Transcendental"
theories
BAC2017
document_files
"isadof.cfg"
"preamble.tex"
"build"

View File

@ -1,2 +0,0 @@
Template: scrartcl
Ontology: mathex

View File

@ -1,47 +0,0 @@
%% Copyright (C) 2018 The University of Sheffield
%% 2018 The University of Paris-Sud
%%
%% License:
%% This program can be redistributed and/or modified under the terms
%% of the LaTeX Project Public License Distributed from CTAN
%% archives in directory macros/latex/base/lppl.txt; either
%% version 1 of the License, or any later version.
%% OR
%% The 2-clause BSD-style license.
%%
%% SPDX-License-Identifier: LPPL-1.0+ OR BSD-2-Clause
%% This is a placeholder for user-specific configuration and packages.
\title{<TITLE>}
\author{<AUTHOR>}
\newkeycommand\isaDofTextExercise[label=,type=,Exercise.content=,content=,concerns=,][1]{%
\begin{Exercise}
#1
\end{Exercise}
}
\newkeycommand\isaDofTextSolution[Task.concerns=,concerns=,content=,valids=,][1]{%
#1
}
\newkeycommand\isaDofSectionSolution[Task.concerns=,concerns=,content=,valids=,][1]{%
#1
}
\newkeycommand\isaDofSubsectionSolution[Task.concerns=,concerns=,content=,valids=,][1]{%
#1
}
\newkeycommand\isaDofSubsubsectionSolution[Task.concerns=,concerns=,content=,valids=,][1]{%
#1
}
\newkeycommand\isaDofTextExercise[Task.concerns=,concerns=,content=,][1]{%
#1
}
\newkeycommand\isaDofTextValidation[tests=,proofs=,][1]{%
#1
}

View File

@ -1,22 +0,0 @@
\documentclass{article}
\usepackage{hyperref}
\begin{document}
\begin{Form}[action={http://your-web-server.com/path/receiveform.cgi}]
\begin{tabular}{l}
\TextField{From } \\\\
\TextField{have 1} \\\\
\TextField{have 2} \\\\
\TextField{have 3} \\\\
\TextField{finally show} \\\\
\CheckBox[width=1em]{Has the polynomial as many solutions as its degree ? } \\\\
\Submit{Submit}\\
\end{tabular}
\end{Form}
\end{document}

View File

@ -1,44 +1,34 @@
(*<*)
theory MathExam
imports "Isabelle_DOF.mathex_onto"
Real
imports "Isabelle_DOF.math_exam"
HOL.Real
begin
(*>*)
open_monitor*[exam::MathExam]
(* open_monitor*[exam::MathExam] *)
section*[header::Header,examSubject= "[algebra]",
date="''02-05-2018''", timeAllowed="90::int"] {* Exam number 1 *}
text{*
date="''02-05-2018''", timeAllowed="90::int"] \<open>Exam number 1\<close>
text\<open>
\begin{itemize}
\item Use black ink or black ball-point pen.
\item Draw diagrams in pencil.
\item Answer all questions in the spaces provided.
\end{itemize}
*}
\<close>
text*[idir::Author, affiliation="''CentraleSupelec''",
email="''idir.aitsadoune@centralesupelec.fr''"]
{*Idir AIT SADOUNE*}
\<open>Idir AIT SADOUNE\<close>
(* should be in DOF-core
* causes crash on the LaTeX side:
( FP-DIV )
*** ! Undefined control sequence.
*** <argument> ...ative_width}}{100} \includegraphics
*** [width=\scale \textwidth ]...
*** l.44 {A Polynome.}
*)
figure*[figure::figure, spawn_columns=False,
relative_width="80",
src="''figures/Polynomialdeg5.png''"]
src="''figures/Polynomialdeg5''"]
\<open>A Polynome.\<close>
subsubsection*[exo1 :: Exercise, content="[q1::Task,q2::Task]"]\<open>Exercise 1\<close>
text{*
text\<open>
Here are the first four lines of a number pattern.
\begin{itemize}
\item Line 1 : @{term "1*6 + 2*4 = 2*7"}
@ -46,15 +36,15 @@ Here are the first four lines of a number pattern.
\item Line 3 : @{term "3*8 + 2*6 = 4*9"}
\item Line 4 : @{term "4*9 + 2*7 = 5*10"}
\end{itemize}
*}
\<close>
declare [[show_sorts=false]]
subsubsection*[exo2 :: Exercise, content="[q1::Task,q2::Task]"]\<open>Exercise 2\<close>
text{* Find the roots of the polynome:
text\<open>Find the roots of the polynome:
@{term "(x^3) - 6 * x^2 + 5 * x + 12"}.
Note the intermediate steps in the following fields and submit the solution. *}
text{*
Note the intermediate steps in the following fields and submit the solution.\<close>
text\<open>
\begin{Form}[action={http://your-web-server.com/path/receiveform.cgi}]
\begin{tabular}{l}
From @{term "(x^3) - 6 * x^2 + 5 * x + 12"} \\\\
@ -66,7 +56,7 @@ text{*
\Submit{Submit}\\
\end{tabular}
\end{Form}
*}
\<close>
(* a bit brutal, as long as lemma* does not yet work *)
(*<*)
@ -88,21 +78,21 @@ proof -
qed
(*>*)
text*[a1::Answer_Formal_Step]{* First Step: Fill in term and justification *}
text*[a2::Answer_Formal_Step]{* Next Step: Fill in term and justification *}
text*[a3::Answer_Formal_Step]{* Next Step: Fill in term and justification *}
text*[a4::Answer_Formal_Step]{* Next Step: Fill in term and justification *}
text*[a1::Answer_Formal_Step]\<open>First Step: Fill in term and justification\<close>
text*[a2::Answer_Formal_Step]\<open>Next Step: Fill in term and justification\<close>
text*[a3::Answer_Formal_Step]\<open>Next Step: Fill in term and justification\<close>
text*[a4::Answer_Formal_Step]\<open>Next Step: Fill in term and justification\<close>
text*[q1::Task, local_grade="oneStar", mark="1::int", type="formal"]
{* Complete Line 10 : @{term "10*x + 2*y = 11*16"} *}
\<open>Complete Line 10 : @{term "10*x + 2*y = 11*16"}\<close>
subsubsection*[exo3 :: Exercise, content="[q1::Task,q2::Task]"]\<open>Exercise 3\<close>
text*[q2::Task, local_grade="threeStars", mark="3::int", type="formal"]
{* Prove that @{term "n*(n+5) + 2*(n+3) "} is always the product of two numbers
\<open>Prove that @{term "n*(n+5) + 2*(n+3) "} is always the product of two numbers
with a difference of 5.
*}
\<close>
(* this does not work on the level of the LaTeX output for known restrictions of the Toplevel. *)
close_monitor*[exam :: MathExam]
(* close_monitor*[exam :: MathExam] *)
end

View File

@ -1,6 +1,6 @@
#!/usr/bin/env bash
# Copyright (c) 2018-2019 The University of Sheffield. All rights reserved.
# 2018 The University of Paris-Sud. All rights reserved.
# 2018 The University of Paris-Saclay. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
@ -34,7 +34,7 @@ if [ ! -f $ISABELLE_HOME_USER/DOF/document-template/build_lib.sh ]; then
echo "This is a Isabelle/DOF project. The document preparation requires"
echo "the Isabelle/DOF framework. Please obtain the framework by cloning"
echo "the Isabelle/DOF git repository, i.e.: "
echo " git clone https://git.logicalhacking.com/HOL-OCL/Isabelle_DOF"
echo " git clone https://git.logicalhacking.com/Isabelle_DOF/Isabelle_DOF"
echo "You can install the framework as follows:"
echo " cd Isabelle_DOF/document-generator"
echo " ./install"

View File

@ -1,2 +1,2 @@
Template: scrartcl
Ontology: mathex
Ontology: math_exam

View File

@ -1,5 +1,5 @@
%% Copyright (C) 2018 The University of Sheffield
%% 2018 The University of Paris-Sud
%% 2018 The University of Paris-Saclay
%%
%% License:
%% This program can be redistributed and/or modified under the terms

View File

@ -1,8 +0,0 @@
theory Noodles
imports "../../ontologies/small_math"
"../../ontologies/technical_report"
begin
title*[t::title]\<open>On Noodles\<close>
end

View File

@ -1,32 +0,0 @@
theory "On_Noodles"
imports "../../ontologies/small_math"
"../../ontologies/technical_report"
begin
open_monitor*[this::article]
title*[t1::title]\<open>On Noodles\<close>
text*[simon::author]\<open>Simon Foster\<close>
text*[a::abstract, keywordlist = "[''topology'']"]
\<open>We present the first fundamental results on the goundbreaking theory of noodles...\<close>
section*[intro::introduction]\<open>Introduction\<close>
text\<open> Authorities say, that Noodles are unleavened dough which is stretched,
extruded, or rolled flat and cut into one or a variety of shapes which usually
include long, thin strips, or waves, helices, tubes, strings, or shells, or
folded over, or cut into other shapes. Noodles are usually cooked in boiling water,
sometimes with cooking oil or salt added. \<close>
section*[def_sec::technical]\<open>Basic definitions\<close>
text*[d1::"definition"]\<open>My first definition\<close>
definition noodle ::"bool" where "noodle = (THE x. True)"
(*
update_instance*[def1, formal_results:="[@{thm ''noodle_def''}]"]
*)
close_monitor*[this::article]
end

View File

@ -0,0 +1 @@
MathExam

View File

@ -4,9 +4,9 @@ theory IsaDofApplications
begin
open_monitor*[this::article]
declare[[strict_monitor_checking=false]]
(*>*)
declare[[strict_monitor_checking=false]]
title*[tit::title]\<open>Using the Isabelle Ontology Framework\<close>
subtitle*[stit::subtitle]\<open>Linking the Formal with the Informal\<close>
text*[adb:: author,
@ -21,7 +21,7 @@ text*[paolo::author,
affiliation= "''IRT-SystemX, Paris, France''"]\<open>Paolo Crisafulli\<close>
text*[bu::author,
email = "\<open>wolff@lri.fr\<close>",
affiliation = "\<open>Université Paris-Sud, Paris, France\<close>"]\<open>Burkhart Wolff\<close>
affiliation = "\<open>Université Paris-Saclay, Paris, France\<close>"]\<open>Burkhart Wolff\<close>
text*[abs::abstract,
@ -230,7 +230,7 @@ enforcing a sequence of text-elements that must belong to the corresponding clas
To start using \isadof, one creates an Isabelle project (with the name
\inlinebash{IsaDofApplications}):
\begin{bash}
isabelle DOF_mkroot -o scholarly_paper -t lncs -d IsaDofApplications
isabelle mkroot_DOF -o scholarly_paper -t lncs -d IsaDofApplications
\end{bash}
where the \inlinebash{-o scholarly_paper} specifies the ontology for writing scientific articles and
\inlinebash{-t lncs} specifies the use of Springer's \LaTeX-configuration for the Lecture Notes in
@ -389,7 +389,7 @@ text\<open> The document class \inlineisar+figure+ --- supported by the \isadof
such as @{docitem_ref \<open>fig_figures\<close>}.
\<close>
subsection*[mathex_onto::example]\<open> The Math-Exam Scenario \<close>
subsection*[math_exam::example]\<open> The Math-Exam Scenario \<close>
text\<open> The Math-Exam Scenario is an application with mixed formal and
semi-formal content. It addresses applications where the author of the exam is not present
during the exam and the preparation requires a very rigorous process, as the french
@ -729,7 +729,7 @@ text\<open> \isadof in its present form has a number of technical short-comings
paragraph\<open> Availability. \<close>
text\<open> The implementation of the framework, the discussed ontology definitions,
and examples are available at
\url{https://git.logicalhacking.com/HOL-OCL/Isabelle_DOF/}.\<close>
\url{\dofurl}.\<close>
paragraph\<open> Acknowledgement. \<close>
text\<open> This work was partly supported by the framework of IRT SystemX, Paris-Saclay, France,
and therefore granted with public funds within the scope of the Program ``Investissements dAvenir''.\<close>

View File

@ -1,4 +1,4 @@
session "IsaDofApplications" = "Isabelle_DOF" +
session "2018-cicm-isabelle_dof-applications" = "Isabelle_DOF" +
options [document = pdf, document_output = "output", quick_and_dirty = true]
theories
IsaDofApplications

View File

@ -1,6 +1,6 @@
#!/usr/bin/env bash
# Copyright (c) 2018-2019 The University of Sheffield. All rights reserved.
# 2018 The University of Paris-Sud. All rights reserved.
# 2018 The University of Paris-Saclay. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
@ -34,7 +34,7 @@ if [ ! -f $ISABELLE_HOME_USER/DOF/document-template/build_lib.sh ]; then
echo "This is a Isabelle/DOF project. The document preparation requires"
echo "the Isabelle/DOF framework. Please obtain the framework by cloning"
echo "the Isabelle/DOF git repository, i.e.: "
echo " git clone https://git.logicalhacking.com/HOL-OCL/Isabelle_DOF"
echo " git clone https://git.logicalhacking.com/Isabelle_DOF/Isabelle_DOF"
echo "You can install the framework as follows:"
echo " cd Isabelle_DOF/document-generator"
echo " ./install"

View File

@ -1,5 +1,5 @@
%% Copyright (C) 2018 The University of Sheffield
%% 2018 The University of Paris-Sud
%% 2018 The University of Paris-Saclay
%%
%% License:
%% This program can be redistributed and/or modified under the terms

View File

@ -1 +1 @@
2018_cicm
2018-cicm-isabelle_dof-applications

View File

@ -1,54 +0,0 @@
chapter\<open>Setting and modifying attributes of doc-items\<close>
theory Concept_Example
imports "../../ontologies/Conceptual" (* we use the generic "Conceptual" ontology *)
begin
text\<open>@{theory \<open>Draft.Conceptual\<close>} provides a monitor @{typ M} enforcing a particular document
structure. Here, we say: From now on, this structural rules are respected wrt. all
\<^theory_text>\<open>doc_class\<close>es @{typ M} is enabled for.\<close>
open_monitor*[struct::M]
section*[a::A, x = "3"] \<open> Lorem ipsum dolor sit amet, ... \<close>
text*[c1::C, x = "''beta''"] \<open> ... suspendisse non arcu malesuada mollis, nibh morbi, ... \<close>
text*[d::D, a1 = "X3"] \<open> ... phasellus amet id massa nunc, pede suscipit repellendus,
... @{C c1} @{thm "refl"}\<close>
update_instance*[d::D, a1 := X2]
text\<open> ... in ut tortor ... @{docitem \<open>a\<close>} ... @{A \<open>a\<close>}\<close>
text*[c2::C, x = "''delta''"] \<open> ... in ut tortor eleifend augue pretium consectetuer. \<close>
text*[f::F] \<open> Lectus accumsan velit ultrices, ... }\<close>
theorem some_proof : "P" sorry
text\<open>This is an example where we add a theorem into a kind of "result-list" of the doc-item f.\<close>
update_instance*[f::F,r:="[@{thm ''Concept_Example.some_proof''}]"]
text\<open> ..., mauris amet, id elit aliquam aptent id, ... @{docitem \<open>a\<close>} \<close>
text\<open>Here we add and maintain a link that is actually modeled as m-to-n relation ...
The type annotations with @{typ A} and @{typ C} are optional and may help to get
additional information at the HOL level, the arguments of the inner-syntax antiquotation
are strings that can be denoted in two different syntactic variants; the former is
more robust that the traditional latter.\<close>
update_instance*[f::F,b:="{(@{docitem \<open>a\<close>}::A,@{docitem \<open>c1\<close>}::C),
(@{docitem ''a''}, @{docitem ''c2''})}"]
close_monitor*[struct]
text\<open>And the trace of the monitor is:\<close>
ML\<open>@{trace_attribute struct}\<close>
print_doc_classes
print_doc_items
check_doc_global
end

View File

@ -1,51 +0,0 @@
(*<*)
theory "00_Frontmatter"
(* imports "Isabelle_DOF.technical_report" *)
imports "../../../ontologies/technical_report"
begin
open_monitor*[this::report]
(*>*)
title*[tit::title]\<open>The Isabelle/DOF User and Implementation Manual\<close>
text*[adb:: author,
email="''a.brucker@sheffield.ac.uk''",
orcid="''0000-0002-6355-1200''",
affiliation="''The University of Sheffield, Sheffield, UK''"]\<open>Achim D. Brucker\<close>
text*[idir::author,
email = "''idir.aitsadoune@centralesupelec.fr''",
affiliation = "''CentraleSupelec, Paris, France''"]\<open>Idir Ait-Sadoune\<close>
text*[paolo::author,
email = "''paolo.crisafulli@irt-systemx.fr''",
affiliation= "''IRT-SystemX, Paris, France''"]\<open>Paolo Crisafulli\<close>
text*[bu::author,
email = "''wolff@lri.fr''",
affiliation = "''Universit\\'e Paris-Sud, Paris, France''"]\<open>Burkhart Wolff\<close>
text*[abs::abstract,
keywordlist="[''Ontology'',''Ontological Modeling'',''Isabelle/DOF'']"]\<open>
While Isabelle is mostly known as part of Isabelle/HOL (an interactive
theorem prover), it actually provides a framework for developing a wide
spectrum of applications. A particular strength
of the Isabelle framework is the combination of text editing, formal verification,
and code generation.
Up to now, Isabelle's document preparation system lacks a mechanism
for ensuring the structure of different document types (as, e.g.,
required in certification processes) in general and, in particular,
mechanism for linking informal and formal parts of a document.
In this paper, we present \isadof, a novel Document Ontology Framework
on top of Isabelle. \isadof allows for conventional typesetting
\<^emph>\<open>as well\<close> as formal development. We show how to model document
ontologies inside \isadof, how to use the resulting meta-information
for enforcing a certain document structure, and discuss ontology-specific IDE support.
\<close>
(*<*)
end
(*>*)

View File

@ -1,69 +0,0 @@
(*<*)
theory "01_Introduction"
imports "00_Frontmatter"
begin
(*>*)
chapter*[intro::introduction]\<open> Introduction \<close>
text*[introtext::introduction]\<open>
The linking of the \<^emph>\<open>formal\<close> to the \<^emph>\<open>informal\<close> is perhaps the
most pervasive challenge in the digitization of knowledge and its
propagation. This challenge incites numerous research efforts
summarized under the labels ``semantic web'', ``data mining'', or any
form of advanced ``semantic'' text processing. A key role in
structuring this linking play \<^emph>\<open>document ontologies\<close> (also called
\<^emph>\<open>vocabulary\<close> in the semantic web community~@{cite "w3c:ontologies:2015"}),
\ie, a machine-readable form of the structure of documents as well as
the document discourse.
Such ontologies can be used for the scientific discourse within scholarly
articles, mathematical libraries, and in the engineering discourse
of standardized software certification
documents~@{cite "boulanger:cenelec-50128:2015" and "cc:cc-part3:2006"}.
Further applications are the domain-specific discourse in juridical texts or medical reports.
In general, an ontology is a formal explicit description of \<^emph>\<open>concepts\<close>
in a domain of discourse (called \<^emph>\<open>classes\<close>), properties of each concept
describing \<^emph>\<open>attributes\<close> of the concept, as well as \<^emph>\<open>links\<close> between
them. A particular link between concepts is the \<^emph>\<open>is-a\<close> relation declaring
the instances of a subclass to be instances of the super-class.
The main objective of this paper is to present \isadof, a novel
framework to \<^emph>\<open>model\<close> typed ontologies and to \<^emph>\<open>enforce\<close> them during
document evolution. Based on Isabelle infrastructures, ontologies may refer to
types, terms, proven theorems, code, or established assertions.
Based on a novel adaption of the Isabelle IDE, a document is checked to be
\<^emph>\<open>conform\<close> to a particular ontology---\isadof is designed to give fast user-feedback
\<^emph>\<open>during the capture of content\<close>. This is particularly valuable in case of document
changes, where the \<^emph>\<open>coherence\<close> between the formal and the informal parts of the
content can be mechanically checked.
To avoid any misunderstanding: \isadof is \<^emph>\<open>not a theory in HOL\<close>
on ontologies and operations to track and trace links in texts,
it is an \<^emph>\<open>environment to write structured text\<close> which \<^emph>\<open>may contain\<close>
Isabelle/HOL definitions and proofs like mathematical articles, tech-reports and
scientific papers---as the present one, which is written in \isadof
itself. \isadof is a plugin into the Isabelle/Isar
framework in the style of~@{cite "wenzel.ea:building:2007"}.
\<close>
(* declaring the forward references used in the subsequent section *)
(*<*)
declare_reference*[bgrnd::text_section]
declare_reference*[isadof::text_section]
declare_reference*[ontomod::text_section]
declare_reference*[ontopide::text_section]
declare_reference*[conclusion::text_section]
(*>*)
text*[plan::introduction]\<open> The plan of the paper is follows: we start by introducing the underlying
Isabelel sytem (@{docitem_ref (unchecked) \<open>bgrnd\<close>}) followed by presenting the
essentials of \isadof and its ontology language (@{docitem_ref (unchecked) \<open>isadof\<close>}).
It follows @{docitem_ref (unchecked) \<open>ontomod\<close>}, where we present three application
scenarios from the point of view of the ontology modeling. In @{docitem_ref (unchecked) \<open>ontopide\<close>}
we discuss the user-interaction generated from the ontological definitions. Finally, we draw
conclusions and discuss related work in @{docitem_ref (unchecked) \<open>conclusion\<close>}. \<close>
(*<*)
end
(*>*)

View File

@ -1,79 +0,0 @@
(*<*)
theory "02_Background"
imports "01_Introduction"
begin
(*>*)
chapter*[bgrnd::text_section,main_author="Some(@{docitem ''adb''}::author)"]
\<open> Background: The Isabelle System \<close>
text*[background::introduction]\<open>
While Isabelle is widely perceived as an interactive theorem prover
for HOL (Higher-order Logic)~@{cite "nipkow.ea:isabelle:2002"}, we
would like to emphasize the view that Isabelle is far more than that:
it is the \<^emph>\<open>Eclipse of Formal Methods Tools\<close>. This refers to the
``\textsl{generic system framework of Isabelle/Isar underlying recent
versions of Isabelle. Among other things, Isar provides an
infrastructure for Isabelle plug-ins, comprising extensible state
components and extensible syntax that can be bound to ML
programs. Thus, the Isabelle/Isar architecture may be understood as
an extension and refinement of the traditional `LCF approach', with
explicit infrastructure for building derivative
\<^emph>\<open>systems\<close>.}''~@{cite "wenzel.ea:building:2007"}
The current system framework offers moreover the following features:
\<^item> a build management grouping components into to pre-compiled sessions,
\<^item> a prover IDE (PIDE) framework~@{cite "wenzel:asynchronous:2014"} with various front-ends
\<^item> documentation - and code generators,
\<^item> an extensible front-end language Isabelle/Isar, and,
\<^item> last but not least, an LCF style, generic theorem prover kernel as
the most prominent and deeply integrated system component.
\<close>
figure*[architecture::figure,relative_width="100",src="''figures/isabelle-architecture''"]\<open>
The system architecture of Isabelle (left-hand side) and the
asynchronous communication between the Isabelle system and
the IDE (right-hand side). \<close>
text*[blug::introduction]\<open> The Isabelle system architecture shown in @{docitem_ref \<open>architecture\<close>}
comes with many layers, with Standard ML (SML) at the bottom layer as implementation
language. The architecture actually foresees a \<^emph>\<open>Nano-Kernel\<close> (our terminology) which
resides in the SML structure \texttt{Context}. This structure provides a kind of container called
\<^emph>\<open>context\<close> providing an identity, an ancestor-list as well as typed, user-defined state
for components (plugins) such as \isadof. On top of the latter, the LCF-Kernel, tactics,
automated proof procedures as well as specific support for higher specification constructs
were built. \<close>
text\<open> We would like to detail the documentation generation of the architecture,
which is based on literate specification commands such as \inlineisar+section+ \ldots,
\inlineisar+subsection+ \ldots, \inlineisar+text+ \ldots, etc.
Thus, a user can add a simple text:
\begin{isar}
text\<Open>This is a description.\<Close>
\end{isar}
These text-commands can be arbitrarily mixed with other commands stating definitions, proofs, code, etc.,
and will result in the corresponding output in generated \LaTeX{} or HTML documents.
Now, \<^emph>\<open>inside\<close> the textual content, it is possible to embed a \<^emph>\<open>text-antiquotation\<close>:
\begin{isar}
text\<Open>According to the reflexivity axiom \at{thm refl}, we obtain in \<Gamma>
for \at{term "fac 5"} the result \at{value "fac 5"}.\<Close>
\end{isar}
which is represented in the generated output by:
\begin{out}
According to the reflexivity axiom $x = x$, we obtain in $\Gamma$ for $\operatorname{fac} 5$ the result $120$.
\end{out}
where \inlineisar+refl+ is actually the reference to the axiom of reflexivity in HOL.
For the antiquotation \inlineisar+\at{value "fac 5"}+ we assume the usual definition for
\inlineisar+fac+ in HOL.
\<close>
text*[anti]\<open> Thus, antiquotations can refer to formal content, can be type-checked before being
displayed and can be used for calculations before actually being typeset. When editing,
Isabelle's PIDE offers auto-completion and error-messages while typing the above
\<^emph>\<open>semi-formal\<close> content. \<close>
(*<*)
end
(*>*)

View File

@ -1,523 +0,0 @@
(*<*)
theory "03_IsaDof"
imports "02_Background"
begin
(*>*)
chapter*[isadof::technical,main_author="Some(@{docitem ''adb''}::author)"]\<open> \isadof \<close>
text\<open> An \isadof document consists of three components:
\<^item> the \<^emph>\<open>ontology definition\<close> which is an Isabelle theory file with definitions
for document-classes and all auxiliary datatypes.
\<^item> the \<^emph>\<open>core\<close> of the document itself which is an Isabelle theory
importing the ontology definition. \isadof provides an own family of text-element
commands such as \inlineisar+title*+, \inlineisar+chapter*+, \inlineisar+text*+, etc.,
which can be annotated with meta-information defined in the underlying ontology definition.
\<^item> the \<^emph>\<open>layout definition\<close> for the given ontology exploiting this meta-information.
\<close>
text\<open>\isadof is a novel Isabelle system component providing specific support for all these
three parts. Note that the document core \<^emph>\<open>may\<close>, but \<^emph>\<open>must\<close> not
use Isabelle definitions or proofs for checking the formal content---the
present paper is actually an example of a document not containing any proof.
The document generation process of \isadof is currently restricted to \LaTeX, which means
that the layout is defined by a set of \LaTeX{} style files. Several layout
definitions for one ontology are possible and pave the way that different \<^emph>\<open>views\<close> for
the same central document were generated, addressing the needs of different purposes `
and/or target readers.
While the ontology and the layout definition will have to be developed by an expert
with knowledge over Isabelle and \isadof and the back end technology depending on the layout
definition, the core is intended to require only minimal knowledge of these two. The situation
is similar to \LaTeX{}-users, who usually have minimal knowledge about the content in
style-files (\<^verbatim>\<open>.sty\<close>-files). In the document core authors \<^emph>\<open>can\<close> use \LaTeX{} commands in
their source, but this limits the possibility of using different representation technologies,
\eg, HTML, and increases the risk of arcane error-messages in generated \LaTeX{}.
The \isadof ontology specification language consists basically on a notation for
document classes, where the attributes were typed with HOL-types and can be instantiated
by terms HOL-terms, \ie, the actual parsers and type-checkers of the Isabelle system were reused.
This has the particular advantage that \isadof commands can be arbitrarily mixed with
Isabelle/HOL commands providing the machinery for type declarations and term specifications such
as enumerations. In particular, document class definitions provide:
\<^item> a HOL-type for each document class as well as inheritance,
\<^item> support for attributes with HOL-types and optional default values,
\<^item> support for overriding of attribute defaults but not overloading, and
\<^item> text-elements annotated with document classes; they are mutable
instances of document classes.
\<close>
text\<open>
Attributes referring to other ontological concepts are called \<^emph>\<open>links\<close>.
The HOL-types inside the document specification language support built-in types for Isabelle/HOL
\inlineisar+typ+'s, \inlineisar+term+'s, and \inlineisar+thm+'s reflecting internal Isabelle's
internal types for these entities; when denoted in HOL-terms to instantiate an attribute, for
example, there is a specific syntax (called \<^emph>\<open>inner syntax antiquotations\<close>) that is checked by
\isadof for consistency.
Document classes can have a \inlineisar+where+ clause containing a regular
expression over class names. Classes with such a \inlineisar+where+ were called \<^emph>\<open>monitor classes\<close>.
While document classes and their inheritance relation structure meta-data of text-elements
in an object-oriented manner, monitor classes enforce structural organization
of documents via the language specified by the regular expression
enforcing a sequence of text-elements that must belong to the corresponding classes.
To start using \isadof, one creates an Isabelle project (with the name
\inlinebash{IsaDofApplications}):
\begin{bash}
isabelle DOF_mkroot -o scholarly_paper -t lncs -d IsaDofApplications
\end{bash}
where the \inlinebash{-o scholarly_paper} specifies the ontology for writing scientific articles and
\inlinebash{-t lncs} specifies the use of Springer's \LaTeX-configuration for the Lecture Notes in
Computer Science series. The project can be formally checked, including the generation of the
article in PDF using the following command:
\begin{bash}
isabelle build -d . IsaDofApplications
\end{bash}
\<close>
chapter*[ontomod::text_section]\<open> Modeling Ontologies in \isadof \<close>
text\<open> In this section, we will use the \isadof document ontology language
for three different application scenarios: for scholarly papers, for mathematical
exam sheets as well as standardization documents where the concepts of the
standard are captured in the ontology. For space reasons, we will concentrate in all three
cases on aspects of the modeling due to space limitations.\<close>
section*[scholar_onto::example]\<open> The Scholar Paper Scenario: Eating One's Own Dog Food. \<close>
text\<open> The following ontology is a simple ontology modeling scientific papers. In this
\isadof application scenario, we deliberately refrain from integrating references to
(Isabelle) formal content in order demonstrate that \isadof is not a framework from
Isabelle users to Isabelle users only.
Of course, such references can be added easily and represent a particular strength
of \isadof.
\begin{figure}
\begin{isar}
doc_class title =
short_title :: "string option" <= None
doc_class subtitle =
abbrev :: "string option" <= None
doc_class author =
affiliation :: "string"
doc_class abstract =
keyword_list :: "string list" <= None
doc_class text_section =
main_author :: "author option" <= None
todo_list :: "string list" <= "[]"
\end{isar}
\caption{The core of the ontology definition for writing scholarly papers.}
\label{fig:paper-onto-core}
\end{figure}
The first part of the ontology \inlineisar+scholarly_paper+ (see \autoref{fig:paper-onto-core})
contains the document class definitions
with the usual text-elements of a scientific paper. The attributes \inlineisar+short_title+,
\inlineisar+abbrev+ etc are introduced with their types as well as their default values.
Our model prescribes an optional \inlineisar+main_author+ and a todo-list attached to an arbitrary
text section; since instances of this class are mutable (meta)-objects of text-elements, they
can be modified arbitrarily through subsequent text and of course globally during text evolution.
Since \inlineisar+author+ is a HOL-type internally generated by \isadof framework and can therefore
appear in the \inlineisar+main_author+ attribute of the \inlineisar+text_section+ class;
semantic links between concepts can be modeled this way.
The translation of its content to, \eg, Springer's \LaTeX{} setup for the Lecture Notes in Computer
Science Series, as required by many scientific conferences, is mostly straight-forward. \<close>
figure*[fig1::figure,spawn_columns=False,relative_width="95",src="''figures/Dogfood-Intro''"]
\<open> Ouroboros I: This paper from inside \ldots \<close>
text\<open> @{docitem \<open>fig1\<close>} shows the corresponding view in the Isabelle/PIDE of thqqe present paper.
Note that the text uses \isadof's own text-commands containing the meta-information provided by
the underlying ontology.
We proceed by a definition of \inlineisar+introduction+'s, which we define as the extension of
\inlineisar+text_section+ which is intended to capture common infrastructure:
\begin{isar}
doc_class introduction = text_section +
comment :: string
\end{isar}
As a consequence of the definition as extension, the \inlineisar+introduction+ class
inherits the attributes \inlineisar+main_author+ and \inlineisar+todo_list+ together with
the corresponding default values.
As a variant of the introduction, we could add here an attribute that contains the formal
claims of the article --- either here, or, for example, in the keyword list of the abstract.
As type, one could use either the built-in type \inlineisar+term+ (for syntactically correct,
but not necessarily proven entity) or \inlineisar+thm+ (for formally proven entities). It suffices
to add the line:
\begin{isar}
claims :: "thm list"
\end{isar}
and to extent the \LaTeX-style accordingly to handle the additional field.
Note that \inlineisar+term+ and \inlineisar+thm+ are types reflecting the core-types of the
Isabelle kernel. In a corresponding conclusion section, one could model analogously an
achievement section; by programming a specific compliance check in SML, the implementation
of automated forms of validation check for specific categories of papers is envisageable.
Since this requires deeper knowledge in Isabelle programming, however, we consider this out
of the scope of this paper.
We proceed more or less conventionally by the subsequent sections (\autoref{fig:paper-onto-sections})
\begin{figure}
\begin{isar}
doc_class technical = text_section +
definition_list :: "string list" <= "[]"
doc_class example = text_section +
comment :: string
doc_class conclusion = text_section +
main_author :: "author option" <= None
doc_class related_work = conclusion +
main_author :: "author option" <= None
doc_class bibliography =
style :: "string option" <= "''LNCS''"
\end{isar}
\caption{Various types of sections of a scholarly papers.}
\label{fig:paper-onto-sections}
\end{figure}
and finish with a monitor class definition that enforces a textual ordering
in the document core by a regular expression (\autoref{fig:paper-onto-monitor}).
\begin{figure}
\begin{isar}
doc_class article =
trace :: "(title + subtitle + author+ abstract +
introduction + technical + example +
conclusion + bibliography) list"
where "(title ~~ \<lbrakk>subtitle\<rbrakk> ~~ \<lbrace>author\<rbrace>$^+$+ ~~ abstract ~~
introduction ~~ \<lbrace>technical || example\<rbrace>$^+$ ~~ conclusion ~~
bibliography)"
\end{isar}
\caption{A monitor for the scholarly paper ontology.}
\label{fig:paper-onto-monitor}
\end{figure}
\<close>
text\<open> We might wish to add a component into our ontology that models figures to be included into
the document. This boils down to the exercise of modeling structured data in the style of a
functional programming language in HOL and to reuse the implicit HOL-type inside a suitable document
class \inlineisar+figure+:
\begin{isar}
datatype placement = h | t | b | ht | hb
doc_class figure = text_section +
relative_width :: "int" (* percent of textwidth *)
src :: "string"
placement :: placement
spawn_columns :: bool <= True
\end{isar}
\<close>
text\<open> Alternatively, by including the HOL-libraries for rationals, it is possible to
use fractions or even mathematical reals. This must be counterbalanced by syntactic
and semantic convenience. Choosing the mathematical reals, \eg, would have the drawback that
attribute evaluation could be substantially more complicated.\<close>
figure*[fig_figures::figure,spawn_columns=False,relative_width="85",src="''figures/Dogfood-figures''"]
\<open> Ouroboros II: figures \ldots \<close>
text\<open> The document class \inlineisar+figure+ --- supported by the \isadof text command
\inlineisar+figure*+ --- makes it possible to express the pictures and diagrams in this paper
such as @{docitem_ref \<open>fig_figures\<close>}.
\<close>
section*[mathex_onto::example]\<open> The Math-Exam Scenario \<close>
text\<open> The Math-Exam Scenario is an application with mixed formal and
semi-formal content. It addresses applications where the author of the exam is not present
during the exam and the preparation requires a very rigorous process, as the french
\<^emph>\<open>baccaleaureat\<close> and exams at The University of Sheffield.
We assume that the content has four different types of addressees, which have a different
\<^emph>\<open>view\<close> on the integrated document:
\<^item> the \<^emph>\<open>setter\<close>, \ie, the author of the exam,
\<^item> the \<^emph>\<open>checker\<close>, \ie, an internal person that checks
the exam for feasibility and non-ambiguity,
\<^item> the \<^emph>\<open>external examiner\<close>, \ie, an external person that checks
the exam for feasibility and non-ambiguity, and
\<^item> the \<^emph>\<open>student\<close>, \ie, the addressee of the exam.
\<close>
text\<open> The latter quality assurance mechanism is used in many universities,
where for organizational reasons the execution of an exam takes place in facilities
where the author of the exam is not expected to be physically present.
Furthermore, we assume a simple grade system (thus, some calculation is required).
\begin{figure}
\begin{isar}
doc_class Author = ...
datatype Subject = algebra | geometry | statistical
datatype Grade = A1 | A2 | A3
doc_class Header = examTitle :: string
examSubject :: Subject
date :: string
timeAllowed :: int -- minutes
datatype ContentClass = setter
| checker
| external_examiner
| student
doc_class Exam_item =
concerns :: "ContentClass set"
doc_class Exam_item =
concerns :: "ContentClass set"
type_synonym SubQuestion = string
\end{isar}
\caption{The core of the ontology modeling math exams.}
\label{fig:onto-exam}
\end{figure}
The heart of this ontology (see \autoref{fig:onto-exam}) is an alternation of questions and answers,
where the answers can consist of simple yes-no answers (QCM style check-boxes) or lists of formulas.
Since we do not
assume familiarity of the students with Isabelle (\inlineisar+term+ would assume that this is a
parse-able and type-checkable entity), we basically model a derivation as a sequence of strings
(see \autoref{fig:onto-questions}).
\begin{figure}
\begin{isar}
doc_class Answer_Formal_Step = Exam_item +
justification :: string
"term" :: "string"
doc_class Answer_YesNo = Exam_item +
step_label :: string
yes_no :: bool -- \<open>for checkboxes\<close>
datatype Question_Type =
formal | informal | mixed
doc_class Task = Exam_item +
level :: Level
type :: Question_Type
subitems :: "(SubQuestion *
(Answer_Formal_Step list + Answer_YesNo) list) list"
concerns :: "ContentClass set" <= "UNIV"
mark :: int
doc_class Exercise = Exam_item +
type :: Question_Type
content :: "(Task) list"
concerns :: "ContentClass set" <= "UNIV"
mark :: int
\end{isar}
\caption{An exam can contain different types of questions.}
\label{fig:onto-questions}
\end{figure}
In many institutions, it makes sense to have a rigorous process of validation
for exam subjects: is the initial question correct? Is a proof in the sense of the
question possible? We model the possibility that the @{term examiner} validates a
question by a sample proof validated by Isabelle (see \autoref{fig:onto-exam-monitor}).
In our scenario this sample proofs are completely \<^emph>\<open>intern\<close>, \ie, not exposed to the
students but just additional material for the internal review process of the exam.
\begin{figure}
\begin{isar}
doc_class Validation =
tests :: "term list" <="[]"
proofs :: "thm list" <="[]"
doc_class Solution = Exam_item +
content :: "Exercise list"
valids :: "Validation list"
concerns :: "ContentClass set" <= "{setter,checker,external_examiner}"
doc_class MathExam=
content :: "(Header + Author + Exercise) list"
global_grade :: Grade
where "\<lbrace>Author\<rbrace>$^+$ ~~ Header ~~ \<lbrace>Exercise ~~ Solution\<rbrace>$^+$ "
\end{isar}
\caption{Validating exams.}
\label{fig:onto-exam-monitor}
\end{figure}
\<close>
declare_reference*["fig_qcm"::figure]
text\<open> Using the \LaTeX{} package hyperref, it is possible to conceive an interactive
exam-sheets with multiple-choice and/or free-response elements
(see @{docitem_ref (unchecked) \<open>fig_qcm\<close>}). With the
help of the latter, it is possible that students write in a browser a formal mathematical
derivation---as part of an algebra exercise, for example---which is submitted to the examiners
electronically. \<close>
figure*[fig_qcm::figure,spawn_columns=False,
relative_width="90",src="''figures/InteractiveMathSheet''"]
\<open> A Generated QCM Fragment \ldots \<close>
section*[cenelec_onto::example]\<open> The Certification Scenario following CENELEC \<close>
text\<open> Documents to be provided in formal certifications (such as CENELEC
50126/50128, the DO-178B/C, or Common Criteria) can much profit from the control of ontological consistency:
a lot of an evaluators work consists in tracing down the links from requirements over
assumptions down to elements of evidence, be it in the models, the code, or the tests.
In a certification process, traceability becomes a major concern; and providing
mechanisms to ensure complete traceability already at the development of the
global document will clearly increase speed and reduce risk and cost of a
certification process. Making the link-structure machine-checkable, be it between requirements,
assumptions, their implementation and their discharge by evidence (be it tests, proofs, or
authoritative arguments), is therefore natural and has the potential to decrease the cost
of developments targeting certifications. Continuously checking the links between the formal
and the semi-formal parts of such documents is particularly valuable during the (usually
collaborative) development effort.
As in many other cases, formal certification documents come with an own terminology and
pragmatics of what has to be demonstrated and where, and how the trace-ability of requirements through
design-models over code to system environment assumptions has to be assured.
\<close>
text\<open> In the sequel, we present a simplified version of an ontological model used in a
case-study~ @{cite "bezzecchi.ea:making:2018"}. We start with an introduction of the concept of requirement
(see \autoref{fig:conceptual}).
\begin{figure}
\begin{isar}
doc_class requirement = long_name :: "string option"
doc_class requirement_analysis = no :: "nat"
where "requirement_item +"
doc_class hypothesis = requirement +
hyp_type :: hyp_type <= physical (* default *)
datatype ass_kind = informal | semiformal | formal
doc_class assumption = requirement +
assumption_kind :: ass_kind <= informal
\end{isar}
\caption{Modeling requirements.}
\label{fig:conceptual}
\end{figure}
Such ontologies can be enriched by larger explanations and examples, which may help
the team of engineers substantially when developing the central document for a certification,
like an explication what is precisely the difference between an \<^emph>\<open>hypothesis\<close> and an
\<^emph>\<open>assumption\<close> in the context of the evaluation standard. Since the PIDE makes for each
document class its definition available by a simple mouse-click, this kind on meta-knowledge
can be made far more accessible during the document evolution.
For example, the term of category \<^emph>\<open>assumption\<close> is used for domain-specific assumptions.
It has formal, semi-formal and informal sub-categories. They have to be
tracked and discharged by appropriate validation procedures within a
certification process, by it by test or proof. It is different from a hypothesis, which is
globally assumed and accepted.
In the sequel, the category \<^emph>\<open>exported constraint\<close> (or \<^emph>\<open>ec\<close> for short)
is used for formal assumptions, that arise during the analysis,
design or implementation and have to be tracked till the final
evaluation target, and discharged by appropriate validation procedures
within the certification process, by it by test or proof. A particular class of interest
is the category \<^emph>\<open>safety related application condition\<close> (or \<^emph>\<open>srac\<close>
for short) which is used for \<^emph>\<open>ec\<close>'s that establish safety properties
of the evaluation target. Their track-ability throughout the certification
is therefore particularly critical. This is naturally modeled as follows:
\begin{isar}
doc_class ec = assumption +
assumption_kind :: ass_kind <= (*default *) formal
doc_class srac = ec +
assumption_kind :: ass_kind <= (*default *) formal
\end{isar}
\<close>
chapter*[ontopide::technical]\<open> Ontology-based IDE support \<close>
text\<open> We present a selection of interaction scenarios @{example \<open>scholar_onto\<close>}
and @{example \<open>cenelec_onto\<close>} with Isabelle/PIDE instrumented by \isadof. \<close>
section*[scholar_pide::example]\<open> A Scholarly Paper \<close>
text\<open> In \autoref{fig-Dogfood-II-bgnd1} and \autoref{fig-bgnd-text_section} we show how
hovering over links permits to explore its meta-information.
Clicking on a document class identifier permits to hyperlink into the corresponding
class definition (\autoref{fig:Dogfood-IV-jumpInDocCLass}); hovering over an attribute-definition
(which is qualified in order to disambiguate; \autoref{fig:Dogfood-V-attribute}).
\<close>
open_monitor*["text-elements"::figure_group,
caption="''Exploring text elements.''"]
figure*["fig-Dogfood-II-bgnd1"::figure, spawn_columns=False,
relative_width="48",
src="''figures/Dogfood-II-bgnd1''"]
\<open>Exploring a Reference of a Text-Element.\<close>
figure*["fig-bgnd-text_section"::figure, spawn_columns=False,
relative_width="48",
src="''figures/Dogfood-III-bgnd-text_section''"]
\<open>Exploring the class of a text element.\<close>
close_monitor*["text-elements"]
side_by_side_figure*["hyperlinks"::side_by_side_figure,anchor="''fig:Dogfood-IV-jumpInDocCLass''",
caption="''Hyperlink to Class-Definition.''",relative_width="48",
src="''figures/Dogfood-IV-jumpInDocCLass''",anchor2="''fig:Dogfood-V-attribute''",
caption2="''Exploring an attribute.''",relative_width2="47",
src2="''figures/Dogfood-III-bgnd-text_section''"]\<open> Hyperlinks.\<close>
declare_reference*["figDogfoodVIlinkappl"::figure]
text\<open> An ontological reference application in \autoref{figDogfoodVIlinkappl}: the ontology-dependant
antiquotation \inlineisar|@ {example ...}| refers to the corresponding text-elements. Hovering allows
for inspection, clicking for jumping to the definition. If the link does not exist or has a
non-compatible type, the text is not validated. \<close>
figure*[figDogfoodVIlinkappl::figure,relative_width="80",src="''figures/Dogfood-V-attribute''"]
\<open> Exploring an attribute (hyperlinked to the class). \<close>
section*[cenelec_pide::example]\<open> CENELEC \<close>
declare_reference*[figfig3::figure]
text\<open> The corresponding view in @{docitem_ref (unchecked) \<open>figfig3\<close>} shows core part of a document,
coherent to the @{example \<open>cenelec_onto\<close>}. The first sample shows standard Isabelle antiquotations
@{cite "wenzel:isabelle-isar:2017"} into formal entities of a theory. This way, the informal parts
of a document get ``formal content'' and become more robust under change.\<close>
figure*[figfig3::figure,relative_width="80",src="''figures/antiquotations-PIDE''"]
\<open> Standard antiquotations referring to theory elements.\<close>
declare_reference*[figfig5::figure]
text\<open> The subsequent sample in @{docitem_ref (unchecked) \<open>figfig5\<close>} shows the definition of an
\<^emph>\<open>safety-related application condition\<close>, a side-condition of a theorem which
has the consequence that a certain calculation must be executed sufficiently fast on an embedded
device. This condition can not be established inside the formal theory but has to be
checked by system integration tests.\<close>
figure*[figfig5::figure, relative_width="80", src="''figures/srac-definition''"]
\<open> Defining a SRAC reference \ldots \<close>
figure*[figfig7::figure, relative_width="80", src="''figures/srac-as-es-application''"]
\<open> Using a SRAC as EC document reference. \<close>
text\<open> Now we reference in @{docitem_ref (unchecked) \<open>figfig7\<close>} this safety-related condition;
however, this happens in a context where general \<^emph>\<open>exported constraints\<close> are listed.
\isadof's checks establish that this is legal in the given ontology.
This example shows that ontological modeling is indeed adequate for large technical,
collaboratively developed documentations, where modifications can lead easily to incoherence.
The current checks help to systematically avoid this type of incoherence between formal and
informal parts. \<close>
chapter*[onto_future::technical]\<open> Monitor Classes \<close>
text\<open> Besides sub-typing, there is another relation between
document classes: a class can be a \<^emph>\<open>monitor\<close> to other ones,
which is expressed by the occurrence of a \inlineisar+where+ clause
in the document class definition containing a regular
expression (see @{example \<open>scholar_onto\<close>}).
While class-extension refers to data-inheritance of attributes,
a monitor imposes structural constraints -- the order --
in which instances of monitored classes may occur. \<close>
text\<open>
The control of monitors is done by the commands:
\<^item> \inlineisar+open_monitor* + <doc-class>
\<^item> \inlineisar+close_monitor* + <doc-class>
\<close>
text\<open>
where the automaton of the monitor class is expected
to be in a final state. In the final state, user-defined SML
Monitors can be nested, so it is possible to "overlay" one or more monitoring
classes and imposing different sets of structural constraints in a
Classes which are neither directly nor indirectly (via inheritance) mentioned in the
monitor are \<^emph>\<open>independent\<close> from a monitor; instances of independent test elements
may occur freely. \<close>
(*<*)
end
(*>*)

View File

@ -1,189 +0,0 @@
(*<*)
theory "04_IsaDofImpl"
imports "02_Background"
begin
(*>*)
chapter*[impl1::introduction,main_author="Some(@{docitem ''adb''}::author)"]
\<open>Isabelle Ontology Framework \isadof\<close>
text\<open>
In this section, we introduce our framework, called \isadof. \isadof
is based on several design-decisions:
\begin{compactitem}
\item the entire \isadof is conceived as ``pure add-on'', \ie, we
deliberately resign on the possibility to modify Isabelle itself
(thus accepting a minor loss in performance and some additional
complexity in the documentation generation process)
\item we decided to identify the ontology types with the Isabelle/HOL
type system, \ie, we reuse the same infrastructure for parsers and
type-checkers, but use the types on the meta-level of the document
structure
\item we decided to make the markup-generation by own means in order
to adapt it as good as possible to the needs of tracking the linking
in documents.
\end{compactitem}
\<close>
subsection*["sec:plugins"::technical]{*Writing \isadof as User-Defined Plugin in Isabelle/Isar*}
text\<open>
Writing an own plugin in Isabelle starts with defining the local data
and registering it in the framework. As mentioned before, contexts
are structures with independent cells/compartments having three
primitives \inlinesml+init+, \inlinesml+extend+ and
\inlinesml+merge+. Technically this is done by instantiating a functor
\inlinesml+Generic_Data+, and the following fairly typical
code-fragment is already drawn from \isadof:
\begin{sml}
structure Data = Generic_Data
( type T = docobj_tab * docclass_tab
val empty = (initial_docobj_tab, initial_docclass_tab)
val extend = I
fun merge((d1,c1),(d2,c2)) = (merge_docobj_tab (d1, d2),
merge_docclass_tab(c1,c2))
);
\end{sml}
with some tables \inlinesml+docobj_tab+ and \inlinesml+docclass_tab+
(not explained here) capturing the specific data of the application in
mind, \ie, a table of document classes and another one capturing the
document class instances.
\enlargethispage{2\baselineskip}
\<close>
text\<open>
All the text samples shown here have to be in the context of an SML
file or in an \inlineisar|ML{* ... *}| command inside a \verb|.thy|
file, which has the effect to type-check and compile these text
elements by the underlying SML compiler.
\<close>
text\<open>
Operations are organized in a model-view-controller scheme, where
Isabelle/Isar provides the controller part. A typical model operation
has the type:\<close>
text\<open>
\begin{sml}
val opn :: <args_type> -> Context.generic -> Context.generic
\end{sml}
\ie, it represents a transformation on contexts. For example, the
operation of declaring a local reference in the context is presented
as follows:
\begin{sml}
fun declare_object_local oid ctxt =
let fun decl {tab,maxano} = {tab=Symtab.update_new(oid,NONE) tab,
maxano=maxano}
in (Data.map(apfst decl)(ctxt)
handle Symtab.DUP _ =>
error("multiple declaration of document reference"))
end
\end{sml}
\<close>
text\<open>
where \inlineisar+Data.map+ is the update function resulting from the
above functor instantiation. This code fragment uses operations from
a library structure \inlinesml+Symtab+ that were used to update the
appropriate table for document objects in the plugin-local
state. Possible exceptions to the update operation were mapped to a
system-global error reporting function.
\<close>
text\<open>
Finally, the view-aspects where handled by an API for
parsing-combinators. The library structure \inlinesml+Scan+ provides,
for example, the operators:
\begin{sml}
op || : ('a -> 'b) * ('a -> 'b) -> 'a -> 'b
op -- : ('a -> 'b * 'c) * ('c -> 'd * 'e) -> 'a -> ('b * 'd) * 'e
op |-- : ('a -> 'b * 'c) * ('c -> 'd * 'e) -> 'a -> 'd * 'e
op --| : ('a -> 'b * 'c) * ('c -> 'd * 'e) -> 'a -> 'b * 'e
op >> : ('a -> 'b * 'c) * ('b -> 'd) -> 'a -> 'd * 'c
op option : ('a -> 'b * 'a) -> 'a -> 'b option * 'a
op repeat : ('a -> 'b * 'a) -> 'a -> 'b list * 'a
\end{sml}
for alternative, sequence, sequence-ignore-left,
sequence-ignore-right, and piping, as well as combinators for option
and repeat. The parsing combinator technology arrived meanwhile even
in mainstream languages such as Java or Scala and is nowadays
sufficiently efficient implemented to replace conventional Lex-Yacc
technology for most applications. It has the advantage to be
smoothlessly integrated into standard programs and allows for dynamic
grammar extensions. There is a more high-level structure
\inlinesml{Parse} providing specific combinators for the
command-language Isar:
\begin{sml}
val attribute = Parse.position Parse.name
-- Scan.optional (Parse.$$$ "=" |-- Parse.!!! Parse.name) "";
val reference = Parse.position Parse.name
-- Scan.option (Parse.$$$ "::" |-- Parse.!!!
(Parse.position Parse.name));
val attributes = (Parse.$$$ "[" |-- (reference
-- (Scan.optional(Parse.$$$ ","
|-- (Parse.enum "," attribute))) []))
--| Parse.$$$ "]"
\end{sml}
``Model'' and ``View'' parts were combined to ``parsers'' which were
registered in the interpreter toplevel of the Isabelle/Isar framework:
\begin{sml}
val _ = Outer_Syntax.command @ {command_keyword "declare_reference"}
"declare document reference"
(attributes >> (fn (((oid,pos),cid),doc_attrs) =>
(Toplevel.theory (DOF_core.declare_object_global oid))));
\end{sml}
\<close>
text\<open>
Altogether, this gives the \emph{extension} of the Isar syntax
allowing to parse and interpret the new \emph{command} in a subsequent
\verb+.thy+ file:
\begin{isar}
declare_reference [lal::requirement, alpha="main", beta=42]
\end{isar}
where we ignore the semantics at the moment. The construction also
generates implicitely some markup information; for example, when
hovering over the \inlineisar|declare_reference| command in a
front-end supporting PIDE, a popup window with the text: ``declare
document reference'' will appear.
\<close>
subsection*["sec:prog_anti"::technical]{*Programming Text Antiquotations*}
text\<open>
As mentioned in the introduction, Isabelle/Isar is configured with a
number of standard commands to annotate formal definitions and proofs
with text---the latter is used in particular to generate PDF and HTML
documents with internal hypertext-references. Inside these text
commands, a number of predefined antiquotations can be inserted which
were checked and decorated with markup during editing.
\<close>
text\<open>
Moreover, there is an API for user-defined antiquotations. It follows
the lines of the MVC style system extensions presented in the previous
section. An excerpt of the table defining some antiquotations can be
found in \verb+thy_output.ML+ of the Isabelle sources and give the
basic idea:
\begin{sml}
val _ = Theory.setup
(basic_entity @ {binding term} (stp -- Args.term) pretty_term_style #>
basic_entity @ {binding prop} (stp -- Args.prop) pretty_term_style #>
... )
\end{sml}
where \inlinesml+stp+ (=\inlinesml+Term_Style.parse+),
\inlinesml+Args.term+ and \inlinesml+Args.prop+ are parsing
combinators from higher Isar-API's (that actually do the type checking
in the surrounding HOL context) and \inlinesml+pretty_term_style+ an
operation pretty-printing the parsed term for one of the possible
targets HTML or \LaTeX{} (converted to \verb+.pdf+ in a
post-compilation process). The construct \inlinesml+@ {binding term}+
decorates the keyword ``term'' with positioning markup (allowing
navigating to this defining place in \verb+thy_output.ML+ by
hyperlinking) and \inlinesml+Theory.setup+ the operator that
registers the entire parser/checker into the Isar framework.
\<close>
text\<open>
Together, this establishes the syntax and semantics of, for example,
the antiquotation:
\begin{isar}
text{* @{term "fac 5"} *}
\end{isar}
inside the text command. A key contribution of this paper is that such
SML code is generated \emph{automatically} from an \isadof ontology
definition introduced in the subsequent section.
\<close>
end

View File

@ -1,421 +0,0 @@
(*<*)
theory "05_DesignImpl"
imports "04_isaDofImpl"
begin
(*>*)
chapter*[impl2::technical,main_author="Some(@{docitem ''bu''}::author)"]
{* \isadof: Design and Implementation*}
text\<open>
In this section, we present the design and implementation of \isadof.
\subsection{Document Ontology Modeling with \isadof}
First, we introduce an own language to define ontologies.
Conceptually, ontologies consist of:
\begin{compactitem}
\item a \emph{document class} that describes a concept, \ie, it
represents set of \emph{instances} of a document class,
i.e. references to document elements;
\item \emph{attributes} specific to document classes;
\item attributes should be typed (arbitrary HOL-types);
\item attributes can refer to other document classes,
thus, document classes must also be HOL-types
(Such attributes were called \emph{links});
\item a special link, the reference to a super-class,
establishes an \emph{is-a} relation between classes;
\item classes may refer to other classes via a regular expression in a
\emph{where} clause (classes with such an optional where clauses are
called \emph{monitor classes});
\item attributes may have default values in order to facilitate notation.
\end{compactitem}
\<close>
text\<open>
For ontology modeling, we chose a syntax roughly similar to
Isabelle/HOL's extensible records. We present the syntax implicitly
by a conceptual example, that serves to introduce the key-features of
the modeling language:
\begin{isar}
doc_class A =
x :: "string"
doc_class B =
y :: "string list" <= "[]"
doc_class C = B +
z :: "A option" <= "None"
datatype enum = "X1" | "X2" | "X3
doc_class D = B +
a1 :: enum <= "X2"
a2 :: int <= "0"
doc_class F =
r :: "thm list"
b :: "(A \<times> B) set" <= "{}"
doc_class M =
trace :: "(A + C + D + F) list"
where "A . (C | D)* . [F]"
\end{isar}
Isabelle uses a two level syntax: the \emph{outer syntax} which is
defined and extended using the mechanisms described in
\autoref{sec:plugins} and the \emph{inner syntax}, is used to define
type and term expressions of the Isabelle framework. Since we reuse a
lot of infrastructure of HOL (with respect to basic type library
definitions), parsing and type-checking have been specialized to HOL
and extensions thereof. The ``switch'' between outer and inner syntax
happens with the quote symbols
\inlineisar+"..."+. % In exceptional cases, the latter can be
% omitted --- notably, if the type or term consists only of one type
% constructor symbol or constant symbol respectively.
%
\<close>
text\<open>
The above ontology specification contains the document classes
\inlineisar+A+, \inlineisar+B+, \inlineisar+C+, \inlineisar+D+,
\inlineisar+F+, and \inlineisar+M+ with the respective attributes
\inlineisar+x+, \inlineisar+y+, \inlineisar+z+, \inlineisar+a1+,
\inlineisar+a2+, \inlineisar+b+ and \inlineisar+trace+.
\inlineisar+C+ and \inlineisar+D+ is are sub-classes from
\inlineisar+B+ as states the class extension \inlineisar*B + ... *.
\enlargethispage{2\baselineskip}
\<close>
text\<open>
Each attribute is typed within the given context; the general HOL
library provides the types \inlineisar+string+, \inlineisar+_ list+,
\inlineisar+_ option+, and \inlineisar+_ set+. On the fly, other
special purpose types can be defined. We reuse here the Isabelle/HOL
\inlineisar+datatype+-statement, which can be mixed arbitrarily in
between the ontology definitions (like any other Isabelle/HOL command)
to define an enumeration type. Document classes---similar to
conventional class-definitions as in object-oriented
programming---\emph{induce} an implicit HOL type; for this reason the
class \inlineisar+C+ can have an attribute that refers to the
\inlineisar+A+ attribute classes. Document classes that contain
attributes referring to induced class types are called
\emph{links}. Links can be complex: the class \inlineisar+F+, for
example, contains a set of pairs, \ie, a relation between
\inlineisar+A+ and \inlineisar+B+ document instances. Each attribute
may be assigned (via \inlineisar+<=+) to a default value represented
by a HOL expression, whose syntax is either defined by library
operations or constant declarations like the
\inlineisar+datatype+-statement.
\<close>
text\<open>
The document class \inlineisar+M+ is a \emph{monitor class}, \ie, a
class possessing a \inlineisar+where+ clause containing a regular
expression consisting of the class identifier \inlineisar+A+,
\inlineisar+B+, etc. Its use is discussed in \autoref{sec:monitor-class}.
\<close>
subsection*[editing::example]{*Editing a Document with Ontology-Conform Meta-Data*}
text\<open>
As already mentioned, Isabelle/Isar comes with a number of standard
\emph{text commands} such as \inlineisar+section{* ... *}+ or
\inlineisar+text{* ... *}+ that offer the usual text structuring
primitives for documents. From the user point-of-view, text commands
offer the facility of spell-checking and IDE support for text
antiquotations (as discussed before), from a system point of view,
they are particular since they are not conceived to have side effects
on the global (formal) context, which is exploited in Isabelle's
parallel execution engine.\<close>
text\<open>
\isadof introduces an own family of text-commands based on the
standard command API of the Isar engine, which allows having
side effects of the global context and thus to store and manage own
meta-information (the standard text-command interface turned out not
to be flexible enough, and a change of this API conflicts with our
goal of not changing Isabelle itself). \isadof, \eg, provides
\inlineisar+section*[<meta-args>]{* ... *}+,
\inlineisar+subsection*[<meta-args>]{* ... *}+,
or \inlineisar+text*[<meta-args>]{* ... *}+, where
\inlineisar+<meta-args>+ is a syntax to declaring instance, class and
attributes for this text element. The syntax for
\inlineisar+<meta-args>+ follows the scheme:
\begin{isar}
<ref> :: <class_id>, attr_1 = "<expr>", ..., attr_n = "<expr>"
\end{isar}
where the \inlineisar+<class_id>+ can be optionally omitted which represents
the implicit superclass \inlineisar+text+, where \inlineisar+attr_i+ must
be declared attributes in the class and where the \inlineisar+"<expr>"+
must have the corresponding type. Attributes from a class definition may
be left undefined; definitions of attribute values \emph{override} default
values or values of super-classes. Overloading of attributes, however, is not
permitted in \isadof. \<close>
text\<open>
We can annotate a text as follows. First, we have to place a
particular document into the context of our conceptual example
ontology shown above:
\begin{isar}
theory Concept_Example
imports "../../ontologies/Conceptual"
begin
\end{isar}
which is contained contained a theory file
\verb+../../ontologies/Conceptual.thy+. Then we can continue to annotate
our text as follows:
\begin{isar}
section*[a::A, x = "''alpha''"] {* Lorem ipsum dolor sit amet, ... *}
text*[c1::C, x = "''beta''"]
{* ... suspendisse non arcu malesuada mollis, nibh morbi, ... *}
text*[d:D, a2="10"]{* Lorem ipsum dolor sit amet, consetetur ...*}
\end{isar}\<close>
text\<open>
Let's consider the last line:
this text is the instance \inlineisar+d+ which belongs to class
\inlineisar+D+, and the default of its attribute \inlineisar+a2+ is
overridden to the value \inlineisar+"10"+. Instances are mutable in
\isadof, the subsequent \isadof command:
\begin{isar}
update_instance*[d::D, a1 := X2, a2 := "20"]
\end{isar}
This changes the attribute values of \verb+d+. The typing
annotation \verb+D+ is optional here (if present, it is checked).\<close>
text\<open>
Document instances were used to reference textual content; in the
generated \LaTeX{} (PDF) and HTML documents they were supported by
hyperlinks. Since Isabelle/Isar has a top-down evaluation and
validation strategy for the global document, a kind of forward
declaration for references is sometimes necessary.
\begin{isar}
declare_reference* [<meta-args>]
\end{isar}
This declares the existence of a text-element and allows for
referencing it, although the actual text-element will occur later in
the document.\<close>
subsection*[ontolinks::technical]{*Ontology-Conform Logical Links: \isadof Antiquotations*}
text\<open>
Up to this point, the world of the formal and the informal document
parts are strictly separated. The main objective of \isadof are ways
to establish machine-checked links between these two universes by
instantiating automatically Isabelle/Isar's concept of
\emph{antiquoations}. The simplest form of link appears in the
following command:
\begin{isar}
text{* ... in ut tortor ... @ {docitem_ref {*a*}} ... @ {A {*a*}}*}
\end{isar}\<close>
text\<open>
This standard text-command contains two \isadof antiquotations; the
first represents just a link to the text-element \inlineisar$a$.
The second contains additionally the implicit constraint that the
reference to \inlineisar$a$ must also belong to the
\inlineisar$A$-class; the following input:
\begin{isar}
text{* ... ... ... @ {C (*a*}}*}
\end{isar}
results in the detection of an ontological inconsistency which will be
reported in PIDE at editing time. Of course, any modification of the
ontology or changes in the labeling of the meta-information will lead
to the usual re-checking of the Isabelle/Isar engine. A natural
representation of these semantic links inside \isadof documents would
be hyperlinks in generated PDF or HTML files.
\enlargethispage{2\baselineskip}\<close>
text\<open>
Besides text antiquotations from Isabelle/Isar, we introduced a novel
concept that we call \emph{inner syntax antiquotations}. It is a
crucial technical feature for establishing links between text-items as
well as document meta-data and formal entities of Isabelle such as
types, terms and theorems (reflecting the fundamental types
\inlineisar+typ+, \inlineisar+term+ and \inlineisar+thm+ of the
Isabelle kernel.) We start with a slightly simpler case is the
establishment of links between text-elements:
\begin{isar}
section*[f::F] {* Lectus accumsan velit ultrices, ... }*}
update_instance*[f,b:="{(@ {docitem ''a''}::A,@ {docitem ''c1''}::C),
(@ {docitem ''a''},@ {docitem ''c1''})}"]
\end{isar}\<close>
text\<open>
This example shows the construction of a relation between text
elements \emph{inside} HOL-expressions with the usual syntactic and
semantic machinery for sets, pairs, (thus: relations). Inside the
world of HOL-terms, we can refer to items of the ``meta-world'' by a
particular form of antiquotations called \emph{inner syntax
antiquotations}. Similarly, but conceptually different, it is
possible to refer in \isadof HOL-expressions to theorems of the
preceding context. Thus, it is possible to establish a theorem (or a
type or term), in the example below, by a proof ellipse in Isabelle:
\begin{isar}
theorem some_proof : "P" sorry
update_instance*[f,r:="[@ {thm ''some_proof''}]"]
\end{isar}\<close>
text\<open>
The resulting theorem is stored in a theorem list as part of the
meta-information of a section. Technically, theorems were introduced
in \isadof as abstract HOL types and some unspecified (Skolem)
HOL-constants with a particular infix-syntax. They are introduced for
example by:
\begin{isar}
typedecl "thm"
consts mk_thm :: "string \<Rightarrow> thm" ("@{thm _}")
\end{isar}
which introduces a new type \inlineisar+thm+ reflecting the internal
Isabelle type for established logical facts and the above notation to
the inner syntax parser. The \inlineisar+doc_class F+ in our schematic
example uses already this type. Whenever these expressions occur
inside an inner-syntax HOL-term, they are checked by the HOL parser
and type-checker as well as an \isadof checker that establishes that
\inlineisar+some_proof+ indeed refers to a known theorem of this name
in the current context.
% (this is, actually, the symmetry axiom of the equality in HOL).
To our knowledge, this is the first ontology-driven framework for
editing mathematical and technical documents that focuses particularly
on documents mixing formal and informal content---a type of documents
that is very common in technical certification processes. We see
mainly one area of related works: IDEs and text editors that support
editing and checking of documents based on an ontology. There is a
large group of ontology editors (\eg, Prot{\'e}g{\'e}~\cite{protege},
Fluent Editor~\cite{cognitum}, NeOn~\cite{neon}, or
OWLGrEd~\cite{owlgred}). With them, we share the support for defining
ontologies as well as auto-completion when editing documents based on
an ontology. While our ontology definitions are, currently, based on a
textual definition, widely used ontology editors (\eg,
OWLGrEd~\cite{owlgred}) also support graphical notations. This could
be added to \isadof in the future. A unique feature of \isadof is the
deep integration of formal and informal text parts. The only other
work in this area wea are aware of is rOntorium~\cite{rontorium}, a plugin
for Prot{\'e}g{\'e} that integrates R~\cite{adler:r:2010} into an
ontology environment. Here, the main motivation behind this
integration is to allow for statistically analyze ontological
documents. Thus, this is complementary to our work.\<close>
text\<open>
There is another form of antiquotations, so-called ML-antiquotations
in Isabelle, which we do not describe in detail in this paper. With
this specific antiquotations, it is possible to refer to the HOL-term
of all the attributes of the doc-item; by writing specific ML-code,
arbitrary user-defined criteria can be implemented establishing that
all meta-data of a document satisfies a particular validation. For
example, in the context of an ontology for scientific papers, we could
enforce that terms or theorems have a particular form or correspond to
``claims'' (contributions) listed in the introduction of the paper.
\<close>
subsection*["sec:monitor-class"::technical]{*Monitor Document Classes*}
text\<open>
\autoref{lst:example} shows our conceptual running example in all
details. While inheritance on document classes allows for structuring
meta-data in an object-oriented manner, monitor classes such as
\inlineisar+M+ impose a structural relation on a document. The
\inlineisar+where+ clause permits to write a regular expression on
class names; the class names mentioned in the where clause are called
the ``controlled'' ones. The expression specifies that all
text-elements that are instances of controlled classes to occur in the
sequential order specified by the \inlineisar+where+-clause. Start and
end were marked by the corresponding monitor commands. Note that
monitors may be nested.
\<close>
text\<open>
\begin{isar}[float, caption={Our running example},label={lst:example}]
theory Concept_Example
imports "../../ontologies/Conceptual"
begin
open_monitor*[struct::M]
section*[a::A, x = "''alpha''"] {* Lorem ipsum dolor sit amet, ... *}
text*[c1::C, x = "''beta''"]
{* ... suspendisse non arcu malesuada mollis, nibh morbi, ... *}
text*[d::D, a1 = "X3"]
{* ... phasellus amet id massa nunc, pede suscipit repellendus, ... *}
text*[c2::C, x = "''delta''"]
{* ... in ut tortor eleifend augue pretium consectetuer. *}
section*[f::F] {* Lectus accumsan velit ultrices, ... @ {docitem_ref {*a*} }*}
theorem some_proof : "P" sorry
update_instance*[f,r:="[@ {thm ''some_proof''}]"]
text{* ..., mauris amet, id elit aliquam aptent id, ... *}
update_instance*[f,b:="{(@ {docitem ''a''}::A,@ {docitem ''c1''}::C),
(@ {docitem ''a''}, @ {docitem ''c1''})}"]
close_monitor*[struct]
\end{isar}
\<close>
section{*Document Generation*}
text\<open>
Up to know, we discussed the definition of ontologies and their
representation in an interactive development environment, \ie,
JEdit/PIDE. In many application areas, it is desirable to also
generate a ``static'' document, \eg, for long-term archiving. Isabelle
supports the generation of both HTML and PDF documents. Due to its
standardization, the latter (in particular in the variant PDF/A) is
particularly suitable for ensuring long-term access. Hence, our
prototype focuses currently on the generation of consistent PDF
documents.\<close>
text\<open>
Technically, the PDF generation is based on \LaTeX{} (this is mostly
hidden from the end users) as standard text formatting such as
itemize-lists or italic and bold fonts can be written in JEdit without
in a ``what-you-see-is-what-you-get''-style. We extended the \LaTeX{}
generation of Isabelle in such a way that for each ontological concept
that is formally defined in \isadof, is mapped to a dedicated
\LaTeX-command. This \LaTeX-command is responsible for the actual
typesetting of the concept as well as for generating the necessary
label and references. For each defined ontology, we need to define a
\LaTeX-style that defines these commands. For the standard commands
such as \inlineisar|section*[...]{* ... *}|, default implementations
are provided by \isadof. For example, the following is the \LaTeX{}
definition for processing \inlineisar|section*[...]{* ... *}|:
\begin{ltx}
\newkeycommand\isaDofSection[reference=,class_id=][1]{%
\isamarkupsection{#1}\label{\commandkey{reference}}%
}
\end{ltx}\<close>
text\<open>
This command gets all meta-arguments of the concepts a swell as the
actual arguments. The layout is delegated to Isabelle's standard
sectioning commands
(\inlineltx|\isamarkupsection{#1}|). Additionally, a label for
linking to this section is generated.
\enlargethispage{2\baselineskip}
\<close>
text\<open>
Considering an ontology defining the concepts for writing scientific
papers, a potential definition for typesetting abstracts (where an
abstract includes a list of keywords) is:
\begin{ltx}
\newkeycommand\isaDofTextAbstract[reference=,class_id=,keywordlist=][1]{%
\begin{isamarkuptext}%
\begin{abstract}\label{\commandkey{reference}}%
#1
\ifthenelse{\equal{\commandkey{keywordlist}}{}}{}{%
\medskip\noindent{\textbf{Keywords:}} \commandkey{keywordlist}%
}
\end{abstract}%
\end{isamarkuptext}%
}
\end{ltx}
Our generated \LaTeX{} is conceptually very close
SALT~\cite{DBLP:conf/esws/GrozaHMD07}--- but instead of writing
\LaTeX{} manually it is automatically generated and, additionally, can
also guarantee the consistency of the formal (mathematical/logical)
content.
\<close>
(*<*)
end
(*>*)

View File

@ -1,65 +0,0 @@
(*<*)
theory "06_Conclusion"
imports "03_IsaDof"
(* imports "05_DesignImpl *)
begin
(*>*)
chapter*[conclusion::conclusion]\<open> Conclusion and Related Work\<close>
text\<open> We have demonstrated the use of \isadof, a novel ontology modeling and enforcement
IDE deeply integrated into the Isabelle/Isar Framework. The two most distinguishing features are
\<^item> \isadof and its ontology language are a strongly typed language that allows
for referring (albeit not reasoning) to entities of Isabelle/HOL, most notably types, terms,
and (formally proven) theorems, and
\<^item> \isadof is supported by the Isabelle/PIDE framework; thus, the advantages of an IDE for
text-exploration (which is the type of this link? To which text element does this link refer?
Which are the syntactic alternatives here?) were available during editing
instead of a post-hoc validation process.
\<close>
text\<open> Of course, a conventional batch-process also exists which can be used
for the validation of large document bases in a conventional continuous build process.
This combination of formal and semi-informal elements, as well as a systematic enforcement
of the coherence to a document ontology of the latter, is, as we believe, novel and offers
a unique potential for the semantic treatment of scientific texts and technical documentations. \<close>
text\<open>
To our knowledge, this is the first ontology-driven framework for editing mathematical and technical
documents that focuses particularly on documents mixing formal and informal content---a type of
documents that is very common in technical certification processes. We see mainly one area of
related works: IDEs and text editors that support editing and checking of documents based on an
ontology. There is a large group of ontology editors (\eg, Prot{\'e}g{\'e}~@{cite "protege"},
Fluent Editor~@{cite "cognitum"}, NeOn~@{cite "neon"}, or OWLGrEd~@{cite "owlgred"}). With them,
we share the support for defining ontologies as well as auto-completion when editing documents
based on an ontology. While our ontology definitions are currently based on a textual definition,
widely used ontology editors (\eg, OWLGrEd~@{cite "owlgred"}) also support graphical notations.
This could be added to \isadof in the future. A unique feature of \isadof is the deep integration
of formal and informal text parts. The only other work in this area we are aware of is
rOntorium~@{cite "rontorium"}, a plugin for Prot{\'e}g{\'e} that integrates
R~@{cite "adler:r:2010"} into an ontology environment. Here, the main motivation behind this
integration is to allow for statistically analyze ontological documents. Thus, this is
complementary to our work. \<close>
text\<open> \isadof in its present form has a number of technical short-comings as well
as potentials not yet explored. On the long list of the short-comings is the
fact that strings inside HOL-terms do not support, for example, Unicode.
For the moment, \isadof is conceived as an
add-on for Isabelle/HOL; a much deeper integration of \isadof into Isabelle
could increase both performance and uniformity. Finally, different target
presentation (such as HTML) would be highly desirable in particular for the
math exam scenarios. And last but not least, it would be desirable that PIDE
itself is ``ontology-aware'' and can, for example, use meta-information
to control read- and write accesses of \<^emph>\<open>parts\<close> of documents.
\<close>
paragraph\<open> Availability. \<close>
text\<open> The implementation of the framework, the discussed ontology definitions,
and examples are available at \url{https://git.logicalhacking.com/HOL-OCL/Isabelle_DOF/}.\<close>
paragraph\<open> Acknowledgement. \<close>
text\<open> This work was partly supported by the framework of IRT SystemX, Paris-Saclay, France,
and therefore granted with public funds within the scope of the Program ``Investissements dAvenir''.\<close>
(*<*)
end
(*>*)

View File

@ -1,46 +0,0 @@
#!/usr/bin/env bash
# Copyright (c) 2018-2019 The University of Sheffield. All rights reserved.
# 2018 The University of Paris-Sud. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in
# the documentation and/or other materials provided with the
# distribution.
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
#
# SPDX-License-Identifier: BSD-2-Clause
set -e
if [ ! -f $ISABELLE_HOME_USER/DOF/document-template/build_lib.sh ]; then
echo ""
echo "Error: Isabelle/DOF not installed"
echo "====="
echo "This is a Isabelle/DOF project. The document preparation requires"
echo "the Isabelle/DOF framework. Please obtain the framework by cloning"
echo "the Isabelle/DOF git repository, i.e.: "
echo " git clone https://git.logicalhacking.com/HOL-OCL/Isabelle_DOF"
echo "You can install the framework as follows:"
echo " cd Isabelle_DOF/document-generator"
echo " ./install"
echo ""
exit 1
fi
cp $ISABELLE_HOME_USER/DOF/document-template/build_lib.sh .
source build_lib.sh

Binary file not shown.

Before

Width:  |  Height:  |  Size: 75 KiB

View File

@ -1,2 +0,0 @@
Template: scrreprt
Ontology: technical_report

View File

@ -1,59 +0,0 @@
%% Copyright (C) 2018 The University of Sheffield
%% 2018 The University of Paris-Sud
%%
%% License:
%% This program can be redistributed and/or modified under the terms
%% of the LaTeX Project Public License Distributed from CTAN
%% archives in directory macros/latex/base/lppl.txt; either
%% version 1 of the License, or any later version.
%% OR
%% The 2-clause BSD-style license.
%%
%% SPDX-License-Identifier: LPPL-1.0+ OR BSD-2-Clause
%% This is a placeholder for user-specific configuration and packages.
\IfFileExists{beramono.sty}{\usepackage[scaled=0.88]{beramono}}{}%
\IfFileExists{upquote.sty}{\usepackage{upquote}}{}%
\usepackage{textcomp}
\usepackage{xcolor}
\usepackage{paralist}
\usepackage{listings}
\usepackage{lstisadof}
\usepackage{xspace}
\usepackage[draft]{fixme}
\lstloadlanguages{bash}
\lstdefinestyle{bash}{language=bash,
,basicstyle=\ttfamily%
,showspaces=false%
,showlines=false%
,columns=flexible%
% ,keywordstyle=\bfseries%
% Defining 2-keywords
,keywordstyle=[1]{\color{BrickRed!60}\bfseries}%
% Defining 3-keywords
,keywordstyle=[2]{\color{OliveGreen!60}\bfseries}%
% Defining 4-keywords
,keywordstyle=[3]{\color{black!60}\bfseries}%
% Defining 5-keywords
,keywordstyle=[4]{\color{Blue!70}\bfseries}%
% Defining 6-keywords
,keywordstyle=[5]{\itshape}%
%
}
\lstdefinestyle{displaybash}{style=bash,
basicstyle=\ttfamily\footnotesize,
backgroundcolor=\color{black!2}, frame=lines}%
\lstnewenvironment{bash}[1][]{\lstset{style=displaybash, #1}}{}
\def\inlinebash{\lstinline[style=bash, breaklines=true,columns=fullflexible]}
\usepackage[caption]{subfig}
\usepackage[size=footnotesize]{caption}
\newcommand{\ie}{i.e.}
\newcommand{\eg}{e.g.}
\title{<TITLE>}
\author{<AUTHOR>}

View File

@ -1,281 +0,0 @@
@STRING{pub-springer={Springer} }
@STRING{pub-springer:adr=""}
@STRING{s-lncs = "LNCS" }
@Manual{ wenzel:isabelle-isar:2017,
title = {The Isabelle/Isar Reference Manual},
author = {Makarius Wenzel},
OPTorganization = {},
OPTaddress = {},
OPTedition = {},
OPTmonth = {},
year = {2017},
note = {Part of the Isabelle distribution.},
OPTannote = {}
}
@Book{ adler:r:2010,
abstract = {Presents a guide to the R computer language, covering such
topics as the user interface, packages, syntax, objects,
functions, object-oriented programming, data sets, lattice
graphics, regression models, and bioconductor.},
added-at = {2013-01-10T22:39:38.000+0100},
address = {Sebastopol, CA},
author = {Adler, Joseph},
isbn = {9780596801700 059680170X},
keywords = {R},
publisher = {O'Reilly},
refid = 432987461,
title = {R in a nutshell},
year = 2010
}
@InCollection{ wenzel.ea:building:2007,
abstract = {We present the generic system framework of
Isabelle/Isarunderlying recent versions of Isabelle. Among
other things, Isar provides an infrastructure for Isabelle
plug-ins, comprising extensible state components and
extensible syntax that can be bound to tactical ML
programs. Thus the Isabelle/Isar architecture may be
understood as an extension and refinement of the
traditional LCF approach, with explicit infrastructure for
building derivative systems. To demonstrate the technical
potential of the framework, we apply it to a concrete
formalmethods tool: the HOL-Z 3.0 environment, which is
geared towards the analysis of Z specifications and formal
proof of forward-refinements.},
author = {Makarius Wenzel and Burkhart Wolff},
booktitle = {TPHOLs 2007},
editor = {Klaus Schneider and Jens Brandt},
language = {USenglish},
acknowledgement={none},
pages = {352--367},
publisher = pub-springer,
address = pub-springer:adr,
number = 4732,
series = s-lncs,
title = {Building Formal Method Tools in the {Isabelle}/{Isar}
Framework},
doi = {10.1007/978-3-540-74591-4_26},
year = 2007
}
@Misc{w3c:ontologies:2015,
title={Ontologies},
organisation={W3c},
url={https://www.w3.org/standards/semanticweb/ontology},
year=2018
}
@BOOK{boulanger:cenelec-50128:2015,
AUTHOR = "Boulanger, Jean-Louis",
TITLE = "{CENELEC} 50128 and {IEC} 62279 Standards",
PUBLISHER = "Wiley-ISTE",
YEAR = "2015",
ADDRESS = "Boston",
NOTE = "The reference on the standard."
}
@Booklet{ cc:cc-part3:2006,
bibkey = {cc:cc-part3:2006},
key = {Common Criteria},
institution = {Common Criteria},
language = {USenglish},
month = sep,
year = 2006,
public = {yes},
title = {Common Criteria for Information Technology Security
Evaluation (Version 3.1), {Part} 3: Security assurance
components},
note = {Available as document
\href{http://www.commoncriteriaportal.org/public/files/CCPART3V3.1R1.pdf}
{CCMB-2006-09-003}},
number = {CCMB-2006-09-003},
acknowledgement={brucker, 2007-04-24}
}
@Book{ nipkow.ea:isabelle:2002,
author = {Tobias Nipkow and Lawrence C. Paulson and Markus Wenzel},
title = {Isabelle/HOL---A Proof Assistant for Higher-Order
Logic},
publisher = pub-springer,
address = pub-springer:adr,
series = s-lncs,
volume = 2283,
doi = {10.1007/3-540-45949-9},
abstract = {This book is a self-contained introduction to interactive
proof in higher-order logic (\acs{hol}), using the proof
assistant Isabelle2002. It is a tutorial for potential
users rather than a monograph for researchers. The book has
three parts.
1. Elementary Techniques shows how to model functional
programs in higher-order logic. Early examples involve
lists and the natural numbers. Most proofs are two steps
long, consisting of induction on a chosen variable followed
by the auto tactic. But even this elementary part covers
such advanced topics as nested and mutual recursion. 2.
Logic and Sets presents a collection of lower-level tactics
that you can use to apply rules selectively. It also
describes Isabelle/\acs{hol}'s treatment of sets, functions
and relations and explains how to define sets inductively.
One of the examples concerns the theory of model checking,
and another is drawn from a classic textbook on formal
languages. 3. Advanced Material describes a variety of
other topics. Among these are the real numbers, records and
overloading. Advanced techniques are described involving
induction and recursion. A whole chapter is devoted to an
extended example: the verification of a security protocol. },
year = 2002,
acknowledgement={brucker, 2007-02-19},
bibkey = {nipkow.ea:isabelle:2002},
tags = {noTAG},
clearance = {unclassified},
timestap = {2008-05-26}
}
@InProceedings{ wenzel:asynchronous:2014,
author = {Makarius Wenzel},
title = {Asynchronous User Interaction and Tool Integration in
Isabelle/{PIDE}},
booktitle = {Interactive Theorem Proving (ITP)},
pages = {515--530},
year = 2014,
crossref = {klein.ea:interactive:2014},
doi = {10.1007/978-3-319-08970-6_33},
timestamp = {Sun, 21 May 2017 00:18:59 +0200},
abstract = { Historically, the LCF tradition of interactive theorem
proving was tied to the read-eval-print loop, with
sequential and synchronous evaluation of prover commands
given on the command-line. This user-interface technology
was adequate when R. Milner introduced his LCF proof
assistant in the 1970-ies, but it severely limits the
potential of current multicore hardware and advanced IDE
front-ends.
Isabelle/PIDE breaks this loop and retrofits the
read-eval-print phases into an asynchronous model of
document-oriented proof processing. Instead of feeding a
sequence of individual commands into the prover process,
the primary interface works via edits over a family of
document versions. Execution is implicit and managed by the
prover on its own account in a timeless and stateless
manner. Various aspects of interactive proof checking are
scheduled according to requirements determined by the
front-end perspective on the proof document, while making
adequate use of the CPU resources on multicore hardware on
the back-end.
Recent refinements of Isabelle/PIDE provide an explicit
concept of asynchronous print functions over existing proof
states. This allows to integrate long-running or
potentially non-terminating tools into the document-model.
Applications range from traditional proof state output
(which may consume substantial time in interactive
development) to automated provers and dis-provers that
report on existing proof document content (e.g.
Sledgehammer, Nitpick, Quickcheck in Isabelle/HOL).
Moreover, it is possible to integrate query operations via
additional GUI panels with separate input and output (e.g.
for Sledgehammer or find-theorems). Thus the Prover IDE
provides continuous proof processing, augmented by add-on
tools that help the user to continue writing proofs.
}
}
@Proceedings{ klein.ea:interactive:2014,
editor = {Gerwin Klein and Ruben Gamboa},
title = {Interactive Theorem Proving - 5th International
Conference, {ITP} 2014, Held as Part of the Vienna Summer
of Logic, {VSL} 2014, Vienna, Austria, July 14-17, 2014.
Proceedings},
series = s-lncs,
volume = 8558,
publisher = pub-springer,
year = 2014,
doi = {10.1007/978-3-319-08970-6},
isbn = {978-3-319-08969-0}
}
@InProceedings{ bezzecchi.ea:making:2018,
title = {Making Agile Development Processes fit for V-style
Certification Procedures},
author = {Bezzecchi, S. and Crisafulli, P. and Pichot, C. and Wolff,
B.},
booktitle = {{ERTS'18}},
abstract = {We present a process for the development of safety and
security critical components in transportation systems
targeting a high-level certification (CENELEC 50126/50128,
DO 178, CC ISO/IEC 15408).
The process adheres to the objectives of an ``agile
development'' in terms of evolutionary flexibility and
continuous improvement. Yet, it enforces the overall
coherence of the development artifacts (ranging from proofs
over tests to code) by a particular environment (CVCE).
In particular, the validation process is built around a
formal development based on the interactive theorem proving
system Isabelle/HOL, by linking the business logic of the
application to the operating system model, down to code and
concrete hardware models thanks to a series of refinement
proofs.
We apply both the process and its support in CVCE to a
case-study that comprises a model of an odometric service
in a railway-system with its corresponding implementation
integrated in seL4 (a secure kernel for which a
comprehensive Isabelle development exists). Novel
techniques implemented in Isabelle enforce the coherence of
semi-formal and formal definitions within to specific
certification processes in order to improve their
cost-effectiveness. },
pdf = {https://www.lri.fr/~wolff/papers/conf/2018erts-agile-fm.pdf},
year = 2018,
series = {ERTS Conference Proceedings},
location = {Toulouse}
}
@MISC{owl2012,
title = {OWL 2 Web Ontology Language},
note={\url{https://www.w3.org/TR/owl2-overview/}, Document Overview (Second Edition)},
author = {World Wide Web Consortium}
}
@MISC{ protege,
title = {Prot{\'e}g{\'e}},
note={\url{https://protege.stanford.edu}},
year = {2018}
}
@MISC{ cognitum,
title = {Fluent Editor},
note={\url{http://www.cognitum.eu/Semantics/FluentEditor/}},
year = {2018}
}
@MISC{ neon,
title = {The NeOn Toolkit},
note = {\url{http://neon-toolkit.org}},
year = {2018}
}
@MISC{ owlgred,
title = {OWLGrEd},
note={\url{http://owlgred.lumii.lv/}},
year = {2018}
}
@MISC{ rontorium,
title = {R Language Package for FLuent Editor (rOntorion)},
note={\url{http://www.cognitum.eu/semantics/FluentEditor/rOntorionFE.aspx}},
year = {2018}
}

View File

@ -0,0 +1,47 @@
(*<*)
theory "00_Frontmatter"
imports "Isabelle_DOF.technical_report"
begin
open_monitor*[this::report]
(*>*)
title*[title::title]\<open>Isabelle/DOF\<close>
subtitle*[subtitle::subtitle]\<open>User and Implementation Manual\<close>
text*[adb:: author,
email="\<open>a.brucker@exeter.ac.uk\<close>",
orcid="\<open>0000-0002-6355-1200\<close>",
http_site="\<open>https://www.brucker.ch/\<close>",
affiliation="\<open>University of Exeter, Exeter, UK\<close>"]\<open>Achim D. Brucker\<close>
text*[bu::author,
email = "\<open>wolff@lri.fr\<close>",
affiliation = "\<open>Université Paris-Saclay, LRI, Paris, France\<close>"]\<open>Burkhart Wolff\<close>
text*[abs::abstract,
keywordlist="[''Ontology'', ''Ontological Modeling'', ''Document Management'',
''Formal Document Development'', ''Document Authoring'', ''Isabelle/DOF'']"]
\<open> \isadof provides an implementation of \dof on top of Isabelle/HOL.
\dof itself is a novel framework for \<^emph>\<open>defining\<close> ontologies
and \<^emph>\<open>enforcing\<close> them during document development and document
evolution. A major goal of \dof is the integrated development of
formal certification documents (\eg, for Common Criteria or CENELEC
50128) that require consistency across both formal and informal
arguments.
\isadof is integrated into Isabelle's IDE, which
allows for smooth ontology development as well as immediate
ontological feedback during the editing of a document.
In this paper, we give an in-depth presentation of the design
concepts of \dof's Ontology Definition Language (ODL) and key
aspects of the technology of its implementation. \isadof is the
first ontology language supporting machine-checked
links between the formal and informal parts in an LCF-style
interactive theorem proving environment.
\<close>
(*<*)
end
(*>*)

View File

@ -0,0 +1,130 @@
(*<*)
theory "01_Introduction"
imports "00_Frontmatter"
begin
(*>*)
chapter*[intro::introduction]\<open> Introduction \<close>
text*[introtext::introduction]\<open>
The linking of the \<^emph>\<open>formal\<close> to the \<^emph>\<open>informal\<close> is perhaps the most pervasive challenge in the
digitization of knowledge and its propagation. This challenge incites numerous research efforts
summarized under the labels ``semantic web,'' ``data mining,'' or any form of advanced ``semantic''
text processing. A key role in structuring this linking play \<^emph>\<open>document ontologies\<close> (also called
\<^emph>\<open>vocabulary\<close> in the semantic web community~@{cite "w3c:ontologies:2015"}), \ie, a machine-readable
form of the structure of documents as well as the document discourse.
Such ontologies can be used for the scientific discourse within scholarly articles, mathematical
libraries, and in the engineering discourse of standardized software certification
documents~@{cite "boulanger:cenelec-50128:2015" and "cc:cc-part3:2006"}: certification documents
have to follow a structure. In practice, large groups of developers have to produce a substantial
set of documents where the consistency is notoriously difficult to maintain. In particular,
certifications are centered around the \<^emph>\<open>traceability\<close> of requirements throughout the entire
set of documents. While technical solutions for the traceability problem exists (most notably:
DOORS~\cite{doors}), they are weak in the treatment of formal entities (such as formulas and their
logical contexts).
Further applications are the domain-specific discourse in juridical texts or medical reports.
In general, an ontology is a formal explicit description of \<^emph>\<open>concepts\<close> in a domain of discourse
(called \<^emph>\<open>classes\<close>), properties of each concept describing \<^emph>\<open>attributes\<close> of the concept, as well
as \<^emph>\<open>links\<close> between them. A particular link between concepts is the \<^emph>\<open>is-a\<close> relation declaring
the instances of a subclass to be instances of the super-class.
To adress this challenge, we present developed the Document Ontology Framework (\dof). \dof is
designed for building scalable and user-friendly tools on top of interactive theorem provers,
and an implementation of DOF called \isadof. \isadof is a novel framework, extending of
Isabelle/HOL, to \<^emph>\<open>model\<close> typed ontologies and to \<^emph>\<open>enforce\<close> them during document evolution. Based
on Isabelle infrastructures, ontologies may refer to types, terms, proven theorems, code, or
established assertions. Based on a novel adaption of the Isabelle IDE, a document is checked to be
\<^emph>\<open>conform\<close> to a particular ontology---\isadof is designed to give fast user-feedback \<^emph>\<open>during the
capture of content\<close>. This is particularly valuable in case of document evolution, where the
\<^emph>\<open>coherence\<close> between the formal and the informal parts of the content can be mechanically checked.
To avoid any misunderstanding: \isadof is \<^emph>\<open>not a theory in HOL\<close> on ontologies and operations to
track and trace links in texts, it is an \<^emph>\<open>environment to write structured text\<close> which
\<^emph>\<open>may contain\<close> Isabelle/HOL definitions and proofs like mathematical articles, tech-reports and
scientific papers---as the present one, which is written in \isadof itself. \isadof is a plugin
into the Isabelle/Isar framework in the style of~@{cite "wenzel.ea:building:2007"}.\<close>
subsubsection\<open>How to Read This Manual\<close>
(*<*)
declare_reference*[background::text_section]
declare_reference*[isadof_tour::text_section]
declare_reference*[isadof_ontologies::text_section]
declare_reference*[isadof_developers::text_section]
(*>*)
text\<open>
This manual can be read in different ways, depending on what you want to accomplish. We see three
different main user groups:
\<^enum> \<^emph>\<open>\isadof users\<close>, \ie, users that just want to edit a core document, be it for a paper or a
technical report, using a given ontology. These users should focus on
@{docitem_ref (unchecked) \<open>isadof_tour\<close>} and, depending on their knowledge of Isabelle/HOL, also
@{docitem_ref (unchecked) \<open>background\<close>}.
\<^enum> \<^emph>\<open>Ontology developers\<close>, \ie, users that want to develop new ontologies or modify existing
document ontologies. These users should, after having gained acquaintance as a user, focus
on @{docitem_ref (unchecked) \<open>isadof_ontologies\<close>}.
\<^enum> \<^emph>\<open>\isadof developers\<close>, \ie, users that want to extend or modify \isadof, \eg, by adding new
text-elements. These users should read @{docitem_ref (unchecked) \<open>isadof_developers\<close>}
\<close>
subsubsection\<open>Typographical Conventions\<close>
text\<open>
We acknowledge that understanding \isadof and its implementation in all details requires
separating multiple technological layers or languages. To help the reader with this, we
will type-set the different languages in different styles. In particular, we will use
\<^item> a light-blue background for input written in Isabelle's Isar language, \eg:
\begin{isar}
lemma refl: "x = x"
by simp
\end{isar}
\<^item> a green background for examples of generated document fragments (\ie, PDF output):
\begin{out}
The axiom refl
\end{out}
\<^item> a red background for For (S)ML-code:
\begin{sml}
fun id x = x
\end{sml}
\<^item> a yellow background for \LaTeX-code:
\begin{ltx}
\newcommand{\refl}{$x = x$}
\end{ltx}
\<^item> a grey background for shell scripts and interactive shell sessions:
\begin{bash}
ë\prompt{}ë ls
CHANGELOG.md CITATION examples install LICENSE README.md ROOTS src
\end{bash}
\<close>
subsubsection\<open>How to Cite \isadof\<close>
text\<open>
If you use or extend \isadof in your publications, please use
\<^item> for the \isadof system~@{cite "brucker.ea:isabelle-ontologies:2018"}:
\begin{quote}\small
A.~D. Brucker, I.~Ait-Sadoune, P.~Crisafulli, and B.~Wolff. Using the {Isabelle} ontology
framework: Linking the formal with the informal. In \<^emph>\<open>Conference on Intelligent Computer
Mathematics (CICM)\<close>, number 11006 in Lecture Notes in Computer Science. Springer-Verlag,
Heidelberg, 2018. \href{https://doi.org/10.1007/978-3-319-96812-4\_3}
{10.1007/978-3-319-96812-4\_3}.
\end{quote}
A \BibTeX-entry is available at:
\url{https://www.brucker.ch/bibliography/abstract/brucker.ea-isabelle-ontologies-2018}.
\<^item> for the implementation of \isadof~@{cite "brucker.ea:isabelledof:2019"}:
\begin{quote}\small
A.~D. Brucker and B.~Wolff. \isadof: Design and implementation. In P.~{\"O}lveczky and
G.~Sala{\"u}n, editors, \<^emph>\<open>Software Engineering and Formal Methods (SEFM)\<close>, Lecture Notes
in Computer Science. Springer-Verlag, Heidelberg, 2019.
\end{quote}
A \BibTeX-entry is available at:
\url{https://www.brucker.ch/bibliography/abstract/brucker.ea-isabelledof-2019}.
\<close>
subsubsection\<open>Availability\<close>
text\<open>
The implementation of the framework is available at
\url{\dofurl}. The website also provides links to the latest releases. \isadof is licensed
under a 2-clause BSD license (SPDX-License-Identifier: BSD-2-Clause).
\<close>
(*<*)
end
(*>*)

View File

@ -0,0 +1,161 @@
(*<*)
theory "02_Background"
imports "01_Introduction"
begin
(*>*)
chapter*[background::text_section]\<open> Background\<close>
section*[bgrnd1::introduction]\<open>The Isabelle System Architecture\<close>
figure*[architecture::figure,relative_width="95",src="''figures/isabelle-architecture''"]\<open>
The system architecture of Isabelle (left-hand side) and the
asynchronous communication between the Isabelle system and
the IDE (right-hand side). \<close>
text*[bg::introduction]\<open>
While Isabelle @{cite "nipkow.ea:isabelle:2002"} is widely perceived as an interactive theorem
prover for HOL (Higher-order Logic)~@{cite "nipkow.ea:isabelle:2002"}, we would like to emphasize
the view that Isabelle is far more than that: it is the \<^emph>\<open>Eclipse of Formal Methods Tools\<close>. This
refers to the ``\<^emph>\<open>generic system framework of Isabelle/Isar underlying recent versions of Isabelle.
Among other things, Isar provides an infrastructure for Isabelle plug-ins, comprising extensible
state components and extensible syntax that can be bound to ML programs. Thus, the Isabelle/Isar
architecture may be understood as an extension and refinement of the traditional `LCF approach',
with explicit infrastructure for building derivative systems.\<close>''~@{cite "wenzel.ea:building:2007"}
The current system framework offers moreover the following features:
\<^item> a build management grouping components into to pre-compiled sessions,
\<^item> a prover IDE (PIDE) framework~@{cite "wenzel:asynchronous:2014"} with various front-ends
\<^item> documentation-generation,
\<^item> code generators for various target languages,
\<^item> an extensible front-end language Isabelle/Isar, and,
\<^item> last but not least, an LCF style, generic theorem prover kernel as
the most prominent and deeply integrated system component.
The Isabelle system architecture shown in @{docitem_ref \<open>architecture\<close>} comes with many layers,
with Standard ML (SML) at the bottom layer as implementation language. The architecture actually
foresees a \<^emph>\<open>Nano-Kernel\<close> (our terminology) which resides in the SML structure \inlinesml{Context}.
This structure provides a kind of container called \<^emph>\<open>context\<close> providing an identity, an
ancestor-list as well as typed, user-defined state for components (plugins) such as \isadof.
On top of the latter, the LCF-Kernel, tactics, automated proof procedures as well as specific
support for higher specification constructs were built.\<close>
section*[dof::introduction]\<open>The Document Model Required by \dof\<close>
text\<open>
In this section, we explain the assumed document model underlying our Document Ontology Framework
(\dof) in general. In particular we discuss the concepts \<^emph>\<open>integrated document\<close>, \<^emph>\<open>sub-document\<close>,
\<^emph>\<open>text-element\<close> and \<^emph>\<open>semantic macros\<close> occurring inside text-elements. Furthermore, we assume two
different levels of parsers (for \<^emph>\<open>outer\<close> and \<^emph>\<open>inner syntax\<close>) where the inner-syntax is basically
a typed \inlineisar|\<lambda>|-calculus and some Higher-order Logic (HOL).
\<close>
(*<*)
declare_reference*["fig:dependency"::text_section]
(*>*)
text\<open>
We assume a hierarchical document model\index{document model}, \ie, an \<^emph>\<open>integrated\<close> document
consist of a hierarchy \<^emph>\<open>sub-documents\<close> (files) that can depend acyclically on each other.
Sub-documents can have different document types in order to capture documentations consisting of
documentation, models, proofs, code of various forms and other technical artifacts. We call the
main sub-document type, for historical reasons, \<^emph>\<open>theory\<close>-files. A theory file\bindex{theory!file}
consists of a \<^emph>\<open>header\<close>\bindex{header}, a \<^emph>\<open>context definition\<close>\index{context}, and a body
consisting of a sequence of \<^emph>\<open>command\<close>s (see @{figure (unchecked) "fig:dependency"}). Even the header consists
of a sequence of commands used for introductory text elements not depending on any context.
The context-definition contains an \inlineisar{import} and a
\inlineisar{keyword} section, for example:
\begin{isar}
" theory Example (* Name of the 'theory' *)
" imports (* Declaration of 'theory' dependencies *)
" Main (* Imports a library called 'Main' *)
" keywords (* Registration of keywords defined locally *)
" requirement (* A command for describing requirements *)
\end{isar}
where \inlineisar{Example} is the abstract name of the text-file,
\inlineisar{Main} refers to an imported theory (recall that the import
relation must be acyclic) and \inlineisar{keywords} are used to
separate commands from each other.
We distinguish fundamentally two different syntactic levels:
\<^item> the \emph{outer-syntax}\bindex{syntax!outer}\index{outer syntax|see {syntax, outer}} (\ie, the
syntax for commands) is processed by a lexer-library and parser combinators built on top, and
\<^item> the \emph{inner-syntax}\bindex{syntax!inner}\index{inner syntax|see {syntax, inner}} (\ie, the
syntax for \inlineisar|\<lambda>|-terms in HOL) with its own parametric polymorphism type
checking.
On the semantic level, we assume a validation process for an integrated document, where the
semantics of a command is a transformation \inlineisar+\<theta> \<rightarrow> \<theta>+ for some system state
\inlineisar+\<theta>+. This document model can be instantiated with outer-syntax commands for common
text elements, \eg, \inlineisar+section{*...*}+ or \inlineisar+text{*...*}+. Thus, users can add
informal text to a sub-document using a text command:
\begin{isar}
text\<Open>This is a description.\<Close>
\end{isar}
This will type-set the corresponding text in, for example, a PDF document. However, this
translation is not necessarily one-to-one: text elements can be enriched by formal, \ie,
machine-checked content via \emph{semantic macros}, called antiquotations\bindex{antiquotation}:
\begin{isar}
text\<Open>According to the reflexivity axiom <@>{thm refl}, we obtain in \<Gamma>
for <@>{term "fac 5"} the result <@>{value "fac 5"}.\<Close>
\end{isar}
which is represented in the final document (\eg, a PDF) by:
\begin{out}
According to the reflexivity axiom $\mathrm{x = x}$, we obtain in $\Gamma$ for $\operatorname{fac} \text{\textrm{5}}$ the result $\text{\textrm{120}}$.
\end{out}
Semantic macros are partial functions of type \inlineisar+\<theta> \<rightarrow> text+; since they can use the
system state, they can perform all sorts of specific checks or evaluations (type-checks,
executions of code-elements, references to text-elements or proven theorems such as
\inlineisar+refl+, which is the reference to the axiom of reflexivity).
Semantic macros establish \<^emph>\<open>formal content\<close> inside informal content; they can be
type-checked before being displayed and can be used for calculations before being
typeset. They represent the device for linking the formal with the informal.
\<close>
figure*["fig:dependency"::figure,relative_width="70",src="''figures/document-hierarchy''"]
\<open>A Theory-Graph in the Document Model. \<close>
section*[bgrnd21::introduction]\<open>Implementability of the Required Document Model.\<close>
text\<open>
Batch-mode checkers for \dof can be implemented in all systems of the LCF-style prover family,
\ie, systems with a type-checked \inlinesml{term}, and abstract \inlinesml{thm}-type for
theorems (protected by a kernel). This includes, \eg, ProofPower, HOL4, HOL-light, Isabelle, or
Coq and its derivatives. \dof is, however, designed for fast interaction in an IDE. If a user wants
to benefit from this experience, only Isabelle and Coq have the necessary infrastructure of
asynchronous proof-processing and support by a PIDE~@{cite "DBLP:conf/itp/Wenzel14" and
"DBLP:journals/corr/Wenzel14" and "DBLP:conf/mkm/BarrasGHRTWW13"
and "Faithfull:2018:COQ:3204179.3204223"} which in many features over-accomplishes the required
features of \dof. For example, current Isabelle versions offer cascade-syntaxes (different
syntaxes and even parser-technologies which can be nested along the
\inlineisar+\<Open> ... \<Close> + barriers, while \dof actually only requires a two-level
syntax model.
\<close>
figure*["fig:dof-ide"::figure,relative_width="95",src="''figures/cicm2018-combined''"]\<open>
The \isadof IDE (left) and the corresponding PDF (right), showing the first page
of~\cite{brucker.ea:isabelle-ontologies:2018}.\<close>
text\<open>
We call the present implementation of \dof on the Isabelle platform \isadof.
@{docitem "fig:dof-ide"} shows a screen-shot of an introductory paper on
\isadof~@{cite "brucker.ea:isabelle-ontologies:2018"}: the \isadof PIDE can be seen on the left,
while the generated presentation in PDF is shown on the right.
Isabelle provides, beyond the features required for \dof, a lot of additional benefits. For
example, it also allows the asynchronous evaluation and checking of the document
content~@{cite "DBLP:conf/itp/Wenzel14" and "DBLP:journals/corr/Wenzel14" and
"DBLP:conf/mkm/BarrasGHRTWW13"} and is dynamically extensible. Its PIDE provides a
\<^emph>\<open>continuous build, continuous check\<close> functionality, syntax highlighting, and auto-completion.
It also provides infrastructure for displaying meta-information (\eg, binding and type annotation)
as pop-ups, while hovering over sub-expressions. A fine-grained dependency analysis allows the
processing of individual parts of theory files asynchronously, allowing Isabelle to interactively
process large (hundreds of theory files) documents. Isabelle can group sub-documents into sessions,
\ie, sub-graphs of the document-structure that can be ``pre-compiled'' and loaded
instantaneously, \ie, without re-processing. \<close>
(*<*)
end
(*>*)

View File

@ -0,0 +1,719 @@
(*<*)
theory
"03_GuidedTour"
imports
"02_Background"
"Isabelle_DOF.CENELEC_50128"
begin
(*>*)
chapter*[isadof_tour::text_section]\<open>\isadof: A Guided Tour\<close>
text\<open>
In this chapter, we will give a introduction into using \isadof for users that want to create and
maintain documents following an existing document ontology.
\<close>
section*[getting_started::technical]\<open>Getting Started\<close>
subsection*[installation::technical]\<open>Installation\<close>
text\<open>
In this section, we will show how to install \isadof and its pre-requisites: Isabelle and
\LaTeX. We assume a basic familiarity with a Linux/Unix-like command line (i.e., a shell).
\<close>
subsubsection*[prerequisites::technical]\<open>Pre-requisites\<close>
text\<open>
\isadof has to major pre-requisites:
\<^item> \<^bold>\<open>Isabelle \isabelleversion\<close>\bindex{Isabelle}. \isadof will not work
with a different version of Isabelle. If you need \isadof for a different version of
Isabelle, please check the \isadof website if there is a version available supporting
the required version of Isabelle. \isadof uses a two-part version system (e.g., 1.0/2019),
where the first part is the version of \isadof (using semantic versioning) and the second
part is the supported version of Isabelle. Thus, the same version of \isadof might be
available for different versions of Isabelle.
\<^item> \<^bold>\<open>\TeXLive 2019\<close>\bindex{TexLive@\TeXLive} or any other modern
\LaTeX-distribution that ships a \pdftex-binary supporting the
\inlineltx|\expanded|-primitive
(for details, please see \url{https://www.texdev.net/2018/12/06/a-new-primitive-expanded}).
\<close>
paragraph\<open>Installing Isabelle\<close>
text\<open>
Please download and install the Isabelle \isabelleversion distribution for your operating system
from the \href{\isabelleurl}{Isabelle website} (\url{isabelleurl}). After the successful
installation of Isabelle, you should be able to call the \inlinebash|isabelle| tool on the
command line:
\begin{bash}
ë\prompt{}ë isabelle version
ë\isabellefullversionë
\end{bash}
Depending on your operating system and depending if you put Isabelle's \inlinebash{bin} directory
in your \inlinebash|PATH|, you will need to invoke \inlinebash|isabelle| using its
full qualified path, \eg:
\begin{bash}
ë\prompt{}ë /usr/local/Isabelleë\isabelleversionë/bin/isabelle version
ë\isabellefullversionë
\end{bash}
\<close>
paragraph\<open>Installing \TeXLive\<close>
text\<open>
Modern Linux distribution will allow you to install \TeXLive using their respective package
managers. On a modern Debian system or a Debian derivative (\eg, Ubuntu), the following command
should install all required \LaTeX{} packages:
\begin{bash}
ë\prompt{}ë sudo aptitude install texlive-latex-extra texlive-fonts-extra
\end{bash}
Please check that this, indeed, installs a version of \pdftex{} that supports the
\inlineltx|\expanded|-primitive. To check your \pdfTeX-binary, execute
\begin{bash}
ë\prompt{}ë pdftex \\expanded{Success}\\end
This is pdfTeX, Version 3.14159265-2.6-1.40.20 (TeX Live 2019/Debian) (preloaded format=pdftex)
restricted \write18 enabled.
entering extended mode
[1{/var/lib/texmf/fonts/map/pdftex/updmap/pdftex.map}]</usr/share/texlive/texmf
-dist/fonts/type1/public/amsfonts/cm/cmr10.pfb>
Output written on texput.pdf (1 page, 8650 bytes).
Transcript written on texput.log.
\end{bash}
If this generates successfully a file \inlinebash|texput.pdf|, your \pdftex-binary supports
the \inlineltx|\expanded|-primitive. If your Linux distribution does not (yet) ship \TeXLive{}
2019 or your are running Windows or OS X, please follow the installation instructions from the
\href{https://www.tug.org/texlive/acquire-netinstall.html}{\TeXLive}{} website
(\url{https://www.tug.org/texlive/acquire-netinstall.html}).
\<close>
subsubsection*[isadof::technical]\<open>Installing \isadof\<close>
text\<open>
In the following, we assume that you already downloaded the \isadof distribution
(\href{\isadofarchiveurl}{\isadofarchiven}) from the \isadof web site. The main steps for
installing are extracting the \isadof distribution and calling its \inlinebash|install| script.
We start by extracting the \isadof archive:
\begin{bash}
ë\prompt{}ë tar xf ë\href{\isadofarchiveurl}{\isadofarchiven}ë
\end{bash}
This will create a directory \texttt{\isadofdirn} containing \isadof distribution.
Next, we need to invoke the \inlinebash|install| script. If necessary, the installations
automatically downloads additional dependencies from the AFP (\url{https://www.isa-afp.org}),
namely the AFP entries ``Functional Automata''~@{cite "Functional-Automata-AFP"} and ``Regular
Sets and Expressions''~@{cite "Regular-Sets-AFP"}. This might take a few minutes to complete.
Moreover, the installation script applies a patch to the Isabelle system, which requires
\<^emph>\<open>write permissions for the Isabelle system directory\<close> and registers \isadof as Isabelle component.
If the \inlinebash|isabelle| tool is not in your \inlinebash|PATH|, you need to call the
\inlinebash|install| script with the \inlinebash|--isabelle| option, passing the full-qualified
path of the \inlinebash|isabelle| tool (\inlinebash|install --help| gives
you an overview of all available configuration options):
\begin{bash}
ë\prompt{}ë cd ë\isadofdirnë
ë\prompt{\isadofdirn}ë ./install --isabelle /usr/local/Isabelleë\isabelleversion/bin/isabelleë
Isabelle/DOF Installer
======================
* Checking Isabelle version:
Success: found supported Isabelle version ë(\isabellefullversion)ë
* Checking (La)TeX installation:
Success: pdftex supports \expanded{} primitive.
* Check availability of Isabelle/DOF patch:
Warning: Isabelle/DOF patch is not available or outdated.
Trying to patch system ....
Applied patch successfully, Isabelle/HOL will be rebuilt during
the next start of Isabelle.
* Checking availability of AFP entries:
Warning: could not find AFP entry Regular-Sets.
Warning: could not find AFP entry Functional-Automata.
Trying to install AFP (this might take a few *minutes*) ....
Registering Regular-Sets iëën
/home/achim/.isabelle/Isabelleë\isabelleversion/ROOTSë
Registering Functional-Automata iëën
/home/achim/.isabelle/Isabelleë\isabelleversion/ROOTSë
AFP installation successful.
* Searching fëëor existing installation:
No old installation found.
* Installing Isabelle/DOF
- Installing Tools iëën
/home/achim/.isabelle/Isabelleë\isabelleversion/DOF/Toolsë
- Installing document templates iëën
/home/achim/.isabelle/Isabelleë\isabelleversion/DOF/document-templateë
- Installing LaTeX styles iëën
/home/achim/.isabelle/Isabelleë\isabelleversion/DOF/latexë
- Registering Isabelle/DOF
* Registering tools iëën
/home/achim/.isabelle/Isabelleë\isabelleversion/etc/settingsë
* Installation successful. Enjoy Isabelle/DOF, you can build the session
Isabelle_DOF and all example documents by executing:
/usr/local/Isabelleë\isabelleversion/bin/isabelleë build -D .
\end{bash}
After the successful installation, you can now explore the examples (in the sub-directory
\inlinebash|examples| or create your own project. On the first start, the session
\inlinebash|Isabelle_DOF| will be built automatically. If you want to pre-build this
session and all example documents, execute:
\begin{bash}
ë\prompt{\isadofdirn}ë isabelle build -D .
\end{bash}
\<close>
subsection*[first_project::technical]\<open>Creating an \isadof Project\<close>
text\<open>
\isadof provides its own variant of Isabelle's
\inlinebash|mkroot| tool, called \inlinebash|mkroot_DOF|:\index{mkroot\_DOF}
\begin{bash}
ë\prompt{}ë isabelle mkroot_DOF -h
Usage: isabelle mkroot_DOF [OPTIONS] [DIR]
Options are:
-h print this hëëelp text and exëëit
-n NAME alternative session name (default: DIR base name)
-o ONTOLOGY (default: scholarly_paper)
Available ontologies:
* CENELEC_50128
* math_exam
* scholarly_paper
* technical_report
-t TEMPLATE (default: scrartcl)
Available document templates:
* eptcs-UNSUPPORTED
* lipics-v2019-UNSUPPORTED
* lncs
* scrartcl
* scrreprt-modern
* scrreprt
Prepare session root DIR (default: current directory).
\end{bash}
Creating a new document setup requires two decisions:
\<^item> which ontologies (\eg, scholarly\_paper) are required and
\<^item> which document template (layout)\index{document template} should be used
(\eg, scrartcl\index{scrartcl}). Some templates (\eg, lncs) require that the users manually
obtains and adds the necessary \LaTeX class file (\eg, \inlinebash|llncs.cls|.
This is mostly due to licensing restrictions.
\<close>
text\<open>
If you are happy with the defaults, \ie, using the ontology for writing academic papers
(scholarly\_paper) using a report layout based on the article class (\inlineltx|scrartcl|) of
the KOMA-Script bundle~@{cite "kohm:koma-script:2019"}, you can create your first project
\inlinebash|myproject| as follows:
\begin{bash}
ë\prompt{}ë isabelle mkroot_DOF myproject
Preparing session "myproject" iëën "myproject"
creating "myproject/ROOT"
creating "myproject/document/root.tex"
Now use the following coëëmmand line to build the session:
isabelle build -D myproject
\end{bash}
This creates a directory \inlinebash|myproject| containing the \isadof-setup for your
new document. To check the document formally, including the generation of the document in PDF,
you only need to execute
\begin{bash}
ë\prompt{}ë isabelle build -d . myproject
\end{bash}
This will create the directory \inlinebash|myproject|:
\begin{center}
\begin{minipage}{.9\textwidth}
\dirtree{%
.1 .
.2 myproject.
.3 document.
.4 build\DTcomment{Build Script}.
.4 isadof.cfg\DTcomment{\isadof configuraiton}.
.4 preamble.tex\DTcomment{Manual \LaTeX-configuration}.
.3 ROOT\DTcomment{Isabelle build-configuration}.
}
\end{minipage}
\end{center}
The \isadof configuration (\inlinebash|isadof.cfg|) specifies the required
ontologies and the document template using a YAML syntax.\<^footnote>\<open>Isabelle power users will recognize that
\isadof's document setup does not make use of a file \inlinebash|root.tex|: this file is
replaced by built-in document templates.\<close> The main two configuration files for
users are:
\<^item> The file \inlinebash|ROOT|\index{ROOT}, which defines the Isabelle session. New theory files as well as new
files required by the document generation (\eg, images, bibliography database using \BibTeX, local
\LaTeX-styles) need to be registered in this file. For details of Isabelle's build system, please
consult the Isabelle System Manual~@{cite "wenzel:system-manual:2019"}.
\<^item> The file \inlinebash|praemble.tex|\index{praemble.tex}, which allows users to add additional
\LaTeX-packages or to add/modify \LaTeX-commands.
\<close>
section*[scholar_onto::example]\<open>Writing Academic Publications (scholarly\_paper)\<close>
subsection\<open>The Scholarly Paper Example\<close>
text\<open>
The ontology ``scholarly\_paper''\index{ontology!scholarly\_paper} is a small ontology modeling
academic/scientific papers. In this \isadof application scenario, we deliberately refrain from
integrating references to (Isabelle) formal content in order demonstrate that \isadof is not a
framework from Isabelle users to Isabelle users only. Of course, such references can be added
easily and represent a particular strength of \isadof.
The \isadof distribution contains an example (actually, our CICM 2018
paper~@{cite "brucker.ea:isabelle-ontologies:2018"}) using the ontology ``scholarly\_paper'' in
the directory \nolinkurl{examples/scholarly_paper/2018-cicm-isabelle_dof-applications/}. You
can inspect/edit the example in Isabelle's IDE, by either
\<^item> starting Isabelle/jedit using your graphical user interface (\eg, by clicking on the
Isabelle-Icon provided by the Isabelle installation) and loading the file
\nolinkurl{examples/scholarly_paper/2018-cicm-isabelle_dof-applications/IsaDofApplications.thy}.
\<^item> starting Isabelle/jedit from the command line by calling:
\begin{bash}
ë\prompt{\isadofdirn}ë
isabelle jedit examples/scholarly_paper/2018-cicm-isabelle_dof-applications/\
IsaDofApplications.thy
\end{bash}
\<close>
text\<open>
You can build the PDF-document by calling:
\begin{bash}
ë\prompt{}ë isabelle build \
2018-cicm-isabelle_dof-applications
\end{bash}
\<close>
subsection\<open>Modeling Academic Publications\<close>
text\<open>
We start by modeling the usual text-elements of an academic paper: the title and author
information, abstract, and text section:
\begin{isar}
doc_class title =
short_title :: "string option" <= None
doc_class subtitle =
abbrev :: "string option" <= None
doc_class author =
affiliation :: "string"
doc_class abstract =
keyword_list :: "string list" <= None
doc_class text_section =
main_author :: "author option" <= None
todo_list :: "string list" <= "[]"
\end{isar}
The attributes \inlineisar+short_title+, \inlineisar+abbrev+ etc are introduced with their types as
well as their default values. Our model prescribes an optional \inlineisar+main_author+ and a
todo-list attached to an arbitrary text section; since instances of this class are mutable
(meta)-objects of text-elements, they can be modified arbitrarily through subsequent text and of
course globally during text evolution. Since \inlineisar+author+ is a HOL-type internally generated
by \isadof framework and can therefore appear in the \inlineisar+main_author+ attribute of the
\inlineisar+text_section+ class; semantic links between concepts can be modeled this way.
\<close>
figure*[fig1::figure,spawn_columns=False,relative_width="95",src="''figures/Dogfood-Intro''"]
\<open> Ouroboros I: This paper from inside \ldots \<close>
text\<open>
@{docitem \<open>fig1\<close>} shows the corresponding view in the Isabelle/jedit of the start of an academic
paper. The text uses \isadof's own text-commands containing the meta-information provided by the
underlying ontology. We proceed by a definition of \inlineisar+introduction+'s, which we define
as the extension of \inlineisar+text_section+ which is intended to capture common infrastructure:
\begin{isar}
doc_class introduction = text_section +
comment :: string
\end{isar}
As a consequence of the definition as extension, the \inlineisar+introduction+ class
inherits the attributes \inlineisar+main_author+ and \inlineisar+todo_list+ together with
the corresponding default values.
We proceed more or less conventionally by the subsequent sections:
\begin{isar}
doc_class technical = text_section +
definition_list :: "string list" <= "[]"
doc_class example = text_section +
comment :: string
doc_class conclusion = text_section +
main_author :: "author option" <= None
doc_class related_work = conclusion +
main_author :: "author option" <= None
\end{isar}
Moreover, we model a document class for including figures (actually, this document class is already
defined in the core ontology of \isadof):
\begin{isar}
datatype placement = h | t | b | ht | hb
doc_class figure = text_section +
relative_width :: "int" (* percent of textwidth *)
src :: "string"
placement :: placement
spawn_columns :: bool <= True
\end{isar}
\<close>
figure*[fig_figures::figure,spawn_columns=False,relative_width="85",src="''figures/Dogfood-figures''"]
\<open> Ouroboros II: figures \ldots \<close>
text\<open>
The document class \inlineisar+figure+ (supported by the \isadof command \inlineisar+figure*+)
makes it possible to express the pictures and diagrams such as @{docitem_ref \<open>fig_figures\<close>}.
Finally, we define a monitor class definition that enforces a textual ordering
in the document core by a regular expression:
\begin{isar}
doc_class article =
trace :: "(title + subtitle + author+ abstract +
introduction + technical + example +
conclusion + bibliography) list"
where "(title ~~ \<lbrakk>subtitle\<rbrakk> ~~ \<lbrace>author\<rbrace>$^+$+ ~~ abstract ~~
introduction ~~ \<lbrace>technical || example\<rbrace>$^+$ ~~ conclusion ~~
bibliography)"
\end{isar}
\<close>
subsection*[scholar_pide::example]\<open>Editing Support for Academic Papers\<close>
side_by_side_figure*[exploring::side_by_side_figure,anchor="''fig-Dogfood-II-bgnd1''",
caption="''Exploring a reference of a text-element.''",relative_width="48",
src="''figures/Dogfood-II-bgnd1''",anchor2="''fig-bgnd-text_section''",
caption2="''Exploring the class of a text element.''",relative_width2="47",
src2="''figures/Dogfood-III-bgnd-text_section''"]\<open>Exploring text elements.\<close>
side_by_side_figure*["hyperlinks"::side_by_side_figure,anchor="''fig:Dogfood-IV-jumpInDocCLass''",
caption="''Hyperlink to class-definition.''",relative_width="48",
src="''figures/Dogfood-IV-jumpInDocCLass''",anchor2="''fig:Dogfood-V-attribute''",
caption2="''Exploring an attribute.''",relative_width2="47",
src2="''figures/Dogfood-III-bgnd-text_section''"]\<open> Hyperlinks.\<close>
text\<open>
From these class definitions, \isadof also automatically generated editing
support for Isabelle/jedit. In \autoref{fig-Dogfood-II-bgnd1} and
\autoref{fig-bgnd-text_section} we show how hovering over links permits to explore its
meta-information. Clicking on a document class identifier permits to hyperlink into the
corresponding class definition (\autoref{fig:Dogfood-IV-jumpInDocCLass}); hovering over an
attribute-definition (which is qualified in order to disambiguate;
\autoref{fig:Dogfood-V-attribute}).
\<close>
figure*[figDogfoodVIlinkappl::figure,relative_width="80",src="''figures/Dogfood-V-attribute''"]
\<open> Exploring an attribute (hyperlinked to the class). \<close>
text\<open>
An ontological reference application in @{docitem_ref "figDogfoodVIlinkappl"}: the
ontology-dependant antiquotation \inlineisar|@ {example ...}| refers to the corresponding
text-elements. Hovering allows for inspection, clicking for jumping to the definition. If the
link does not exist or has a non-compatible type, the text is not validated.
\<close>
section*[cenelec_onto::example]\<open>Writing Certification Documents (CENELEC\_50128)\<close>
subsection\<open>The CENELEC 50128 Example\<close>
text\<open>
The ontology ``CENELEC\_50128''\index{ontology!CENELEC\_50128} is a small ontology modeling
documents for a certification following CENELEC 50128~@{cite "boulanger:cenelec-50128:2015"}.
The \isadof distribution contains a small example using the ontology ``CENELEC\_50128'' in
the directory \nolinkurl{examples/CENELEC_50128/mini_odo/}. You can inspect/edit the example
in Isabelle's IDE, by either
\<^item> starting Isabelle/jedit using your graphical user interface (\eg, by clicking on the
Isabelle-Icon provided by the Isabelle installation) and loading the file
\nolinkurl{examples/CENELEC_50128/mini_odo/mini_odo.thy}.
\<^item> starting Isabelle/jedit from the command line by calling:
\begin{bash}
ë\prompt{\isadofdirn}ë
isabelle jedit examples/CENELEC_50128/mini_odo/mini_odo.thy
\end{bash}
\<close>
text\<open>
You can build the PDF-document by calling:
\begin{bash}
ë\prompt{}ë isabelle build mini_odo
\end{bash}
\<close>
subsection\<open>Modeling CENELEC 50128\<close>
text\<open>
Documents to be provided in formal certifications (such as CENELEC
50128~@{cite "boulanger:cenelec-50128:2015"} or Common Criteria~@{cite "cc:cc-part3:2006"}) can
much profit from the control of ontological consistency: a lot of an evaluators work consists in
tracing down the links from requirements over assumptions down to elements of evidence, be it in
the models, the code, or the tests. In a certification process, traceability becomes a major
concern; and providing mechanisms to ensure complete traceability already at the development of
the global document will clearly increase speed and reduce risk and cost of a certification
process. Making the link-structure machine-checkable, be it between requirements, assumptions,
their implementation and their discharge by evidence (be it tests, proofs, or authoritative
arguments), is therefore natural and has the potential to decrease the cost of developments
targeting certifications. Continuously checking the links between the formal and the semi-formal
parts of such documents is particularly valuable during the (usually collaborative) development
effort.
As in many other cases, formal certification documents come with an own terminology and pragmatics
of what has to be demonstrated and where, and how the trace-ability of requirements through
design-models over code to system environment assumptions has to be assured.
In the sequel, we present a simplified version of an ontological model used in a
case-study~@{cite "bezzecchi.ea:making:2018"}. We start with an introduction of the concept of
requirement:
\begin{isar}
doc_class requirement = long_name :: "string option"
doc_class requirement_analysis = no :: "nat"
where "requirement_item +"
doc_class hypothesis = requirement +
hyp_type :: hyp_type <= physical (* default *)
datatype ass_kind = informal | semiformal | formal
doc_class assumption = requirement +
assumption_kind :: ass_kind <= informal
\end{isar}
Such ontologies can be enriched by larger explanations and examples, which may help
the team of engineers substantially when developing the central document for a certification,
like an explication what is precisely the difference between an \<^emph>\<open>hypothesis\<close> and an
\<^emph>\<open>assumption\<close> in the context of the evaluation standard. Since the PIDE makes for each
document class its definition available by a simple mouse-click, this kind on meta-knowledge
can be made far more accessible during the document evolution.
For example, the term of category \<^emph>\<open>assumption\<close> is used for domain-specific assumptions.
It has formal, semi-formal and informal sub-categories. They have to be
tracked and discharged by appropriate validation procedures within a
certification process, by it by test or proof. It is different from a hypothesis, which is
globally assumed and accepted.
In the sequel, the category \<^emph>\<open>exported constraint\<close> (or \<^emph>\<open>ec\<close> for short)
is used for formal assumptions, that arise during the analysis,
design or implementation and have to be tracked till the final
evaluation target, and discharged by appropriate validation procedures
within the certification process, by it by test or proof. A particular class of interest
is the category \<^emph>\<open>safety related application condition\<close> (or \<^emph>\<open>srac\<close>
for short) which is used for \<^emph>\<open>ec\<close>'s that establish safety properties
of the evaluation target. Their track-ability throughout the certification
is therefore particularly critical. This is naturally modeled as follows:
\begin{isar}
doc_class ec = assumption +
assumption_kind :: ass_kind <= (*default *) formal
doc_class srac = ec +
assumption_kind :: ass_kind <= (*default *) formal
\end{isar}
We now can, \eg, write
\begin{isar}
text*[ass123::SRAC]\<Open>
The overall sampling frequence of the odometer subsystem is therefore
14 khz, which includes sampling, computing a$$nd result communication
times \ldots
\<Close>
\end{isar}
This will be shown in the PDF as follows:
\<close>
text*[ass123::SRAC] \<open> The overall sampling frequence of the odometer
subsystem is therefore 14 khz, which includes sampling, computing and
result communication times \ldots \<close>
subsection*[ontopide::technical]\<open>Editing Support for CENELEC 50128\<close>
figure*[figfig3::figure,relative_width="95",src="''figures/antiquotations-PIDE''"]
\<open> Standard antiquotations referring to theory elements.\<close>
text\<open> The corresponding view in @{docitem_ref \<open>figfig3\<close>} shows core part of a document
conformimg to the CENELEC 50128 ontology. The first sample shows standard Isabelle antiquotations
@{cite "wenzel:isabelle-isar:2019"} into formal entities of a theory. This way, the informal parts
of a document get ``formal content'' and become more robust under change.\<close>
figure*[figfig5::figure, relative_width="95", src="''figures/srac-definition''"]
\<open> Defining a SRAC reference \ldots \<close>
figure*[figfig7::figure, relative_width="95", src="''figures/srac-as-es-application''"]
\<open> Using a SRAC as EC document reference. \<close>
text\<open> The subsequent sample in @{docitem_ref \<open>figfig5\<close>} shows the definition of an
\<^emph>\<open>safety-related application condition\<close>, a side-condition of a theorem which
has the consequence that a certain calculation must be executed sufficiently fast on an embedded
device. This condition can not be established inside the formal theory but has to be
checked by system integration tests. Now we reference in @{docitem_ref \<open>figfig7\<close>} this
safety-related condition; however, this happens in a context where general \<^emph>\<open>exported constraints\<close>
are listed. \isadof's checks establish that this is legal in the given ontology.
\<close>
section*[math_exam::example]\<open>Writing Exams (math\_exam)\<close>
subsection\<open>The Math Exam Example\<close>
text\<open>
The ontology ``math\_exam''\index{ontology!math\_exam} is an experimental ontology modeling
the process of writing exams at higher education institution in the United Kingdom, where exams
undergo both an internal and external review process. The \isadof distribution contains a tiny
example using the ontology ``math\_exam'' in the directory
\nolinkurl{examples/math_exam/MathExam/}. You can inspect/edit the example
in Isabelle's IDE, by either
\<^item> starting Isabelle/jedit using your graphical user interface (\eg, by clicking on the
Isabelle-Icon provided by the Isabelle installation) and loading the file
\nolinkurl{examples/math_exam/MathExam/MathExam.thy}.
\<^item> starting Isabelle/jedit from the command line by calling:
\begin{bash}
ë\prompt{\isadofdirn}ë
isabelle jedit examples/math_exam/MathExam/MathExam.thy
\end{bash}
\<close>
text\<open>
You can build the PDF-document by calling:
\begin{bash}
ë\prompt{}ë isabelle build MathExam
\end{bash}
\<close>
subsection\<open>Modeling Exams\<close>
text\<open>
The math-exam scenario is an application with mixed formal and semi-formal content. It addresses
applications where the author of the exam is not present during the exam and the preparation
requires a very rigorous process.
We assume that the content has four different types of addressees, which have a different
\<^emph>\<open>view\<close> on the integrated document:
\<^item> the \<^emph>\<open>setter\<close>, \ie, the author of the exam,
\<^item> the \<^emph>\<open>checker\<close>, \ie, an internal person that checks
the exam for feasibility and non-ambiguity,
\<^item> the \<^emph>\<open>external\<close>, \ie, an external person that checks
the exam for feasibility and non-ambiguity, and
\<^item> the \<^emph>\<open>student\<close>, \ie, the addressee of the exam.
\<close>
text\<open>
The latter quality assurance mechanism is used in many universities,
where for organizational reasons the execution of an exam takes place in facilities
where the author of the exam is not expected to be physically present.
Furthermore, we assume a simple grade system (thus, some calculation is required). We
can model this as follows:
\begin{isar}
doc_class Author = ...
datatype Subject = algebra | geometry | statistical
datatype Grade = A1 | A2 | A3
doc_class Header = examTitle :: string
examSubject :: Subject
date :: string
timeAllowed :: int -- minutes
datatype ContentClass = setter
| checker
| external_examiner
| student
doc_class Exam_item = concerns :: "ContentClass set"
doc_class Exam_item = concerns :: "ContentClass set"
type_synonym SubQuestion = string
\end{isar}
The heart of this ontology is an alternation of questions and answers, where the answers can
consist of simple yes-no answers or lists of formulas. Since we do not assume familiarity of
the students with Isabelle (\inlineisar+term+ would assume that this is a parse-able and
type-checkable entity), we basically model a derivation as a sequence of strings:
\begin{isar}
doc_class Answer_Formal_Step = Exam_item +
justification :: string
"term" :: "string"
doc_class Answer_YesNo = Exam_item +
step_label :: string
yes_no :: bool -- \<open>for checkboxes\<close>
datatype Question_Type =
formal | informal | mixed
doc_class Task = Exam_item +
level :: Level
type :: Question_Type
subitems :: "(SubQuestion *
(Answer_Formal_Step list + Answer_YesNo) list) list"
concerns :: "ContentClass set" <= "UNIV"
mark :: int
doc_class Exercise = Exam_item +
type :: Question_Type
content :: "(Task) list"
concerns :: "ContentClass set" <= "UNIV"
mark :: int
\end{isar}
In many institutions, having a rigorous process of validation for exam subjects makes sense: is
the initial question correct? Is a proof in the sense of the question possible? We model the
possibility that the @{term examiner} validates a question by a sample proof validated by Isabelle:
\begin{isar}
doc_class Validation =
tests :: "term list" <="[]"
proofs :: "thm list" <="[]"
doc_class Solution = Exam_item +
content :: "Exercise list"
valids :: "Validation list"
concerns :: "ContentClass set" <= "{setter,checker,external_examiner}"
doc_class MathExam=
content :: "(Header + Author + Exercise) list"
global_grade :: Grade
where "\<lbrace>Author\<rbrace>$^+$ ~~ Header ~~ \<lbrace>Exercise ~~ Solution\<rbrace>$^+$ "
\end{isar}
In our scenario this sample proofs are completely \<^emph>\<open>intern\<close>, \ie, not exposed to the
students but just additional material for the internal review process of the exam.
\<close>
section\<open>Style Guide\<close>
text\<open>
The document generation process of \isadof is based on Isabelle's document generation framework,
using \LaTeX{} as the underlying back-end. As Isabelle's document generation framework, it is
possible to embed (nearly) arbitrary \LaTeX-commands in text-commands, \eg:
\begin{isar}
text\<Open> This is \emph{emphasized} a$$nd this is a
citation~\cite{brucker.ea:isabelle-ontologies:2018}\<Close>
\end{isar}
In general, we advise against this practice and, whenever positive, use the \isadof (respetively
Isabelle) provided alternatives:
\begin{isar}
text\<Open> This is *\<Open>emphasized\<Close> a$$nd this is a
citation <@>{cite "brucker.ea:isabelle-ontologies:2018"}.\<Close>
\end{isar}
Clearly, this is not always possible and, in fact, often \isadof documents will contain
\LaTeX-commands, this should be restricted to layout improvements that otherwise are (currently)
not possible. As far as possible, the use of \LaTeX-commands should be restricted to the definition
of ontologies and document templates (see @{docitem_ref (unchecked) \<open>isadof_ontologies\<close>}).
Restricting the use of \LaTeX has two advantages: first, \LaTeX commands can circumvent the
consistency checks of \isadof and, hence, only if no \LaTeX commands are used, \isadof can
ensure that a document that does not generate any error messages in Isabelle/jedit also generated
a PDF document. Second, future version of \isadof might support different targets for the
document generation (\eg, HTML) which, naturally, are only available to documents not using
native \LaTeX-commands.
Similarly, (unchecked) forward references should, if possible, be avoided, as they also might
create dangeling references during the document generation that break the document generation.
Finally, we recommend to use the @{command "check_doc_global"} command at the end of your
document to check the global reference structure.
\<close>
(*<*)
end
(*>*)

View File

@ -0,0 +1,906 @@
(*<*)
theory
"04_RefMan"
imports
"03_GuidedTour"
"Isabelle_DOF.Isa_COL"
begin
(*>*)
chapter*[isadof_ontologies::text_section]\<open>Developing Ontologies\<close>
text\<open>
In this chapter, we explain the concepts for modeling new ontologies, developing a document
representation for them, as well as developing new document templates.
\<close>
section*[infrastructure::text_section]\<open>Overview and Technical Infrastructure\<close>
text\<open>
\isadof is embedded in the underlying generic document model of Isabelle as described in
@{docitem "dof"}. Recall that the document language can be extended dynamically, \ie, new
\<open>user-defined\<close> can be introduced at run-time. This is similar to the definition of new functions
in an interpreter. \isadof as a system plugin is is a number of new command definitions in
Isabelle's document model.
\isadof consists consists basically of four components:
\<^item> an own \<^emph>\<open>family of text-element\<close> such as @{command "title*"}, @{command "chapter*"}
@{command "text*"}, etc., which can be annotated with meta-information defined in the
underlying ontology definition and allow to build a \<^emph>\<open>core\<close> document,
\<^item> the \<^emph>\<open>ontology definition\<close> which is an Isabelle theory file with definitions
for document-classes and all auxiliary datatypes
(called Ontology Definition Language (ODL)),
\<^item> the ontology-specific \<^emph>\<open>layout definition\<close>, exploiting this meta-information, and
\<^item> the generic \<^emph>\<open>layout definition\<close> for documents following, \eg, the format guidelines of
publishers or standardization bodies.
\<close>
text\<open>
The list of fully supported (\ie, supporting both interactive ontological modeling and
document generation) ontologies and the list of supported document templates can be
obtained by calling \inlinebash|isabelle mkroot_DOF -h| (see @{docitem "first_project"}).
Note that the postfix \inlinebash|-UNSUPPORTED| denoted experimental ontologies or templates
for which further manual setup steps might be required or that are not fully tested. Also note
that the \LaTeX-class files required by the templates need to be already installed on your
system. This is mostly a problem for publisher specific templates (\eg, Springer's
\path{llncs.cls}), which, due to copyright restrictions, cannot be distributed legally.
\<close>
subsection\<open>Ontologies\<close>
text\<open>
The document core \<^emph>\<open>may\<close>, but \<^emph>\<open>must\<close> not use Isabelle definitions or proofs for checking the
formal content---this manual is actually an example of a document not containing any proof.
Consequently, the document editing and checking facility provided by \isadof addresses the needs
of common users for an advanced text-editing environment, neither modeling nor proof knowledge is
inherently required.
We expect authors of ontologies to have experience in the use of \isadof, basic modeling (and,
potentially, some basic SML programming) experience, basic \LaTeX{} knowledge, and, last but not
least, domain knowledge of the ontology to be modeled. Users with experience in UML-like
meta-modeling will feel familiar with most concepts; however, we expect no need for insight in
the Isabelle proof language, for example, or other more advanced concepts.
Technically, ontologies\index{ontology!directory structure} are stored in a directory
\inlinebash|src/ontologies| and consist of a Isabelle theory file and a \LaTeX-style file:
\begin{center}
\begin{minipage}{.9\textwidth}
\dirtree{%
.1 .
.2 src.
.3 ontologies\DTcomment{Ontologies}.
.4 ontologies.thy\DTcomment{Ontology Registration}.
.4 CENELEC\_50128\DTcomment{CENELEC\_50128}.
.5 CENELEC\_50128.thy.
.5 DOF-CENELEC\_50128.sty.
.4 scholarly\_paper\DTcomment{scholarly\_paper}.
.5 scholarly\_paper.thy.
.5 DOF-scholarly\_paper.sty.
.4 \ldots.
}
\end{minipage}
\end{center}
\<close>
text\<open>
Developing a new ontology ``\inlinebash|foo|'' requires, from a technical perspective, the
following steps:
\<^item> create a new sub-directory \inlinebash|foo| in the directory \inlinebash|src/ontologies|
\<^item> definition of the ontological concepts, using \isadof's Ontology Definition Language (ODL), in
a new theory file \path{src/ontologies/foo/foo.thy}.
\<^item> definition of the document representation for the ontological concepts in a \LaTeX-style
file \path{src/ontologies/foo/DOF-foo.sty}
\<^item> registration (as import) of the new ontology in the file.
\path{src/ontologies/ontologies.thy}.
\<^item> activation of the new document setup by executing the install script. You can skip the lengthy
checks for the AFP entries and the installation of the Isabelle patch by using the
\inlinebash|--skip-patch-and-afp| option:
\begin{bash}
ë\prompt{\isadofdirn}ë ./install --skip-patch-and-afp
\end{bash}
\<close>
subsection\<open>Document Templates\<close>
text\<open>
Document-templates\index{document template} define the overall layout (page size, margins, fonts,
etc.) of the generated documents and are the the main technical means for implementing layout
requirements that are, \eg, required by publishers or standardization bodies. Document-templates
are stored in a directory
\path{src/document-templates}:\index{document template!directory structure}
\begin{center}
\begin{minipage}{.9\textwidth}
\dirtree{%
.1 .
.2 src.
.3 document-templates\DTcomment{Document templates}.
.4 root-lncs.tex.
.4 root-scrartcl.tex.
.4 root-scrreprt-modern.tex.
.4 root-scrreprt.tex.
}
\end{minipage}
\end{center}
\<close>
text\<open>
Developing a new document template ``\inlinebash|bar|'' requires the following steps:
\<^item> develop a new \LaTeX-template \inlinebash|src/document-templates/root-bar.tex|
\<^item> activation of the new document template by executing the install script. You can skip the lengthy
checks for the AFP entries and the installation of the Isabelle patch by using the
\inlinebash|--skip-patch-and-afp| option:
\begin{bash}
ë\prompt{\isadofdirn}ë ./install --skip-patch-and-afp
\end{bash}
\<close>
text\<open>
As the document generation of \isadof is based
on \LaTeX, the \isadof document templates can (and should) make use of any \LaTeX-classes provided
by publishers or standardization bodies.
\<close>
section\<open>The Ontology Definition Language (ODL)\<close>
text\<open>
ODL shares some similarities with meta-modeling languages such as UML class
models: It builds upon concepts like class, inheritance, class-instances, attributes, references
to instances, and class-invariants. Some concepts like advanced type-checking, referencing to
formal entities of Isabelle, and monitors are due to its specific application in the
Isabelle context. Conceptually, ontologies specified in ODL consist of:
\<^item> \<^emph>\<open>document classes\<close> (\inlineisar{doc_class}) that describe concepts;
\<^item> an optional document base class expressing single inheritance
class extensions;
\<^item> \<^emph>\<open>attributes\<close> specific to document classes, where
\<^item> attributes are HOL-typed;
\<^item> attributes of instances of document elements are mutable;
\<^item> attributes can refer to other document classes, thus, document
classes must also be HOL-types (such attributes are called
\<^emph>\<open>links\<close>);
\<^item> attribute values were denoted by HOL-terms;
\<^item> a special link, the reference to a super-class, establishes an
\<^emph>\<open>is-a\<close> relation between classes;
\<^item> classes may refer to other classes via a regular expression in a
\<^emph>\<open>where\<close> clause;
\<^item> attributes may have default values in order to facilitate notation.
\<close>
text\<open>
The \isadof ontology specification language consists basically on a notation for document classes,
where the attributes were typed with HOL-types and can be instantiated by terms HOL-terms, \ie,
the actual parsers and type-checkers of the Isabelle system were reused. This has the particular
advantage that \isadof commands can be arbitrarily mixed with Isabelle/HOL commands providing the
machinery for type declarations and term specifications such
as enumerations. In particular, document class definitions provide:
\<^item> a HOL-type for each document class as well as inheritance,
\<^item> support for attributes with HOL-types and optional default values,
\<^item> support for overriding of attribute defaults but not overloading, and
\<^item> text-elements annotated with document classes; they are mutable
instances of document classes.
\<close>
text\<open>
Attributes referring to other ontological concepts are called \<^emph>\<open>links\<close>. The HOL-types inside the
document specification language support built-in types for Isabelle/HOL \inlineisar+typ+'s,
\inlineisar+term+'s, and \inlineisar+thm+'s reflecting internal Isabelle's internal types for
these entities; when denoted in HOL-terms to instantiate an attribute, for example, there is a
specific syntax (called \<^emph>\<open>inner syntax antiquotations\<close>) that is checked by
\isadof for consistency.
Document classes\bindex{document class}\index{class!document@see document class} support
\inlineisar+where+-clauses\index{where clause} containing a regular expression over class
names. Classes with a \inlineisar+where+ were called
\<^emph>\<open>monitor classes\<close>.\bindex{monitor class}\index{class!monitor@see monitor class} While document
classes and their inheritance relation structure meta-data of text-elements in an object-oriented
manner, monitor classes enforce structural organization of documents via the language specified
by the regular expression enforcing a sequence of text-elements.
A major design decision of ODL is to denote attribute values by HOL-terms and HOL-types.
Consequently, ODL can refer to any predefined type defined in the HOL library, \eg,
\inlineisar+string+ or \inlineisar+int+ as well as parameterized types, \eg, \inlineisar+_ option+,
\inlineisar+_ list+, \inlineisar+_ set+, or products \inlineisar+_ \<times> _+. As a consequence of the
document model, ODL definitions may be arbitrarily intertwined with standard HOL type definitions.
Finally, document class definitions result in themselves in a HOL-types in order to allow \<^emph>\<open>links\<close>
to and between ontological concepts.
\<close>
subsection*["odl-manual0"::technical]\<open>Some Isabelle/HOL Specification Constructs Revisited\<close>
text\<open>
As ODL is an extension of Isabelle/HOL, document class definitions can therefore be arbitrarily
mixed with standard HOL specification constructs. To make this manual self-contained, we present
syntax and semantics of the specification constructs that are most likely relevant for the
developer of ontologies (for more details, see~@{cite "wenzel:isabelle-isar:2019"}. Our
presentation is a simplification of the original sources following the needs of ontology developers
in \isadof:
\<^item> \<open>name\<close>:\index{name@\<open>name\<close>}
with the syntactic category of \<open>name\<close>'s we refer to alpha-numerical identifiers
(called \<open>short_id\<close>'s in @{cite "wenzel:isabelle-isar:2019"}) and identifiers
in \inlineisar+" ... "+ which might contain certain ``quasi-letters'' such
as \inlineisar+_+, \inlineisar+-+, \inlineisar+.+. See~@{cite "wenzel:isabelle-isar:2019"} for details.
\<^item> \<open>tyargs\<close>:\index{tyargs@\<open>tyargs\<close>}
\<^rail>\<open> typefree | ('(' (typefree * ',') ')')\<close>
\<open>typefree\<close> denotes fixed type variable(\<open>'a\<close>, \<open>'b\<close>, ...) (see~@{cite "wenzel:isabelle-isar:2019"})
\<^item> \<open>dt_name\<close>:\index{dt\_npurdahame@\<open>dt_name\<close>}
\<^rail>\<open> (tyargs?) name (mixfix?)\<close>
The syntactic entity \<open>name\<close> denotes an identifier, \<open>mixfix\<close> denotes the usual
parenthesized mixfix notation (see @{cite "wenzel:isabelle-isar:2019"}).
The \<open>name\<close>'s referred here are type names such as \<^verbatim>\<open>int\<close>, \<^verbatim>\<open>string\<close>, \<^verbatim>\<open>list\<close>, \<^verbatim>\<open>set\<close>, etc.
\<^item> \<open>type_spec\<close>:\index{type_spec@\<open>type_spec\<close>}
\<^rail>\<open> (tyargs?) name\<close>
The \<open>name\<close>'s referred here are type names such as \<^verbatim>\<open>int\<close>, \<^verbatim>\<open>string\<close>, \<^verbatim>\<open>list\<close>, \<^verbatim>\<open>set\<close>, etc.
\<^item> \<open>type\<close>:\index{type@\<open>type\<close>}
\<^rail>\<open> (( '(' ( type * ',') ')' )? name) | typefree \<close>
\clearpage
\<^item> \<open>dt_ctor\<close>:\index{dt\_ctor@\<open>dt_ctor\<close>}
\<^rail>\<open> name (type*) (mixfix?)\<close>
\<^item> \<open>datatype_specification\<close>:\index{datatype\_specification@\<open>datatype_specification\<close>}
\<^rail>\<open> @@{command "datatype"} dt_name '=' (dt_ctor * '|' )\<close>
\<^item> \<open>type_synonym_specification\<close>:\index{type\_synonym\_specification@\<open>type_synonym_specification\<close>}
\<^rail>\<open> @@{command "type_synonym"} type_spec '=' type\<close>
\<^item> \<open>constant_definition\<close> :\index{constant\_definition@\<open>constant_definition\<close>}
\<^rail>\<open> @@{command "definition"} name '::' type 'where' '"' name '=' \<newline> expr '"'\<close>
\<^item> \<open>expr\<close>:\index{expr@\<open>expr\<close>}
the syntactic category \<open>expr\<close> here denotes the very rich ``inner-syntax'' language of
mathematical notations for $\lambda$-terms in Isabelle/HOL. Example expressions are:
\inlineisar|1+2| (arithmetics), \inlineisar|[1,2,3]| (lists), \inlineisar|''ab c''| (strings),
\inlineisar|{1,2,3}| (sets), \inlineisar|(1,2,3)| (tuples),
\inlineisar|\<forall> x. P(x) \<and> Q x = C| (formulas). For details, see~@{cite "nipkowMain19"}.
\<close>
text\<open>
Advanced ontologies can, \eg, use recursive function definitions with
pattern-matching~@{cite "functions19"}, extensible record
pecifications~@{cite "wenzel:isabelle-isar:2019"}, and abstract type declarations.
\<close>
subsection*["odl-manual1"::technical]\<open>Defining Document Classes\<close>
text\<open>
A document class\bindex{document class} can be defined using the @{command "doc_class"} keyword:
\<^item> \<open>class_id\<close>:\index{class\_id@\<open>class_id\<close>} a type-\<open>name\<close> that has been introduced
via a \<open>doc_class_specification\<close>.
\<^item> \<open>doc_class_specification\<close>:\index{doc\_class\_specification@\<open>doc_class_specification\<close>}
We call document classes with an \<open>accepts_clause\<close>
\<^emph>\<open>monitor classes\<close> or \<^emph>\<open>monitors\<close> for short.
\<^rail>\<open> @@{command "doc_class"} class_id '=' (class_id '+')? (attribute_decl+) \<newline>
(accepts_clause rejects_clause?)?\<close>
\<^item> \<open>attribute_decl\<close>:\index{attribute\_decl@\<open>attribute_decl\<close>}
\<^rail>\<open> name '::' '"' type '"' default_clause? \<close>
\<^item> \<open>accepts_clause\<close>:\index{accepts\_clause@\<open>accepts_clause\<close>}
\<^rail>\<open> 'accepts' '"' regexpr '"'\<close>
\<^item> \<open>rejects_clause\<close>:\index{rejects\_clause@\<open>rejects_clause\<close>}
\<^rail>\<open> 'rejects' (class_id * ',') \<close>
\<^item> \<open>default_clause\<close>:\index{default\_clause@\<open>default_clause\<close>}
\<^rail>\<open> '<=' '"' expr '"' \<close>
\<^item> \<open>regexpr\<close>:\index{regexpr@\<open>regexpr\<close>}
\<^rail>\<open> '\<lfloor>' class_id '\<rfloor>' | '(' regexpr ')' | (regexpr '||' regexpr) | (regexpr '~~' regexpr)
| ('\<lbrace>' regexpr '\<rbrace>') | ( '\<lbrace>' regexpr '\<rbrace>\<^sup>*') \<close>
Regular expressions describe sequences of \<open>class_id\<close>s (and indirect sequences of document
items corresponding to the \<open>class_id\<close>s). The constructors for alternative, sequence,
repetitions and non-empty sequence follow in the top-down order of the above diagram.
\<close>
text\<open>
\isadof provides a default document representation (\ie, content and layout of the generated
PDF) that only prints the main text, omitting all attributes. \isadof provides the
\inlineltx|\newisadof[]{}|\index{newisadof@\inlineltx{\newisadof}}\index{document class!PDF}
command for defining a dedicated layout for a document class in \LaTeX. Such a document
class-specific \LaTeX-definition can not only provide a specific layout (\eg, a specific
highlighting, printing of certain attributes), it can also generate entries in in the table of
contents or an index. Overall, the \inlineltx|\newisadof[]{}| command follows the structure
of the \inlineisar|doc_class|-command:
\begin{ltx}[escapechar=ë]
\newisadof{ë\<open>class_id\<close>ë}[label=,type=, ë\<open>attribute_decl\<close>ë][1]{%
% ë\LaTeXë-definition of the document class representation
\begin{isamarkuptext}%
#1%
\end{isamarkuptext}%
}
\end{ltx}
The \<open>class_id\<close> is the full-qualified name of the document class and the list of \<open>attribute_decl\<close>
needs to declare all attributes of the document class. Within the \LaTeX-definition of the document
class representation, the identifier \inlineltx|#1| refers to the content of the main text of the
document class (written in \inlineisar|\<Open> ... \<Close>|) and the attributes can be referenced
by their name using the \inlineltx|\commandkey{...}|-command (see the documentation of the
\LaTeX-package ``keycommand''~@{cite "chervet:keycommand:2010"} for details). Usually, the
representations definition needs to be wrapped in a
\inlineltx|\begin{isarmarkup}...\end{isamarkup}|-environment, to ensure the correct context
within Isabelle's \LaTeX-setup.
Moreover, \isadof also provides the following two variants of \inlineltx|\newisadof{}[]{}|:
\<^item> \inlineltx|\renewisadof{}[]{}|\index{renewisadof@\inlineltx{\renewisadof}} for re-defining
(over-writing) an already defined command, and
\<^item> \inlineltx|\provideisadof{}[]{}|\index{provideisadof@\inlineltx{\provideisadof}} for providing
a definition if it is not yet defined.
\<close>
text\<open>
While arbitrary \LaTeX-commands can be used within these commands,
special care is required for arguments containing special characters (\eg, the
underscore ``\_'') that do have a special meaning in \LaTeX.
Moreover, as usual, special care has to be taken for commands that write into aux-files
that are included in a following \LaTeX-run. For such complex examples, we refer the interested
reader, in general, to the style files provided in the \isadof distribution. In particular
the definitions of the concepts \inlineisar|title*| and \inlineisar|author*| in the file
\path{ontologies/scholarly_paper/DOF-scholarly_paper.sty} show examples of protecting special
characters in definitions that need to make use of a entries in an aux-file.
\<close>
subsubsection\<open>Common Ontology Library (COL)\<close>
text\<open>
\isadof provides a Common Ontology Library (COL)\index{Common Ontology Library@see COL}\bindex{COL}
that introduces ontology concepts that are so generic that they we expect them to be useful for
all types of documents. In particular it defines the super-class \inlineisar|text_element|: the
root of all text-elements,
\begin{isar}
doc_class text_element =
level :: "int option" <= "None"
referentiable :: bool <= "False"
variants :: "String.literal set" <= "{STR ''outline'', STR ''document''}"
\end{isar}
Here, \inlineisar|level| defines the section-level (\eg, using a \LaTeX-inspired hierarchy:
from \inlineisar|Some -1| (corresponding to \inlineltx|\part|) to
\inlineisar|Some 0| (corresponding to \inlineltx|\chapter|, respectively, \inlineisar|chapter*|)
to \inlineisar|Some 3| (corresponding to \inlineltx|\subsubsection|, respectively,
\inlineisar|subsubsection*|). Using an invariant, a derived ontology could, \eg, require that
any sequence of technical-elements must be introduced by a text-element with a higher level
(this would require that technical text section are introduce by a section element).
\<close>
subsubsection\<open>Example\<close>
text\<open>
The category ``exported constraint (EC)'' is, in the file
\path{ontologies/CENELEC_50128/CENELEC_50128.thy} defined as follows:
\begin{isar}
doc_class requirement = text_element +
long_name :: "string option"
is_concerned :: "role set"
doc_class AC = requirement +
is_concerned :: "role set" <= "UNIV"
doc_class EC = AC +
assumption_kind :: ass_kind <= (*default *) formal
\end{isar}
We now define the document representations, in the file
\path{ontologies/CENELEC_50128/DOF-CENELEC_50128.sty}. Let us assume that we want to
register the definition of ECs in a dedicated table of contents (\inlineltx{tos})
and use an earlier defined environment \inlineltx|\begin{EC}...\end{EC}| for their graphical
representation. Note that the \inlineltx|\newisadof{}[]{}|-command requires the
full-qualified names, \eg, \inlineisar|t$$ext.CENELEC_50128.EC| for the document class and
\inlineisar|CENELEC_50128.requirement.long_name| for the attribute \inlineisar|long_name|,
inherited from the document class \inlineisar|requirement|. The document representation of ECs
can now be defined as follows:
\begin{ltx}
\newisadof{text.CENELEC_50128.EC}%
[label=,type=%
,Isa_COL.text_element.level=%
,Isa_COL.text_element.referentiable=%
,Isa_COL.text_element.variants=%
,CENELEC_50128.requirement.is_concerned=%
,CENELEC_50128.requirement.long_name=%
,CENELEC_50128.EC.assumption_kind=][1]{%
\begin{isamarkuptext}%
\ifthenelse{\equal{\commandkey{CENELEC_50128.requirement.long_name}}{}}{%
% If long_name is not defined, we only create an entry in the table tos
% using the auto-generated number of the EC
\begin{EC}%
\addxcontentsline{tos}{chapter}[]{\autoref{\commandkey{label}}}%
}{%
% If long_name is defined, we use the long_name as title in the
% layout of the EC, in the table "tos" and as index entry. .
\begin{EC}[\commandkey{CENELEC_50128.requirement.long_name}]%
\addxcontentsline{toe}{chapter}[]{\autoref{\commandkey{label}}: %
\commandkey{CENELEC_50128.requirement.long_name}}%
\DOFindex{EC}{\commandkey{CENELEC_50128.requirement.long_name}}%
}%
\label{\commandkey{label}}% we use the label attribute as anchor
#1% The main text of the EC
\end{EC}
\end{isamarkuptext}%
}
\end{ltx}
\<close>
subsection*["text-elements"::technical]\<open>Annotatable Top-level Text-Elements\<close>
text\<open>
While the default user interface for class definitions via the
\inlineisar|text*\<Open> ... \<Close>|-command allow to access all features of the document
class, \isadof provides short-hands for certain, widely-used, concepts such as
\inlineisar|title*\<Open> ... \<Close>| or \inlineisar|section*\<Open> ... \<Close>|, \eg:
\begin{isar}
title*[title::title]\<Open>Isabelle/DOF\<Close>
subtitle*[subtitle::subtitle]\<Open>User and Implementation Manual\<Close>
text*[adb:: author, email="\<Open>a.brucker@exeter.ac.uk\<Close>",
orcid="\<Open>0000-0002-6355-1200\<Close>",
http_site="\<Open>https://brucker.ch/\<Close>",
affiliation="\<Open>University of Exeter, Exeter, UK\<Close>"] \<Open>Achim D. Brucker\<Close>
text*[bu::author, email = "\<Open>wolff@lri.fr\<Close>",
affiliation = "\<Open>Université Paris-Saclay, LRI, Paris, France\<Close>"]\<Open>Burkhart Wolff\<Close>
\end{isar}
In general, all standard text-elements from the Isabelle document model such
as @{command "chapter"}, @{command "section"}, @{command "text"}, have in the \isadof
implementation their counterparts in the family of text-elements that are ontology-aware,
\ie, they dispose on a meta-argument list that allows to define that a test-element
that has an identity as a text-object labelled as \<open>obj_id\<close>, belongs to a document class
\<open>class_id\<close> that has been defined earlier, and has its class-attributes set with particular
values (which are denotable in Isabelle/HOL mathematical term syntax).
\<^item> \<open>annotated_text_element\<close> :
\<^rail>\<open>
( ( @@{command "title*"}
| @@{command "subtitle*"}
| @@{command "chapter*"}
| @@{command "section*"} | @@{command "subsection*"}
| @@{command "subsubsection*"} | @@{command "paragraph*"} | @@{command "subparagraph*"}
| @@{command "text*"} | @@{command "figure*"} | @@{command "side_by_side_figure*"}
| @@{command "open_monitor*"} | @@{command "close_monitor*"}
| @@{command "Definition*"} | @@{command "Lemma*"}
| @@{command "Theorem*"} | @@{command "Conjecture*"}
)
\<newline>
'[' meta_args ']' '\<open>' text '\<close>'
)
| change_status_command
| inspection_command
\<close>
\clearpage
\<^item> \<open>meta_args\<close> :
\<^rail>\<open>(obj_id ('::' class_id) ((attribute '=' term)) * ',')\<close>
\<^item> \<open>rich_meta_args\<close> :
\<^rail>\<open> (obj_id ('::' class_id) ((attribute (('=' | '+=') term)) * ','))\<close>
\<close>
subsubsection\<open>Experts: Defining New Top-Level Commands\<close>
text\<open>
Defining such new top-level commands requires some Isabelle knowledge as well as
extending the dispatcher of the \LaTeX-backend. For the details of defining top-level
commands, we refer the reader to the Isar manual~@{cite "wenzel:isabelle-isar:2019"}.
Here, we only give a brief example how the \inlineisar|section*|-command is defined; we
refer the reader to the source code of \isadof for details.
First, new top-level keywords need to be declared in the \inlineisar|keywords|-section of
the theory header defining new keywords:
\begin{isar}
theory
...
imports
...
keywords
"section*"
begin
...
end
\end{isar}
Second, given an implementation of the functionality of the new keyword (implemented in SML),
the new keyword needs to be registered, together with its parser, as outer syntax:
\begin{sml}
val _ =
Outer_Syntax.command ("section*", <@>{here}) "section heading"
(attributes -- Parse.opt_target -- Parse.document_source --| semi
>> (Toplevel.theory o (enriched_document_command (SOME(SOME 1))
{markdown = false} )));
\end{sml}
\<close>
text\<open>
Finally, for the document generation, a new dispatcher has to be defined in \LaTeX---this is
mandatory, otherwise the document generation will break. These dispatcher always follow the same
schemata:
\begin{ltx}
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% begin: section*-dispatcher
\NewEnviron{isamarkupsection*}[1][]{\isaDof[env={section},#1]{\BODY}}
% end: section*-dispatcher
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
\end{ltx}
After the definition of the dispatcher, one can, optionally, define a custom representation
using the \inlineltx|newisadof|-command, as introduced in the previous section:
\begin{ltx}
\newisadof{section}[label=,type=][1]{%
\isamarkupfalse%
\isamarkupsection{#1}\label{\commandkey{label}}%
\isamarkuptrue%
}
\end{ltx}
\<close>
subsection*["inspections-commands"::technical]\<open>Status and Inspection Commands\<close>
text\<open>
\<^item> \isadof\<open>change_status_command\<close> :
\<^rail>\<open> (@@{command "update_instance*"} '[' rich_meta_args ']')
| (@@{command "declare_reference*"} (obj_id ('::' class_id)))\<close>
\<^item> \isadof\<open>inspection_command\<close> :
\<^rail>\<open> @@{command "print_doc_classes"}
| @@{command "print_doc_items"}
| @@{command "check_doc_global"}\<close>
\<close>
subsection*["sec:advanced"::technical]\<open>Advanced ODL Concepts\<close>
subsubsection\<open>Meta-types as Types\<close>
text\<open>
To express the dependencies between text elements to the formal
entities, \eg, \inlinesml+term+ ($\lambda$-term), \inlinesml+typ+, or
\inlinesml+thm+, we represent the types of the implementation language
\<^emph>\<open>inside\<close> the HOL type system. We do, however, not reflect the data of
these types. They are just declared abstract types,
``inhabited'' by special constant symbols carrying strings, for
example of the format \inlineisar+<@>{thm <string>}+. When HOL
expressions were used to denote values of \inlineisar+doc_class+
instance attributes, this requires additional checks after
conventional type-checking that this string represents actually a
defined entity in the context of the system state
\inlineisar+\<theta>+. For example, the \inlineisar+establish+
attribute in the previous section is the power of the ODL: here, we model
a relation between \<^emph>\<open>claims\<close> and \<^emph>\<open>results\<close> which may be a
formal, machine-check theorem of type \inlinesml+thm+ denoted by, for
example: \inlineisar+property = "[<@>{thm ''system_is_safe''}]"+ in a
system context \inlineisar+\<theta>+ where this theorem is
established. Similarly, attribute values like
\inlineisar+property = "<@>{term \<Open>A \<leftrightarrow> B\<Close>}"+
require that the HOL-string \inlineisar+A \<leftrightarrow> B+ is
again type-checked and represents indeed a formula in $\theta$. Another instance of
this process, which we call \<open>second-level type-checking\<close>, are term-constants
generated from the ontology such as
\inlineisar+<@>{definition <string>}+.
\<close>
subsubsection*["sec:monitors"::technical]\<open>ODL Monitors\<close>
text\<open>
We call a document class with an accept-clause a \<^emph>\<open>monitor\<close>.\bindex{monitor} Syntactically, an
accept-clause\index{accept-clause} contains a regular expression over class identifiers.
For example:
\begin{isar}
doc_class article = style_id :: string <= "''CENELEC_50128''"
accepts "(title ~~ \<lbrace>author\<rbrace>\<bsup>+\<esup> ~~ abstract ~~ \<lbrace>introduction\<rbrace>\<bsup>+\<esup> ~~
\<lbrace>technical || example\<rbrace>\<bsup>+\<esup> ~~ \<lbrace>conclusion\<rbrace>\<bsup>+\<esup>)"
\end{isar}
Semantically, monitors introduce a behavioral element into ODL:
\begin{isar}
open_monitor*[this::article] (* begin of scope of monitor "this" *)
...
close_monitor*[this] (* end of scope of monitor "this" *)
\end{isar}
Inside the scope of a monitor, all instances of classes mentioned in its accept-clause (the
\<^emph>\<open>accept-set\<close>) have to appear in the order specified by the regular expression; instances not
covered by an accept-set may freely occur. Monitors may additionally contain a reject-clause
with a list of class-ids (the reject-list). This allows specifying ranges of
admissible instances along the class hierarchy:
\<^item> a superclass in the reject-list and a subclass in the
accept-expression forbids instances superior to the subclass, and
\<^item> a subclass $S$ in the reject-list and a superclass $T$ in the
accept-list allows instances of superclasses of $T$ to occur freely,
instances of $T$ to occur in the specified order and forbids
instances of $S$.
\<close>
text\<open>
Monitored document sections can be nested and overlap; thus, it is
possible to combine the effect of different monitors. For example, it
would be possible to refine the \inlineisar+example+ section by its own
monitor and enforce a particular structure in the presentation of
examples.
Monitors manage an implicit attribute \inlineisar+trace+ containing
the list of ``observed'' text element instances belonging to the
accept-set. Together with the concept of ODL class invariants, it is
possible to specify properties of a sequence of instances occurring in
the document section. For example, it is possible to express that in
the sub-list of \inlineisar+introduction+-elements, the first has an
\inlineisar+introduction+ element with a \inlineisar+level+ strictly
smaller than the others. Thus, an introduction is forced to have a
header delimiting the borders of its representation. Class invariants
on monitors allow for specifying structural properties on document
sections.\<close>
subsubsection*["sec:class_inv"::technical]\<open>ODL Class Invariants\<close>
text\<open>
Ontological classes as described so far are too liberal in many situations. For example, one
would like to express that any instance of a \inlineisar+result+ class finally has a non-empty
property list, if its \inlineisar+kind+ is \inlineisar+p$$roof+, or that the \inlineisar+establish+
relation between \inlineisar+claim+ and \inlineisar+result+ is surjective.
In a high-level syntax, this type of constraints could be expressed, \eg, by:
\begin{isar}
(* 1 *) \<forall> x \<in> result. x@kind = pr$$oof \<leftrightarrow> x@kind \<noteq> []
(* 2 *) \<forall> x \<in> conclusion. \<forall> y \<in> Domain(x@establish)
\<rightarrow> \<exists> y \<in> Range(x@establish). (y,z) \<in> x@establish
(* 3 *) \<forall> x \<in> introduction. finite(x@authored_by)
\end{isar}
where \inlineisar+result+, \inlineisar+conclusion+, and \inlineisar+introduction+ are the set of
all possible instances of these document classes. All specified constraints are already checked
in the IDE of \dof while editing; it is however possible to delay a final error message till the
closing of a monitor (see next section). The third constraint enforces that the user sets the
\inlineisar+authored_by+ set, otherwise an error will be reported.
For the moment, there is no high-level syntax for the definition of class invariants. A
formulation, in SML, of the first class-invariant in \autoref{sec:class_inv} is straight-forward:
\begin{sml}
fun check_result_inv oid {is_monitor:bool} ctxt =
let val kind = compute_attr_access ctxt "kind" oid <@>{here} <@>{here}
val prop = compute_attr_access ctxt "property" oid <@>{here} <@>{here}
val tS = HOLogic.dest_list prop
in case kind_term of
<@>{term "proof"} => if not(null tS) then true
else error("class result invariant violation")
| _ => false
end
val _ = Theory.setup (DOF_core.update_class_invariant
"tiny_cert.result" check_result_inv)
\end{sml}
The \inlinesml{setup}-command (last line) registers the \inlineisar+check_result_inv+ function
into the \isadof kernel, which activates any creation or modification of an instance of
\inlineisar+result+. We cannot replace \inlineisar+compute_attr_access+ by the corresponding
antiquotation \inlineisar+<@>{docitem_value kind::oid}+, since \inlineisar+oid+ is bound to a
variable here and can therefore not be statically expanded.
\<close>
section*["document-templates"::technical]\<open>Defining Document Templates\<close>
subsection\<open>The Core Template\<close>
text\<open>
Document-templates\bindex{document template} define the overall layout (page size, margins, fonts,
etc.) of the generated documents and are the the main technical means for implementing layout
requirements that are, \eg, required by publishers or standardization bodies.
The common structure of an \isadof document template looks as follows:
\begin{ltx}[escapechar=ë, numbers=left,numberstyle=\tiny,xleftmargin=5mm]
\documentclass{article} % The LaTeX-class of your template ë\label{lst:dc}ë
%% The following part is (mostly) required by Isabelle/DOF, do not modify
\usepackage[T1]{fontenc} % Font encoding
\usepackage[utf8]{inputenc} % UTF8 support
\usepackage{isabelle} % Required (by Isabelle)
\usepackage{xcolor}
\usepackage{isabellesym} % Required (by Isabelle)
\usepackage{amsmath} % Used by some ontologies
\usepackage{amssymb} % Strongly recommended (by Isabelle)
\bibliographystyle{abbrv}
\IfFileExists{DOF-core.sty}{}{ % Required by Isabelle/DOF
\PackageError{DOF-core}{Isabelle/DOF not installed.
This is a Isabelle_DOF project. The doëëcument preparation requires
the Isabelle_DOF framework. }{%
For further help, see
ë\url{\dofurl}ë
}
\input{ontologies} % This will include the document specific
% ontologies from isadof.cfg
\IfFileExists{preamble.tex} % Include preamble.tex, if it exists.
{\input{preamble.tex}}{}
\usepackage{graphicx} % Required for images.
\usepackage[caption]{subfig}
\usepackage[size=footnotesize]{caption}
\usepackage{hyperref} % Required by Isabelle/DOF
%% Begin of template specific configuration ë\label{lst:config-start}ë
\urlstyle{rm}
\isabellestyle{it} ë\label{lst:config-end}ë
%% Main document, do not modify
\begin{document}
\maketitle
\input{session}
\IfFileExists{root.bib}{\bibliography{root}}{}
\end{document}
\end{ltx}
If a new layout is already supported by a \LaTeX-class, then developing basic support for it
is straight forwards: after reading the authors guidelines of the new template,
Developing basic support for a new document template is straight forwards In most cases, it is
sufficient to replace the document class in \autoref{lst:dc} of the template and add the
\LaTeX-packages that are (strictly) required by the used \LaTeX-setup. In general, we recommend
to only add \LaTeX-packages that are always necessary fro this particular template, as loading
packages in the templates minimizes the freedom users have by adapting the \path{preample.tex}.
Moreover, you might want to add/modify the template specific configuration
(\autoref{lst:config-start}-\ref{lst:config-end}). The new template should be stored in
\path{src/document-templates} and its file name should start with the prefix \path{root-}. After
adding a new template, call the \inlinebash{install} script (see @{docref "infrastructure"}
\<close>
subsection\<open>Tips, Tricks, and Known Limitations\<close>
text\<open>
For readers with basic knowledge of \LaTeX{}, adapting existing templates and ontologies) to
support new layouts should be rather straight forward, there are several things to consider that
we discuss in this section.
\<close>
subsubsection\<open>Getting Started\<close>
text\<open>
In general, we recommend to create a test project (\eg, using \inlinebash|isabelle mkroot_DOF|)
to develop new document templates or ontology representations. The default setup of the \isadof
build system generated a \path{output/document} directory with a self-contained \LaTeX-setup. In
this directory, you can directly use \LaTeX on the main file, called \path{root.tex}:
\begin{bash}
ë\prompt{MyProject/output/document}ë pdflatex root.tex
\end{bash}
This allows you to develop and check your \LaTeX-setup without the overhead of running
\inlinebash|isabelle build| after each change of your template (or ontology-style). Note that
the content of the \path{output} directory is overwritten by executing
\inlinebash|isabelle build|.
\<close>
subsubsection\<open>Truncated Warning and Error Messages\<close>
text\<open>
By default, \LaTeX{} cuts of many warning or error messages after 79 characters. Due to the
use of full-qualified names in \isadof, this can often result in important information being
cut off. Thus, it can be very helpful to configure \LaTeX{} in such a way that it prints
long error or warning messages. This can easily be done on the command line for individual
\LaTeX{} invocations:
\begin{bash}
ë\prompt{MyProject/output/document}ë max_print_line=200 error_line=200 half_error_line=100 pdflatex root.tex
\end{bash}
\<close>
subsubsection\<open>Deferred Declaration of Information\<close>
text\<open>
During document generation, sometimes, information needs to be printed prior to its
declaration in a \isadof theory. This violation the declaration-before-use-principle
requires that information is written into an auxiliary file during the first run of \LaTeX{}
so that the information is available at further runs of \LaTeX{}. While, on the one hand,
this is a standard process (\eg, used for updating references), implementing it correctly
requires a solid understanding of \LaTeX's expansion mechanism. In this context, the recently
introduced \inlineltx|\expanded{}|-primitive
(see \url{https://www.texdev.net/2018/12/06/a-new-primitive-expanded}) is particularly useful.
Examples of its use can be found, \eg, in the ontology-styles
\path{ontologies/scholarly_paper/DOF-scholarly_paper.sty} or
\path{ontologies/CENELEC_50128/DOF-CENELEC_50128.sty}. For details about the expansion mechanism
in general, we refer the reader to the \LaTeX{} literature (\eg,~@{cite "knuth:texbook:1986"
and "mittelbach.ea:latex:1999" and "eijkhout:latex-cs:2012"}).
\<close>
subsubsection\<open>Authors and Affiliation Information\<close>
text\<open>
In the context of academic papers, the defining the representations for the author and
affiliation information is particularly challenges as, firstly, they inherently are breaking
the declare-before-use-principle and, secondly, each publisher uses a different \LaTeX-setup
for their declaration. Moreover, the mapping from the ontological modeling to the document
representation might also need to bridge the gap between different common modeling styles of
authors and their affiliations, namely: affiliations as attributes of authors vs. authors and
affiliations both as entities with a many-to-many relationship.
The ontology representation
\path{ontologies/scholarly_paper/DOF-scholarly_paper.sty} contains an example that, firstly,
shows how to write the author and affiliation information into the auxiliary file for re-use
in the next \LaTeX-run and, secondly, shows how to collect the author and affiliation
information into an \inlineltx|\author| and a \inlineltx|\institution| statement, each of
which containing the information for all authors. The collection of the author information
is provided by the following \LaTeX-code:
\begin{ltx}
\def\dof@author{}%
\newcommand{\DOFauthor}{\author{\dof@author}}
\AtBeginDocument{\DOFauthor}
\def\leftadd#1#2{\expandafter\leftaddaux\expandafter{#1}{#2}{#1}}
\def\leftaddaux#1#2#3{\gdef#3{#1#2}}
\newcounter{dof@cnt@author}
\newcommand{\addauthor}[1]{%
\ifthenelse{\equal{\dof@author}{}}{%
\gdef\dof@author{#1}%
}{%
\leftadd\dof@author{\protect\and #1}%
}
}
\end{ltx}
The new command \inlineltx|\addauthor| and a similarly defined command \inlineltx|\addaffiliation|
can now be used in the definition of the representation of the concept
\inlineisar|text.scholarly_paper.author|, which writes the collected information in the
job's aux-file. The intermediate step of writing this information into the job's aux-file is necessary,
as the author and affiliation information is required right at the begin of the document
(\ie, when \LaTeX's \inlineltx|\maketitle| is invoked) while \isadof allows to define authors at
any place within a document:
\begin{ltx}
\provideisadof{text.scholarly_paper.author}%
[label=,type=%
,scholarly_paper.author.email=%
,scholarly_paper.author.affiliation=%
,scholarly_paper.author.orcid=%
,scholarly_paper.author.http_site=%
][1]{%
\stepcounter{dof@cnt@author}
\def\dof@a{\commandkey{scholarly_paper.author.affiliation}}
\ifthenelse{\equal{\commandkey{scholarly_paper.author.orcid}}{}}{%
\immediate\write\@auxout%
{\noexpand\addauthor{#1\noexpand\inst{\thedof@cnt@author}}}%
}{%
\immediate\write\@auxout%
{\noexpand\addauthor{#1\noexpand%
\inst{\thedof@cnt@author}%
\orcidID{\commandkey{scholarly_paper.author.orcid}}}}%
}
\protected@write\@auxout{}{%
\string\addaffiliation{\dof@a\\\string\email{%
\commandkey{scholarly_paper.author.email}}}}%
}
\end{ltx}
Finally, the collected information is used in the \inlineltx|\author| command using the
\inlineltx|AtBeginDocument|-hook:
\begin{ltx}
\newcommand{\DOFauthor}{\author{\dof@author}}
\AtBeginDocument{%
\DOFauthor
}
\end{ltx}
\<close>
subsubsection\<open>Restricting the Use of Ontologies to Specific Templates\<close>
text\<open>
As ontology representations might rely on features only provided by certain templates
(\LaTeX-classes), authors of ontology representations might restrict their use to
specific classes. This can, \eg, be done using the \inlineltx|\@ifclassloaded{}| command:
\begin{ltx}
\@ifclassloaded{llncs}{}%
{% LLNCS class not loaded
\PackageError{DOF-scholarly_paper}
{Scholarly Paper only supports LNCS as document class.}{}\stop%
}
\end{ltx}
For a real-world example testing for multiple classes, see
\path{ontologies/scholarly_paper/DOF-scholarly_paper.sty}):
We encourage this clear and machine-checkable enforcement of restrictions while, at the same
time, we also encourage to provide a package option to overwrite them. The latter allows
inherited ontologies to overwrite these restrictions and, therefore, to provide also support
for additional document templates. For example, the ontology \inlineisar|technical_report|
extends the \inlineisar|scholarly_paper| ontology and its \LaTeX supports provides support
for the \inlineltx|scrrept|-class which is not supported by the \LaTeX support for
\inlineisar|scholarly_paper|.
\<close>
subsubsection\<open>Outdated Version of \path{comment.sty}\<close>
text\<open>
Isabelle's \LaTeX-setup relies on an ancient version of \path{comment.sty} that, moreover,
is used in plain\TeX-mode. This is known to cause issues with some modern \LaTeX-classes
such as LPICS. Such a conflict might require the help of an Isabelle wizard.
\<close>
(*<*)
end
(*>*)

View File

@ -0,0 +1,288 @@
(*<*)
theory "05_Implementation"
imports "04_RefMan"
begin
(*>*)
chapter*[isadof_developers::text_section]\<open>Extending \isadof\<close>
text\<open>
In this chapter, we describe the basic implementation aspects of \isadof, which is based on
the following design-decisions:
\<^item> the entire \isadof is a ``pure add-on,'' \ie, we deliberately resign on the possibility to
modify Isabelle itself.
\<^item> we made a small exception to this rule: the \isadof package modifies in its installation
about 10 lines in the \LaTeX-generator (\path{src/patches/thy_output.ML}).
\<^item> we decided to make the markup-generation by itself to adapt it as well as possible to the
needs of tracking the linking in documents.
\<^item> \isadof is deeply integrated into the Isabelle's IDE (PIDE) to give immediate feedback during
editing and other forms of document evolution.
\<close>
text\<open>
Semantic macros, as required by our document model, are called \<^emph>\<open>document antiquotations\<close>
in the Isabelle literature~@{cite "wenzel:isabelle-isar:2019"}. While Isabelle's code-antiquotations
are an old concept going back to Lisp and having found via SML and OCaml their ways into modern
proof systems, special annotation syntax inside documentation comments have their roots in
documentation generators such as Javadoc. Their use, however, as a mechanism to embed
machine-checked \<^emph>\<open>formal content\<close> is usually very limited and also lacks
IDE support.
\<close>
section\<open>\isadof: A User-Defined Plugin in Isabelle/Isar\<close>
text\<open>
A plugin in Isabelle starts with defining the local data and registering it in the framework. As
mentioned before, contexts are structures with independent cells/compartments having three
primitives \inlinesml+init+, \inlinesml+extend+ and \inlinesml+merge+. Technically this is done by
instantiating a functor \inlinesml+Generic_Data+, and the following fairly typical code-fragment
is drawn from \isadof:
\begin{sml}
structure Data = Generic_Data
( type T = docobj_tab * docclass_tab * ...
val empty = (initial_docobj_tab, initial_docclass_tab, ...)
val extend = I
fun merge((d1,c1,...),(d2,c2,...)) = (merge_docobj_tab (d1,d2,...),
merge_docclass_tab(c1,c2,...))
);
\end{sml}
where the table \inlinesml+docobj_tab+ manages document classes and \inlinesml+docclass_tab+ the
environment for class definitions (inducing the inheritance relation). Other tables capture, \eg,
the class invariants, inner-syntax antiquotations. Operations follow the MVC-pattern, where
Isabelle/Isar provides the controller part. A typical model operation has the type:
\begin{sml}
val opn :: <args_type> -> Context.generic -> Context.generic
\end{sml}
representing a transformation on system contexts. For example, the operation of declaring a local
reference in the context is presented as follows:
\begin{sml}
fun declare_object_local oid ctxt =
let fun decl {tab,maxano} = {tab=Symtab.update_new(oid,NONE) tab,
maxano=maxano}
in (Data.map(apfst decl)(ctxt)
handle Symtab.DUP _ =>
error("multiple declaration of document reference"))
end
\end{sml}
where \inlineisar+Data.map+ is the update function resulting from the instantiation of the
functor \inlinesml|Generic_Data|. This code fragment uses operations from a library structure
\inlinesml+Symtab+ that were used to update the appropriate table for document objects in
the plugin-local state. Possible exceptions to the update operation were mapped to a system-global
error reporting function.
Finally, the view-aspects were handled by an API for parsing-combinators. The library structure
\inlinesml+Scan+ provides the operators:
\begin{sml}
op || : ('a -> 'b) * ('a -> 'b) -> 'a -> 'b
op -- : ('a -> 'b * 'c) * ('c -> 'd * 'e) -> 'a -> ('b * 'd) * 'e
op >> : ('a -> 'b * 'c) * ('b -> 'd) -> 'a -> 'd * 'c
op option : ('a -> 'b * 'a) -> 'a -> 'b option * 'a
op repeat : ('a -> 'b * 'a) -> 'a -> 'b list * 'a
\end{sml}
for alternative, sequence, and piping, as well as combinators for option and repeat. Parsing
combinators have the advantage that they can be smoothlessly integrated into standard programs,
and they enable the dynamic extension of the grammar. There is a more high-level structure
\inlinesml{Parse} providing specific combinators for the command-language Isar:
\begin{sml}[mathescape=false]
val attribute = Parse.position Parse.name
-- Scan.optional(Parse.$$$ "=" |-- Parse.!!! Parse.name)"";
val reference = Parse.position Parse.name
-- Scan.option (Parse.$$$ "::" |-- Parse.!!!
(Parse.position Parse.name));
val attributes =(Parse.$$$ "[" |-- (reference
-- (Scan.optional(Parse.$$$ ","
|--(Parse.enum ","attribute)))[]))--| Parse.$$$ "]"
\end{sml}
The ``model'' \inlineisar+declare_reference_opn+ and ``new'' \inlineisar+attributes+ parts were
combined via the piping operator and registered in the Isar toplevel:
\begin{sml}
fun declare_reference_opn (((oid,_),_),_) =
(Toplevel.theory (DOF_core.declare_object_global oid))
val _ = Outer_Syntax.command <@>{command_keyword "declare_reference"}
"declare document reference"
(attributes >> declare_reference_opn);
\end{sml}
Altogether, this gives the extension of Isabelle/HOL with Isar syntax and semantics for the
new \emph{command}:
\begin{isar}
declare_reference [lal::requirement, alpha="main", beta=42]
\end{isar}
The construction also generates implicitly some markup information; for example, when hovering
over the \inlineisar|declare_reference| command in the IDE, a popup window with the text:
``declare document reference'' will appear.
\<close>
section\<open>Programming Antiquotations\<close>
text\<open>
The definition and registration of text antiquotations and ML-antiquotations is similar in
principle: based on a number of combinators, new user-defined antiquotation syntax and semantics
can be added to the system that works on the internal plugin-data freely. For example, in
\begin{sml}
val _ = Theory.setup(
Thy_Output.antiquotation <@>{binding docitem}
docitem_antiq_parser
(docitem_antiq_gen default_cid) #>
ML_Antiquotation.inline <@>{binding docitem_value}
ML_antiq_docitem_value)
\end{sml}
the text antiquotation \inlineisar+docitem+ is declared and bounded to a parser for the argument
syntax and the overall semantics. This code defines a generic antiquotation to be used in text
elements such as
\begin{isar}
text\<Open>as defined in <@>{docitem \<Open>d1\<Close>} ...\<Close>
\end{isar}
The subsequent registration \inlineisar+docitem_value+ binds code to a ML-antiquotation usable
in an ML context for user-defined extensions; it permits the access to the current ``value''
of document element, \ie; a term with the entire update history.
It is possible to generate antiquotations \emph{dynamically}, as a consequence of a class
definition in ODL. The processing of the ODL class \inlineisar+d$$efinition+ also \emph{generates}
a text antiquotation \inlineisar+<@>{definition \<Open>d1\<Close>}+, which works similar to
\inlineisar+<@>{docitem \<Open>d1\<Close>}+ except for an additional type-check that assures that
\inlineisar+d1+ is a reference to a definition. These type-checks support the subclass hierarchy.
\<close>
section\<open>Implementing Second-level Type-Checking\<close>
text\<open>
On expressions for attribute values, for which we chose to use HOL syntax to avoid that users
need to learn another syntax, we implemented an own pass over type-checked terms. Stored in the
late-binding table \inlineisar+ISA_transformer_tab+, we register for each inner-syntax-annotation
(ISA's), a function of type
\begin{sml}
theory -> term * typ * Position.T -> term option
\end{sml}
Executed in a second pass of term parsing, ISA's may just return \inlineisar+None+. This is
adequate for ISA's just performing some checking in the logical context \inlineisar+theory+;
ISA's of this kind report errors by exceptions. In contrast, \<^emph>\<open>transforming\<close> ISA's will
yield a term; this is adequate, for example, by replacing a string-reference to some term denoted
by it. This late-binding table is also used to generate standard inner-syntax-antiquotations from
a \inlineisar+doc_class+.
\<close>
section\<open>Programming Class Invariants\<close>
text\<open>
For the moment, there is no high-level syntax for the definition of class invariants. A
formulation, in SML, of the first class-invariant in @{docref "sec:class_inv"} is straight-forward:
\begin{sml}
fun check_result_inv oid {is_monitor:bool} ctxt =
let val kind = compute_attr_access ctxt "kind" oid <@>{here} <@>{here}
val prop = compute_attr_access ctxt "property" oid <@>{here} <@>{here}
val tS = HOLogic.dest_list prop
in case kind_term of
<@>{term "proof"} => if not(null tS) then true
else error("class result invariant violation")
| _ => false
end
val _ = Theory.setup (DOF_core.update_class_invariant
"tiny_cert.result" check_result_inv)
\end{sml}
The \inlinesml{setup}-command (last line) registers the \inlineisar+check_result_inv+ function
into the \isadof kernel, which activates any creation or modification of an instance of
\inlineisar+result+. We cannot replace \inlineisar+compute_attr_access+ by the corresponding
antiquotation \inlineisar+<@>{docitem_value kind::oid}+, since \inlineisar+oid+ is
bound to a variable here and can therefore not be statically expanded.
\<close>
section\<open>Implementing Monitors\<close>
text\<open>
Since monitor-clauses have a regular expression syntax, it is natural to implement them as
deterministic automata. These are stored in the \inlineisar+docobj_tab+ for monitor-objects
in the \isadof component. We implemented the functions:
\begin{sml}
val enabled : automaton -> env -> cid list
val next : automaton -> env -> cid -> automaton
\end{sml}
where \inlineisar+env+ is basically a map between internal automaton states and class-id's
(\inlineisar+cid+'s). An automaton is said to be \<^emph>\<open>enabled\<close> for a class-id,
iff it either occurs in its accept-set or its reject-set (see @{docref "sec:monitors"}). During
top-down document validation, whenever a text-element is encountered, it is checked if a monitor
is \emph{enabled} for this class; in this case, the \inlineisar+next+-operation is executed. The
transformed automaton recognizing the rest-language is stored in \inlineisar+docobj_tab+ if
possible; otherwise, if \inlineisar+next+ fails, an error is reported. The automata implementation
is, in large parts, generated from a formalization of functional automata~\cite{Functional-Automata-AFP}.
\<close>
section\<open>The \LaTeX-Core of \isadof\<close>
text\<open>
The \LaTeX-implementation of \isadof heavily relies on the
``keycommand''~@{cite "chervet:keycommand:2010"} package. In fact, the core \isadof \LaTeX-commands
are just wrappers for the corresponding commands from the keycommand package:
\begin{ltx}
\newcommand\newisadof[1]{%
\expandafter\newkeycommand\csname isaDof.#1\endcsname}%
\newcommand\renewisadof[1]{%
\expandafter\renewkeycommand\csname isaDof.#1\endcsname}%
\newcommand\provideisadof[1]{%
\expandafter\providekeycommand\csname isaDof.#1\endcsname}%
\end{ltx}
The \LaTeX-generator of \isadof maps each \inlineisar{doc_item} to an \LaTeX-environment (recall
@{docref "text-elements"}). As generic \inlineisar{doc_item} are derived from the text element,
the enviornment \inlineltx|{isamarkuptext*}| builds the core of \isadof's \LaTeX{} implementation.
For example, the @{docref "ass123"} from page \pageref{ass123} is mapped to
\begin{ltx}
\begin{isamarkuptext*}%
[label = {ass122},type = {CENELEC_50128.SRAC},
args={label = {ass122}, type = {CENELEC_50128.SRAC},
CENELEC_50128.EC.assumption_kind = {formal}}
] The overall sampling frequence of the odometer subsystem is therefore
14 khz, which includes sampling, computing and result communication
times ...
\end{isamarkuptext*}
\end{ltx}
This environment is mapped to a plain \LaTeX command via (again, recall @{docref "text-elements"}):
\begin{ltx}
\NewEnviron{isamarkuptext*}[1][]{\isaDof[env={text},#1]{\BODY}}
\end{ltx}
For the command-based setup, \isadof provides a dispatcher that selects the most specific
implementation for a given \inlineisar|doc_class|:
\begin{ltx}
%% The Isabelle/DOF dispatcher:
\newkeycommand+[\|]\isaDof[env={UNKNOWN},label=,type={dummyT},args={}][1]{%
\ifcsname isaDof.\commandkey{type}\endcsname%
\csname isaDof.\commandkey{type}\endcsname%
[label=\commandkey{label},\commandkey{args}]{#1}%
\else\relax\fi%
\ifcsname isaDof.\commandkey{env}.\commandkey{type}\endcsname%
\csname isaDof.\commandkey{env}.\commandkey{type}\endcsname%
[label=\commandkey{label},\commandkey{args}]{#1}%
\else%
\message{Isabelle/DOF: Using default LaTeX representation for concept %
"\commandkey{env}.\commandkey{type}".}%
\ifcsname isaDof.\commandkey{env}\endcsname%
\csname isaDof.\commandkey{env}\endcsname%
[label=\commandkey{label}]{#1}%
\else%
\errmessage{Isabelle/DOF: No LaTeX representation for concept %
"\commandkey{env}.\commandkey{type}" defined and no default %
definition for "\commandkey{env}" available either.}%
\fi%
\fi%
}
\end{ltx}
\<close>
(*<*)
end
(*>*)

View File

@ -1,18 +1,11 @@
(*<*)
theory IsaDofManual
imports "06_Conclusion"
theory "Isabelle_DOF-Manual"
imports "05_Implementation"
begin
(*<*)
text*[bib::bibliography]\<open>References\<close>
close_monitor*[this]
check_doc_global
text\<open>Resulting trace in doc\_item ''this'': \<close>
ML\<open>@{trace_attribute this}\<close>
(*<*)
end
(*>*)

View File

@ -1,23 +1,33 @@
session "IsaDof_Manual" = "Isabelle_DOF" +
session "Isabelle_DOF-Manual" = "Isabelle_DOF" +
options [document = pdf, document_output = "output", quick_and_dirty = true]
theories
IsaDofManual
"Isabelle_DOF-Manual"
document_files
"isadof.cfg"
"root.bib"
"root.mst"
"preamble.tex"
"build"
"lstisadof.sty"
"figures/isabelle-architecture.pdf"
"figures/Dogfood-Intro.png"
"figures/InteractiveMathSheet.png"
"lstisadof-manual.sty"
"figures/antiquotations-PIDE.png"
"figures/cicm2018-combined.png"
"figures/cicm2018-dof.png"
"figures/cicm2018-pdf.png"
"figures/document-hierarchy.pdf"
"figures/document-hierarchy.svg"
"figures/Dogfood-figures.png"
"figures/Dogfood-II-bgnd1.png"
"figures/Dogfood-III-bgnd-text_section.png"
"figures/Dogfood-Intro.png"
"figures/Dogfood-IV-jumpInDocCLass.png"
"figures/Dogfood-III-bgnd-text_section.png"
"figures/Dogfood-V-attribute.png"
"figures/antiquotations-PIDE.png"
"figures/srac-definition.png"
"figures/IsaArchGlobal.png"
"figures/IsaArchInteract.png"
"figures/IsaArch.odp"
"figures/isabelle-architecture.pdf"
"figures/isabelle-architecture.svg"
"figures/isadof.png"
"figures/PIDE-interaction.pdf"
"figures/srac-as-es-application.png"
"figures/Dogfood-figures.png"
"figures/srac-definition.png"
"figures/Isabelle_DOF-logo.pdf"

View File

@ -1,6 +1,6 @@
#!/usr/bin/env bash
# Copyright (c) 2018-2019 The University of Sheffield. All rights reserved.
# 2018 The University of Paris-Sud. All rights reserved.
# 2018 The University of Paris-Saclay. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
@ -34,7 +34,7 @@ if [ ! -f $ISABELLE_HOME_USER/DOF/document-template/build_lib.sh ]; then
echo "This is a Isabelle/DOF project. The document preparation requires"
echo "the Isabelle/DOF framework. Please obtain the framework by cloning"
echo "the Isabelle/DOF git repository, i.e.: "
echo " git clone https://git.logicalhacking.com/HOL-OCL/Isabelle_DOF"
echo " git clone https://git.logicalhacking.com/Isabelle_DOF/Isabelle_DOF"
echo "You can install the framework as follows:"
echo " cd Isabelle_DOF/document-generator"
echo " ./install"

Binary file not shown.

After

Width:  |  Height:  |  Size: 124 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 56 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 214 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 135 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 73 KiB

View File

@ -0,0 +1,660 @@
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<!-- Created with Inkscape (http://www.inkscape.org/) -->
<svg
xmlns:dc="http://purl.org/dc/elements/1.1/"
xmlns:cc="http://creativecommons.org/ns#"
xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#"
xmlns:svg="http://www.w3.org/2000/svg"
xmlns="http://www.w3.org/2000/svg"
xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
width="247.05695mm"
height="113.70705mm"
viewBox="0 0 247.05695 113.70704"
version="1.1"
id="svg8"
inkscape:version="0.92.4 (5da689c313, 2019-01-14)"
sodipodi:docname="document-hierarchy.svg">
<defs
id="defs2">
<marker
inkscape:isstock="true"
style="overflow:visible"
id="marker1659"
refX="0"
refY="0"
orient="auto"
inkscape:stockid="Arrow1Lstart">
<path
transform="matrix(0.8,0,0,0.8,10,0)"
style="fill:#000000;fill-opacity:1;fill-rule:evenodd;stroke:#000000;stroke-width:1.00000003pt;stroke-opacity:1"
d="M 0,0 5,-5 -12.5,0 5,5 Z"
id="path1657"
inkscape:connector-curvature="0" />
</marker>
<marker
inkscape:stockid="Arrow1Lend"
orient="auto"
refY="0"
refX="0"
id="Arrow1Lend"
style="overflow:visible"
inkscape:isstock="true">
<path
id="path1224"
d="M 0,0 5,-5 -12.5,0 5,5 Z"
style="fill:#000000;fill-opacity:1;fill-rule:evenodd;stroke:#000000;stroke-width:1.00000003pt;stroke-opacity:1"
transform="matrix(-0.8,0,0,-0.8,-10,0)"
inkscape:connector-curvature="0" />
</marker>
<marker
inkscape:stockid="Arrow1Lstart"
orient="auto"
refY="0"
refX="0"
id="marker1601-3-5"
style="overflow:visible"
inkscape:isstock="true"
inkscape:collect="always">
<path
id="path1599-6-3"
d="M 0,0 5,-5 -12.5,0 5,5 Z"
style="fill:#000000;fill-opacity:1;fill-rule:evenodd;stroke:#000000;stroke-width:1.00000003pt;stroke-opacity:1"
transform="matrix(0.8,0,0,0.8,10,0)"
inkscape:connector-curvature="0" />
</marker>
<marker
inkscape:stockid="Arrow1Lstart"
orient="auto"
refY="0"
refX="0"
id="marker1601-3-5-2"
style="overflow:visible"
inkscape:isstock="true"
inkscape:collect="always">
<path
id="path1599-6-3-5"
d="M 0,0 5,-5 -12.5,0 5,5 Z"
style="fill:#000000;fill-opacity:1;fill-rule:evenodd;stroke:#000000;stroke-width:1.00000003pt;stroke-opacity:1"
transform="matrix(0.8,0,0,0.8,10,0)"
inkscape:connector-curvature="0" />
</marker>
<marker
inkscape:stockid="Arrow1Lstart"
orient="auto"
refY="0"
refX="0"
id="marker1601-3-5-2-4"
style="overflow:visible"
inkscape:isstock="true"
inkscape:collect="always">
<path
id="path1599-6-3-5-3"
d="M 0,0 5,-5 -12.5,0 5,5 Z"
style="fill:#000000;fill-opacity:1;fill-rule:evenodd;stroke:#000000;stroke-width:1.00000003pt;stroke-opacity:1"
transform="matrix(0.8,0,0,0.8,10,0)"
inkscape:connector-curvature="0" />
</marker>
<marker
inkscape:stockid="Arrow1Lstart"
orient="auto"
refY="0"
refX="0"
id="marker1601-3-5-2-4-8"
style="overflow:visible"
inkscape:isstock="true"
inkscape:collect="always">
<path
id="path1599-6-3-5-3-6"
d="M 0,0 5,-5 -12.5,0 5,5 Z"
style="fill:#000000;fill-opacity:1;fill-rule:evenodd;stroke:#000000;stroke-width:1.00000003pt;stroke-opacity:1"
transform="matrix(0.8,0,0,0.8,10,0)"
inkscape:connector-curvature="0" />
</marker>
</defs>
<sodipodi:namedview
id="base"
pagecolor="#ffffff"
bordercolor="#666666"
borderopacity="1.0"
inkscape:pageopacity="0.0"
inkscape:pageshadow="2"
inkscape:zoom="0.90414235"
inkscape:cx="569.22671"
inkscape:cy="-143.11208"
inkscape:document-units="mm"
inkscape:current-layer="layer1"
showgrid="true"
showguides="true"
inkscape:guide-bbox="true"
inkscape:window-width="1918"
inkscape:window-height="1573"
inkscape:window-x="0"
inkscape:window-y="25"
inkscape:window-maximized="0"
fit-margin-top="0"
fit-margin-left="0"
fit-margin-right="0"
fit-margin-bottom="0">
<inkscape:grid
type="xygrid"
id="grid835"
originx="-161.16314"
originy="-192.38397" />
<sodipodi:guide
position="39.920202,188.61601"
orientation="0,1"
id="guide963"
inkscape:locked="false" />
</sodipodi:namedview>
<metadata
id="metadata5">
<rdf:RDF>
<cc:Work
rdf:about="">
<dc:format>image/svg+xml</dc:format>
<dc:type
rdf:resource="http://purl.org/dc/dcmitype/StillImage" />
<dc:title></dc:title>
</cc:Work>
</rdf:RDF>
</metadata>
<g
inkscape:label="Layer 1"
inkscape:groupmode="layer"
id="layer1"
transform="translate(-161.16312,9.0910371)">
<flowRoot
xml:space="preserve"
id="flowRoot965"
style="font-style:normal;font-variant:normal;font-weight:600;font-stretch:normal;font-size:26.66666603px;line-height:1.25;font-family:'Open Sans';-inkscape-font-specification:'Open Sans Semi-Bold';text-align:center;letter-spacing:0px;word-spacing:0px;text-anchor:middle;fill:#000000;fill-opacity:1;stroke:none"
transform="scale(0.26458333)"><flowRegion
id="flowRegion967"
style="font-style:normal;font-variant:normal;font-weight:600;font-stretch:normal;font-family:'Open Sans';-inkscape-font-specification:'Open Sans Semi-Bold'"><rect
id="rect969"
width="840"
height="720"
x="280"
y="-257.48032"
style="font-style:normal;font-variant:normal;font-weight:600;font-stretch:normal;font-family:'Open Sans';-inkscape-font-specification:'Open Sans Semi-Bold'" /></flowRegion><flowPara
id="flowPara971" /></flowRoot> <rect
style="opacity:1;fill:#cccccc;fill-opacity:0.81415926;stroke:#000000;stroke-width:0.46538228;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1"
id="rect981-6-2"
width="76.729164"
height="55.5625"
x="161.39581"
y="16.541656" />
<rect
style="opacity:1;fill:#f36b21;fill-opacity:0.81415926;stroke:#000000;stroke-width:0.5243327;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1"
id="rect837-3-0-1"
width="63.421741"
height="7.859282"
x="172.0574"
y="29.849043" />
<text
xml:space="preserve"
style="font-style:normal;font-variant:normal;font-weight:600;font-stretch:normal;font-size:7.05555534px;line-height:1.25;font-family:'Open Sans';-inkscape-font-specification:'Open Sans Semi-Bold';text-align:center;letter-spacing:0px;word-spacing:0px;text-anchor:middle;fill:#f36b21;fill-opacity:1;stroke:none;stroke-width:0.26458332"
x="203.8177"
y="36.617695"
id="text833-6-9-9"><tspan
sodipodi:role="line"
x="203.8177"
y="36.617695"
style="font-style:normal;font-variant:normal;font-weight:600;font-stretch:normal;font-family:'Open Sans';-inkscape-font-specification:'Open Sans Semi-Bold';fill:#080808;stroke-width:0.26458332"
id="tspan860-6-7">context definition</tspan></text>
<text
xml:space="preserve"
style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:7.76111126px;line-height:1.25;font-family:'Open Sans';-inkscape-font-specification:'Open Sans Bold';text-align:center;letter-spacing:0px;word-spacing:0px;text-anchor:middle;fill:#000000;fill-opacity:1;stroke:none;stroke-width:0.26458332"
x="164.89302"
y="25.267418"
id="text961-5-6"><tspan
sodipodi:role="line"
id="tspan959-9-1"
x="164.89302"
y="25.267418"
style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:7.76111126px;font-family:'Open Sans';-inkscape-font-specification:'Open Sans Bold';stroke-width:0.26458332">A</tspan></text>
<path
style="fill:none;stroke:#000000;stroke-width:0.25783753px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1;marker-start:url(#marker1601-3-5)"
d="m 201.08331,72.104157 c 0.87864,20.534874 0,21.382403 0,21.382403 64.88872,-0.04303 42.33334,-0.215736 42.33334,-0.215736"
id="path1174-7-5"
inkscape:connector-curvature="0"
sodipodi:nodetypes="ccc" />
<rect
style="opacity:1;fill:#1eaedb;fill-opacity:0.81415926;stroke:#000000;stroke-width:0.5243327;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1"
id="rect837-3-0-1-3"
width="63.421741"
height="7.859282"
x="172.0574"
y="19.18749" />
<text
xml:space="preserve"
style="font-style:normal;font-variant:normal;font-weight:600;font-stretch:normal;font-size:7.05555534px;line-height:1.25;font-family:'Open Sans';-inkscape-font-specification:'Open Sans Semi-Bold';text-align:center;letter-spacing:0px;word-spacing:0px;text-anchor:middle;fill:#f36b21;fill-opacity:1;stroke:none;stroke-width:0.26458332"
x="203.8177"
y="25.956142"
id="text833-6-9-9-6"><tspan
sodipodi:role="line"
x="203.8177"
y="25.956142"
style="font-style:normal;font-variant:normal;font-weight:600;font-stretch:normal;font-family:'Open Sans';-inkscape-font-specification:'Open Sans Semi-Bold';fill:#080808;stroke-width:0.26458332"
id="tspan860-6-7-7">header</tspan></text>
<rect
style="opacity:1;fill:#2a9c16;fill-opacity:0.81415926;stroke:#000000;stroke-width:0.5243327;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1"
id="rect837-3-0-1-3-5"
width="63.421741"
height="7.859282"
x="172.05748"
y="39.903194" />
<text
xml:space="preserve"
style="font-style:normal;font-variant:normal;font-weight:600;font-stretch:normal;font-size:7.05555534px;line-height:1.25;font-family:'Open Sans';-inkscape-font-specification:'Open Sans Semi-Bold';text-align:center;letter-spacing:0px;word-spacing:0px;text-anchor:middle;fill:#f36b21;fill-opacity:1;stroke:none;stroke-width:0.26458332"
x="203.81781"
y="46.671844"
id="text833-6-9-9-6-3"><tspan
sodipodi:role="line"
x="203.81781"
y="46.671844"
style="font-style:normal;font-variant:normal;font-weight:600;font-stretch:normal;font-family:'Open Sans';-inkscape-font-specification:'Open Sans Semi-Bold';fill:#080808;stroke-width:0.26458332"
id="tspan860-6-7-7-5">command</tspan></text>
<flowRoot
xml:space="preserve"
id="flowRoot173"
style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:26.66666603px;line-height:1.25;font-family:Raleway;-inkscape-font-specification:'Raleway Bold';text-align:center;letter-spacing:0px;word-spacing:0px;text-anchor:middle;fill:#000000;fill-opacity:1;stroke:none"
transform="matrix(0.26458333,0,0,0.26458333,169.50707,-38.446423)"><flowRegion
id="flowRegion175"><rect
id="rect177"
width="300"
height="190"
x="-10.656718"
y="327.82898" /></flowRegion><flowPara
id="flowPara179"></flowPara></flowRoot> <flowRoot
xml:space="preserve"
id="flowRoot181"
style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:26.66666603px;line-height:1.25;font-family:Raleway;-inkscape-font-specification:'Raleway Bold';text-align:center;letter-spacing:0px;word-spacing:0px;text-anchor:middle;fill:#000000;fill-opacity:1;stroke:none"
transform="matrix(0.26458333,0,0,0.26458333,169.50707,-38.446423)"><flowRegion
id="flowRegion183"><rect
id="rect185"
width="290"
height="120"
x="-310.65671"
y="327.82898" /></flowRegion><flowPara
id="flowPara187"></flowPara></flowRoot> <rect
style="opacity:1;fill:#2a9c16;fill-opacity:0.81415926;stroke:#000000;stroke-width:0.5243327;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1"
id="rect837-3-0-1-3-5-6"
width="63.421741"
height="7.859282"
x="172.05757"
y="50.486526" />
<text
xml:space="preserve"
style="font-style:normal;font-variant:normal;font-weight:600;font-stretch:normal;font-size:7.05555534px;line-height:1.25;font-family:'Open Sans';-inkscape-font-specification:'Open Sans Semi-Bold';text-align:center;letter-spacing:0px;word-spacing:0px;text-anchor:middle;fill:#f36b21;fill-opacity:1;stroke:none;stroke-width:0.26458332"
x="203.81786"
y="57.25518"
id="text833-6-9-9-6-3-2"><tspan
sodipodi:role="line"
x="203.81786"
y="57.25518"
style="font-style:normal;font-variant:normal;font-weight:600;font-stretch:normal;font-family:'Open Sans';-inkscape-font-specification:'Open Sans Semi-Bold';fill:#080808;stroke-width:0.26458332"
id="tspan860-6-7-7-5-9">command</tspan></text>
<rect
style="opacity:1;fill:#2a9c16;fill-opacity:0.81415926;stroke:#000000;stroke-width:0.5243327;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1"
id="rect837-3-0-1-3-5-6-1"
width="63.421741"
height="7.859282"
x="172.05757"
y="61.069847" />
<text
xml:space="preserve"
style="font-style:normal;font-variant:normal;font-weight:600;font-stretch:normal;font-size:7.05555534px;line-height:1.25;font-family:'Open Sans';-inkscape-font-specification:'Open Sans Semi-Bold';text-align:center;letter-spacing:0px;word-spacing:0px;text-anchor:middle;fill:#f36b21;fill-opacity:1;stroke:none;stroke-width:0.26458332"
x="203.81783"
y="67.838509"
id="text833-6-9-9-6-3-2-2"><tspan
sodipodi:role="line"
x="203.81783"
y="67.838509"
style="font-style:normal;font-variant:normal;font-weight:600;font-stretch:normal;font-family:'Open Sans';-inkscape-font-specification:'Open Sans Semi-Bold';fill:#080808;stroke-width:0.26458332"
id="tspan860-6-7-7-5-9-7">command</tspan></text>
<rect
style="opacity:1;fill:#cccccc;fill-opacity:0.81415926;stroke:#000000;stroke-width:0.46538228;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1"
id="rect981-6-2-0"
width="76.729164"
height="55.5625"
x="246.59166"
y="48.82082" />
<rect
style="opacity:1;fill:#f36b21;fill-opacity:0.81415926;stroke:#000000;stroke-width:0.5243327;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1"
id="rect837-3-0-1-9"
width="63.421741"
height="7.859282"
x="257.25323"
y="62.128208" />
<text
xml:space="preserve"
style="font-style:normal;font-variant:normal;font-weight:600;font-stretch:normal;font-size:7.05555534px;line-height:1.25;font-family:'Open Sans';-inkscape-font-specification:'Open Sans Semi-Bold';text-align:center;letter-spacing:0px;word-spacing:0px;text-anchor:middle;fill:#f36b21;fill-opacity:1;stroke:none;stroke-width:0.26458332"
x="289.01349"
y="68.896866"
id="text833-6-9-9-3"><tspan
sodipodi:role="line"
x="289.01349"
y="68.896866"
style="font-style:normal;font-variant:normal;font-weight:600;font-stretch:normal;font-family:'Open Sans';-inkscape-font-specification:'Open Sans Semi-Bold';fill:#080808;stroke-width:0.26458332"
id="tspan860-6-7-6">context definition</tspan></text>
<text
xml:space="preserve"
style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:7.76111126px;line-height:1.25;font-family:'Open Sans';-inkscape-font-specification:'Open Sans Bold';text-align:center;letter-spacing:0px;word-spacing:0px;text-anchor:middle;fill:#000000;fill-opacity:1;stroke:none;stroke-width:0.26458332"
x="250.08887"
y="57.546581"
id="text961-5-6-0"><tspan
sodipodi:role="line"
id="tspan959-9-1-6"
x="250.08887"
y="57.546581"
style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:7.76111126px;font-family:'Open Sans';-inkscape-font-specification:'Open Sans Bold';stroke-width:0.26458332">C</tspan></text>
<rect
style="opacity:1;fill:#1eaedb;fill-opacity:0.81415926;stroke:#000000;stroke-width:0.5243327;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1"
id="rect837-3-0-1-3-2"
width="63.421741"
height="7.859282"
x="257.25323"
y="51.466656" />
<text
xml:space="preserve"
style="font-style:normal;font-variant:normal;font-weight:600;font-stretch:normal;font-size:7.05555534px;line-height:1.25;font-family:'Open Sans';-inkscape-font-specification:'Open Sans Semi-Bold';text-align:center;letter-spacing:0px;word-spacing:0px;text-anchor:middle;fill:#f36b21;fill-opacity:1;stroke:none;stroke-width:0.26458332"
x="289.01349"
y="58.23531"
id="text833-6-9-9-6-6"><tspan
sodipodi:role="line"
x="289.01349"
y="58.23531"
style="font-style:normal;font-variant:normal;font-weight:600;font-stretch:normal;font-family:'Open Sans';-inkscape-font-specification:'Open Sans Semi-Bold';fill:#080808;stroke-width:0.26458332"
id="tspan860-6-7-7-1">header</tspan></text>
<rect
style="opacity:1;fill:#2a9c16;fill-opacity:0.81415926;stroke:#000000;stroke-width:0.5243327;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1"
id="rect837-3-0-1-3-5-8"
width="63.421741"
height="7.859282"
x="257.25333"
y="72.182365" />
<text
xml:space="preserve"
style="font-style:normal;font-variant:normal;font-weight:600;font-stretch:normal;font-size:7.05555534px;line-height:1.25;font-family:'Open Sans';-inkscape-font-specification:'Open Sans Semi-Bold';text-align:center;letter-spacing:0px;word-spacing:0px;text-anchor:middle;fill:#f36b21;fill-opacity:1;stroke:none;stroke-width:0.26458332"
x="289.01361"
y="78.951012"
id="text833-6-9-9-6-3-7"><tspan
sodipodi:role="line"
x="289.01361"
y="78.951012"
style="font-style:normal;font-variant:normal;font-weight:600;font-stretch:normal;font-family:'Open Sans';-inkscape-font-specification:'Open Sans Semi-Bold';fill:#080808;stroke-width:0.26458332"
id="tspan860-6-7-7-5-92">command</tspan></text>
<rect
style="opacity:1;fill:#2a9c16;fill-opacity:0.81415926;stroke:#000000;stroke-width:0.5243327;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1"
id="rect837-3-0-1-3-5-6-0"
width="63.421741"
height="7.859282"
x="257.25342"
y="82.765694" />
<text
xml:space="preserve"
style="font-style:normal;font-variant:normal;font-weight:600;font-stretch:normal;font-size:7.05555534px;line-height:1.25;font-family:'Open Sans';-inkscape-font-specification:'Open Sans Semi-Bold';text-align:center;letter-spacing:0px;word-spacing:0px;text-anchor:middle;fill:#f36b21;fill-opacity:1;stroke:none;stroke-width:0.26458332"
x="289.01364"
y="89.534348"
id="text833-6-9-9-6-3-2-23"><tspan
sodipodi:role="line"
x="289.01364"
y="89.534348"
style="font-style:normal;font-variant:normal;font-weight:600;font-stretch:normal;font-family:'Open Sans';-inkscape-font-specification:'Open Sans Semi-Bold';fill:#080808;stroke-width:0.26458332"
id="tspan860-6-7-7-5-9-75">command</tspan></text>
<rect
style="opacity:1;fill:#2a9c16;fill-opacity:0.81415926;stroke:#000000;stroke-width:0.5243327;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1"
id="rect837-3-0-1-3-5-6-1-9"
width="63.421741"
height="7.859282"
x="257.25342"
y="93.349014" />
<text
xml:space="preserve"
style="font-style:normal;font-variant:normal;font-weight:600;font-stretch:normal;font-size:7.05555534px;line-height:1.25;font-family:'Open Sans';-inkscape-font-specification:'Open Sans Semi-Bold';text-align:center;letter-spacing:0px;word-spacing:0px;text-anchor:middle;fill:#f36b21;fill-opacity:1;stroke:none;stroke-width:0.26458332"
x="289.01361"
y="100.11768"
id="text833-6-9-9-6-3-2-2-2"><tspan
sodipodi:role="line"
x="289.01361"
y="100.11768"
style="font-style:normal;font-variant:normal;font-weight:600;font-stretch:normal;font-family:'Open Sans';-inkscape-font-specification:'Open Sans Semi-Bold';fill:#080808;stroke-width:0.26458332"
id="tspan860-6-7-7-5-9-7-2">command</tspan></text>
<rect
style="opacity:1;fill:#cccccc;fill-opacity:0.81415926;stroke:#000000;stroke-width:0.46538228;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1"
id="rect981-6-2-0-8"
width="76.729164"
height="55.5625"
x="246.06255"
y="-8.858346" />
<rect
style="opacity:1;fill:#f36b21;fill-opacity:0.81415926;stroke:#000000;stroke-width:0.5243327;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1"
id="rect837-3-0-1-9-9"
width="63.421741"
height="7.859282"
x="256.72409"
y="4.4490485" />
<text
xml:space="preserve"
style="font-style:normal;font-variant:normal;font-weight:600;font-stretch:normal;font-size:7.05555534px;line-height:1.25;font-family:'Open Sans';-inkscape-font-specification:'Open Sans Semi-Bold';text-align:center;letter-spacing:0px;word-spacing:0px;text-anchor:middle;fill:#f36b21;fill-opacity:1;stroke:none;stroke-width:0.26458332"
x="288.48425"
y="11.217703"
id="text833-6-9-9-3-7"><tspan
sodipodi:role="line"
x="288.48425"
y="11.217703"
style="font-style:normal;font-variant:normal;font-weight:600;font-stretch:normal;font-family:'Open Sans';-inkscape-font-specification:'Open Sans Semi-Bold';fill:#080808;stroke-width:0.26458332"
id="tspan860-6-7-6-3">context definition</tspan></text>
<text
xml:space="preserve"
style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:7.76111126px;line-height:1.25;font-family:'Open Sans';-inkscape-font-specification:'Open Sans Bold';text-align:center;letter-spacing:0px;word-spacing:0px;text-anchor:middle;fill:#000000;fill-opacity:1;stroke:none;stroke-width:0.26458332"
x="249.55975"
y="-0.1325787"
id="text961-5-6-0-6"><tspan
sodipodi:role="line"
id="tspan959-9-1-6-1"
x="249.55975"
y="-0.1325787"
style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:7.76111126px;font-family:'Open Sans';-inkscape-font-specification:'Open Sans Bold';stroke-width:0.26458332">B</tspan></text>
<rect
style="opacity:1;fill:#1eaedb;fill-opacity:0.81415926;stroke:#000000;stroke-width:0.5243327;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1"
id="rect837-3-0-1-3-2-2"
width="63.421741"
height="7.859282"
x="256.72409"
y="-6.2125063" />
<text
xml:space="preserve"
style="font-style:normal;font-variant:normal;font-weight:600;font-stretch:normal;font-size:7.05555534px;line-height:1.25;font-family:'Open Sans';-inkscape-font-specification:'Open Sans Semi-Bold';text-align:center;letter-spacing:0px;word-spacing:0px;text-anchor:middle;fill:#f36b21;fill-opacity:1;stroke:none;stroke-width:0.26458332"
x="288.48425"
y="0.55614966"
id="text833-6-9-9-6-6-9"><tspan
sodipodi:role="line"
x="288.48425"
y="0.55614966"
style="font-style:normal;font-variant:normal;font-weight:600;font-stretch:normal;font-family:'Open Sans';-inkscape-font-specification:'Open Sans Semi-Bold';fill:#080808;stroke-width:0.26458332"
id="tspan860-6-7-7-1-3">header</tspan></text>
<rect
style="opacity:1;fill:#2a9c16;fill-opacity:0.81415926;stroke:#000000;stroke-width:0.5243327;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1"
id="rect837-3-0-1-3-5-8-1"
width="63.421741"
height="7.859282"
x="256.72418"
y="14.503201" />
<text
xml:space="preserve"
style="font-style:normal;font-variant:normal;font-weight:600;font-stretch:normal;font-size:7.05555534px;line-height:1.25;font-family:'Open Sans';-inkscape-font-specification:'Open Sans Semi-Bold';text-align:center;letter-spacing:0px;word-spacing:0px;text-anchor:middle;fill:#f36b21;fill-opacity:1;stroke:none;stroke-width:0.26458332"
x="288.48438"
y="21.271849"
id="text833-6-9-9-6-3-7-9"><tspan
sodipodi:role="line"
x="288.48438"
y="21.271849"
style="font-style:normal;font-variant:normal;font-weight:600;font-stretch:normal;font-family:'Open Sans';-inkscape-font-specification:'Open Sans Semi-Bold';fill:#080808;stroke-width:0.26458332"
id="tspan860-6-7-7-5-92-4">command</tspan></text>
<rect
style="opacity:1;fill:#2a9c16;fill-opacity:0.81415926;stroke:#000000;stroke-width:0.5243327;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1"
id="rect837-3-0-1-3-5-6-0-7"
width="63.421741"
height="7.859282"
x="256.72427"
y="25.086535" />
<text
xml:space="preserve"
style="font-style:normal;font-variant:normal;font-weight:600;font-stretch:normal;font-size:7.05555534px;line-height:1.25;font-family:'Open Sans';-inkscape-font-specification:'Open Sans Semi-Bold';text-align:center;letter-spacing:0px;word-spacing:0px;text-anchor:middle;fill:#f36b21;fill-opacity:1;stroke:none;stroke-width:0.26458332"
x="288.48441"
y="31.855186"
id="text833-6-9-9-6-3-2-23-8"><tspan
sodipodi:role="line"
x="288.48441"
y="31.855186"
style="font-style:normal;font-variant:normal;font-weight:600;font-stretch:normal;font-family:'Open Sans';-inkscape-font-specification:'Open Sans Semi-Bold';fill:#080808;stroke-width:0.26458332"
id="tspan860-6-7-7-5-9-75-4">command</tspan></text>
<rect
style="opacity:1;fill:#2a9c16;fill-opacity:0.81415926;stroke:#000000;stroke-width:0.5243327;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1"
id="rect837-3-0-1-3-5-6-1-9-5"
width="63.421741"
height="7.859282"
x="256.72427"
y="35.669849" />
<text
xml:space="preserve"
style="font-style:normal;font-variant:normal;font-weight:600;font-stretch:normal;font-size:7.05555534px;line-height:1.25;font-family:'Open Sans';-inkscape-font-specification:'Open Sans Semi-Bold';text-align:center;letter-spacing:0px;word-spacing:0px;text-anchor:middle;fill:#f36b21;fill-opacity:1;stroke:none;stroke-width:0.26458332"
x="288.48438"
y="42.438511"
id="text833-6-9-9-6-3-2-2-2-0"><tspan
sodipodi:role="line"
x="288.48438"
y="42.438511"
style="font-style:normal;font-variant:normal;font-weight:600;font-stretch:normal;font-family:'Open Sans';-inkscape-font-specification:'Open Sans Semi-Bold';fill:#080808;stroke-width:0.26458332"
id="tspan860-6-7-7-5-9-7-2-3">command</tspan></text>
<rect
style="opacity:1;fill:#cccccc;fill-opacity:0.81415926;stroke:#000000;stroke-width:0.46538228;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1"
id="rect981-6-2-6"
width="76.729164"
height="55.5625"
x="331.25821"
y="16.541651" />
<rect
style="opacity:1;fill:#f36b21;fill-opacity:0.81415926;stroke:#000000;stroke-width:0.5243327;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1"
id="rect837-3-0-1-1"
width="63.421741"
height="7.859282"
x="341.91977"
y="29.849037" />
<text
xml:space="preserve"
style="font-style:normal;font-variant:normal;font-weight:600;font-stretch:normal;font-size:7.05555534px;line-height:1.25;font-family:'Open Sans';-inkscape-font-specification:'Open Sans Semi-Bold';text-align:center;letter-spacing:0px;word-spacing:0px;text-anchor:middle;fill:#f36b21;fill-opacity:1;stroke:none;stroke-width:0.26458332"
x="373.67999"
y="36.617695"
id="text833-6-9-9-0"><tspan
sodipodi:role="line"
x="373.67999"
y="36.617695"
style="font-style:normal;font-variant:normal;font-weight:600;font-stretch:normal;font-family:'Open Sans';-inkscape-font-specification:'Open Sans Semi-Bold';fill:#080808;stroke-width:0.26458332"
id="tspan860-6-7-63">context definition</tspan></text>
<text
xml:space="preserve"
style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:7.76111126px;line-height:1.25;font-family:'Open Sans';-inkscape-font-specification:'Open Sans Bold';text-align:center;letter-spacing:0px;word-spacing:0px;text-anchor:middle;fill:#000000;fill-opacity:1;stroke:none;stroke-width:0.26458332"
x="334.75543"
y="25.267416"
id="text961-5-6-2"><tspan
sodipodi:role="line"
id="tspan959-9-1-0"
x="334.75543"
y="25.267416"
style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:7.76111126px;font-family:'Open Sans';-inkscape-font-specification:'Open Sans Bold';stroke-width:0.26458332">D</tspan></text>
<rect
style="opacity:1;fill:#1eaedb;fill-opacity:0.81415926;stroke:#000000;stroke-width:0.5243327;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1"
id="rect837-3-0-1-3-6"
width="63.421741"
height="7.859282"
x="341.91977"
y="19.187481" />
<text
xml:space="preserve"
style="font-style:normal;font-variant:normal;font-weight:600;font-stretch:normal;font-size:7.05555534px;line-height:1.25;font-family:'Open Sans';-inkscape-font-specification:'Open Sans Semi-Bold';text-align:center;letter-spacing:0px;word-spacing:0px;text-anchor:middle;fill:#f36b21;fill-opacity:1;stroke:none;stroke-width:0.26458332"
x="373.67999"
y="25.956137"
id="text833-6-9-9-6-1"><tspan
sodipodi:role="line"
x="373.67999"
y="25.956137"
style="font-style:normal;font-variant:normal;font-weight:600;font-stretch:normal;font-family:'Open Sans';-inkscape-font-specification:'Open Sans Semi-Bold';fill:#080808;stroke-width:0.26458332"
id="tspan860-6-7-7-55">header</tspan></text>
<rect
style="opacity:1;fill:#2a9c16;fill-opacity:0.81415926;stroke:#000000;stroke-width:0.5243327;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1"
id="rect837-3-0-1-3-5-4"
width="63.421741"
height="7.859282"
x="341.91986"
y="39.903194" />
<text
xml:space="preserve"
style="font-style:normal;font-variant:normal;font-weight:600;font-stretch:normal;font-size:7.05555534px;line-height:1.25;font-family:'Open Sans';-inkscape-font-specification:'Open Sans Semi-Bold';text-align:center;letter-spacing:0px;word-spacing:0px;text-anchor:middle;fill:#f36b21;fill-opacity:1;stroke:none;stroke-width:0.26458332"
x="373.68008"
y="46.671856"
id="text833-6-9-9-6-3-76"><tspan
sodipodi:role="line"
x="373.68008"
y="46.671856"
style="font-style:normal;font-variant:normal;font-weight:600;font-stretch:normal;font-family:'Open Sans';-inkscape-font-specification:'Open Sans Semi-Bold';fill:#080808;stroke-width:0.26458332"
id="tspan860-6-7-7-5-5">command</tspan></text>
<rect
style="opacity:1;fill:#2a9c16;fill-opacity:0.81415926;stroke:#000000;stroke-width:0.5243327;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1"
id="rect837-3-0-1-3-5-6-6"
width="63.421741"
height="7.859282"
x="341.91995"
y="50.48653" />
<text
xml:space="preserve"
style="font-style:normal;font-variant:normal;font-weight:600;font-stretch:normal;font-size:7.05555534px;line-height:1.25;font-family:'Open Sans';-inkscape-font-specification:'Open Sans Semi-Bold';text-align:center;letter-spacing:0px;word-spacing:0px;text-anchor:middle;fill:#f36b21;fill-opacity:1;stroke:none;stroke-width:0.26458332"
x="373.68015"
y="57.255199"
id="text833-6-9-9-6-3-2-9"><tspan
sodipodi:role="line"
x="373.68015"
y="57.255199"
style="font-style:normal;font-variant:normal;font-weight:600;font-stretch:normal;font-family:'Open Sans';-inkscape-font-specification:'Open Sans Semi-Bold';fill:#080808;stroke-width:0.26458332"
id="tspan860-6-7-7-5-9-3">command</tspan></text>
<rect
style="opacity:1;fill:#2a9c16;fill-opacity:0.81415926;stroke:#000000;stroke-width:0.5243327;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1"
id="rect837-3-0-1-3-5-6-1-7"
width="63.421741"
height="7.859282"
x="341.91995"
y="61.069859" />
<text
xml:space="preserve"
style="font-style:normal;font-variant:normal;font-weight:600;font-stretch:normal;font-size:7.05555534px;line-height:1.25;font-family:'Open Sans';-inkscape-font-specification:'Open Sans Semi-Bold';text-align:center;letter-spacing:0px;word-spacing:0px;text-anchor:middle;fill:#f36b21;fill-opacity:1;stroke:none;stroke-width:0.26458332"
x="373.68011"
y="67.838516"
id="text833-6-9-9-6-3-2-2-4"><tspan
sodipodi:role="line"
x="373.68011"
y="67.838516"
style="font-style:normal;font-variant:normal;font-weight:600;font-stretch:normal;font-family:'Open Sans';-inkscape-font-specification:'Open Sans Semi-Bold';fill:#080808;stroke-width:0.26458332"
id="tspan860-6-7-7-5-9-7-5">command</tspan></text>
<path
style="fill:none;stroke:#000000;stroke-width:0.25783753px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1;marker-start:url(#marker1601-3-5-2)"
d="m 199.496,15.328092 c 0.87864,-20.5348759 0,-21.3824044 0,-21.3824044 64.88872,0.04303 42.33334,0.215736 42.33334,0.215736"
id="path1174-7-5-4"
inkscape:connector-curvature="0"
sodipodi:nodetypes="ccc" />
<path
style="fill:none;stroke:#000000;stroke-width:0.26011065px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1;marker-start:url(#marker1601-3-5-2-4)"
d="m 323.04732,-5.355514 c 45.95553,0.3995671 47.85221,0 47.85221,0 -0.0962,29.508532 -0.48279,19.251338 -0.48279,19.251338"
id="path1174-7-5-4-7"
inkscape:connector-curvature="0"
sodipodi:nodetypes="ccc" />
<path
style="fill:none;stroke:#000000;stroke-width:0.26011065px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1;marker-start:url(#marker1601-3-5-2-4-8)"
d="m 323.57654,93.311737 c 45.95553,-0.399568 47.85221,0 47.85221,0 -0.0962,-29.508534 -0.48279,-19.251339 -0.48279,-19.251339"
id="path1174-7-5-4-7-8"
inkscape:connector-curvature="0"
sodipodi:nodetypes="ccc" />
</g>
</svg>

After

Width:  |  Height:  |  Size: 36 KiB

Some files were not shown because too many files have changed in this diff Show More