Compare commits

..

13 Commits

141 changed files with 9254 additions and 25940 deletions

27
.ci/Jenkinsfile vendored Normal file
View File

@ -0,0 +1,27 @@
pipeline {
agent any
stages {
stage('Build Docker') {
steps {
sh 'cp src/patches/thy_output.ML .ci/isabelle4isadof/'
sh 'docker build -t logicalhacking:isabelle4dof-2018 .ci/isabelle4isadof'
sh 'rm -f .ci/isabelle4isadof/thy_output.ML'
}
}
stage('Check Docker') {
when { changeset "src/patches/*" }
steps {
sh 'cp patches/thy_output.ML .ci/isabelle4isadof/'
sh 'docker build --no-cache -t logicalhacking:isabelle4dof-2018 .ci/isabelle4isadof'
sh 'rm -f .ci/isabelle4isadof/thy_output.ML'
}
}
stage('Build Isabelle/DOF') {
steps {
sh 'find -type d -name "output" -exec rm -rf {} \\; || true'
sh 'docker run -v $PWD:/DOF logicalhacking:isabelle4dof-2018 sh -c "cd /DOF && ./install && isabelle build -D ."'
}
}
}
}

View File

@ -0,0 +1,32 @@
# Copyright (c) 2019 Achim D. Brucker
#
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
#
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# SPDX-License-Identifier: BSD-2-Clause
FROM logicalhacking:isabelle2018
WORKDIR /home/isabelle
COPY thy_output.ML /home/isabelle/Isabelle/src/Pure/Thy
RUN Isabelle/bin/isabelle build -b Functional-Automata

177
.ci/mk_release Executable file
View File

@ -0,0 +1,177 @@
#!/usr/bin/env bash
# Copyright (c) 2019The University of Exeter.
# 2019 The University of Paris-Saclay.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in
# the documentation and/or other materials provided with the
# distribution.
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
#
# SPDX-License-Identifier: BSD-2-Clause
#set -e
shopt -s nocasematch
print_help()
{
echo "Usage: mk_release [OPTION] "
echo ""
echo " A tool for building $ISADOF_TAR.tar.xz"
echo ""
echo "Run ..."
echo ""
echo " --help, -h display this help message"
echo " --sign -s sign release archive"
echo " (default: $SIGN)"
echo " --isabelle, -i isabelle isabelle command used for installation"
echo " (default: $ISABELLE)"
echo " --tag -t tag use tag for release archive"
echo " --p --publish publish generated artefact"
echo " (use master: $PUBLISH)"
}
read_config() {
if [ ! -f .config ]; then
echo "Error: .config not found (not started in the main directory?)!"
exit 1
else
source .config
fi
}
check_isabelle_version() {
ACTUAL_ISABELLE_VERSION=`$ISABELLE version`
echo "* Checking Isabelle version:"
if [ "$ISABELLE_VERSION" != "$ACTUAL_ISABELLE_VERSION" ]; then
echo "* Expecting $ISABELLE_VERSION, found $ACTUAL_ISABELLE_VERSION: ERROR"
exit 1
else
echo "* Expecting $ISABELLE_VERSION, found $ACTUAL_ISABELLE_VERSION: success"
fi
}
clone_repo()
{
echo "* Cloning into $ISADOF_DIR"
git clone . $ISADOF_DIR
if [ "$USE_TAG" = "true" ]; then
echo " * Switching to tag $DOF_VERSION/$ISABELLE_SHORT_VERSION"
(cd $ISADOF_DIR && git checkout $DOF_VERSION/$ISABELLE_SHORT_VERSION)
else
echo " * Not tag specified, using master branch"
fi
}
build_and_install_manual()
{
echo "* Building manual"
ROOTS=$ISABELLE_HOME_USER/ROOTS
if [ -f $ROOTS ]; then
mv $ROOTS $ROOTS.backup
fi
(cd $ISADOF_DIR && ./install)
(cd $ISADOF_DIR && $ISABELLE build -c Isabelle_DOF-Manual)
mkdir -p $ISADOF_DIR/doc
cp $ISADOF_DIR/examples/technical_report/Isabelle_DOF-Manual/output/document.pdf \
$ISADOF_DIR/doc/Isabelle_DOF-Manual.pdf
find $ISADOF_DIR -type d -name "output" -exec rm -rf {} \; || true
rm -rf $ISADOF_DIR/.git* $ISADOF_DIR/.ci $ISADOF_DIR/.afp
if [ -f $ROOTS.backup ]; then
mv $ROOTS.backup $ROOTS
fi
}
create_archive()
{
echo "* Creating archive"
(cd $BUILD_DIR && tar cf $ISADOF_TAR.tar $ISADOF_TAR && xz $ISADOF_DIR.tar)
mv $BUILD_DIR/$ISADOF_TAR.tar.xz .
rm -rf $BUILD_DIR
}
sign_archive()
{
echo "* Publish archive not yet implemented"
gpg --armor --output $ISADOF_TAR.tar.xz.asc --detach-sig $ISADOF_TAR.tar.xz
}
publish_archive()
{
echo "* Publish archive"
ssh 0x5f.org mkdir -p www/$DOF_ARTIFACT_HOST/htdocs/$DOF_ARTIFACT_DIR
scp $ISADOF_TAR.tar.xz* 0x5f.org:www/$DOF_ARTIFACT_HOST/htdocs/$DOF_ARTIFACT_DIR/
ssh 0x5f.org chmod go+u-w -R www/$DOF_ARTIFACT_HOST/htdocs/$DOF_ARTIFACT_DIR
}
read_config
ISABELLE=`which isabelle`
USE_TAG="false"
SIGN="false"
PUBLISH="false"
ISABELLE_SHORT_VERSION=`echo $ISABELLE_VERSION | sed -e 's/:.*$//'`
TAG="$DOF_VERSION/$ISABELLE_SHORT_VERSION"
BUILD_DIR=`mktemp -d`
ISADOF_TAR="Isabelle_DOF-"$DOF_VERSION"_"$ISABELLE_SHORT_VERSION
ISADOF_DIR="$BUILD_DIR/$ISADOF_TAR"
while [ $# -gt 0 ]
do
case "$1" in
--isabelle|-i)
ISABELLE="$2";
shift;;
--tag|-t)
TAG="$2";
USE_TAG="true"
shift;;
--sign|-s)
SIGN="true";;
--publish|-p)
PUBLISH="true";;
--help|-h)
print_help
exit 0;;
*) print_help
exit 1;;
esac
shift
done
check_isabelle_version
VARS=`$ISABELLE getenv ISABELLE_HOME_USER`
for i in $VARS; do
export "$i"
done
clone_repo
build_and_install_manual
create_archive
if [ "$SIGN" = "true" ]; then
sign_archive
fi
if [ "$PUBLISH" = "true" ]; then
publish_archive
fi
exit 0

8
.config Normal file
View File

@ -0,0 +1,8 @@
DOF_VERSION="Unreleased"
ISABELLE_VERSION="Isabelle2018: August 2018"
ISABELLE_URL="https://isabelle.in.tum.de/website-Isabelle2018/"
DOF_URL="https://git.logicalhacking.com/Isabelle_DOF/Isabelle_DOF"
DOF_ARTIFACT_DIR="releases/Isabelle_DOF/Isabelle_DOF"
DOF_ARTIFACT_HOST="artifacts.logicalhacking.com"
AFP_DATE="afp-2019-06-04"
AFP_URL="https://sourceforge.net/projects/afp/files/afp-Isabelle2018/"$AFP_DATE".tar.gz"

5
.gitattributes vendored
View File

@ -1,2 +1,3 @@
core.autocrlf false
core.eol lf
install -crlf
document-generator/Tools/DOF_mkroot -crlf
document-generator/document-template/build -crlf

2
.gitignore vendored Executable file → Normal file
View File

@ -1,4 +1,4 @@
output
*.template.sty
.afp
*~
*#

View File

@ -1,18 +0,0 @@
# Continuous Build and Release Setup
[![status-badge](https://ci.logicalhacking.com/api/badges/Isabelle_DOF/Isabelle_DOF/status.svg)](https://ci.logicalhacking.com/Isabelle_DOF/Isabelle_DOF)
This directory contains the CI configuration for the [Woodpecker CI](https://woodpecker-ci.org/).
It may also contain additional tools and script that are useful for preparing a release.
## Generated Artifacts
### Latest Build
* lualatex
* [browser_info](https://artifacts.logicalhacking.com/ci/Isabelle_DOF/Isabelle_DOF/main/latest/lualatex/browser_info/Unsorted/)
* [aux files](https://artifacts.logicalhacking.com/ci/Isabelle_DOF/Isabelle_DOF/main/latest/lualatex/)
* pdflatex
* [browser_info](https://artifacts.logicalhacking.com/ci/Isabelle_DOF/Isabelle_DOF/main/latest/pdflatex/browser_info/Unsorted/)
* [aux files](https://artifacts.logicalhacking.com/ci/Isabelle_DOF/Isabelle_DOF/main/latest/pdflatex/)
* [Isabelle_DOF-Unreleased_Isabelle2022.tar.xz](https://artifacts.logicalhacking.com/ci/Isabelle_DOF/Isabelle_DOF/main/latest/Isabelle_DOF-Unreleased_Isabelle2022.tar.xz)

View File

@ -1,55 +0,0 @@
pipeline:
build:
image: docker.io/logicalhacking/isabelle2022
commands:
- export ARTIFACT_DIR=$CI_WORKSPACE/.artifacts/$CI_REPO/$CI_BRANCH/$CI_BUILD_NUMBER/$LATEX
- mkdir -p $ARTIFACT_DIR
- export `isabelle getenv ISABELLE_HOME_USER`
- mkdir -p $ISABELLE_HOME_USER/etc
- echo "ISABELLE_PDFLATEX=\"$LATEX --file-line-error\"" >> $ISABELLE_HOME_USER/etc/settings
- isabelle build -D . -o browser_info
- isabelle components -u .
- isabelle build -D . -o browser_info
- isabelle dof_mkroot -q DOF_test
- isabelle build -D DOF_test
- cp -r $ISABELLE_HOME_USER/browser_info $ARTIFACT_DIR
- cd $ARTIFACT_DIR
- cd ../..
- ln -s * latest
archive:
image: docker.io/logicalhacking/isabelle2022
commands:
- export ARTIFACT_DIR=$CI_WORKSPACE/.artifacts/$CI_REPO/$CI_BRANCH/$CI_BUILD_NUMBER/$LATEX
- mkdir -p $ARTIFACT_DIR
- export ISABELLE_VERSION=`isabelle version`
- ./.woodpecker/mk_release -d
- cp Isabelle_DOF-Unreleased_$ISABELLE_VERSION.tar.xz $ARTIFACT_DIR/../
when:
matrix:
LATEX: lualatex
deploy:
image: docker.io/drillster/drone-rsync
settings:
hosts: [ "ci.logicalhacking.com"]
port: 22
source: .artifacts/$CI_REPO_OWNER/*
target: $CI_REPO_OWNER
include: [ "**.*"]
key:
from_secret: artifacts_ssh
user: artifacts
notify:
image: drillster/drone-email
settings:
host: smtp.0x5f.org
username: woodpecker
password:
from_secret: email
from: ci@logicalhacking.com
when:
status: [ failure ]
matrix:
LATEX:
- lualatex
- pdflatex

View File

@ -1,224 +0,0 @@
#!/usr/bin/env bash
# Copyright (c) 2019-2022 University of Exeter.
# 2019 University of Paris-Saclay.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in
# the documentation and/or other materials provided with the
# distribution.
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
#
# SPDX-License-Identifier: BSD-2-Clause
set -e
shopt -s nocasematch
print_help()
{
echo "Usage: mk_release [OPTION] "
echo ""
echo " A tool for building an Isabelle/DOF release archive."
echo ""
echo "Run ..."
echo ""
echo " --help, -h display this help message"
echo " --sign, -s sign release archive"
echo " (default: $SIGN)"
echo " --isabelle, -i isabelle isabelle command used for installation"
echo " (default: $ISABELLE)"
echo " --tag tag, -t tag use tag for release archive"
echo " (default: use master branch)"
echo " --publish, -p publish generated artefact"
echo " (default: $PUBLISH)"
echo " --quick-and-dirty, -d only build required artifacts, no complete test"
echo " (default: $DIRTY)"
}
check_isabelle_version() {
ACTUAL_ISABELLE_VERSION=`$ISABELLE version`
echo "* Checking Isabelle version:"
if [ "$ISABELLE_VERSION" != "$ACTUAL_ISABELLE_VERSION" ]; then
echo "* Expecting $ISABELLE_VERSION, found $ACTUAL_ISABELLE_VERSION: ERROR"
exit 1
else
echo "* Expecting $ISABELLE_VERSION, found $ACTUAL_ISABELLE_VERSION: success"
fi
}
clone_repo()
{
echo "* Cloning into $ISADOF_WORK_DIR"
git clone . $ISADOF_WORK_DIR
if [ "$USE_TAG" = "true" ]; then
echo " * Switching to tag $TAG"
(cd $ISADOF_WORK_DIR && git checkout $TAG)
else
echo " * No tag specified, using master branch"
fi
(cd $ISADOF_WORK_DIR && git show -s --format="COMMIT=%H%nDATE=%cd" --date=short | sed -e 's|-|/|g') >> $ISADOF_WORK_DIR/etc/settings
}
build_and_install_manuals()
{
echo "* Building manual"
if [ "$DIRTY" = "true" ]; then
if [ -z ${ARTIFACT_DIR+x} ]; then
echo " * Quick and Dirty Mode (local build)"
$ISABELLE build -d . Isabelle_DOF-Manual 2018-cicm-isabelle_dof-applications
mkdir -p $ISADOF_WORK_DIR/examples/scholarly_paper/2018-cicm-isabelle_dof-applications/output/
cp examples/scholarly_paper/2018-cicm-isabelle_dof-applications/output/document.pdf \
$ISADOF_WORK_DIR/examples/scholarly_paper/2018-cicm-isabelle_dof-applications/output/
mkdir -p $ISADOF_WORK_DIR/examples/technical_report/Isabelle_DOF-Manual/output/
cp examples/technical_report/Isabelle_DOF-Manual/output/document.pdf \
$ISADOF_WORK_DIR/examples/technical_report/Isabelle_DOF-Manual/output/;
else
echo " * Quick and Dirty Mode (running on CI)"
mkdir -p $ISADOF_WORK_DIR/examples/scholarly_paper/2018-cicm-isabelle_dof-applications/output/
cp $ARTIFACT_DIR/browser_info/Unsorted/2018-cicm-isabelle_dof-applications/document.pdf \
$ISADOF_WORK_DIR/examples/scholarly_paper/2018-cicm-isabelle_dof-applications/output/
mkdir -p $ISADOF_WORK_DIR/examples/technical_report/Isabelle_DOF-Manual/output/
cp $ARTIFACT_DIR/browser_info/Unsorted/Isabelle_DOF-Manual/document.pdf \
$ISADOF_WORK_DIR/examples/technical_report/Isabelle_DOF-Manual/output/;
fi
else
(cd $ISADOF_WORK_DIR && $ISABELLE env ./install-afp)
(cd $ISADOF_WORK_DIR && $ISABELLE build -c -D . )
fi
mkdir -p $ISADOF_WORK_DIR/doc
echo "Isabelle/DOF Manuals!" > $ISADOF_WORK_DIR/doc/Contents
cp $ISADOF_WORK_DIR/examples/technical_report/Isabelle_DOF-Manual/output/document.pdf \
$ISADOF_WORK_DIR/doc/Isabelle_DOF-Manual.pdf
echo " Isabelle_DOF-Manual User and Implementation Manual for Isabelle/DOF" >> $ISADOF_WORK_DIR/doc/Contents
cp $ISADOF_WORK_DIR/examples/scholarly_paper/2018-cicm-isabelle_dof-applications/output/document.pdf \
$ISADOF_WORK_DIR/doc/2018-cicm-isabelle_dof-applications.pdf
echo " 2018-cicm-isabelle_dof-applications Example academic paper" >> $ISADOF_WORK_DIR/doc/Contents
find $ISADOF_WORK_DIR -type d -name "output" -exec rm -rf {} \; &> /dev/null || true
rm -rf $ISADOF_WORK_DIR/.git* $ISADOF_WORK_DIR/.woodpecker $ISADOF_WORK_DIR/.afp
}
create_archive()
{
echo "* Creating archive"
cp $ISADOF_WORK_DIR/doc/Isabelle_DOF-Manual.pdf $ISADOF_TAR.pdf
(mv $ISADOF_WORK_DIR $ISADOF_DIR)
(cd $BUILD_DIR && tar cf $ISADOF_TAR.tar $ISADOF_TAR && xz $ISADOF_DIR.tar)
mv $BUILD_DIR/$ISADOF_TAR.tar.xz .
}
sign_archive()
{
echo "* Signing archive"
gpg --armor --output $ISADOF_TAR.tar.xz.asc --detach-sig $ISADOF_TAR.tar.xz
}
publish_archive()
{
echo "* Publish archive"
ssh 0x5f.org mkdir -p www/$DOF_ARTIFACT_HOST/htdocs/$DOF_ARTIFACT_DIR
scp $ISADOF_TAR.* 0x5f.org:www/$DOF_ARTIFACT_HOST/htdocs/$DOF_ARTIFACT_DIR/
ssh 0x5f.org chmod go+u-w -R www/$DOF_ARTIFACT_HOST/htdocs/$DOF_ARTIFACT_DIR
}
ISABELLE=`which isabelle`
USE_TAG="false"
SIGN="false"
PUBLISH="false"
DIRTY="false"
BUILD_DIR=`mktemp -d`
ISADOF_WORK_DIR="$BUILD_DIR/Isabelle_DOF"
while [ $# -gt 0 ]
do
case "$1" in
--isabelle|-i)
ISABELLE="$2";
shift;;
--tag|-t)
TAG="$2";
USE_TAG="true"
shift;;
--sign|-s)
SIGN="true";;
--publish|-p)
PUBLISH="true";;
--quick-and-dirty|-d)
DIRTY="true";;
--help|-h)
print_help
exit 0;;
*) print_help
exit 1;;
esac
shift
done
clone_repo
ISADOF_MAIN_DIR=`pwd`
if [ "$DIRTY" = "true" ]; then
echo "Running in Quick and Dirty mode!"
$ISABELLE components -u $ISADOF_MAIN_DIR
else
$ISABELLE components -x $ISADOF_MAIN_DIR
$ISABELLE components -u $ISADOF_WORK_DIR
fi
VARS=`$ISABELLE getenv ISABELLE_TOOL`
for i in $VARS; do
export "$i"
done
ISABELLE_VERSION="Isabelle$($ISABELLE_TOOL dof_param -b isabelle_version)"
DOF_VERSION="$($ISABELLE_TOOL dof_param -b dof_version)"
ISABELLE_SHORT_VERSION=`echo $ISABELLE_VERSION | sed -e 's/:.*$//'`
ISADOF_TAR="Isabelle_DOF-"$DOF_VERSION"_"$ISABELLE_SHORT_VERSION
ISADOF_DIR="$BUILD_DIR/$ISADOF_TAR"
check_isabelle_version
build_and_install_manuals
if [ "$DIRTY" != "true" ]; then
$ISABELLE components -x $ISADOF_WORK_DIR
$ISABELLE components -u $ISADOF_MAIN_DIR
fi
create_archive
if [ "$SIGN" = "true" ]; then
sign_archive
fi
if [ "$PUBLISH" = "true" ]; then
publish_archive
fi
rm -rf $BUILD_DIR
exit 0

View File

@ -7,45 +7,3 @@ and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0.
## [Unreleased]
### Added
### Changed
- Updated Isabelle version to Isabelle 2022
## [1.3.0] - 2022-07-08
### Changed
- The project-specific configuration is not part of the `ROOT` file, the formerly
used `isadof.cfg` is obsolete and no longer supported.
- Removed explicit use of `document/build` script. Requires removing the `build` script
entry from ROOT files.
- Isabelle/DOF is now a proper Isabelle component that should be installed using the
`isabelle components` command. The installation script is now only a convenient way
of installing the required AFP entries.
- `mkroot_DOF` has been renamed to `dof_mkroot` (and reimplemented in Scala).
## [1.2.0] - 2022-03-26
## [1.1.0] - 2021-03-20
### Added
- New antiquotations, consistency checks
### Changed
- Updated manual
- Restructured setup for ontologies (Isabelle theories and LaTeX styles)
## 1.0.0 - 2018-08-18
### Added
- First public release
[Unreleased]: https://git.logicalhacking.com/Isabelle_DOF/Isabelle_DOF/compare/v1.3.0/Isabelle2021...HEAD
[1.3.0]: https://git.logicalhacking.com/Isabelle_DOF/Isabelle_DOF/compare/v1.2.0/Isabelle2021...v1.3.0/Isabelle2021-1
[1.2.0]: https://git.logicalhacking.com/Isabelle_DOF/Isabelle_DOF/compare/v1.1.0/Isabelle2021...v1.2.0/Isabelle2021
[1.1.0]: https://git.logicalhacking.com/Isabelle_DOF/Isabelle_DOF/compare/v1.0.0/Isabelle2019...v1.1.0/Isabelle2021

View File

@ -1,4 +1,5 @@
To cite Isabelle/DOF in publications, please use
Achim D. Brucker, Idir Ait-Sadoune, Paolo Crisafulli, and Burkhart
Wolff. Using The Isabelle Ontology Framework: Linking the Formal

View File

@ -1,6 +1,6 @@
Copyright (C) 2018-2019 The University of Sheffield
2019-2022 The University of Exeter
2018-2022 The University of Paris-Saclay
2019-2019 The University of Exeter
2018-2019 The University of Paris-Saclay
All rights reserved.
Redistribution and use in source and binary forms, with or without

175
README.md
View File

@ -1,67 +1,67 @@
# [Isabelle/DOF](https://git.logicalhacking.com/Isabelle_DOF/Isabelle_DOF): Document Preparation Setup
Isabelle/DOF is a novel Document Ontology Framework on top of Isabelle.
Isabelle/DOF allows for both conventional typesetting and formal development.
The manual for [Isabelle/DOF 1.3.0/Isabelle2021-1 is available
online.](https://artifacts.logicalhacking.com/releases/Isabelle_DOF/Isabelle_DOF/Isabelle_DOF-1.3.0_Isabelle2021-1.pdf)
Isabelle/DOF allows for both conventional typesetting as well as formal
development.
## Pre-requisites
Isabelle/DOF has three major prerequisites:
Isabelle/DOF has two major pre-requisites:
* **Isabelle:** Isabelle/DOF requires [Isabelle 2018](http://isabelle.in.tum.de/website-Isabelle2018/).
Please download the Isabelle 2018 distribution for your operating
system from the [Isabelle website](http://isabelle.in.tum.de/website-Isabelle2018/).
* **LaTeX:** Isabelle/DOF requires a modern pdfTeX-engine supporting the \expanded{}-primitive. This
is, for example, included in the [TeXLive 2019](https://www.tug.org/texlive/) (or later)
distribution. Please follow the [TeXLive installation instructions](https://www.tug.org/texlive/acquire-netinstall.html)
for installing TeXLive.
* **Isabelle:** Isabelle/DOF requires [Isabelle
2022](https://isabelle.in.tum.de/website-Isabelle2022/). Please download the
Isabelle 2022 distribution for your operating system from the [Isabelle
website](https://isabelle.in.tum.de/website-Isabelle2022/).
* **AFP:** Isabelle/DOF requires several entries from the [Archive of Formal Proofs
(AFP)](https://www.isa-afp.org/). Please install the AFP following the
instructions given at <https://www.isa-afp.org/using.html>.
* **LaTeX:** Isabelle/DOF requires a modern LaTeX installation, i.e., at least
[TeX Live 2022](https://www.tug.org/texlive/) with all available updates
applied.
## Installation
Isabelle/DOF is provided as an Isabelle component. After installing the
prerequisites, change into the directory containing Isabelle/DOF (this should be
the directory containing this `README.md` file) and execute (if you executed
this command already during the installation of the prerequisites, you can skip
it now):
In most case, the DOF-plugin can be installed as follows:
```console
foo@bar:~$ isabelle components -u .
foo@bar:~$ ./install
```
The final step for the installation is:
If a specific Isabelle version should be used (i.e., not the default
one), the full path to the ``isabelle`` command needs to be passed as
using the ``--isabelle`` command line argument of the ``install`` script:
```console
foo@bar:~$ isabelle build -D .
foo@bar:~$ ./install --isabelle /usr/local/Isabelle2018/bin/isabelle
```
This will compile Isabelle/DOF and run the example suite.
For further command line options of the installer, please use the
built-in help:
```console
foo@bar:~$ ./install --help
```
## Usage
### Opening an Example
If you want to work with or extend one of the examples, e.g., you can open it
similar to any standard Isabelle theory:
If you want to work with or extend one of the examples, e.g., you can
open it similar to any standard Isabelle theory:
```console
isabelle jedit -d . -l Isabelle_DOF examples/scholarly_paper/2018_cicm/IsaDofApplications.thy
```
This will open an example of a scientific paper using the pre-compiled session
``Isabelle_DOF``, i.e., you will not be able to edit the ontology definitions.
If you want to edit the ontology definition, just open the theory file with the
default HOL session:
This will open an example of a scientific paper using the pre-compiled
session ``Isabelle_DOF``, i.e., you will not be able to edit the
ontology definitions. If you want to edit the ontology definition,
just open the theory file with the default HOL session:
```console
isabelle jedit -d . -l HOL examples/scholarly_paper/2018_cicm/IsaDofApplications.thy
```
While this gives you more flexibility, it might "clutter" your editing
experience, as a lot of internal theories are loaded into Isabelle's editor.
experience, as a lot of internal theories are loaded into Isabelle's
editor.
### Creating a New Project
@ -69,72 +69,50 @@ The DOF-plugin provides an alternative to Isabelle's ``mkroot`` command.
Isabelle projects that use DOF need to be created using
```console
foo@bar:~$ isabelle dof_mkroot
foo@bar:~$ isabelle mkroot_DOF
```
The ``dof_mkroot`` command takes the same parameter as the standard ``mkroot``
command of Isabelle. Thereafter, the normal Isabelle command for building
documents can be used.
The ``mkroot_DOF`` command takes the same parameter as the standard
``mkroot`` command of Isabelle. Thereafter, the normal Isabelle
command for building documents can be used.
Using the ``-o`` option, different ontology setups can be selected and using the
``-t`` option, different LaTeX setups can be selected. For example,
Using the ``-o`` option, different ontology setups can be
selected and using the ``-t`` option, different LaTeX setups
can be selected. For example,
```console
foo@bar:~$ isabelle dof_mkroot -o scholarly_paper -t scrartcl
foo@bar:~$ isabelle mkroot_DOF -o scholarly_paper -t scrartcl
```
creates a setup using the scholarly_paper ontology and the article class from
the KOMA-Script bundle.
creates a setup using the scholarly_paper ontology and the article
class from the KOMA-Script bundle.
The help (option ``-h``) show a list of all supported ontologies and document
templates:
The help (option ``-h``) show a list of all supported ontologies and
document templates:
```console
foo@bar:~$ isabelle dof_mkroot -h
foo@bar:~$ isabelle mkroot_DOF -h
Usage: isabelle dof_mkroot [OPTIONS] [DIRECTORY]
Usage: isabelle mkroot_DOF [OPTIONS] [DIR]
Options are:
-I init Mercurial repository and add generated files
-h print help
-n NAME alternative session name (default: directory base name)
-o NAMES list of ontologies, separated by blanks
(default: "technical_report scholarly_paper")
-q quiet mode: less verbosity
-t NAME template (default: "scrreprt-modern")
-h print this help text and exit
-n NAME alternative session name (default: DIR base name)
-o ONTOLOGY (default: scholarly_paper)
Available ontologies:
* cenelec_50128
* mathex
* scholarly_paper
-t TEMPLATE (default: scrartcl)
Available document templates:
* lncs
* scrartcl
* scrreprt
* scrreprt-modern
Create session root directory for Isabelle/DOF (default: current directory).
Prepare session root DIR (default: current directory).
```
## Releases
For releases, signed archives including a PDF version of the Isabelle/DOF manual
are available:
* Isabelle/DOF 1.3.0/Isabelle2021-1
* [Isabelle_DOF-1.3.0_Isabelle2021-1.pdf](https://artifacts.logicalhacking.com/releases/Isabelle_DOF/Isabelle_DOF/Isabelle_DOF-1.3.0_Isabelle2021-1.pdf)
* [Isabelle_DOF-1.3.0_Isabelle2021-1.tar.xz](https://artifacts.logicalhacking.com/releases/Isabelle_DOF/Isabelle_DOF/Isabelle_DOF-1.3.0_Isabelle2021-1.tar.xz)
* [Isabelle_DOF-1.3.0_Isabelle2021-1.tar.xz.asc](https://artifacts.logicalhacking.com/releases/Isabelle_DOF/Isabelle_DOF/Isabelle_DOF-1.3.0_Isabelle2021-1.tar.xz.asc)
### Older Releases
* Isabelle/DOF 1.2.0/Isabelle2021
* [Isabelle_DOF-1.2.0_Isabelle2021.pdf](https://artifacts.logicalhacking.com/releases/Isabelle_DOF/Isabelle_DOF/Isabelle_DOF-1.2.0_Isabelle2021.pdf)
* [Isabelle_DOF-1.2.0_Isabelle2021.tar.xz](https://artifacts.logicalhacking.com/releases/Isabelle_DOF/Isabelle_DOF/Isabelle_DOF-1.2.0_Isabelle2021.tar.xz)
* [Isabelle_DOF-1.2.0_Isabelle2021.tar.xz.asc](https://artifacts.logicalhacking.com/releases/Isabelle_DOF/Isabelle_DOF/Isabelle_DOF-1.2.0_Isabelle2021.tar.xz.asc)
* Isabelle/DOF 1.1.0/Isabelle2021
* [Isabelle_DOF-1.1.0_Isabelle2021.pdf](https://artifacts.logicalhacking.com/releases/Isabelle_DOF/Isabelle_DOF/Isabelle_DOF-1.1.0_Isabelle2021.pdf)
* [Isabelle_DOF-1.1.0_Isabelle2021.tar.xz](https://artifacts.logicalhacking.com/releases/Isabelle_DOF/Isabelle_DOF/Isabelle_DOF-1.1.0_Isabelle2021.tar.xz)
* [Isabelle_DOF-1.1.0_Isabelle2021.tar.xz.asc](https://artifacts.logicalhacking.com/releases/Isabelle_DOF/Isabelle_DOF/Isabelle_DOF-1.1.0_Isabelle2021.tar.xz.asc)
* Isabelle/DOF 1.1.0/Isabelle2020
* [Isabelle_DOF-1.1.0_Isabelle2020.pdf](https://artifacts.logicalhacking.com/releases/Isabelle_DOF/Isabelle_DOF/Isabelle_DOF-1.1.0_Isabelle2020.pdf)
* [Isabelle_DOF-1.1.0_Isabelle2020.tar.xz](https://artifacts.logicalhacking.com/releases/Isabelle_DOF/Isabelle_DOF/Isabelle_DOF-1.1.0_Isabelle2020.tar.xz)
* [Isabelle_DOF-1.1.0_Isabelle2020.tar.xz.asc](https://artifacts.logicalhacking.com/releases/Isabelle_DOF/Isabelle_DOF/Isabelle_DOF-1.1.0_Isabelle2020.tar.xz.asc)
* Isabelle/DOF 1.0.0/Isabelle2019
* [Isabelle_DOF-1.0.0_Isabelle2019.pdf](https://artifacts.logicalhacking.com/releases/Isabelle_DOF/Isabelle_DOF/Isabelle_DOF-1.0.0_Isabelle2019.pdf)
* [Isabelle_DOF-1.0.0_Isabelle2019.tar.xz](https://artifacts.logicalhacking.com/releases/Isabelle_DOF/Isabelle_DOF/Isabelle_DOF-1.0.0_Isabelle2019.tar.xz)
* [Isabelle_DOF-1.0.0_Isabelle2019.tar.xz.asc](https://artifacts.logicalhacking.com/releases/Isabelle_DOF/Isabelle_DOF/Isabelle_DOF-1.0.0_Isabelle2019.tar.xz.asc)
## Team
Main contacts:
@ -147,7 +125,6 @@ Main contacts:
* Idir Ait-Sadoune
* Paolo Crisafulli
* Chantal Keller
* Nicolas Méric
## License
@ -157,32 +134,18 @@ SPDX-License-Identifier: BSD-2-Clause
## Publications
* Achim D. Brucker, Idir Ait-Sadoune, Paolo Crisafulli, and Burkhart Wolff.
[Using The Isabelle Ontology Framework: Linking the Formal with the
Informal](https://www.brucker.ch/bibliography/download/2018/brucker.ea-isabelle-ontologies-2018.pdf).
In Conference on Intelligent Computer Mathematics (CICM). Lecture Notes in
Computer Science (11006), Springer-Verlag, 2018.
[doi:10.1007/978-3-319-96812-4_3](https://doi.org/10.1007/978-3-319-96812-4_3).
* Achim D. Brucker, Idir Ait-Sadoune, Paolo Crisafulli, and Burkhart
Wolff. [Using The Isabelle Ontology Framework: Linking the Formal
with the Informal](https://www.brucker.ch/bibliography/download/2018/brucker.ea-isabelle-ontologies-2018.pdf).
In Conference on Intelligent Computer Mathematics (CICM). Lecture
Notes in Computer Science (11006), Springer-Verlag, 2018.
* Achim D. Brucker and Burkhart Wolff. [Isabelle/DOF: Design and
Implementation](https://www.brucker.ch/bibliography/download/2019/brucker.ea-isabelledof-2019.pdf).
In Software Engineering and Formal Methods (SEFM). Lecture Notes in Computer
Science (11724), Springer-Verlag, 2019.
[doi:10.1007/978-3-030-30446-1_15](https://doi.org/10.1007/978-3-030-30446-1_15).
In Software Engineering and Formal Methods (SEFM). Lecture Notes in
Computer Science, Springer-Verlag, 2019.
* Achim D. Brucker, Burkhart Wolff. [Using Ontologies in Formal Developments
Targeting
Certification](https://www.brucker.ch/bibliography/download/2019/brucker.ea-ontologies-certification-2019.pdf).
In Integrated Formal Methods (IFM). Lecture Notes in Computer Science (11918).
Springer-Verlag 2019.
[doi:10.1007/978-3-030-34968-4_4](http://dx.doi.org/10.1007/978-3-030-34968-4_4)
## Master Repository
* Sergio Bezzecchi, Paolo Crisafulli, Charlotte Pichot, and Burkhart Wolff.
[Making Agile Development Processes fit for V-style Certification
Procedures.](https://hal.archives-ouvertes.fr/hal-01702815/document). In ERTS
2018. <https://hal.archives-ouvertes.fr/hal-01702815>
## Upstream Repository
The upstream git repository, i.e., the single source of truth, for this project
is hosted at <https://git.logicalhacking.com/Isabelle_DOF/Isabelle_DOF>.
The master git repository for this project is hosted
<https://git.logicalhacking.com/Isabelle_DOF/Isabelle_DOF>.

View File

@ -1,13 +0,0 @@
title = Isabelle/DOF
module = $ISABELLE_HOME_USER/DOF/isabelle_dof.jar
no_build = false
requirements = \
env:ISABELLE_SCALA_JAR
sources = \
src/scala/dof.scala \
src/scala/dof_document_build.scala \
src/scala/dof_mkroot.scala \
src/scala/dof_tools.scala
services = \
isabelle.dof.DOF_Tools \
isabelle.dof.DOF_Document_Build$Engine

View File

@ -1,4 +0,0 @@
# -*- shell-script -*- :mode=shellscript:
ISABELLE_DOF_HOME="$COMPONENT"
ISABELLE_DOCS="$ISABELLE_DOF_HOME/doc:$ISABELLE_DOCS"

View File

@ -1,90 +0,0 @@
theory PikeOS_ST (*Security Target *)
imports "../../../src/ontologies/CC_v3.1_R5/CC_v3_1_R5"
(* Isabelle_DOF.CC_v3_1_R5 in the future. *)
begin
section \<open>ST PikeOS\<close>
open_monitor*[stpkos::ST_MNT]
section*[pkosstintrosec::st_ref_cls]\<open> ST Introduction \<close>
open_monitor*[PikosIntro::ST_INTRO_MNT]
subsection*[pkosstrefsubsec::st_ref_cls]\<open> ST Reference \<close>
text*[pkosstref::st_ref_cls, title="''PikeOS Security Target''", st_version ="(0,4,5)",
authors= "[]", st_date= "''29072020''"]
\<open>This document is the @{docitem st} for the Common Criteria evaluation of PikeOS.
It complies with the Common Criteria for Information Technology Security Evaluation
Version 3.1 Revision 4.\<close>
subsection*[pkossttoerefsubsec::st_ref_cls]\<open>TOE Reference\<close>
text*[pkostoeref::toe_ref_cls, dev_name="''''", toe_name="''PikeOS''",
toe_version= "(0,3,4)", prod_name="Some ''S3725''"]
\<open>The @{docitem toe_def} is the operating system PikeOS version 3.4
running on the microprocessor family x86 hosting different applications.
The @{docitem toe_def} is referenced as PikeOS 3.4 base
product build S3725 for Linux and Windows development host with PikeOS 3.4
Certification Kit build S4250 and PikeOS 3.4 Common Criteria Kit build S4388.\<close>
subsection*[pkossttoeovrvwsubsec::st_ref_cls]\<open> TOE Overview \<close>
text*[pkosovrw1::toe_ovrw_cls]\<open>The @{definition \<open>toe\<close> } is a special kind of operating
system, that allows to effectively separate
different applications running on the same platform from each other. The TOE can host
user applications that can also be operating systems. User applications can also be
malicious, and even in that case the TOE ensures that malicious user applications are
harming neither the TOE nor other applications in other partitions. The TOE will be
installed and run on a hardware platform (e.g. embedded systems).
The TOE is intended to be used as a component (the separation kernel) in MILS systems.
MILS (Multiple Independent Levels of Security) systems are explained in .
The TOE controls usage of memory, devices, processors, and communication channels
to ensure complete separation of user applications and to prevent unexpected
interference between user applications. The TOE enforces restrictions on the
communication between the separated user applications as specified by the configuration
data.
The major security services provided by the TOE are:
Separation in space of applications hosted in different partitions from each other
and from the PikeOS operating system according to the configuration data by
Page 3 of 44using the underlying hardware,
2086 Separation in time of applications hosted in different partitions from each other
and from the PikeOS operating system according to the configuration data,
Provision and management of communication objects,
 Management of and access to the TOE and TOE data,
 PikeOS operating system self-protection and accuracy of security functionality,
 Generation and treatment of audit data according to the configuration data.\<close>
text*[pkosovrw2::toe_ovrw_cls, toe_type="''OS separation kernel''"]
\<open>The TOE is a special kind of operating system providing a separation kernel with real-
time support.
The typical life cycle phases for this TOE type are development (source code
development), manufacturing (compilation to binary), system integration (by the system
integrator), installation (by the system operator), and finally, operational use (by the
system operator). Operational use of the TOE is explicitly in the focus of this ST. A
security evaluation/certification according to the assurance package chosen in this ST
(see Section 2.3 “Package Claim” below) involves all these life cycle phases.\<close>
text*[pkosdesc::toe_desc_cls]\<open>\<close>
close_monitor*[PikosIntro]
open_monitor*[PikosCCLM::CONF_CLAIMS_MNT]
close_monitor*[PikosCCLM]
open_monitor*[PikosSPD::SEC_PROB_DEF_MNT]
close_monitor*[PikosSPD]
open_monitor*[PikosSO::SEC_OBJ_MNT]
close_monitor*[PikosSO]
open_monitor*[PikosSR::SEC_REQ_MNT]
close_monitor*[PikosSR]
close_monitor*[stpkos]
end

View File

@ -1,11 +1,11 @@
session "mini_odo" = "Isabelle_DOF" +
options [document = pdf, document_output = "output", document_build = dof]
sessions
"Physical_Quantities"
options [document = pdf, document_output = "output"]
theories
"mini_odo"
document_files
"isadof.cfg"
"preamble.tex"
"build"
"root.bib"
"root.mst"
"lstisadof.sty"
@ -13,4 +13,3 @@ session "mini_odo" = "Isabelle_DOF" +
"figures/odometer.jpeg"
"figures/three-phase-odo.pdf"
"figures/wheel-df.png"

View File

@ -0,0 +1,46 @@
#!/usr/bin/env bash
# Copyright (c) 2018-2019 The University of Sheffield. All rights reserved.
# 2018 The University of Paris-Saclay. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in
# the documentation and/or other materials provided with the
# distribution.
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
#
# SPDX-License-Identifier: BSD-2-Clause
set -e
if [ ! -f $ISABELLE_HOME_USER/DOF/document-template/build_lib.sh ]; then
echo ""
echo "Error: Isabelle/DOF not installed"
echo "====="
echo "This is a Isabelle/DOF project. The document preparation requires"
echo "the Isabelle/DOF framework. Please obtain the framework by cloning"
echo "the Isabelle/DOF git repository, i.e.: "
echo " git clone https://git.logicalhacking.com/Isabelle_DOF/Isabelle_DOF"
echo "You can install the framework as follows:"
echo " cd Isabelle_DOF/document-generator"
echo " ./install"
echo ""
exit 1
fi
cp $ISABELLE_HOME_USER/DOF/document-template/build_lib.sh .
source build_lib.sh

View File

@ -0,0 +1,3 @@
Template: scrreprt-modern
Ontology: technical_report
Ontology: cenelec_50128

View File

@ -1,16 +1,3 @@
%% Copyright (C) 2018 The University of Sheffield
%% 2018 The University of Paris-Saclay
%% 2019 The University of Exeter
%%
%% License:
%% This program can be redistributed and/or modified under the terms
%% of the LaTeX Project Public License Distributed from CTAN
%% archives in directory macros/latex/base/lppl.txt; either
%% version 1.3c of the License, or (at your option) any later version.
%% OR
%% The 2-clause BSD-style license.
%%
%% SPDX-License-Identifier: LPPL-1.3c+ OR BSD-2-Clause
\definecolor{OliveGreen} {cmyk}{0.64,0,0.95,0.40}
\definecolor{BrickRed} {cmyk}{0,0.89,0.94,0.28}

View File

@ -1,17 +1,3 @@
%% Copyright (C) 2018 The University of Sheffield
%% 2018 The University of Paris-Saclay
%% 2019 The University of Exeter
%%
%% License:
%% This program can be redistributed and/or modified under the terms
%% of the LaTeX Project Public License Distributed from CTAN
%% archives in directory macros/latex/base/lppl.txt; either
%% version 1.3c of the License, or (at your option) any later version.
%% OR
%% The 2-clause BSD-style license.
%%
%% SPDX-License-Identifier: LPPL-1.3c+ OR BSD-2-Clause
%% This is a placeholder for user-specific configuration and packages.
\usepackage{listings}
\usepackage{lstisadof}
@ -19,6 +5,8 @@
\usepackage{paralist}
\usepackage{numprint}
\newcommand{\fixIsarList}{\vspace{-\topsep}\vspace{-\baselineskip}\mbox{}\\[0pt]\noindent}
\newcommand{\eg}{e.\,g.}
\newcommand{\ie}{i.\,e.}
\author{}
\title{}

View File

@ -1,65 +1,27 @@
(*************************************************************************
* Copyright (C)
* 2019 The University of Exeter
* 2018-2019 The University of Paris-Saclay
* 2018 The University of Sheffield
*
* License:
* This program can be redistributed and/or modified under the terms
* of the 2-clause BSD-style license.
*
* SPDX-License-Identifier: BSD-2-Clause
*************************************************************************)
(*<*)
theory
mini_odo
imports
imports
"Isabelle_DOF.CENELEC_50128"
"Isabelle_DOF.technical_report"
"Physical_Quantities.SI" "Physical_Quantities.SI_Pretty"
begin
use_template "scrreprt-modern"
use_ontology technical_report and CENELEC_50128
declare[[strict_monitor_checking=true]]
define_shortcut* dof \<rightleftharpoons> \<open>\dof\<close>
isadof \<rightleftharpoons> \<open>\isadof{}\<close>
(*>*)
title*[title::title]\<open>The CENELEC 50128 Ontology\<close>
subtitle*[subtitle::subtitle]\<open>Case Study: An Odometer-Subsystem\<close>
chapter*[casestudy::technical]\<open>An Odometer-Subsystem\<close>
chapter*[casestudy::technical]\<open>A Case-Study: An Odometer-Subsystem\<close>
text\<open>
In our case study, we will follow the phases of analysis, design, and implementation of the
odometry function of a train. This \<^cenelec_term>\<open>SF\<close> processes data from an odometer to compute
the position, speed, and acceleration of a train. This system provides the basis for many
safety critical decisions, \<^eg>, the opening of the doors. Due to its relatively small size, it
odometry function of a train. This software processes data from an odometer to compute the position,
speed, and acceleration of a train. This system provides the basis for many
safety critical decisions, \eg, the opening of the doors. Due to its relatively small size, it
is a manageable, albeit realistic target for a comprehensive formal development: it covers a
physical model of the environment, the physical and architectural model of the odometer,
but also the \<^cenelec_term>\<open>SFRS\<close> aspects including the problem of numerical sampling and the
boundaries of efficient computations. The interplay between environment and measuring-device as
well as the implementation problems on a platform with limited resources makes the odometer a
fairly typical \<^cenelec_term>\<open>safety\<close> critical \<^cenelec_term>\<open>component\<close> of an embedded system.
The case-study is presented in form of an \<^emph>\<open>integrated source\<close> in \<^isadof> containing all four
reports from the phases:
\<^item> \<^term>\<open>software_requirements\<close> with deliverable \<^doc_class>\<open>SWRS\<close>
(or long:\<^typ>\<open>software_requirements_specification\<close>(-report))
\<^item> \<^term>\<open>software_architecture_and_design\<close> with deliverable \<^doc_class>\<open>SWDS\<close>
(or long: \<^typ>\<open>software_design_specification\<close>(-report))
\<^item> \<^term>\<open>software_component_design\<close> with deliverable \<^doc_class>\<open>SWCDVR\<close>
(or long: \<^typ>\<open>software_component_design_verification\<close>(-report).)
\<^item> \<^term>\<open>component_implementation_and_testing\<close> with deliverable \<^doc_class>\<open>SWADVR\<close>
(or long: \<^typ>\<open>software_architecture_and_design_verification\<close>(-report))
The objective of this case study is to demonstrate deep-semantical ontologoies in
software developments targeting certifications, and in particular, how \<^isadof>'s
integrated source concept permits to assure \<^cenelec_term>\<open>traceability\<close>.
\<^bold>\<open>NOTE\<close> that this case study has aspects that were actually covered by CENELEC 50126 -
the 'systems'-counterpart covering hardware aspects. Recall that the CENELEC 50128 covers
software.
physical model of the environment, the physical and architectural model of the odometer including
the problem of numerical sampling, and the boundaries of efficient computations. The interplay
between environment and measuring-device as well as the implementation problems on a platform
with limited resources makes the odometer a fairly typical safety critical embedded system.
Due to space reasons, we will focus on the analysis part of the integrated
document; the design and code parts will only be outlined in a final resume. The
@ -70,8 +32,7 @@ text\<open>
development.
\<close>
section\<open>A CENELEC-conform development as an \<^emph>\<open>Integrated Source\<close>\<close>
section\<open>System Requirements Specification as an \<^emph>\<open>Integrated Source\<close>\<close>
text\<open>Accurate information of a train's location along a track is in an important prerequisite
to safe railway operation. Position, speed and acceleration measurement usually lies on a
set of independent measurements based on different physical principles---as a way to enhance
@ -85,9 +46,9 @@ text\<open>
related to the trains progress. By measuring the fractional rotation of the encoders shaft and
considering the wheels effective ratio, relative movement of the train can be calculated.
\begin{wrapfigure}[8]{l}{4.6cm}
\begin{wrapfigure}[8]{l}{3.9cm}
\centering
\vspace{-.5cm}
\vspace{-.7cm}
\includegraphics[width=3.4cm]{figures/wheel-df}
\caption{Motion sensing via an odometer.}
\label{wheel-df}
@ -100,23 +61,17 @@ text\<open>
This model is already based on several fundamental assumptions relevant for the correct
functioning of the system and for its integration into the system as a whole. In
particular, we need to make the following assumptions explicit: \<^vs>\<open>-0.3cm\<close>\<close>
particular, we need to make the following assumptions explicit:\vspace{-.3cm}
\<^item> that the wheel is perfectly circular with a given, constant radius,
\<^item> that the slip between the trains wheel and the track negligible,
\<^item> the distance between all teeth of a wheel is the same and constant, and
\<^item> the sampling rate of positions is a given constant.
text*["perfect-wheel"::assumption]
\<open>\<^item> the wheel is perfectly circular with a given, constant radius. \<^vs>\<open>-0.3cm\<close>\<close>
text*["no-slip"::assumption]
\<open>\<^item> the slip between the trains wheel and the track negligible. \<^vs>\<open>-0.3cm\<close>\<close>
text*["constant-teeth-dist"::assumption]
\<open>\<^item> the distance between all teeth of a wheel is the same and constant, and \<^vs>\<open>-0.3cm\<close>\<close>
text*["constant-sampling-rate"::assumption]
\<open>\<^item> the sampling rate of positions is a given constant.\<close>
text\<open>
These assumptions have to be traced throughout the certification process as
\<^emph>\<open>derived requirements\<close> (or, in CENELEC terminology, as \<^emph>\<open>exported constraints\<close>), which is
also reflected by their tracing throughout the body of certification documents. This may result
in operational regulations, \<^eg>, regular checks for tolerable wheel defects. As for the
in operational regulations, \eg, regular checks for tolerable wheel defects. As for the
\<^emph>\<open>no slip\<close>-assumption, this leads to the modeling of constraints under which physical
slip can be neglected: the device can only produce reliable results under certain physical
constraints (speed and acceleration limits). Moreover, the \<^emph>\<open>no slip\<close>-assumption motivates
@ -125,53 +80,57 @@ text\<open>
\<close>
subsection\<open>Capturing ``System Architecture.''\<close>
figure*["three-phase"::figure,relative_width="70",src="''figures/three-phase-odo''"]
\<open>An odometer with three sensors \<open>C1\<close>, \<open>C2\<close>, and \<open>C3\<close>.\<close>
text\<open>
The requirements analysis also contains a document \<^doc_class>\<open>SYSAD\<close>
(\<^typ>\<open>system_architecture_description\<close>) that contains technical drawing of the odometer,
a timing diagram (see \<^figure>\<open>three-phase\<close>), and tables describing the encoding of the position
for the possible signal transitions of the sensors \<open>C1\<close>, \<open>C2\<close>, and \<open>C3\<close>.
\begin{figure}
\centering
\includegraphics[width=.70\textwidth]{figures/three-phase-odo}
\begin{picture}(0,0)
\put(-112,44){\includegraphics[width=.30\textwidth]{figures/odometer}}
\end{picture}
\caption{An odometer with three sensors \inlineisar{C1}, \inlineisar{C2}, and \inlineisar{C3}.}\label{three-phase}
\end{figure}
The requirements analysis also contains a sub-document \<^emph>\<open>system architecture description\<close>
(CENELEC notion) that contains technical drawing of the odometer, a timing diagrams
(see \autoref{three-phase}), and tables describing the encoding of the position
for the possible signal transitions of the sensors \inlineisar{C1}, \inlineisar{C2}, and $C3.$
\<close>
subsection\<open>Capturing ``System Interfaces.''\<close>
text\<open>
The requirements analysis also contains a sub-document \<^doc_class>\<open>FnI\<close> (\<^typ>\<open>functions_and_interfaces\<close>)
describing the technical format of the output of the odometry function.
This section, \<^eg>, specifies the output \<^emph>\<open>speed\<close> as given by a \<^verbatim>\<open>int_32\<close> to be the
``Estimation of the speed (in mm/sec) evaluated over the latest \<open>N\<^sub>a\<^sub>v\<^sub>g\<close> samples''
where the speed refers to the physical speed of the train and \<open>N\<^sub>a\<^sub>v\<^sub>g\<close> a parameter of the
The requirements analysis also contains a sub-document \<^emph>\<open>functions and interfaces\<close>
(CENELEC notion) describing the technical format of the output of the odometry function.
This section, \eg, specifies the output \<^emph>\<open>speed\<close> as given by a \<^verbatim>\<open>int_32\<close> to be the
``Estimation of the speed (in mm/sec) evaluated over the latest $N_{\text{avg}}$ samples''
where the speed refers to the physical speed of the train and $N_{\text{avg}}$ a parameter of the
sub-system configuration. \<close>
(*<*)
declare_reference*["df-numerics-encshaft"::figure]
declare_reference*["df-numerics-encshaft"::figure]
(*>*)
subsection\<open>Capturing ``Required Performances.''\<close>
text\<open>
The given analysis document is relatively implicit on the expected precision of the measurements;
however, certain interface parameters like \<open>Odometric_Position_TimeStamp\<close>
(a counter on the number of samplings) and \<open>Relative_Position\<close> are defined by as
however, certain interface parameters like \inlineisar*Odometric_Position_TimeStamp*
(a counter on the number of samplings) and \inlineisar*Relative_Position* are defined by as
unsigned 32 bit integer. These definitions imply that exported constraints concerning the acceptable
time of service as well the maximum distance before a necessary reboot of the subsystem.
For our case-study, we assume maximum deviation of the \<open>Relative_Position\<close> to the
For our case-study, we assume maximum deviation of the \inlineisar*Relative_Position* to the
theoretical distance.
The requirement analysis document describes the physical environment, the architecture
of the measuring device, and the required format and precision of the measurements of the odometry
function as represented (see @{figure (unchecked) "df-numerics-encshaft"}).\<close>
figure*["df-numerics-encshaft"::figure,relative_width="76",src="''figures/df-numerics-encshaft''"]
\<open>Real distance vs. discrete distance vs. shaft-encoder sequence\<close>
subsection\<open>Capturing the ``Software Design Spec'' (Resume).\<close>
text\<open>
\enlargethispage{\baselineskip}
The design provides a function that manages an internal first-in-first-out buffer of
shaft-encodings and corresponding positions. Central for the design is a step-function analyzing
new incoming shaft encodings, checking them and propagating two kinds of error-states (one allowing
recovery, another one, fatal, signaling, \<^eg>, a defect of the receiver hardware),
recovery, another one, fatal, signaling, \eg, a defect of the receiver hardware),
calculating the relative position, speed and acceleration.
\<close>
@ -189,123 +148,88 @@ text\<open>
in AutoCorres.
\<close>
(*<*)
definition teeth_per_wheelturn::nat ("tpw") where "tpw \<equiv> SOME x. x > 0"
definition wheel_diameter ::"real[m]" ("w\<^sub>d") where "w\<^sub>d \<equiv> SOME x. x > 0"
definition wheel_circumference::"real[m]" ("w\<^sub>0") where "w\<^sub>0 \<equiv> pi *\<^sub>Q w\<^sub>d"
definition \<delta>s\<^sub>r\<^sub>e\<^sub>s ::"real[m]" where "\<delta>s\<^sub>r\<^sub>e\<^sub>s \<equiv> 1 / (2 * 3 * tpw) *\<^sub>Q w\<^sub>0 "
(*>*)
section\<open>Formal Enrichment of the Software Requirements Specification\<close>
text\<open>
After the \<^emph>\<open>capture\<close>-phase, where we converted/integrated existing informal analysis and design
documents as well as code into an integrated Isabelle document, we entered into the phase of
\<open>formal enrichment\<close>. For example, from the assumptions in the architecture follow
the definitions:
@{theory_text [display]\<open>
definition teeth_per_wheelturn::nat ("tpw") where "tpw \<equiv> SOME x. x > 0"
definition wheel_diameter::"real[m]" ("w\<^sub>d") where "w\<^sub>d \<equiv> SOME x. x > 0"
definition wheel_circumference::"real[m]" ("w\<^sub>0") where "w\<^sub>0 \<equiv> pi *\<^sub>Q w\<^sub>d"
definition \<delta>s\<^sub>r\<^sub>e\<^sub>s::"real[m]" where "\<delta>s\<^sub>r\<^sub>e\<^sub>s \<equiv> 1 / (2 * 3 * tpw) *\<^sub>Q w\<^sub>0 "
\<close>}
Here, \<open>real\<close> refers to the real numbers as defined in the HOL-Analysis library, which provides
concepts such as Cauchy Sequences, limits, differentiability, and a very substantial part of
classical Calculus. \<open>SOME\<close> is the Hilbert choice operator from HOL; the definitions of the
model parameters admit all possible positive values as uninterpreted constants. Our
\<^assumption>\<open>perfect-wheel\<close> is translated into a calculation of the circumference of the
wheel, while \<open>\<delta>s\<^sub>r\<^sub>e\<^sub>s\<close>, the resolution of the odometer, can be calculated
\<open>formal enrichment\<close>. For example, from the assumptions in the architecture follow
the definitions:
\begin{isar}
definition teeth_per_wheelturn::nat ("tpw") where "tpw \<equiv> SOME x. x > 0"
definition wheel_diameter::real ("w$_d$") where "w$_d$ \<equiv> SOME x. x > 0"
definition wheel_circumference::real ("w$_{\text{circ}}$") where "w$_{\text{circ}}$ \<equiv> pi * w$_d$"
definition \<delta>s$_{\text{res}}$::real where "\<delta>s$_{\text{res}}$ \<equiv> w$_{\text{circ}}$ / (2 * 3 * tpw)"
\end{isar}
Here, \inlineisar{real} refers to the real numbers as defined in the HOL-Analysis
library, which provides concepts such as Cauchy Sequences, limits,
differentiability, and a very substantial part of classical Calculus. \inlineisar{SOME} is the
Hilbert choice operator from HOL; the definitions of the model parameters admit all possible positive values as uninterpreted
constants. Our perfect-wheel assumption is translated into a calculation of the circumference of the
wheel, while \inlineisar{\<delta>s<bsub>res<esub>}, the resolution of the odometer, can be calculated
from the these parameters. HOL-Analysis permits to formalize the fundamental physical observables:
\<close>
(*<*)
type_synonym distance_function = "real[s] \<Rightarrow> real[m]"
consts Speed::"distance_function \<Rightarrow> real[s] \<Rightarrow> real[m\<cdot>s\<^sup>-\<^sup>1]"
consts Accel::"distance_function \<Rightarrow> real[s] \<Rightarrow> real[m\<cdot>s\<^sup>-\<^sup>2]"
consts Speed\<^sub>M\<^sub>a\<^sub>x::"real[m\<cdot>s\<^sup>-\<^sup>1]"
(* Non - SI conform common abrbreviations *)
definition "kmh \<equiv> kilo *\<^sub>Q metre \<^bold>/ hour :: 'a::{field,ring_char_0}[m\<cdot>s\<^sup>-\<^sup>1]"
definition "kHz \<equiv> kilo *\<^sub>Q hertz :: 'a::{field,ring_char_0}[s\<^sup>-\<^sup>1]"
(*>*)
text\<open>
@{theory_text [display]\<open>
type_synonym distance_function = "real[s]\<Rightarrow>real[m]"
definition Speed::"distance_function\<Rightarrow>real\<Rightarrow>real" where "Speed f \<equiv> deriv f"
definition Accel::"distance_function\<Rightarrow>real\<Rightarrow>real" where "Accel f \<equiv> deriv (deriv f)"
\<close>}
which permits to constrain the central observable \<open>distance_function\<close> in a
\begin{isar}
type_synonym distance_function = "real\<Rightarrow>real"
definition Speed::"distance_function\<Rightarrow>real\<Rightarrow>real" where "Speed f \<equiv> deriv f"
definition Accel::"distance_function\<Rightarrow>real\<Rightarrow>real"
where "Accel f \<equiv> deriv (deriv f)"
\end{isar}
which permits to constrain the central observable \inlineisar|distance_function| in a
way that they describe the space of ``normal behavior'' where we expect the odometer to produce
reliable measurements over a \<open>distance_function df\<close> .
reliable measurements over a \inlineisar|distance_function df|.
The essence of the physics of the train is covered by the following definition:
@{theory_text [display]\<open>
definition normally_behaved_distance_function :: "(real \<Rightarrow> real) \<Rightarrow> bool"
where normally_behaved_distance_function df =
( \<forall> t. df(t) \<in> \<real>\<^sub>\<ge>\<^sub>0 \<and> (\<forall> t \<in> \<real>\<real>\<^sub>\<ge>\<^sub>0. df(t) = 0)
\<and> df differentiable on \<real>\<^sub>\<ge>\<^sub>0 \<and> (Speed df)differentiable on \<real>\<^sub>\<ge>\<^sub>0$
\<and> (Accel df)differentiable on \<real>\<^sub>\<ge>\<^sub>0
\<and> (\<forall> t. (Speed df) t \<in> {Speed\<^sub>M\<^sub>i\<^sub>n .. Speed\<^sub>M\<^sub>a\<^sub>x})
\<and> (\<forall> t. (Accel df) t \<in> {Accel\<^sub>M\<^sub>i\<^sub>n .. Accel\<^sub>M\<^sub>a\<^sub>x}))
\<close>}
\begin{isar}
definition normally_behaved_distance_function :: "(real \<Rightarrow> real) \<Rightarrow> bool"
where normally_behaved_distance_function df =
( \<forall> t. df(t) \<in> \<real>$_{\ge 0}$ \<and> (\<forall> t \<in> \<real>$_{\le 0}$. df(t) = 0)
\<and> df differentiable on$_{\text{R}}$ \<and> (Speed df)differentiable on$_{\text{R}}$
\<and> (Accel df)differentiable on$_{\ensuremath{R}}$
\<and> (\<forall> t. (Speed df) t \<in> {-Speed$_{\text{Max}}$ .. Speed$_{\text{Max}}$})
\<and> (\<forall> t. (Accel df) t \<in> {-\<bar>Accel$_{\text{Max}}$\<bar> .. \<bar>Accel$_{\text{Max}}$\<bar>}))
\end{isar}
which constrains the distance functions in the bounds described of the informal descriptions and
states them as three-fold differentiable function in certain bounds concerning speed and
acceleration. Note that violations, in particular of the constraints on speed and acceleration,
\<^emph>\<open>do\<close> occur in practice. In such cases, the global system adapts recovery strategies that are out
of the scope of our model. Concepts like \<open>shaft_encoder_state\<close> (a triple with the sensor values
\<open>C1\<close>, \<open>C2\<close>, \<open>C3\<close>) were formalized as types, while tables were
defined as recursive functions:
@{theory_text [display]\<open>
fun phase\<^sub>0 :: "nat \<Rightarrow> shaft_encoder_state" where
"phase\<^sub>0 (0) = \<lparr> C1 = False, C2 = False, C3 = True \<rparr>"
|"phase\<^sub>0 (1) = \<lparr> C1 = True, C2 = False, C3 = True \<rparr>"
|"phase\<^sub>0 (2) = \<lparr> C1 = True, C2 = False, C3 = False\<rparr>"
|"phase\<^sub>0 (3) = \<lparr> C1 = True, C2 = True, C3 = False\<rparr>"
|"phase\<^sub>0 (4) = \<lparr> C1 = False, C2 = True, C3 = False\<rparr>"
|"phase\<^sub>0 (5) = \<lparr> C1 = False, C2 = True, C3 = True \<rparr>"
|"phase\<^sub>0 x = phase\<^sub>0(x - 6)"
definition Phase ::"nat\<Rightarrow>shaft_encoder_state" where Phase(x) = phase\<^sub>0(x-1)
\<close>}
We now define shaft encoder sequences as translations of distance functions:
@{theory_text [display]\<open>
definition encoding::"distance_function\<Rightarrow>nat\<Rightarrow>real\<Rightarrow>shaft_encoder_state"
where "encoding df init\<^sub>p\<^sub>o\<^sub>s \<equiv> \<lambda>x. Phase(nat\<lfloor>df(x) / \<delta>s\<^sub>r\<^sub>e\<^sub>s\<rfloor> + init\<^sub>p\<^sub>o\<^sub>s)"
\<close>}
where \<open>init\<^sub>p\<^sub>o\<^sub>s\<close> is the initial position of the wheel.
\<open>sampling\<close>'s were constructed from encoding sequences over discretized time points:
@{theory_text [display]\<open>
definition sampling::"distance_function\<Rightarrow>nat\<Rightarrow>real\<Rightarrow>nat\<Rightarrow>shaft_encoder_state"
where "sampling df init\<^sub>p\<^sub>o\<^sub>s \<delta>t \<equiv> \<lambda>n::nat. encoding df initinit\<^sub>p\<^sub>o\<^sub>s (n * \<delta>t)"
\<close>}
states them as three-fold differentiable function in certain bounds concerning speed and acceleration.
Note that violations, in particular of the constraints on speed and acceleration, \<^emph>\<open>do\<close> occur in practice.
In such cases, the global system adapts recovery strategies that are out of the scope of our model.
Concepts like \inlineisar+shaft_encoder_state+ (a triple with the sensor values
\inlineisar{C1}, \inlineisar{C2}, \inlineisar{C3}) were formalized as types, while tables were defined as recursive functions:
\enlargethispage{2\baselineskip}\begin{isar}
fun phase$_0$ :: "nat \<Rightarrow> shaft_encoder_state" where
"phase$_0$ (0) = \<lparr> C1 = False, C2 = False, C3 = True \<rparr>"
|"phase$_0$ (1) = \<lparr> C1 = True, C2 = False, C3 = True \<rparr>"
|"phase$_0$ (2) = \<lparr> C1 = True, C2 = False, C3 = False\<rparr>"
|"phase$_0$ (3) = \<lparr> C1 = True, C2 = True, C3 = False\<rparr>"
|"phase$_0$ (4) = \<lparr> C1 = False, C2 = True, C3 = False\<rparr>"
|"phase$_0$ (5) = \<lparr> C1 = False, C2 = True, C3 = True \<rparr>"
|"phase$_0$ x = phase$_0$(x - 6)"
definition Phase ::"nat\<Rightarrow>shaft_encoder_state" where Phase(x) = phase$_0$(x-1)
\end{isar}
We now define shaft encoder sequences as
translations of distance functions:
\begin{isar}
definition encoding::"distance_function\<Rightarrow>nat\<Rightarrow>real\<Rightarrow>shaft_encoder_state"
where "encoding df init$_{\text{pos}}$ \<equiv> \<lambda>x. Phase(nat\<lfloor>df(x) / \<delta>s$_{\text{res}}$\<rfloor> + init$_{\text{pos}}$)"
\end{isar}
where \inlineisar+init$_{\text{pos}}$+ is the initial position of the wheel.
\inlineisar+sampling+'s were constructed from encoding sequences over discretized time points:
\begin{isar}
definition $\!\!$sampling::"distance$\!$_function\<Rightarrow>nat\<Rightarrow>real\<Rightarrow>nat\<Rightarrow>shaft$\!$_encoder$\!$_state"
where "sampling df init$_{\text{pos}}$ \<delta>t \<equiv> \<lambda>n::nat. encoding df init$_{\text{pos}}$ (n * \<delta>t)"
\end{isar}
The sampling interval \inlineisar+\<delta>t+ (the inverse of the sampling frequency) is a critical
parameter of the configuration of a system.
Finally, we can formally define the required performances. From the interface description
and the global model parameters such as wheel diameter, the number of teeth per wheel, the
sampling frequency etc., we can infer the maximal time of service as well the maximum distance
the device can measure. As an example configuration, choosing:
\<^item> \<^term>\<open>(1 *\<^sub>Q metre):: real[m]\<close> for \<^term>\<open>w\<^sub>d\<close> (wheel-diameter),
\<^item> \<^term>\<open>100 :: real\<close> for \<^term>\<open>tpw\<close> (teeth per wheel),
\<^item> \<^term>\<open>80 *\<^sub>Q kmh :: real[m\<cdot>s\<^sup>-\<^sup>1]\<close> for \<^term>\<open>Speed\<^sub>M\<^sub>a\<^sub>x\<close>,
\<^item> \<^term>\<open>14.4 *\<^sub>Q kHz :: real[s\<^sup>-\<^sup>1]\<close> for the sampling frequency,
results in an odometer resolution of \<^term>\<open>2.3 *\<^sub>Q milli *\<^sub>Q metre\<close>, a maximum distance of
\<^term>\<open>9878 *\<^sub>Q kilo *\<^sub>Q metre\<close>, and a maximal system up-time of \<^term>\<open>123.4 *\<^sub>Q hour\<close>s.
Finally, we can formally define the required performances. From the interface description
and the global model parameters such as wheel diameter, the number of teeth per wheel, the sampling
frequency etc., we can infer the maximal time of service as well the maximum distance the
device can measure.
As an example configuration, choosing 1m for
\inlineisar+w$_d$+, 100 for \inlineisar+tpw+, 80km/h \inlineisar+Speed$_{\text{Max}}$+,
and 14400Hz for the sampling frequency, results in an odometer resolution of 2.3mm,
a maximum distance of 9878km, and a maximal system up-time of 123.4 hours.
The required precision of an odometer can be defined by a constant describing
the maximally allowed difference between \<open>df(n*\<delta>t)\<close> and
\<open>sampling df init\<^sub>p\<^sub>o\<^sub>s \<delta>t n\<close> for all \<open>init\<^sub>p\<^sub>o\<^sub>s \<in>{0..5}\<close>.
the maximally allowed difference between \inlineisar+df(n*\<delta>t)+ and
\inlineisar+sampling df init$_{\text{pos}}$ \<delta>t n+ for all \inlineisar+init$_{\text{pos}}$ \<in>{0..5}+.
\<close>
(*<*)
ML\<open>val two_thirty2 = 1024 * 1024 * 1024 * 4;
@ -315,50 +239,41 @@ ML\<open>val two_thirty2 = 1024 * 1024 * 1024 * 4;
section*[verific::technical]\<open>Verification of the Software Requirements Specification\<close>
text\<open>The original documents contained already various statements that motivate certain safety
properties of the device. For example, the \<open>Phase\<close>-table excludes situations in which
all sensors \<open>C1\<close>, \<open>C2\<close>, and \<open>C3\<close> are all ``off'' or situations in
properties of the device. For example, the \inlineisar+Phase+-table excludes situations in which
all sensors \inlineisar{C1}, \inlineisar{C2}, and \inlineisar{C3} are all ``off'' or situations in
which sensors are ``on,'' reflecting a physical or electrical error in the odometer. It can be
shown by a very small Isabelle case-distinction proof that this safety requirement follows indeed
from the above definitions:
@{theory_text [display]\<open>
lemma Encoder_Property_1:(C1(Phase x) \<and> C2(Phase x) \<and> C3(Phase x))=False
proof (cases x)
case 0 then show ?thesis by (simp add: Phase_def)
next
case (Suc n) then show ?thesis
by(simp add: Phase_def,rule_tac n = n in cycle_case_split,simp_all)
qed
\<close>}
for all positions \<open>x\<close>. Similarly, it is proved that the table is indeed cyclic:
\<open>phase\<^sub>0 x = phase\<^sub>0(x mod 6)\<close>
and locally injective:
\<open>\<forall>x<6. \<forall>y<6. phase\<^sub>0 x = phase\<^sub>0 y \<longrightarrow> x = y\<close>
These lemmas, building the ``theory of an odometer,'' culminate in a theorem
shown by a very small Isabelle case-distinction proof that this safety requirement follows indeed from the
above definitions:
\begin{isar}
lemma Encoder_Property_1:(C1(Phase x) \<and> C2(Phase x) \<and> C3(Phase x))=False
proof (cases x)
case 0 then show ?thesis by (simp add: Phase_def)
next
case (Suc n) then show ?thesis
by(simp add: Phase_def,rule_tac n = n in cycle_case_split,simp_all)
qed
\end{isar}
for all positions \inlineisar+x+. Similarly, it is proved that the table is indeed
cyclic: \inlineisar+ phase$_0$ x = phase$_0$(x mod 6)+ and locally injective:
\inlineisar+\<forall>x<6. \<forall>y<6. phase$_0$ x = phase$_0$ y \<longrightarrow> x = y+.
These lemmas, building the ``theory of an odometer,'' culminate in a theorem
that we would like to present in more detail.
\begin{isar}
theorem minimal_sampling :
assumes * : normally_behaved_distance_function df
and ** : \<delta>t * Speed$_{\text{Max}}$ < \<delta>s$_{\text{res}}$
shows \<forall> \<delta>X\<le>\<delta>t. 0<\<delta>X \<longrightarrow>
\<exists>f. retracting (f::nat\<Rightarrow>nat) \<and>
sampling df init$_{\text{pos}}$ \<delta>X = (sampling df init$_{\text{pos}}$ \<delta>t) o f
@{theory_text [display]\<open>
theorem minimal_sampling :
assumes * : normally_behaved_distance_function df
and ** : \<delta>t * Speed\<^sub>M\<^sub>a\<^sub>x < \<delta>s\<^sub>r\<^sub>e\<^sub>s
shows \<forall> \<delta>X\<le>\<delta>t. 0<\<delta>X \<longrightarrow>
\<exists>f. retracting (f::nat\<Rightarrow>nat) \<and>
sampling df init\<^sub>p\<^sub>o\<^sub>s \<delta>X = (sampling df init\<^sub>p\<^sub>o\<^sub>s \<delta>t) o f
\<close>}
This theorem states for \<open>normally_behaved_distance_function\<close>s that there is
\end{isar}
This theorem states for \inlineisar+normally_behaved_distance_function+s that there is
a minimal sampling frequency assuring the safety of the measurements; samplings on
some \<open>df\<close> gained from this minimal sampling frequency can be ``pumped up''
some \inlineisar$df$ gained from this minimal sampling frequency can be ``pumped up''
to samplings of these higher sampling frequencies; they do not contain more information.
Of particular interest is the second assumption, labelled ``\<open>**\<close>'' which
establishes a lower bound from \<open>w\<^sub>0\<close>, \<open>tpw\<close>,
\<open>Speed\<^sub>M\<^sub>a\<^sub>x\<close> for the sampling frequency. Methodologically, this represents
Of particular interest is the second assumption, labelled ``\inlineisar|**|,'' which
establishes a lower bound from \inlineisar+w$_{\text{circ}}$+, \inlineisar+tpw+,
\inlineisar+Speed$_{\text{Max}}$+ for the sampling frequency. Methodologically, this represents
an exported constraint that can not be represented \<^emph>\<open>inside\<close> the design model: it means that the
computations have to be fast enough on the computing platform in order to assure that the
calculations are valid. It was in particular this exported constraint that forced us to give up
@ -372,7 +287,6 @@ standard~@{cite "bsi:50128:2014"}, 7.2.4.22 and are usually addressed in an own
\<close>
chapter*[ontomodeling::text_section]\<open>The CENELEC 50128 Ontology\<close>
text\<open>
Modeling an ontology from a semi-formal text such as~@{cite"bsi:50128:2014"} is,
like any other modeling activity, not a simple one-to-one translation of some
@ -384,144 +298,125 @@ text\<open>
section*[lhf::text_section]
\<open>Tracking Concepts and Definitions\<close>
text\<open>
\<^isadof> is designed to annotate text elements with structured meta-information and to reference
\isadof is designed to annotate text elements with structured meta-information and to reference
these text elements throughout the integrated source. A classical application of this capability
is the annotation of concepts and terms definitions---be them informal, semi-formal or formal---and
their consistent referencing. In the context of our CENELEC ontology, \<^eg>, we can translate the
their consistent referencing. In the context of our CENELEC ontology, \eg, we can translate the
third chapter of @{cite "bsi:50128:2014"} ``Terms, Definitions and Abbreviations'' directly
into our Ontology Definition Language (ODL). Picking one example out of 49, consider the definition
of the concept \<^cenelec_term>\<open>traceability\<close> in paragraphs 3.1.46 (a notion referenced 31 times in
the standard), which we translated directly into:
@{theory_text [display]\<open>
Definition*[traceability, short_name="''traceability''"]
\<open>degree to which relationship can be established between two or more products of a
development process, especially those having a predecessor/successor or
master/subordinate relationship to one another.\<close>
\<close>}
In the integrated source of the odometry study, we can reference in a text element to this
of the concept ``traceability'' in paragraphs 3.1.46 (a notion referenced 31 times in the standard),
which we translated directly into:
\begin{isar}
Definition*[traceability::concept]<open> degree to which relationship
can be established between two or more products of a development
process, especially those having a predecessor/successor or
master/subordinate relationship to one another. <close>
\end{isar}
In the integrated source of the odometry study, we can reference in a text element to this
concept as follows:
@{theory_text [display]\<open>
text*[...]\<open> ... to assure <@>{cenelec_term traceability} for
<@>{requirement bitwiseAND}, we prove ... \<close>
\<close>}
\<^isadof> also uses the underlying ontology to generate the navigation markup inside the IDE, \<^ie>
the presentation of this document element inside \<^isadof> is immediately hyperlinked against the
@{theory_text \<open> Definition* \<close>}-element shown above; this serves as documentation of
\begin{isar}
text*[...]<open> ... to assure <@>{concept traceability} for
<@>{requirement bitwiseAND}, we prove ... <close>
\end{isar}
The presentation of this document element inside \isadof is immediately hyperlinked against the
\inlineisar+Definition*+ element shown above; this serves as documentation of
the standard for the development team working on the integrated source. The PDF presentation
of such links depends on the actual configurations for the document generation; We will explain
this later.
CENELEC foresees also a number of roles, phases, safety integration levels, etc., which were
directly translated into HOL enumeration types usable in ontological concepts of ODL.
@{theory_text [display]\<open>
datatype role =
PM (* Program Manager *) | RQM (* Requirements Manager *)
| DES (* Designer *) | IMP (* Implementer *) |
| VER (* Verifier *) | VAL (* Validator *) | ...
datatype phase =
SYSDEV_ext (* System Development *) | SPl (* Software Planning *)
| SR (* Software Requirement *) | SA (* Software Architecture *)
| SDES (* Software Design *) | ...
\<close>}
Similarly, we can formalize the Table A.5: Verification and Testing of @{cite "bsi:50128:2014"}:
a classification of \<^emph>\<open>verification and testing techniques\<close>:
@{theory_text [display]\<open>
datatype vnt_technique =
\begin{isar}
datatype role =
PM (* Program Manager *) | RQM (* Requirements Manager *)
| DES (* Designer *) | IMP (* Implementer *) |
| VER (* Verifier *) | VAL (* Validator *) | ...
datatype phase =
SYSDEV_ext (* System Development *) | SPl (* Software Planning *)
| SR (* Software Requirement *) | SA (* Software Architecture *)
| SDES (* Software Design *) | ...
\end{isar}
Similarly, we can formalize the Table A.5: Verification and Testing of @{cite "bsi:50128:2014"}:
a classification of \<^emph>\<open>verification and testing techniques\<close>:
\begin{isar}
datatype vnt_technique =
formal_proof "thm list" | stat_analysis
| dyn_analysis dyn_ana_kind | ...
\<close>}
In contrast to the standard, we can parameterize \<open>formal_proof\<close> with a list of
theorems, an entity known in the Isabelle kernel. Here, \<^isadof> assures for text elements
\end{isar}
In contrast to the standard, we can parameterize \inlineisar+formal_proof+ with a list of
theorems, an entity known in the Isabelle kernel. Here, \isadof assures for text elements
annotated with theorem names, that they refer indeed to established theorems in the Isabelle
environment. Additional checks could be added to make sure that these theorems have a particular
form.
While we claim that this possibility to link to theorems (and test-results) is unique in the
world of systems attempting to assure \<^cenelec_term>\<open>traceability\<close>, referencing a particular
(proven) theorem is definitively not sufficient to satisfy the claimed requirement. Human
evaluators will always have to check that the provided theorem \<open>adequately\<close> represents the claim;
we do not in the slightest suggest that their work is superfluous. Our framework allows to
statically check that tests or proofs have been provided, at places where the ontology requires
them to be, and both assessors and developers can rely on this check and navigate through
related information easily. It does not guarantee that intended concepts for, \<^eg>, safety
or security have been adequately modeled.
world of systems attempting to assure traceability, referencing a particular (proven) theorem is
definitively not sufficient to satisfy the claimed requirement. Human evaluators will always have
to check that the provided theorem \<open>adequately\<close> represents the claim; we do not in the slightest
suggest that their work is superfluous. Our framework allows to statically check that tests or proofs
have been provided, at places where the ontology requires them to be, and both assessors and developers
can rely on this check and navigate through related information easily. It does not guarantee that
intended concepts for, \eg, safety or security have been adequately modeled.
\<close>
section*[moe::text_section]
\<open>Major Ontological Entities: Requirements and Evidence\<close>
text\<open>
We introduce central concept of a \<^emph>\<open>requirement\<close> as an ODL \<^theory_text>\<open>doc_class\<close>
based on the generic basic library \<^doc_class>\<open>text_element\<close> providing basic layout attributes.
@{theory_text [display]\<open>
doc_class requirement = text_element +
long_name :: "string option"
is_concerned :: "role set"
\<close>}
the groups of stakeholders in the CENELEC process. Therefore, the \<open>is_concerned\<close>-attribute
allows expressing who ``owns'' this text-element. \<^isadof> supports a role-based
presentation, \<^eg>, different presentation styles of the integrated source may decide to highlight,
to omit, to defer into an annex, text entities according to the role-set.
We introduce central concept of a \<^emph>\<open>requirement\<close> as an ODL \inlineisar*doc_class*
based on some generic basic library \inlineisar*text_element* providing basic layout attributes.
\begin{isar}
doc_class requirement = text_element +
long_name :: "string option"
is_concerned :: "role set"
\end{isar}
where the \inlineisar*roles* are exactly the ones defined in the previous section and represent
the groups of stakeholders in the CENELEC process. Therefore, the \inlineisar+is_concerned+-attribute
allows expressing who ``owns'' this text-element. \isadof supports a role-based
presentation, \eg, different presentation styles of the
integrated source may decide to highlight, to omit, to defer into an annex, text entities
according to the role-set.
Since ODL supports single inheritance, we can express sub-requirements and therefore a style
of requirement decomposition as advocated in GSN~@{cite "kelly.ea:goal:2004"}:
@{theory_text [display]\<open>
doc_class sub_requirement =
decomposes :: "requirement"
relates_to :: "requirement set"
\<close>}
\<close>
\begin{isar}
doc_class sub_requirement =
decomposes :: "requirement"
relates_to :: "requirement set"
\end{isar}\<close>
section*[claimsreqevidence::text_section]\<open>Tracking Claims, Derived Requirements and Evidence\<close>
text\<open>An example for making explicit implicit principles,
consider the following statement @{cite "bsi:50128:2014"}, pp. 25.: \<^vs>\<open>-0.15cm\<close>
consider the following statement @{cite "bsi:50128:2014"}, pp. 25.:\vspace{-1.5mm}
\begin{quote}\small
The objective of software verification is to examine and arrive at a judgment based on
evidence that output items (process, documentation, software or application) of a specific
development phase fulfill the requirements and plans with respect to completeness, correctness
and consistency.
\end{quote} \<^vs>\<open>-0.15cm\<close>
The terms \<^onto_class>\<open>judgement\<close> based on \<^term>\<open>evidence\<close> are used as a kind of leitmotif throughout
the CENELEC standard, but they are neither explained nor even listed in the general glossary.
However, the standard is fairly explicit on the \<^emph>\<open>phase\<close>s and the organizational roles that
different stakeholders should have in the process. Our version to express this key concept of
\<^onto_class>\<open>judgement\<close> , \<^eg>, by the following concept:
@{theory_text [display]\<open>
doc_class judgement =
refers_to :: requirement
evidence :: "vnt_technique list"
status :: status
is_concerned :: "role set" <= "{VER,ASR,VAL}"
\<close>}
\end{quote}\vspace{-1.5mm}
The terms \<^emph>\<open>judgment\<close> and \<^emph>\<open>evidence\<close> are used as a kind of leitmotif throughout the CENELEC
standard, but they are neither explained nor even listed in the general glossary. However, the
standard is fairly explicit on the \<^emph>\<open>phase\<close>s and the organizational roles that different stakeholders
should have in the process. Our version to express this key concept of judgment, \eg, by
the following concept:
\begin{isar}
doc_class judgement =
refers_to :: requirement
evidence :: "vnt_technique list"
status :: status
is_concerned :: "role set" <= "{VER,ASR,VAL}"
\end{isar}
As one can see, the role set is per default set to the verification team, the assessors and the
validation team.
There are different views possible here: an alternative would be to define \<^term>\<open>evidence\<close>
as ontological concept with \<^typ>\<open>vnt_technique\<close>'s (rather than an attribute of judgement)
and consider the basis of a summary containing the relation between requirements and relation:
@{theory_text [display]\<open>
doc_class summary =
based_on :: "(requirement \<times> evidence) set"
status :: status
is_concerned :: "role set" <= "{VER,ASR,VAL}"
\<close>}
There are different views possible here: an alternative would be to define \inlineisar+evidence+
as ontological concept with \inlineisar+vnt_technique+'s (rather than an attribute of judgement)
and consider the basis of judgments as a relation between requirements and relation:
\begin{isar}
doc_class judgement =
based_on :: "(requirement \<times> evidence) set"
status :: status
is_concerned :: "role set" <= "{VER,ASR,VAL}"
\end{isar}
More experimentation will be needed to find out what kind of ontological modeling is most
adequate for developers in the context of \isadof.
@ -533,66 +428,60 @@ text\<open>From the variety of different possibilities for adding CENELEC annota
integrated source, we will, in the following, point out three scenarios.\<close>
subsection\<open>Internal Verification of Claims in the Requirements Specification.\<close>
text\<open>In our case, the \<^term>\<open>SR\<close>-team early on detected a property necessary
for error-detection of the device (c.f. @{technical verific}):
@{theory_text [display]\<open>
text*[encoder_props::requirement]\<open> The requirement specification team identifies the property:
C1 & C2 & C3 = 0 (bitwise logical AND operation)
C1 | C2 | C3 = 1 (bitwise logical OR operation) \<close>
\<close>}
After the Isabelle proofs shown in @{technical verific}, we can either register the theorems
text\<open>In our case, the SR-team early on detected a property necessary
for error-detection of the device (c.f. @{docitem verific}):
\enlargethispage{2\baselineskip}\begin{isar}
text*[encoder_props::requirement]<open> The requirement specification team ...
C1 & C2 & C3 = 0 (bitwise logical AND operation)
C1 | C2 | C3 = 1 (bitwise logical OR operation) <close>
\end{isar}
After the Isabelle proofs shown in @{docitem verific}, we can either register the theorems
directly in an evidence statement:
@{theory_text [display]\<open>
text*[J1::judgement, refers_to="@{docitem <open>encoder_props<close>}",
evidence="[formal_proof[@{thm <open>Encoder_Property_1<close>},
@{thm <open>Encoder_Property_2<close>}]]"]
\<open>The required encoder properties are in fact verified to be consistent
with the formalization of @{term "phase\<^sub>0"}.\<close>
\<close>}
The references \<open>@{...}\<close>, called antiquotation, allow us not only to reference to
\begin{isar}
text*[J1::judgement, refers_to="<@>{docitem <open>encoder_props<close>}",
evidence="[formal_proof[<@>{thm <open>Encoder_Property_1<close>},
<@>{thm <open>Encoder_Property_2<close>}]]"]
<open>The required encoder properties are in fact verified to be consistent
with the formalization of <@>{term "phase$_0$"}.<close>
\end{isar}
The references \inlineisar|<@>{...}|, called antiquotation, allow us not only to reference to
formal concepts, they are checked for consistency and there are also antiquotations that
print the formally checked content (\<^eg>, the statement of a theorem).
print the formally checked content (\eg, the statement of a theorem).
\<close>
subsection\<open>Exporting Claims of the Requirements Specification.\<close>
text\<open>By definition, the main purpose of the requirement specification is the
identification of the safety requirements. As an example, we state the required precision of an
odometric function: for any normally behaved distance function \inlineisar+df+, and any representable
and valid sampling sequence that can be constructed for \inlineisar+df+, we require that the
difference between the physical distance and distance calculable from the
@{term Odometric_Position_Count} is bound by the minimal resolution of the odometer.
\begin{isar}
text*[R5::safety_requirement]<open>We can now state ... <close>
definition
Odometric_Position_Count_precise::(shaft_encoder_state list\<Rightarrow>output)\<Rightarrow>bool
where Odometric_Position_Count_precise odofunction \<equiv>
(\<forall> df. \<forall>S. normally_behaved_distance_function df
\<longrightarrow> representable S
\<longrightarrow> valid_sampling S df
\<longrightarrow> (let pos = uint(Odometric_Position_Count(odofunction S))
in \<bar>df((length S - 1)*\<delta>t$_{\text{odo}}$) - (\<delta>s$_{\text{res}}$ * pos)\<bar> \<le> \<delta>s$_{\text{res}}$))
text\<open>By definition, the main purpose of the requirement specification is the identification of
the safety requirements. As an example, we state the required precision of an odometric function:
for any normally behaved distance function \<open>df\<close>, and any representable and valid
sampling sequence that can be constructed for \<open>df\<close>, we require that the difference
between the physical distance and distance calculable from the @{term Odometric_Position_Count}
is bound by the minimal resolution of the odometer.
@{theory_text [display]\<open>
text*[R5::safety_requirement]\<open>We can now state ... \<close>
definition Odometric_Position_Count_precise :: "(shaft_encoder_state list\<Rightarrow>output)\<Rightarrow>bool"
where "Odometric_Position_Count_precise odofunction \<equiv>
(\<forall> df. \<forall>S. normally_behaved_distance_function df
\<longrightarrow> representable S
\<longrightarrow> valid_sampling S df
\<longrightarrow> (let pos = uint(Odometric_Position_Count(odofunction S))
in \<bar>df((length S - 1)*\<delta>t\<^sub>o\<^sub>d\<^sub>o) - (\<delta>s\<^sub>r\<^sub>e\<^sub>s * pos)\<bar> \<le> \<delta>s\<^sub>r\<^sub>e\<^sub>s))"
update_instance*[R5::safety_requirement,
formal_definition:="[@{thm \<open>Odometric_Position_Count_precise_def\<close>}]"]
\<close>}
By \<^theory_text>\<open>update_instance*\<close>, we book the property \<open>Position_Count_precise_def\<close> as
\<^onto_class>\<open>safety_requirement\<close>, a specific sub-class of \<^onto_class>\<open>requirement\<close>s
update_instance*[R5::safety_requirement,
formal_definition:="[<@>{thm <open>Odometric_Position_Count_precise_def<close>}]"]
\end{isar}
By \inlineisar+update_instance*+, we book the property \inlineisar+Position_Count_precise_def+ as
\inlineisar+safety_requirement+, a specific sub-class of \inlineisar+requirement+s
requesting a formal definition in Isabelle.\<close>
subsection\<open>Exporting Derived Requirements.\<close>
text\<open>Finally, we discuss the situation where the verification team discovered a critical side-condition
for a major theorem necessary for the safety requirements; this was in our development the case for
the condition labelled ``\<open>**\<close>'' in @{docitem verific}. The current CENELEC standard clearly separates
the condition labelled ``\inlineisar|**|'' in @{docitem verific}. The current CENELEC standard clearly separates
``requirement specifications'' from ``verification reports,'' which is probably motivated
by the overall concern of organizational separation and of document consistency. While this
document organization is possible in \<^isadof>, it is in our experience often counter-productive
document organization is possible in \isadof, it is in our experience often counter-productive
in practice: organizations tend to defend their documents because the impact of changes is more and more
difficult to oversee. This effect results in a dramatic development slow-down and an increase of
costs. Furthermore, these barriers exclude situations where developers perfectly know, for example,
@ -607,71 +496,65 @@ different PDF versions and for each version, document specific consistency guara
automatically enforced.
In our case study, we define this condition as predicate, declare an explanation of it as
\<^onto_class>\<open>SRAC\<close> (CENELEC for: safety-related application condition; ontologically, this is a
derived class from \<^onto_class>\<open>requirement\<close>.) and add the definition of the predicate into the
\inlineisar+SRAC+ (CENELEC for: safety-related application condition; ontologically, this is a
derived class from \inlineisar+requirement+.) and add the definition of the predicate into the
document instance as described in the previous section.\<close>
text\<open>\appendix\<close>
chapter\<open>Appendix\<close>
text\<open>
\<^item> \<open>@{thm refl}\<close> : @{thm refl}
\<^item> \<open>@{thm [source] refl}\<close> : @{thm [source] refl}
\<^item> \<open>@{thm[mode=Rule] conjI}\<close> : @{thm[mode=Rule] conjI}
\<^item> \<open>@{file "mini_odo.thy"}\<close> : @{file "mini_odo.thy"}
\<^item> \<open>@{value "3+4::int"}}\<close> : @{value "3+4::int"}
\<^item> \<open>@{const hd}\<close> : @{const hd}
\<^item> \<open>@{theory HOL.List}\<close> : @{theory HOL.List}s
\<^item> \<open>@{tserm "3"}\<close> : @{term "3"}
\<^item> \<open>@{type bool}\<close> : @{type bool}
\<^item> \<open>@{thm term [show_types] "f x = a + x"}\<close> : @{term [show_types] "f x = a + x"}
\<^item> \inlineisar|<@>{thm refl}|: @{thm refl}
\<^item> \inlineisar|<@>{thm [source] refl}|: @{thm [source] refl}
\<^item> \inlineisar|<@>{thm[mode=Rule] conjI}|: @{thm[mode=Rule] conjI}
\<^item> \inlineisar|<@>{file "mini_odo.thy"}|: @{file "mini_odo.thy"}
\<^item> \inlineisar|<@>{value "3+4::int"}|: @{value "3+4::int"}
\<^item> \inlineisar|<@>{const hd}|: @{const hd}
\<^item> \inlineisar|<@>{theory HOL.List}|: @{theory HOL.List}
\<^item> \inlineisar|<@>{term "3"}|: @{term "3"}
\<^item> \inlineisar|<@>{type bool}|: @{type bool}
\<^item> \inlineisar|<@>{term [show_types] "f x = a + x"}|: @{term [show_types] "f x = a + x"}
\<close>
text\<open>Examples for declaration of typed doc-classes "assumption" (sic!) and "hypothesis" (sic!!),
text\<open>Examples for declaration of typed doc-items "assumption" and "hypothesis",
concepts defined in the underlying ontology @{theory "Isabelle_DOF.CENELEC_50128"}. \<close>
text*[ass2::assumption, long_name="Some ''assumption one''"] \<open> The subsystem Y is safe. \<close>
text*[hyp1::hypothesis] \<open> \<open>P \<noteq> NP\<close> \<close>
text*[ass1::assumption, long_name="Some ''assumption one''"] \<open> The subsystem Y is safe. \<close>
text*[hyp1::hypothesis] \<open> P not equal NP \<close>
text\<open>
A real example fragment fsrom a larger project, declaring a text-element as a
"safety-related application condition", a concept defined in the
@{theory "Isabelle_DOF.CENELEC_50128"} ontology:\<close>
text\<open>A real example fragment from a larger project, declaring a text-element as a
"safety-related application condition", a concept defined in the
@{theory "Isabelle_DOF.CENELEC_50128"} ontology:\<close>
text*[hyp2::hypothesis]\<open>Under the assumption @{assumption \<open>ass2\<close>} we establish the following: ... \<close>
text*[hyp2::hypothesis]\<open>Under the assumption @{assumption \<open>ass1\<close>} we establish the following: ... \<close>
text*[ass122::SRAC, long_name="Some ''ass122''"]
\<open> The overall sampling frequence of the odometer subsystem is therefore 14 khz,
which includes sampling, computing and result communication times... \<close>
text*[ass122::SRAC, long_name="Some ''ass122''"] \<open> The overall sampling frequence of the odometer
subsystem is therefore 14 khz, which includes sampling, computing and
result communication times... \<close>
text*[ass123::SRAC]
\<open> The overall sampling frequence of the odometer subsystem is therefore 14 khz,
which includes sampling, computing and result communication times... \<close>
text*[ass123::SRAC] \<open> The overall sampling frequence of the odometer
subsystem is therefore 14 khz, which includes sampling, computing and
result communication times... \<close>
text*[ass124::EC, long_name="Some ''ass124''"]
\<open> The overall sampling frequence of the odometer subsystem is therefore 14 khz,
which includes sampling, computing and result communication times... \<close>
text*[ass124::EC, long_name="Some ''ass124''"] \<open> The overall sampling frequence of the odometer
subsystem is therefore 14 khz, which includes sampling, computing and
result communication times... \<close>
text*[t10::test_result]
\<open> This is a meta-test. This could be an ML-command that governs the external
test-execution via, \<^eg>, a makefile or specific calls to a test-environment or test-engine. \<close>
text*[t10::test_result] \<open> This is a meta-test. This could be an ML-command
that governs the external test-execution via, eg., a makefile or specific calls
to a test-environment or test-engine \<close>
text \<open> Finally some examples of references to doc-items, i.e. text-elements
with declared meta-information and status. \<close>
text \<open> As established by @{test_result (unchecked) \<open>t10\<close>},
@{test_result (define) \<open>t10\<close>} \<close>
text \<open> the @{test_result \<open>t10\<close>}
as well as the @{SRAC \<open>ass122\<close>}\<close>
text\<open>Finally some examples of references to doc-items, i.e. text-elements with declared
meta-information and status. \<close>
text \<open> As established by @{docref (unchecked) \<open>t10\<close>},
@{docref (define) \<open>t10\<close>} \<close>
text \<open> the @{docref \<open>t10\<close>}
as well as the @{docref \<open>ass122\<close>}\<close>
text \<open> represent a justification of the safety related applicability
condition @{SRAC \<open>ass122\<close>} aka exported constraint @{EC \<open>ass122\<close>}.\<close>
text \<open> due to notational conventions for antiquotations, one may even write:
"represent a justification of the safety related applicability
condition \<^SRAC>\<open>ass122\<close> aka exported constraint \<^EC>\<open>ass122\<close>."\<close>
(*<*)
end
(*>*)

View File

@ -1,3 +1,4 @@
scholarly_paper
technical_report
math_exam
CENELEC_50128

View File

@ -1,90 +0,0 @@
theory Cytology
imports "Isabelle_DOF.scholarly_paper"
begin
text\<open>A small example ontology for demonstration purposes.
The presentation follows closely: \<^url>\<open>https://www.youtube.com/watch?v=URUJD5NEXC8\<close>.\<close>
datatype protein = filaments | motor_proteins | rna | dna |nucleolus
type_synonym desc = "string"
onto_class organelles = description :: desc
find_theorems (60) name:"organelles"
term "Cytology.organelles.make"
onto_class ribosomes = organelles + description :: desc
onto_class mytochondria = organelles + description :: desc
onto_class golgi_apparatus = organelles + description :: desc
onto_class lysosome = organelles + description :: desc
text\<open>the control center of the cell:\<close>
onto_class nucleus = organelles +
description :: desc
components :: "protein list" <= "[nucleolus]"
(* Not so nice construction to mimick inheritance on types useds in attribute positions. *)
datatype organelles' = upcast\<^sub>r\<^sub>i\<^sub>b\<^sub>o\<^sub>s\<^sub>o\<^sub>m\<^sub>e\<^sub>s (get_ribosomes:ribosomes)
| upcast\<^sub>m\<^sub>y\<^sub>t\<^sub>o\<^sub>c\<^sub>h\<^sub>o\<^sub>n\<^sub>d\<^sub>r\<^sub>i\<^sub>a (get_mytochondria:mytochondria)
| upcast\<^sub>g\<^sub>o\<^sub>l\<^sub>g\<^sub>i\<^sub>_\<^sub>a\<^sub>p\<^sub>p\<^sub>a\<^sub>r\<^sub>a\<^sub>t\<^sub>u\<^sub>s (get_golgi_apparatus: golgi_apparatus)
| upcast\<^sub>l\<^sub>y\<^sub>s\<^sub>o\<^sub>s\<^sub>o\<^sub>m\<^sub>e (get_lysosome : lysosome)
| upcast\<^sub>n\<^sub>u\<^sub>c\<^sub>l\<^sub>e\<^sub>u\<^sub>s (get_nucleus : nucleus)
fun is\<^sub>n\<^sub>u\<^sub>c\<^sub>l\<^sub>e\<^sub>u\<^sub>s where "is\<^sub>n\<^sub>u\<^sub>c\<^sub>l\<^sub>e\<^sub>u\<^sub>s (upcast\<^sub>n\<^sub>u\<^sub>c\<^sub>l\<^sub>e\<^sub>u\<^sub>s X) = True" | "is\<^sub>n\<^sub>u\<^sub>c\<^sub>l\<^sub>e\<^sub>u\<^sub>s ( _) = False"
(* ... *)
fun downcast\<^sub>r\<^sub>i\<^sub>b\<^sub>o\<^sub>s\<^sub>o\<^sub>m\<^sub>e\<^sub>s
where "downcast\<^sub>r\<^sub>i\<^sub>b\<^sub>o\<^sub>s\<^sub>o\<^sub>m\<^sub>e\<^sub>s (upcast\<^sub>r\<^sub>i\<^sub>b\<^sub>o\<^sub>s\<^sub>o\<^sub>m\<^sub>e\<^sub>s X) = X" | "downcast\<^sub>r\<^sub>i\<^sub>b\<^sub>o\<^sub>s\<^sub>o\<^sub>m\<^sub>e\<^sub>s _ = undefined"
fun downcast\<^sub>m\<^sub>y\<^sub>t\<^sub>o\<^sub>c\<^sub>h\<^sub>o\<^sub>n\<^sub>d\<^sub>r\<^sub>i\<^sub>a
where "downcast\<^sub>m\<^sub>y\<^sub>t\<^sub>o\<^sub>c\<^sub>h\<^sub>o\<^sub>n\<^sub>d\<^sub>r\<^sub>i\<^sub>a (upcast\<^sub>m\<^sub>y\<^sub>t\<^sub>o\<^sub>c\<^sub>h\<^sub>o\<^sub>n\<^sub>d\<^sub>r\<^sub>i\<^sub>a X) = X" | "downcast\<^sub>m\<^sub>y\<^sub>t\<^sub>o\<^sub>c\<^sub>h\<^sub>o\<^sub>n\<^sub>d\<^sub>r\<^sub>i\<^sub>a _ = undefined"
fun downcast\<^sub>g\<^sub>o\<^sub>l\<^sub>g\<^sub>i\<^sub>_\<^sub>a\<^sub>p\<^sub>p\<^sub>a\<^sub>r\<^sub>a\<^sub>t\<^sub>u\<^sub>s
where "downcast\<^sub>g\<^sub>o\<^sub>l\<^sub>g\<^sub>i\<^sub>_\<^sub>a\<^sub>p\<^sub>p\<^sub>a\<^sub>r\<^sub>a\<^sub>t\<^sub>u\<^sub>s (upcast\<^sub>g\<^sub>o\<^sub>l\<^sub>g\<^sub>i\<^sub>_\<^sub>a\<^sub>p\<^sub>p\<^sub>a\<^sub>r\<^sub>a\<^sub>t\<^sub>u\<^sub>s X) = X" | "downcast\<^sub>g\<^sub>o\<^sub>l\<^sub>g\<^sub>i\<^sub>_\<^sub>a\<^sub>p\<^sub>p\<^sub>a\<^sub>r\<^sub>a\<^sub>t\<^sub>u\<^sub>s _ = undefined"
fun downcast\<^sub>l\<^sub>y\<^sub>s\<^sub>o\<^sub>s\<^sub>o\<^sub>m\<^sub>e
where "downcast\<^sub>l\<^sub>y\<^sub>s\<^sub>o\<^sub>s\<^sub>o\<^sub>m\<^sub>e (upcast\<^sub>l\<^sub>y\<^sub>s\<^sub>o\<^sub>s\<^sub>o\<^sub>m\<^sub>e X) = X" | "downcast\<^sub>l\<^sub>y\<^sub>s\<^sub>o\<^sub>s\<^sub>o\<^sub>m\<^sub>e _ = undefined"
fun downcast\<^sub>n\<^sub>u\<^sub>c\<^sub>l\<^sub>e\<^sub>u\<^sub>s
where "downcast\<^sub>n\<^sub>u\<^sub>c\<^sub>l\<^sub>e\<^sub>u\<^sub>s (upcast\<^sub>n\<^sub>u\<^sub>c\<^sub>l\<^sub>e\<^sub>u\<^sub>s X) = X" | "downcast\<^sub>n\<^sub>u\<^sub>c\<^sub>l\<^sub>e\<^sub>u\<^sub>s _ = undefined"
onto_class cell =
name :: string
membrane :: desc <= "\<open>The outer boundary of the cell\<close>"
cytoplasm :: desc <= "\<open>The liquid in the cell\<close>"
cytoskeleton :: desc <= "\<open>includes the thread-like microfilaments\<close>"
genetic_material :: "protein list" <= "[rna, dna]"
text\<open>Cells are devided into two categories: \<^emph>\<open>procaryotic\<close> cells (unicellular organisms some
bacteria) without a substructuring in organelles and \<^emph>\<open>eucaryotic\<close> cells, as occurring in
pluricellular organisms\<close>
onto_class procaryotic_cells = cell +
name :: string
onto_class eucaryotic_cells = cell +
organelles :: "organelles' list"
invariant has_nucleus :: "\<lambda>\<sigma>::eucaryotic_cells. \<exists> org \<in> set (organelles \<sigma>). is\<^sub>n\<^sub>u\<^sub>c\<^sub>l\<^sub>e\<^sub>u\<^sub>s org"
\<comment> \<open>Cells must have at least one nucleus. However, this should be executable.\<close>
find_theorems (70)name:"eucaryotic_cells"
find_theorems name:has_nucleus
value "is\<^sub>n\<^sub>u\<^sub>c\<^sub>l\<^sub>e\<^sub>u\<^sub>s (mk\<^sub>n\<^sub>u\<^sub>c\<^sub>l\<^sub>e\<^sub>u\<^sub>s X)"
term \<open>eucaryotic_cells.organelles\<close>
value \<open>(eucaryotic_cells.organelles(eucaryotic_cells.make X Y Z Z Z [] 3 []))\<close>
value \<open>has_nucleus_inv(eucaryotic_cells.make X Y Z Z Z [] 3 [])\<close>
value \<open>has_nucleus_inv(eucaryotic_cells.make X Y Z Z Z [] 3
[upcast\<^sub>n\<^sub>u\<^sub>c\<^sub>l\<^sub>e\<^sub>u\<^sub>s (nucleus.make a b c d [])])\<close>
end

View File

@ -0,0 +1,98 @@
(*<*)
theory MathExam
imports "Isabelle_DOF.math_exam"
HOL.Real
begin
(*>*)
(* open_monitor*[exam::MathExam] *)
section*[header::Header,examSubject= "[algebra]",
date="''02-05-2018''", timeAllowed="90::int"] \<open>Exam number 1\<close>
text\<open>
\begin{itemize}
\item Use black ink or black ball-point pen.
\item Draw diagrams in pencil.
\item Answer all questions in the spaces provided.
\end{itemize}
\<close>
text*[idir::Author, affiliation="''CentraleSupelec''",
email="''idir.aitsadoune@centralesupelec.fr''"]
\<open>Idir AIT SADOUNE\<close>
figure*[figure::figure, spawn_columns=False,
relative_width="80",
src="''figures/Polynomialdeg5''"]
\<open>A Polynome.\<close>
subsubsection*[exo1 :: Exercise, content="[q1::Task,q2::Task]"]\<open>Exercise 1\<close>
text\<open>
Here are the first four lines of a number pattern.
\begin{itemize}
\item Line 1 : @{term "1*6 + 2*4 = 2*7"}
\item Line 2 : @{term "2*7 + 2*5 = 3*8"}
\item Line 3 : @{term "3*8 + 2*6 = 4*9"}
\item Line 4 : @{term "4*9 + 2*7 = 5*10"}
\end{itemize}
\<close>
declare [[show_sorts=false]]
subsubsection*[exo2 :: Exercise, content="[q1::Task,q2::Task]"]\<open>Exercise 2\<close>
text\<open>Find the roots of the polynome:
@{term "(x^3) - 6 * x^2 + 5 * x + 12"}.
Note the intermediate steps in the following fields and submit the solution.\<close>
text\<open>
\begin{Form}[action={http://your-web-server.com/path/receiveform.cgi}]
\begin{tabular}{l}
From @{term "(x^3) - 6 * x^2 + 5 * x + 12"} \\\\
\TextField{have 1} \\\\
\TextField{have 2} \\\\
\TextField{have 3} \\\\
\TextField{finally show} \\\\
\CheckBox[width=1em]{Has the polynomial as many solutions as its degree ? } \\\\
\Submit{Submit}\\
\end{tabular}
\end{Form}
\<close>
(* a bit brutal, as long as lemma* does not yet work *)
(*<*)
lemma check_polynome :
fixes x::real
shows "(x^3) - 6 * x^2 + 5 * x + 12 = (x-4) * (x+1) * (x - 3)"
proof -
have * : "(x-4) * (x+1) * (x - 3) = (x-4) * ((x+1) * (x-3))"
by simp
have ** : "... = (x-4) * (x^2 - 2*x - 3)"
apply(auto simp: right_diff_distrib add.commute semiring_normalization_rules(1)[symmetric])
by (simp add: semiring_normalization_rules(29))
have *** : "... = x^3 - 6 * x^2 + 5 * x + 12"
apply(auto simp: right_diff_distrib left_diff_distrib add.commute semiring_normalization_rules(1)[symmetric])
by (simp add: numeral_3_eq_3 semiring_normalization_rules(29))
show ?thesis
by(simp only: * ** ***)
qed
(*>*)
text*[a1::Answer_Formal_Step]\<open>First Step: Fill in term and justification\<close>
text*[a2::Answer_Formal_Step]\<open>Next Step: Fill in term and justification\<close>
text*[a3::Answer_Formal_Step]\<open>Next Step: Fill in term and justification\<close>
text*[a4::Answer_Formal_Step]\<open>Next Step: Fill in term and justification\<close>
text*[q1::Task, local_grade="oneStar", mark="1::int", type="formal"]
\<open>Complete Line 10 : @{term "10*x + 2*y = 11*16"}\<close>
subsubsection*[exo3 :: Exercise, content="[q1::Task,q2::Task]"]\<open>Exercise 3\<close>
text*[q2::Task, local_grade="threeStars", mark="3::int", type="formal"]
\<open>Prove that @{term "n*(n+5) + 2*(n+3) "} is always the product of two numbers
with a difference of 5.
\<close>
(* this does not work on the level of the LaTeX output for known restrictions of the Toplevel. *)
(* close_monitor*[exam :: MathExam] *)
end

View File

@ -0,0 +1,10 @@
session "MathExam" = "Isabelle_DOF" +
options [document = pdf, document_output = "output"]
theories
MathExam
document_files
"preamble.tex"
"isadof.cfg"
"preamble.tex"
"build"
"figures/Polynomialdeg5.png"

View File

@ -0,0 +1,46 @@
#!/usr/bin/env bash
# Copyright (c) 2018-2019 The University of Sheffield. All rights reserved.
# 2018 The University of Paris-Saclay. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in
# the documentation and/or other materials provided with the
# distribution.
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
#
# SPDX-License-Identifier: BSD-2-Clause
set -e
if [ ! -f $ISABELLE_HOME_USER/DOF/document-template/build_lib.sh ]; then
echo ""
echo "Error: Isabelle/DOF not installed"
echo "====="
echo "This is a Isabelle/DOF project. The document preparation requires"
echo "the Isabelle/DOF framework. Please obtain the framework by cloning"
echo "the Isabelle/DOF git repository, i.e.: "
echo " git clone https://git.logicalhacking.com/Isabelle_DOF/Isabelle_DOF"
echo "You can install the framework as follows:"
echo " cd Isabelle_DOF/document-generator"
echo " ./install"
echo ""
exit 1
fi
cp $ISABELLE_HOME_USER/DOF/document-template/build_lib.sh .
source build_lib.sh

Binary file not shown.

After

Width:  |  Height:  |  Size: 4.9 KiB

View File

@ -0,0 +1,2 @@
Template: scrartcl
Ontology: math_exam

View File

@ -0,0 +1,18 @@
%% Copyright (C) 2018 The University of Sheffield
%% 2018 The University of Paris-Saclay
%%
%% License:
%% This program can be redistributed and/or modified under the terms
%% of the LaTeX Project Public License Distributed from CTAN
%% archives in directory macros/latex/base/lppl.txt; either
%% version 1 of the License, or any later version.
%% OR
%% The 2-clause BSD-style license.
%%
%% SPDX-License-Identifier: LPPL-1.0+ OR BSD-2-Clause
%% This is a placeholder for user-specific configuration and packages.
\title{<TITLE>}
\author{<AUTHOR>}

1
examples/math_exam/ROOTS Normal file
View File

@ -0,0 +1 @@
MathExam

View File

@ -1,80 +1,51 @@
(*************************************************************************
* Copyright (C)
* 2019 The University of Exeter
* 2018-2019 The University of Paris-Saclay
* 2018 The University of Sheffield
*
* License:
* This program can be redistributed and/or modified under the terms
* of the 2-clause BSD-style license.
*
* SPDX-License-Identifier: BSD-2-Clause
*************************************************************************)
(*<*)
theory IsaDofApplications
imports "Isabelle_DOF.scholarly_paper"
begin
use_template "lncs"
use_ontology "scholarly_paper"
open_monitor*[this::article]
declare[[strict_monitor_checking=false]]
define_shortcut* isadof \<rightleftharpoons> \<open>\isadof\<close>
LaTeX \<rightleftharpoons> \<open>\LaTeX{}\<close>
dots \<rightleftharpoons> \<open>\ldots\<close>
isabelle \<rightleftharpoons> \<open>Isabelle/HOL\<close>
Protege \<rightleftharpoons> \<open>Prot{\'e}g{\'e}\<close>
(* slanted text in contrast to italics *)
define_macro* slanted_text \<rightleftharpoons> \<open>\textsl{\<close> _ \<open>}\<close>
(*>*)
title*[tit::title] \<open>Using the Isabelle Ontology Framework\<close>
title*[tit::title]\<open>Using the Isabelle Ontology Framework\<close>
subtitle*[stit::subtitle]\<open>Linking the Formal with the Informal\<close>
author*[adb,
email ="''a.brucker@sheffield.ac.uk''",
orcid ="''0000-0002-6355-1200''",
affiliation ="''The University of Sheffield, Sheffield, UK''"]\<open>Achim D. Brucker\<close>
author*[idir,
email = "''idir.aitsadoune@centralesupelec.fr''",
affiliation = "''CentraleSupelec, Paris, France''"]\<open>Idir Ait-Sadoune\<close>
author*[paolo,
email = "''paolo.crisafulli@irt-systemx.fr''",
affiliation = "''IRT-SystemX, Paris, France''"]\<open>Paolo Crisafulli\<close>
author*[bu,
email = "\<open>wolff@lri.fr\<close>",
affiliation = "\<open>Université Paris-Saclay, Paris, France\<close>"]\<open>Burkhart Wolff\<close>
text*[adb:: author,
email="''a.brucker@sheffield.ac.uk''",
orcid="''0000-0002-6355-1200''",
affiliation="''The University of Sheffield, Sheffield, UK''"]\<open>Achim D. Brucker\<close>
text*[idir::author,
email = "''idir.aitsadoune@centralesupelec.fr''",
affiliation = "''CentraleSupelec, Paris, France''"]\<open>Idir Ait-Sadoune\<close>
text*[paolo::author,
email = "''paolo.crisafulli@irt-systemx.fr''",
affiliation= "''IRT-SystemX, Paris, France''"]\<open>Paolo Crisafulli\<close>
text*[bu::author,
email = "\<open>wolff@lri.fr\<close>",
affiliation = "\<open>Université Paris-Saclay, Paris, France\<close>"]\<open>Burkhart Wolff\<close>
abstract*[abs::abstract, keywordlist="[''Ontology'',''Ontological Modeling'',''Isabelle/DOF'']"]\<open>
While Isabelle is mostly known as part of \<^isabelle> (an interactive
theorem prover), it actually provides a framework for developing a wide
spectrum of applications. A particular strength
of the Isabelle framework is the combination of text editing, formal verification,
and code generation.
Up to now, Isabelle's document preparation system lacks a mechanism
for ensuring the structure of different document types (as, e.g.,
required in certification processes) in general and, in particular,
mechanism for linking informal and formal parts of a document.
In this paper, we present \<^isadof>, a novel Document Ontology Framework
on top of Isabelle. \<^isadof> allows for conventional typesetting
\<^emph>\<open>as well\<close> as formal development. We show how to model document
ontologies inside \<^isadof>, how to use the resulting meta-information
for enforcing a certain document structure, and discuss ontology-specific
IDE support.
text*[abs::abstract,
keywordlist="[''Ontology'',''Ontological Modeling'',''Isabelle/DOF'']"]\<open>
While Isabelle is mostly known as part of Isabelle/HOL (an interactive
theorem prover), it actually provides a framework for developing a wide
spectrum of applications. A particular strength
of the Isabelle framework is the combination of text editing, formal verification,
and code generation.
%% If you consider citing this paper, please refer to
%% @{cite "brucker.ea:isabelle-ontologies:2018"}.
Up to now, Isabelle's document preparation system lacks a mechanism
for ensuring the structure of different document types (as, e.g.,
required in certification processes) in general and, in particular,
mechanism for linking informal and formal parts of a document.
In this paper, we present \isadof, a novel Document Ontology Framework
on top of Isabelle. \isadof allows for conventional typesetting
\<^emph>\<open>as well\<close> as formal development. We show how to model document
ontologies inside \isadof, how to use the resulting meta-information
for enforcing a certain document structure, and discuss ontology-specific IDE support.
\<close>
section*[intro::introduction]\<open> Introduction \<close>
text*[introtext::introduction, level = "Some 1"]\<open>
text*[introtext::introduction]\<open>
The linking of the \<^emph>\<open>formal\<close> to the \<^emph>\<open>informal\<close> is perhaps the
most pervasive challenge in the digitization of knowledge and its
propagation. This challenge incites numerous research efforts
@ -82,7 +53,7 @@ summarized under the labels ``semantic web'', ``data mining'', or any
form of advanced ``semantic'' text processing. A key role in
structuring this linking play \<^emph>\<open>document ontologies\<close> (also called
\<^emph>\<open>vocabulary\<close> in the semantic web community~@{cite "w3c:ontologies:2015"}),
\<^ie>, a machine-readable form of the structure of documents as well as
\ie, a machine-readable form of the structure of documents as well as
the document discourse.
Such ontologies can be used for the scientific discourse within scholarly
@ -96,22 +67,22 @@ describing \<^emph>\<open>attributes\<close> of the concept, as well as \<^emph>
them. A particular link between concepts is the \<^emph>\<open>is-a\<close> relation declaring
the instances of a subclass to be instances of the super-class.
The main objective of this paper is to present \<^isadof>, a novel
The main objective of this paper is to present \isadof, a novel
framework to \<^emph>\<open>model\<close> typed ontologies and to \<^emph>\<open>enforce\<close> them during
document evolution. Based on Isabelle infrastructures, ontologies may refer to
types, terms, proven theorems, code, or established assertions.
Based on a novel adaption of the Isabelle IDE, a document is checked to be
\<^emph>\<open>conform\<close> to a particular ontology---\<^isadof> is designed to give fast user-feedback
\<^emph>\<open>conform\<close> to a particular ontology---\isadof is designed to give fast user-feedback
\<^emph>\<open>during the capture of content\<close>. This is particularly valuable in case of document
changes, where the \<^emph>\<open>coherence\<close> between the formal and the informal parts of the
content can be mechanically checked.
To avoid any misunderstanding: \<^isadof> is \<^emph>\<open>not a theory in HOL\<close>
To avoid any misunderstanding: \isadof is \<^emph>\<open>not a theory in HOL\<close>
on ontologies and operations to track and trace links in texts,
it is an \<^emph>\<open>environment to write structured text\<close> which \<^emph>\<open>may contain\<close>
\<^isabelle> definitions and proofs like mathematical articles, tech-reports and
scientific papers---as the present one, which is written in \<^isadof>
itself. \<^isadof> is a plugin into the Isabelle/Isar
Isabelle/HOL definitions and proofs like mathematical articles, tech-reports and
scientific papers---as the present one, which is written in \isadof
itself. \isadof is a plugin into the Isabelle/Isar
framework in the style of~@{cite "wenzel.ea:building:2007"}.
\<close>
@ -123,29 +94,29 @@ declare_reference*[ontomod::text_section]
declare_reference*[ontopide::text_section]
declare_reference*[conclusion::text_section]
(*>*)
text*[plan::introduction, level="Some 1"]\<open> The plan of the paper is follows: we start by introducing the underlying
Isabelle system (@{text_section (unchecked) \<open>bgrnd\<close>}) followed by presenting the
essentials of \<^isadof> and its ontology language (@{text_section (unchecked) \<open>isadof\<close>}).
It follows @{text_section (unchecked) \<open>ontomod\<close>}, where we present three application
scenarios from the point of view of the ontology modeling. In @{text_section (unchecked) \<open>ontopide\<close>}
text*[plan::introduction]\<open> The plan of the paper is follows: we start by introducing the underlying
Isabelel sytem (@{docitem (unchecked) \<open>bgrnd\<close>}) followed by presenting the
essentials of \isadof and its ontology language (@{docitem (unchecked) \<open>isadof\<close>}).
It follows @{docitem (unchecked) \<open>ontomod\<close>}, where we present three application
scenarios from the point of view of the ontology modeling. In @{docitem_ref (unchecked) \<open>ontopide\<close>}
we discuss the user-interaction generated from the ontological definitions. Finally, we draw
conclusions and discuss related work in @{text_section (unchecked) \<open>conclusion\<close>}. \<close>
conclusions and discuss related work in @{docitem_ref (unchecked) \<open>conclusion\<close>}. \<close>
section*[bgrnd::text_section,main_author="Some(@{docitem ''bu''}::author)"]
section*[bgrnd::text_section,main_author="Some(@{docitem ''adb''}::author)"]
\<open> Background: The Isabelle System \<close>
text*[background::introduction, level="Some 1"]\<open>
text*[background::introduction]\<open>
While Isabelle is widely perceived as an interactive theorem prover
for HOL (Higher-order Logic)~@{cite "nipkow.ea:isabelle:2002"}, we
would like to emphasize the view that Isabelle is far more than that:
it is the \<^emph>\<open>Eclipse of Formal Methods Tools\<close>. This refers to the
``\<^slanted_text>\<open>generic system framework of Isabelle/Isar underlying recent
``\textsl{generic system framework of Isabelle/Isar underlying recent
versions of Isabelle. Among other things, Isar provides an
infrastructure for Isabelle plug-ins, comprising extensible state
components and extensible syntax that can be bound to ML
programs. Thus, the Isabelle/Isar architecture may be understood as
an extension and refinement of the traditional `LCF approach', with
explicit infrastructure for building derivative
\<^emph>\<open>systems\<close>.\<close>''~@{cite "wenzel.ea:building:2007"}
\<^emph>\<open>systems\<close>.}''~@{cite "wenzel.ea:building:2007"}
The current system framework offers moreover the following features:
@ -162,24 +133,24 @@ figure*[architecture::figure,relative_width="100",src="''figures/isabelle-archit
asynchronous communication between the Isabelle system and
the IDE (right-hand side). \<close>
text*[blug::introduction, level="Some 1"]\<open> The Isabelle system architecture shown in @{figure \<open>architecture\<close>}
text*[blug::introduction]\<open> The Isabelle system architecture shown in @{docitem_ref \<open>architecture\<close>}
comes with many layers, with Standard ML (SML) at the bottom layer as implementation
language. The architecture actually foresees a \<^emph>\<open>Nano-Kernel\<close> (our terminology) which
resides in the SML structure \<^ML_structure>\<open>Context\<close>. This structure provides a kind of container called
resides in the SML structure \texttt{Context}. This structure provides a kind of container called
\<^emph>\<open>context\<close> providing an identity, an ancestor-list as well as typed, user-defined state
for components (plugins) such as \<^isadof>. On top of the latter, the LCF-Kernel, tactics,
for components (plugins) such as \isadof. On top of the latter, the LCF-Kernel, tactics,
automated proof procedures as well as specific support for higher specification constructs
were built. \<close>
text\<open> We would like to detail the documentation generation of the architecture,
which is based on literate specification commands such as \inlineisar+section+ \<^dots>,
\inlineisar+subsection+ \<^dots>, \inlineisar+text+ \<^dots>, etc.
which is based on literate specification commands such as \inlineisar+section+ \ldots,
\inlineisar+subsection+ \ldots, \inlineisar+text+ \ldots, etc.
Thus, a user can add a simple text:
\begin{isar}
text\<Open>This is a description.\<Close>
\end{isar}
These text-commands can be arbitrarily mixed with other commands stating definitions, proofs, code, etc.,
and will result in the corresponding output in generated \<^LaTeX> or HTML documents.
and will result in the corresponding output in generated \LaTeX{} or HTML documents.
Now, \<^emph>\<open>inside\<close> the textual content, it is possible to embed a \<^emph>\<open>text-antiquotation\<close>:
\begin{isar}
text\<Open>According to the reflexivity axiom \at{thm refl}, we obtain in \<Gamma>
@ -194,45 +165,45 @@ For the antiquotation \inlineisar+\at{value "fac 5"}+ we assume the usual defin
\inlineisar+fac+ in HOL.
\<close>
text*[anti::introduction, level = "Some 1"]\<open> Thus, antiquotations can refer to formal content, can be type-checked before being
text*[anti]\<open> Thus, antiquotations can refer to formal content, can be type-checked before being
displayed and can be used for calculations before actually being typeset. When editing,
Isabelle's PIDE offers auto-completion and error-messages while typing the above
\<^emph>\<open>semi-formal\<close> content. \<close>
section*[isadof::technical,main_author="Some(@{docitem ''adb''}::author)"]\<open> \<^isadof> \<close>
section*[isadof::technical,main_author="Some(@{docitem ''adb''}::author)"]\<open> \isadof \<close>
text\<open> An \<^isadof> document consists of three components:
text\<open> An \isadof document consists of three components:
\<^item> the \<^emph>\<open>ontology definition\<close> which is an Isabelle theory file with definitions
for document-classes and all auxiliary datatypes.
\<^item> the \<^emph>\<open>core\<close> of the document itself which is an Isabelle theory
importing the ontology definition. \<^isadof> provides an own family of text-element
importing the ontology definition. \isadof provides an own family of text-element
commands such as \inlineisar+title*+, \inlineisar+section*+, \inlineisar+text*+, etc.,
which can be annotated with meta-information defined in the underlying ontology definition.
\<^item> the \<^emph>\<open>layout definition\<close> for the given ontology exploiting this meta-information.
\<close>
text\<open>\<^isadof> is a novel Isabelle system component providing specific support for all these
text\<open>\isadof is a novel Isabelle system component providing specific support for all these
three parts. Note that the document core \<^emph>\<open>may\<close>, but \<^emph>\<open>must\<close> not
use Isabelle definitions or proofs for checking the formal content---the
present paper is actually an example of a document not containing any proof.
The document generation process of \<^isadof> is currently restricted to \LaTeX, which means
that the layout is defined by a set of \<^LaTeX> style files. Several layout
The document generation process of \isadof is currently restricted to \LaTeX, which means
that the layout is defined by a set of \LaTeX{} style files. Several layout
definitions for one ontology are possible and pave the way that different \<^emph>\<open>views\<close> for
the same central document were generated, addressing the needs of different purposes `
and/or target readers.
While the ontology and the layout definition will have to be developed by an expert
with knowledge over Isabelle and \<^isadof> and the back end technology depending on the layout
with knowledge over Isabelle and \isadof and the back end technology depending on the layout
definition, the core is intended to require only minimal knowledge of these two. The situation
is similar to \<^LaTeX>-users, who usually have minimal knowledge about the content in
style-files (\<^verbatim>\<open>.sty\<close>-files). In the document core authors \<^emph>\<open>can\<close> use \<^LaTeX> commands in
is similar to \LaTeX{}-users, who usually have minimal knowledge about the content in
style-files (\<^verbatim>\<open>.sty\<close>-files). In the document core authors \<^emph>\<open>can\<close> use \LaTeX{} commands in
their source, but this limits the possibility of using different representation technologies,
\<^eg>, HTML, and increases the risk of arcane error-messages in generated \<^LaTeX>.
\eg, HTML, and increases the risk of arcane error-messages in generated \LaTeX{}.
The \<^isadof> ontology specification language consists basically on a notation for
The \isadof ontology specification language consists basically on a notation for
document classes, where the attributes were typed with HOL-types and can be instantiated
by terms HOL-terms, \<^ie>, the actual parsers and type-checkers of the Isabelle system were reused.
This has the particular advantage that \<^isadof> commands can be arbitrarily mixed with
by terms HOL-terms, \ie, the actual parsers and type-checkers of the Isabelle system were reused.
This has the particular advantage that \isadof commands can be arbitrarily mixed with
Isabelle/HOL commands providing the machinery for type declarations and term specifications such
as enumerations. In particular, document class definitions provide:
\<^item> a HOL-type for each document class as well as inheritance,
@ -247,7 +218,7 @@ The HOL-types inside the document specification language support built-in types
\inlineisar+typ+'s, \inlineisar+term+'s, and \inlineisar+thm+'s reflecting internal Isabelle's
internal types for these entities; when denoted in HOL-terms to instantiate an attribute, for
example, there is a specific syntax (called \<^emph>\<open>inner syntax antiquotations\<close>) that is checked by
\<^isadof> for consistency.
\isadof for consistency.
Document classes can have a \inlineisar+where+ clause containing a regular
expression over class names. Classes with such a \inlineisar+where+ were called \<^emph>\<open>monitor classes\<close>.
@ -256,10 +227,10 @@ in an object-oriented manner, monitor classes enforce structural organization
of documents via the language specified by the regular expression
enforcing a sequence of text-elements that must belong to the corresponding classes.
To start using \<^isadof>, one creates an Isabelle project (with the name
To start using \isadof, one creates an Isabelle project (with the name
\inlinebash{IsaDofApplications}):
\begin{bash}
isabelle dof_mkroot -o scholarly_paper -t lncs IsaDofApplications
isabelle mkroot_DOF -o scholarly_paper -t lncs -d IsaDofApplications
\end{bash}
where the \inlinebash{-o scholarly_paper} specifies the ontology for writing scientific articles and
\inlinebash{-t lncs} specifies the use of Springer's \LaTeX-configuration for the Lecture Notes in
@ -270,8 +241,8 @@ article in PDF using the following command:
\end{bash}
\<close>
section*[ontomod::text_section]\<open> Modeling Ontologies in \<^isadof> \<close>
text\<open> In this section, we will use the \<^isadof> document ontology language
section*[ontomod::text_section]\<open> Modeling Ontologies in \isadof \<close>
text\<open> In this section, we will use the \isadof document ontology language
for three different application scenarios: for scholarly papers, for mathematical
exam sheets as well as standardization documents where the concepts of the
standard are captured in the ontology. For space reasons, we will concentrate in all three
@ -279,11 +250,11 @@ cases on aspects of the modeling due to space limitations.\<close>
subsection*[scholar_onto::example]\<open> The Scholar Paper Scenario: Eating One's Own Dog Food. \<close>
text\<open> The following ontology is a simple ontology modeling scientific papers. In this
\<^isadof> application scenario, we deliberately refrain from integrating references to
(Isabelle) formal content in order demonstrate that \<^isadof> is not a framework from
\isadof application scenario, we deliberately refrain from integrating references to
(Isabelle) formal content in order demonstrate that \isadof is not a framework from
Isabelle users to Isabelle users only.
Of course, such references can be added easily and represent a particular strength
of \<^isadof>.
of \isadof.
\begin{figure}
@ -314,18 +285,18 @@ with the usual text-elements of a scientific paper. The attributes \inlineisar+s
Our model prescribes an optional \inlineisar+main_author+ and a todo-list attached to an arbitrary
text section; since instances of this class are mutable (meta)-objects of text-elements, they
can be modified arbitrarily through subsequent text and of course globally during text evolution.
Since \inlineisar+author+ is a HOL-type internally generated by \<^isadof> framework and can therefore
Since \inlineisar+author+ is a HOL-type internally generated by \isadof framework and can therefore
appear in the \inlineisar+main_author+ attribute of the \inlineisar+text_section+ class;
semantic links between concepts can be modeled this way.
The translation of its content to, \<^eg>, Springer's \<^LaTeX> setup for the Lecture Notes in Computer
The translation of its content to, \eg, Springer's \LaTeX{} setup for the Lecture Notes in Computer
Science Series, as required by many scientific conferences, is mostly straight-forward. \<close>
figure*[fig1::figure,spawn_columns=False,relative_width="95",src="''figures/Dogfood-Intro''"]
\<open> Ouroboros I: This paper from inside \<^dots> \<close>
\<open> Ouroboros I: This paper from inside \ldots \<close>
text\<open> @{figure \<open>fig1\<close>} shows the corresponding view in the Isabelle/PIDE of thqqe present paper.
Note that the text uses \<^isadof>'s own text-commands containing the meta-information provided by
text\<open> @{docitem \<open>fig1\<close>} shows the corresponding view in the Isabelle/PIDE of thqqe present paper.
Note that the text uses \isadof's own text-commands containing the meta-information provided by
the underlying ontology.
We proceed by a definition of \inlineisar+introduction+'s, which we define as the extension of
\inlineisar+text_section+ which is intended to capture common infrastructure:
@ -407,15 +378,15 @@ doc_class figure = text_section +
text\<open> Alternatively, by including the HOL-libraries for rationals, it is possible to
use fractions or even mathematical reals. This must be counterbalanced by syntactic
and semantic convenience. Choosing the mathematical reals, \<^eg>, would have the drawback that
and semantic convenience. Choosing the mathematical reals, \eg, would have the drawback that
attribute evaluation could be substantially more complicated.\<close>
figure*[fig_figures::figure,spawn_columns=False,relative_width="85",src="''figures/Dogfood-figures''"]
\<open> Ouroboros II: figures \<^dots> \<close>
\<open> Ouroboros II: figures \ldots \<close>
text\<open> The document class \inlineisar+figure+ --- supported by the \<^isadof> text command
text\<open> The document class \inlineisar+figure+ --- supported by the \isadof text command
\inlineisar+figure*+ --- makes it possible to express the pictures and diagrams in this paper
such as @{figure \<open>fig_figures\<close>}.
such as @{docitem_ref \<open>fig_figures\<close>}.
\<close>
subsection*[math_exam::example]\<open> The Math-Exam Scenario \<close>
@ -427,12 +398,12 @@ during the exam and the preparation requires a very rigorous process, as the fre
We assume that the content has four different types of addressees, which have a different
\<^emph>\<open>view\<close> on the integrated document:
\<^item> the \<^emph>\<open>setter\<close>, \<^ie>, the author of the exam,
\<^item> the \<^emph>\<open>checker\<close>, \<^ie>, an internal person that checks
\<^item> the \<^emph>\<open>setter\<close>, \ie, the author of the exam,
\<^item> the \<^emph>\<open>checker\<close>, \ie, an internal person that checks
the exam for feasibility and non-ambiguity,
\<^item> the \<^emph>\<open>external examiner\<close>, \<^ie>, an external person that checks
\<^item> the \<^emph>\<open>external examiner\<close>, \ie, an external person that checks
the exam for feasibility and non-ambiguity, and
\<^item> the \<^emph>\<open>student\<close>, \<^ie>, the addressee of the exam.
\<^item> the \<^emph>\<open>student\<close>, \ie, the addressee of the exam.
\<close>
text\<open> The latter quality assurance mechanism is used in many universities,
where for organizational reasons the execution of an exam takes place in facilities
@ -506,7 +477,7 @@ In many institutions, it makes sense to have a rigorous process of validation
for exam subjects: is the initial question correct? Is a proof in the sense of the
question possible? We model the possibility that the @{term examiner} validates a
question by a sample proof validated by Isabelle (see \autoref{fig:onto-exam-monitor}).
In our scenario this sample proofs are completely \<^emph>\<open>intern\<close>, \<^ie>, not exposed to the
In our scenario this sample proofs are completely \<^emph>\<open>intern\<close>, \ie, not exposed to the
students but just additional material for the internal review process of the exam.
\begin{figure}
\begin{isar}
@ -532,15 +503,15 @@ doc_class MathExam=
declare_reference*["fig_qcm"::figure]
text\<open> Using the \<^LaTeX> package hyperref, it is possible to conceive an interactive
text\<open> Using the \LaTeX{} package hyperref, it is possible to conceive an interactive
exam-sheets with multiple-choice and/or free-response elements
(see @{figure (unchecked) \<open>fig_qcm\<close>}). With the
(see @{docitem_ref (unchecked) \<open>fig_qcm\<close>}). With the
help of the latter, it is possible that students write in a browser a formal mathematical
derivation---as part of an algebra exercise, for example---which is submitted to the examiners
electronically. \<close>
figure*[fig_qcm::figure,spawn_columns=False,
relative_width="90",src="''figures/InteractiveMathSheet''"]
\<open> A Generated QCM Fragment \<^dots> \<close>
\<open> A Generated QCM Fragment \ldots \<close>
subsection*[cenelec_onto::example]\<open> The Certification Scenario following CENELEC \<close>
text\<open> Documents to be provided in formal certifications (such as CENELEC
@ -615,7 +586,7 @@ doc_class srac = ec +
section*[ontopide::technical]\<open> Ontology-based IDE support \<close>
text\<open> We present a selection of interaction scenarios @{example \<open>scholar_onto\<close>}
and @{example \<open>cenelec_onto\<close>} with Isabelle/PIDE instrumented by \<^isadof>. \<close>
and @{example \<open>cenelec_onto\<close>} with Isabelle/PIDE instrumented by \isadof. \<close>
subsection*[scholar_pide::example]\<open> A Scholarly Paper \<close>
text\<open> In \autoref{fig-Dogfood-II-bgnd1} and \autoref{fig-bgnd-text_section} we show how
@ -648,7 +619,7 @@ figure*[figDogfoodVIlinkappl::figure,relative_width="80",src="''figures/Dogfood-
\<open> Exploring an attribute (hyperlinked to the class). \<close>
subsection*[cenelec_pide::example]\<open> CENELEC \<close>
declare_reference*[figfig3::figure]
text\<open> The corresponding view in @{docitem (unchecked) \<open>figfig3\<close>} shows core part of a document,
text\<open> The corresponding view in @{docitem_ref (unchecked) \<open>figfig3\<close>} shows core part of a document,
coherent to the @{example \<open>cenelec_onto\<close>}. The first sample shows standard Isabelle antiquotations
@{cite "wenzel:isabelle-isar:2017"} into formal entities of a theory. This way, the informal parts
of a document get ``formal content'' and become more robust under change.\<close>
@ -657,20 +628,20 @@ figure*[figfig3::figure,relative_width="80",src="''figures/antiquotations-PIDE''
\<open> Standard antiquotations referring to theory elements.\<close>
declare_reference*[figfig5::figure]
text\<open> The subsequent sample in @{figure (unchecked) \<open>figfig5\<close>} shows the definition of an
text\<open> The subsequent sample in @{docitem_ref (unchecked) \<open>figfig5\<close>} shows the definition of an
\<^emph>\<open>safety-related application condition\<close>, a side-condition of a theorem which
has the consequence that a certain calculation must be executed sufficiently fast on an embedded
device. This condition can not be established inside the formal theory but has to be
checked by system integration tests.\<close>
figure*[figfig5::figure, relative_width="80", src="''figures/srac-definition''"]
\<open> Defining a SRAC reference \<^dots> \<close>
\<open> Defining a SRAC reference \ldots \<close>
figure*[figfig7::figure, relative_width="80", src="''figures/srac-as-es-application''"]
\<open> Using a SRAC as EC document reference. \<close>
text\<open> Now we reference in @{figure (unchecked) \<open>figfig7\<close>} this safety-related condition;
text\<open> Now we reference in @{docitem_ref (unchecked) \<open>figfig7\<close>} this safety-related condition;
however, this happens in a context where general \<^emph>\<open>exported constraints\<close> are listed.
\<^isadof>'s checks establish that this is legal in the given ontology.
\isadof's checks establish that this is legal in the given ontology.
This example shows that ontological modeling is indeed adequate for large technical,
collaboratively developed documentations, where modifications can lead easily to incoherence.
@ -693,20 +664,22 @@ The control of monitors is done by the commands:
\<^item> \inlineisar+close_monitor* + <doc-class>
\<close>
text\<open>
where the automaton of the monitor class is expected to be in a final state. In the final state,
user-defined SML Monitors can be nested, so it is possible to "overlay" one or more monitoring
classes and imposing different sets of structural constraints in a Classes which are neither
directly nor indirectly (via inheritance) mentioned in the monitor are \<^emph>\<open>independent\<close> from a
monitor; instances of independent test elements may occur freely. \<close>
where the automaton of the monitor class is expected
to be in a final state. In the final state, user-defined SML
Monitors can be nested, so it is possible to "overlay" one or more monitoring
classes and imposing different sets of structural constraints in a
Classes which are neither directly nor indirectly (via inheritance) mentioned in the
monitor are \<^emph>\<open>independent\<close> from a monitor; instances of independent test elements
may occur freely. \<close>
section*[conclusion::conclusion]\<open> Conclusion and Related Work\<close>
text\<open> We have demonstrated the use of \<^isadof>, a novel ontology modeling and enforcement
text\<open> We have demonstrated the use of \isadof, a novel ontology modeling and enforcement
IDE deeply integrated into the Isabelle/Isar Framework. The two most distinguishing features are
\<^item> \<^isadof> and its ontology language are a strongly typed language that allows
for referring (albeit not reasoning) to entities of \<^isabelle>, most notably types, terms,
\<^item> \isadof and its ontology language are a strongly typed language that allows
for referring (albeit not reasoning) to entities of Isabelle/HOL, most notably types, terms,
and (formally proven) theorems, and
\<^item> \<^isadof> is supported by the Isabelle/PIDE framework; thus, the advantages of an IDE for
\<^item> \isadof is supported by the Isabelle/PIDE framework; thus, the advantages of an IDE for
text-exploration (which is the type of this link? To which text element does this link refer?
Which are the syntactic alternatives here?) were available during editing
instead of a post-hoc validation process.
@ -725,27 +698,27 @@ on documents mixing formal and informal content---a type of documents
that is very common in technical certification processes. We see
mainly one area of related works: IDEs and text editors that support
editing and checking of documents based on an ontology. There is a
large group of ontology editors (\<^eg>, \<^Protege>~@{cite "protege"},
large group of ontology editors (\eg, Prot{\'e}g{\'e}~@{cite "protege"},
Fluent Editor~@{cite "cognitum"}, NeOn~@{cite "neon"}, or
OWLGrEd~@{cite "owlgred"}). With them, we share the support for defining
ontologies as well as auto-completion when editing documents based on
an ontology. While our ontology definitions are currently based on a
textual definition, widely used ontology editors (\<^eg>,
textual definition, widely used ontology editors (\eg,
OWLGrEd~@{cite "owlgred"}) also support graphical notations. This could
be added to \<^isadof> in the future. A unique feature of \<^isadof> is the
be added to \isadof in the future. A unique feature of \isadof is the
deep integration of formal and informal text parts. The only other
work in this area we are aware of is rOntorium~@{cite "rontorium"}, a plugin
for \<^Protege> that integrates R~@{cite "adler:r:2010"} into an
for Prot{\'e}g{\'e} that integrates R~@{cite "adler:r:2010"} into an
ontology environment. Here, the main motivation behind this
integration is to allow for statistically analyze ontological
documents. Thus, this is complementary to our work.
\<close>
text\<open> \<^isadof> in its present form has a number of technical short-comings as well
text\<open> \isadof in its present form has a number of technical short-comings as well
as potentials not yet explored. On the long list of the short-comings is the
fact that strings inside HOL-terms do not support, for example, Unicode.
For the moment, \<^isadof> is conceived as an
add-on for \<^isabelle>; a much deeper integration of \<^isadof> into Isabelle
For the moment, \isadof is conceived as an
add-on for Isabelle/HOL; a much deeper integration of \isadof into Isabelle
could increase both performance and uniformity. Finally, different target
presentation (such as HTML) would be highly desirable in particular for the
math exam scenarios. And last but not least, it would be desirable that PIDE

View File

@ -1,13 +1,13 @@
session "2018-cicm-isabelle_dof-applications" = "Isabelle_DOF" +
options [document = pdf, document_output = "output", document_build = dof, quick_and_dirty = true]
options [document = pdf, document_output = "output", quick_and_dirty = true]
theories
IsaDofApplications
document_files
"isadof.cfg"
"root.bib"
"authorarchive.sty"
"preamble.tex"
"build"
"lstisadof.sty"
"vector_iD_icon.pdf"
"figures/isabelle-architecture.pdf"
"figures/Dogfood-Intro.png"
"figures/InteractiveMathSheet.png"

View File

@ -1,339 +0,0 @@
%% Copyright (C) 2008-2019 Achim D. Brucker, https://www.brucker.ch
%%
%% License:
%% This program can be redistributed and/or modified under the terms
%% of the LaTeX Project Public License Distributed from CTAN
%% archives in directory macros/latex/base/lppl.txt; either
%% version 1.3c of the License, or (at your option) any later version.
%% OR
%% The 2-clause BSD-style license.
%%
%% SPDX-License-Identifier: LPPL-1.3c+ OR BSD-2-Clause
\NeedsTeXFormat{LaTeX2e}\relax
\ProvidesPackage{authorarchive}
[0000/00/00 Unreleased v1.1.1+%
Self-archiving information for scientific publications.]
%
\PassOptionsToPackage{hyphens}{url}
%
\RequirePackage{ifthen}
\RequirePackage[inline]{enumitem}
\RequirePackage{graphicx}
\RequirePackage{eso-pic}
\RequirePackage{intopdf}
\RequirePackage{kvoptions}
\RequirePackage{hyperref}
\RequirePackage{calc}
\RequirePackage{qrcode}
\RequirePackage{hvlogos}
%
%Better url breaking
\g@addto@macro{\UrlBreaks}{\UrlOrds}
%
% Option declarations
% -------------------
\SetupKeyvalOptions{
family=AA,
prefix=AA@
}
%
\DeclareStringOption[.]{bibtexdir}
\DeclareStringOption[https://duckduckgo.com/?q=]{baseurl}
\DeclareStringOption[.pdf]{suffix}
\DeclareStringOption[UNKNOWN PUBLISHER]{publisher}[]
\DeclareStringOption[UNKNOWN YEAR]{year}[]
\DeclareStringOption[]{key}[]
\DeclareStringOption[]{doi}[]
\DeclareStringOption[]{doiText}[]
\DeclareStringOption[]{publisherurl}[]
\DeclareStringOption[UNKNOWN START PAGE]{startpage}[]
\DeclareStringOption[UNKNOWN PUBLICATION]{publication}[]
\DeclareBoolOption{ACM}
\DeclareBoolOption{acmart}
\DeclareBoolOption{ENTCS}
\DeclareBoolOption{IEEE}
\DeclareBoolOption{LNCS}
\DeclareBoolOption{LNI}
\DeclareBoolOption{nocopyright}
\DeclareBoolOption{nourl}
\DeclareBoolOption{nobib}
\DeclareBoolOption{orcidicon}
%\ProcessOptions\relax
% Default option rule
\DeclareDefaultOption{%
\ifx\CurrentOptionValue\relax
\PackageWarningNoLine{\@currname}{%
Unknown option `\CurrentOption'\MessageBreak
is passed to package `authorarchive'%
}%
% Pass the option to package color.
% Again it is better to expand \CurrentOption.
\expandafter\PassOptionsToPackage\expandafter{\CurrentOption}{color}%
\else
% Package color does not take options with values.
% We provide the standard LaTeX error.
\@unknownoptionerror
\fi
}
\ProcessKeyvalOptions*
% Provide command for dynamic configuration seutp
\def\authorsetup{\kvsetkeys{AA}}
% Load local configuration
\InputIfFileExists{authorarchive.config}{}{}
\newlength\AA@x
\newlength\AA@y
\newlength\AA@width
\def\AA@bibBibTeX{\AA@bibtexdir/\AA@key.bib}
\def\AA@bibBibTeXLong{\AA@bibtexdir/\AA@key.bibtex}
\def\AA@bibWord{\AA@bibtexdir/\AA@key.word.xml}
\def\AA@bibEndnote{\AA@bibtexdir/\AA@key.enw}
\def\AA@bibRIS{\AA@bibtexdir/\AA@key.ris}
\newboolean{AA@bibExists}
\setboolean{AA@bibExists}{false}
\IfFileExists{\AA@bibBibTeX}{\setboolean{AA@bibExists}{true}}{}
\IfFileExists{\AA@bibBibTeXLong}{\setboolean{AA@bibExists}{true}}{}
\IfFileExists{\AA@bibWord}{\setboolean{AA@bibExists}{true}}{}
\IfFileExists{\AA@bibEndnote}{\setboolean{AA@bibExists}{true}}{}
\IfFileExists{\AA@bibRIS}{\setboolean{AA@bibExists}{true}}{}
\setlength\AA@x{1in+\hoffset+\oddsidemargin}
\newcommand{\authorcrfont}{\footnotesize}
\newcommand{\authorat}[1]{\AtPageUpperLeft{\put(\LenToUnit{\AA@x},\LenToUnit{.2cm-\paperheight}){#1}}}
\newcommand{\authorwidth}[1]{\setlength{\AA@width}{#1}}
\setlength{\AA@width}{\textwidth}
\def\AA@pageinfo{}
\ifthenelse{\equal{\AA@startpage}{UNKNOWN START PAGE}}{%
}{%
\setcounter{page}{\AA@startpage}%
\def\AA@pageinfo{pp. \thepage--\pageref{\aa@lastpage}, }
}
%%%% sig-alternate.cls
\ifAA@ACM%
\ifthenelse{\equal{\AA@publisher}{UNKNOWN PUBLISHER}}{%
\setkeys{AA}{publisher=ACM}
}{}
\global\boilerplate={}
\global\copyrightetc={}
\renewcommand{\conferenceinfo}[2]{}
\renewcommand{\authorcrfont}{\scriptsize}
\setlength\AA@x{1in+\hoffset+\oddsidemargin}
\setlength\AA@y{-\textheight+\topmargin+\headheight-\footskip} % -\voffset-\topmargin-\headheight-\footskip}
\renewcommand{\authorat}[1]{\put(\LenToUnit{\AA@x},\LenToUnit{\AA@y}){#1}}
\setlength{\AA@width}{\columnwidth}
\fi
%
%%%% acmart.cls
\ifAA@acmart%
\ifthenelse{\equal{\AA@publisher}{UNKNOWN PUBLISHER}}{%
\setkeys{AA}{publisher=ACM}
}{}
\renewcommand{\authorat}[1]{\AtPageUpperLeft{\put(\LenToUnit{\AA@x},\LenToUnit{0.2cm-\paperheight}){#1}}}
\setlength{\AA@width}{\textwidth}
\fi
%
%%%% LNCS
\ifAA@LNCS%
\ifAA@orcidicon%
\renewcommand{\orcidID}[1]{\href{https://orcid.org/#1}{%
\textsuperscript{\,\includegraphics[height=2\fontcharht\font`A]{vector_iD_icon}}}}
\else\relax\fi%
%
\ifthenelse{\equal{\AA@publisher}{UNKNOWN PUBLISHER}}{%
\setkeys{AA}{publisher=Springer-Verlag}
}{}
\renewcommand{\authorcrfont}{\scriptsize}
\@ifclasswith{llncs}{a4paper}{%
\ExplSyntaxOn
\@ifundefined{pdfmanagement_add:nnn}{%
\pdfpagesattr{/CropBox [92 114 523 780]}%
}{%
\pdfmanagement_add:nnn {Pages}{CropBox}{[92~114~523~780]}
}%
\ExplSyntaxOff
\renewcommand{\authorat}[1]{\put(\LenToUnit{\AA@x},40){#1}}%
}{%
\ExplSyntaxOn
\@ifundefined{pdfmanagement_add:nnn}{%
\pdfpagesattr{/CropBox [92 65 523 731]}% LNCS page: 152x235 mm
}{%
\pdfmanagement_add:nnn {Pages}{CropBox}{[92~62~523~731]}
}%
\ExplSyntaxOff
\renewcommand{\authorat}[1]{\put(\LenToUnit{\AA@x},23){#1}}
}
\setlength{\AA@width}{\textwidth}
\setcounter{tocdepth}{2}
\fi
%
%%%% LNI
\ifAA@LNI%
\ifthenelse{\equal{\AA@publisher}{UNKNOWN PUBLISHER}}{%
\setkeys{AA}{publisher=GI}
}{}
\renewcommand{\authorat}[1]{\put(\LenToUnit{\AA@x},35){#1}}
\renewcommand{\authorcrfont}{\scriptsize}
\pdfpagesattr{/CropBox [70 65 526.378 748.15]} % TODO
\setlength{\AA@width}{\textwidth}
\setcounter{tocdepth}{2}
\fi
%
%%%% ENTCS
\ifAA@ENTCS%
\addtolength{\voffset}{1cm}
\ifthenelse{\equal{\AA@publisher}{UNKNOWN PUBLISHER}}{%
\setkeys{AA}{publisher=Elsevier Science B.~V.}
}{}
\renewcommand{\authorat}[1]{\put(\LenToUnit{\AA@x},\LenToUnit{-.5cm-\the\ht\AA@authoratBox}){#1}}
\renewcommand{\authorcrfont}{\scriptsize}
\setlength{\AA@width}{\textwidth}
\fi
%
%%%% IEEE
\ifAA@IEEE%
\ifthenelse{\equal{\AA@publisher}{UNKNOWN PUBLISHER}}{%
\setkeys{AA}{publisher=IEEE}
}{}
\renewcommand{\authorat}[1]{\put(\LenToUnit{\AA@x},6){#1}}
\renewcommand{\authorcrfont}{\scriptsize}
\setlength{\AA@width}{\textwidth}
\setcounter{tocdepth}{2}
\fi
%
\hypersetup{%
draft = false,
bookmarksopen = true,
bookmarksnumbered= true,
pdfauthor = {\@author},
pdftitle = {\@title},
}
\@ifpackageloaded{totpages}{%
\def\aa@lastpage{TotPages}
}{%
\RequirePackage{lastpage}
\def\aa@lastpage{LastPage}
}
\newsavebox{\AA@authoratBox}
\AddToShipoutPicture*{%
\setlength{\unitlength}{1mm}%
\savebox{\AA@authoratBox}{%
\parbox{1.4cm}{%
\bgroup%
\normallineskiplimit=0pt%
\ifAA@nourl%
\ifx\AA@doi\@empty\relax%
\else%
\qrcode[hyperlink,height=1.17cm,padding]{https://doi.org/\AA@doi}%
\fi%
\else%
\qrcode[hyperlink,height=1.17cm,padding]{\AA@baseurl/\AA@key\AA@suffix}%
\fi%
\egroup%
}%
\ifAA@nourl\ifx\AA@doi\@empty\addtolength{\AA@width}{1.4cm}\fi\fi
\parbox{\AA@width-1.4cm}{\authorcrfont%
\ifAA@LNCS%
\AA@publication, \AA@pageinfo \AA@year. %
\ifAA@nocopyright\else
\textcopyright~\AA@year~\AA@publisher.
\fi
This is the author's
version of the work. It is posted
\ifAA@nourl\relax\else%
at \url{\AA@baseurl/\AA@key\AA@suffix} %
\fi
\ifAA@nocopyright\relax\else
by permission of \AA@publisher{}
\fi
for your personal use.
\ifx\AA@doi\@empty%
\relax
\else
The final publication is available at Springer via
\ifx\AA@doiText\@empty%
\url{https://doi.org/\AA@doi}.
\else
\href{https://doi.org/\AA@doi}{\AA@doiText}.
\fi
\fi
\else
\ifAA@nocopyright\relax\else
\textcopyright~\AA@year~\AA@publisher. %
\fi%
This is the author's
version of the work. It is posted
\ifAA@nourl\relax\else%
at \url{\AA@baseurl/\AA@key\AA@suffix} %
\fi
\ifAA@nocopyright\relax\else
by permission of \AA@publisher{} %
\fi
for your personal use. Not for redistribution. The definitive
version was published in \emph{\AA@publication}, \AA@pageinfo \AA@year%
\ifx\AA@doi\@empty%
\ifx\AA@publisherurl\@empty%
.%
\else
\url{\AA@publisherurl}.%
\fi
\else
\ifx\AA@doiText\@empty%
, doi: \href{https://doi.org/\AA@doi}{\AA@doi}.%
\else
, doi: \href{https://doi.org/\AA@doi}{\AA@doiText}.%
\fi
\fi
\fi
\ifAA@nobib\relax\else%
\ifthenelse{\boolean{AA@bibExists}}{%
\hfill
\begin{itemize*}[label={}, itemjoin={,}]
\IfFileExists{\AA@bibBibTeX}{%
\item \attachandlink{\AA@bibBibTeX}[application/x-bibtex]{BibTeX entry of this paper}{\BibTeX}%
}{%
\IfFileExists{\AA@bibBibTeXLong}{%
\item \attachandlink[\AA@key.bib]{\AA@bibBibTeXLong}[application/x-bibtex]{BibTeX entry of this paper}{\BibTeX}%
}{%
\typeout{No file \AA@bibBibTeX{} (and no \AA@bibBibTeXLong) found. Not embedded reference in BibTeX format.}%
}%
}%
\IfFileExists{\AA@bibWord}{%
\item \attachandlink{\AA@bibWord}[application/xml]{XML entry of this paper (e.g., for Word 2007 and later)}{Word}%
}{%
\typeout{No file \AA@bibWord{} found. Not embedded reference for Word 2007 and later.}%
}%
\IfFileExists{\AA@bibEndnote}{%
\item \attachandlink{\AA@bibEndnote}[application/x-endnote-refer]{Endnote entry of this paper}{EndNote}%
}{%
\typeout{No file \AA@bibEndnote{} found. Not embedded reference in Endnote format.}%
}%
\IfFileExists{\AA@bibRIS}{%
\item \attachandlink{\AA@bibRIS}[application/x-research-info-systems]{RIS entry of this paper}{RIS}%
}{%
\typeout{No file \AA@bibRIS{} found. Not embedded reference in RIS format.}%
}%
\end{itemize*}\\
}{%
\PackageError{authorarchive}{No bibliographic files found. Specify option 'nobib' if this is intended.}
}
\fi
}
}
\authorat{\raisebox{\the\ht\AA@authoratBox}{\usebox{\AA@authoratBox}}}
}

View File

@ -0,0 +1,46 @@
#!/usr/bin/env bash
# Copyright (c) 2018-2019 The University of Sheffield. All rights reserved.
# 2018 The University of Paris-Saclay. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in
# the documentation and/or other materials provided with the
# distribution.
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
#
# SPDX-License-Identifier: BSD-2-Clause
set -e
if [ ! -f $ISABELLE_HOME_USER/DOF/document-template/build_lib.sh ]; then
echo ""
echo "Error: Isabelle/DOF not installed"
echo "====="
echo "This is a Isabelle/DOF project. The document preparation requires"
echo "the Isabelle/DOF framework. Please obtain the framework by cloning"
echo "the Isabelle/DOF git repository, i.e.: "
echo " git clone https://git.logicalhacking.com/Isabelle_DOF/Isabelle_DOF"
echo "You can install the framework as follows:"
echo " cd Isabelle_DOF/document-generator"
echo " ./install"
echo ""
exit 1
fi
cp $ISABELLE_HOME_USER/DOF/document-template/build_lib.sh .
source build_lib.sh

View File

@ -0,0 +1,2 @@
Template: scrartcl
Ontology: scholarly_paper

View File

@ -1,16 +1,3 @@
%% Copyright (C) 2018 The University of Sheffield
%% 2018 The University of Paris-Saclay
%% 2019 The University of Exeter
%%
%% License:
%% This program can be redistributed and/or modified under the terms
%% of the LaTeX Project Public License Distributed from CTAN
%% archives in directory macros/latex/base/lppl.txt; either
%% version 1.3c of the License, or (at your option) any later version.
%% OR
%% The 2-clause BSD-style license.
%%
%% SPDX-License-Identifier: LPPL-1.3c+ OR BSD-2-Clause
\definecolor{OliveGreen} {cmyk}{0.64,0,0.95,0.40}
\definecolor{BrickRed} {cmyk}{0,0.89,0.94,0.28}

View File

@ -22,6 +22,7 @@
\usepackage{listings}
\usepackage{lstisadof}
\usepackage{xspace}
\usepackage[draft]{fixme}
\lstloadlanguages{bash}
\lstdefinestyle{bash}{language=bash,
@ -52,19 +53,27 @@
\usepackage[caption]{subfig}
\usepackage[size=footnotesize]{caption}
\newcommand{\ie}{i.e.}
\newcommand{\eg}{e.g.}
\usepackage[LNCS,
orcidicon,
key=brucker.ea-isabelle-ontologies-2018,
year=2018,
publication={F. Rabe et al. (Eds.): CICM 2018, LNAI 11006},
nobib,
startpage={1},
doi={10.1007/978-3-319-96812-4_3},
doiText={10.1007/978-3-319-96812-4\_3},
]{authorarchive}
\authorrunning{A. D. Brucker et al.}
\pagestyle{headings}
\subject{Example of an Academic Paper\footnote{%
This document is an example setup for writing academic paper. While
it is optimized for Springer's LNCS class, it uses a Koma Script
LaTeX class to avoid the need for distributing \texttt{llncs.cls},
which would violate Springer's copyright. This example has been
published at CICM 2018:
\protect\begin{quote}
Achim D. Brucker, Idir Ait-Sadoune, Paolo Crisafulli, and
Burkhart Wolff. Using The Isabelle Ontology Framework: Linking
the Formal with the Informal. In Conference on Intelligent
Computer Mathematics (CICM). Lecture Notes in Computer Science
(11006), Springer-Verlag, 2018.
\protect\end{quote}
Note that the content of this example is not updated and, hence,
might not be correct with respect to the latest version of
\isadof{}.
}}
\title{<TITLE>}

View File

@ -108,7 +108,7 @@
volume = 2283,
doi = {10.1007/3-540-45949-9},
abstract = {This book is a self-contained introduction to interactive
proof in higher-order logic HOL, using the proof
proof in higher-order logic (\acs{hol}), using the proof
assistant Isabelle2002. It is a tutorial for potential
users rather than a monograph for researchers. The book has
three parts.
@ -121,7 +121,7 @@
such advanced topics as nested and mutual recursion. 2.
Logic and Sets presents a collection of lower-level tactics
that you can use to apply rules selectively. It also
describes Isabelle/HOL's treatment of sets, functions
describes Isabelle/\acs{hol}'s treatment of sets, functions
and relations and explains how to define sets inductively.
One of the examples concerns the theory of model checking,
and another is drawn from a classic textbook on formal
@ -279,21 +279,3 @@
year = {2018}
}
@incollection{brucker.ea:isabelle-ontologies:2018,
abstract = {While Isabelle is mostly known as part of Isabelle/HOL (an interactive theorem prover), it actually provides a framework for developing a wide spectrum of applications. A particular strength of the Isabelle framework is the combination of text editing, formal verification, and code generation.\\\\Up to now, Isabelle's document preparation system lacks a mechanism for ensuring the structure of different document types (as, e.g., required in certification processes) in general and, in particular, mechanism for linking informal and formal parts of a document.\\\\In this paper, we present Isabelle/DOF, a novel Document Ontology Framework on top of Isabelle. Isabelle/DOF allows for conventional typesetting \emph{as well} as formal development. We show how to model document ontologies inside Isabelle/DOF, how to use the resulting meta-information for enforcing a certain document structure, and discuss ontology-specific IDE support.},
address = {Heidelberg},
author = {Achim D. Brucker and Idir Ait-Sadoune and Paolo Crisafulli and Burkhart Wolff},
booktitle = {Conference on Intelligent Computer Mathematics (CICM)},
doi = {10.1007/978-3-319-96812-4_3},
keywords = {Isabelle/Isar, HOL, Ontologies},
language = {USenglish},
location = {Hagenberg, Austria},
number = {11006},
pdf = {https://www.brucker.ch/bibliography/download/2018/brucker.ea-isabelle-ontologies-2018.pdf},
publisher = {Springer-Verlag},
series = {Lecture Notes in Computer Science},
title = {Using The Isabelle Ontology Framework: Linking the Formal with the Informal},
url = {https://www.brucker.ch/bibliography/abstract/brucker.ea-isabelle-ontologies-2018},
year = {2018}
}

View File

@ -1,7 +0,0 @@
session "2020-iFM-csp" = "Isabelle_DOF" +
options [document = pdf, document_output = "output", document_build = dof]
theories
"paper"
document_files
"root.bib"
"preamble.tex"

View File

@ -1,8 +0,0 @@
%% This is a placeholder for user-specific configuration and packages.
\usepackage{stmaryrd}
\title{<TITLE>}
\author{<AUTHOR>}

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -1,2 +1 @@
2018-cicm-isabelle_dof-applications
2020-iFM-CSP

View File

@ -1,143 +1,44 @@
(*************************************************************************
* Copyright (C)
* 2019-2022 The University of Exeter
* 2018-2022 The University of Paris-Saclay
* 2018 The University of Sheffield
*
* License:
* This program can be redistributed and/or modified under the terms
* of the 2-clause BSD-style license.
*
* SPDX-License-Identifier: BSD-2-Clause
*************************************************************************)
(*<*)
theory "00_Frontmatter"
imports "Isabelle_DOF.technical_report"
begin
section\<open>Local Document Setup.\<close>
text\<open>Introducing document specific abbreviations and macros:\<close>
define_shortcut* dof \<rightleftharpoons> \<open>\dof\<close>
isadof \<rightleftharpoons> \<open>\isadof{}\<close>
define_shortcut* TeXLive \<rightleftharpoons> \<open>\TeXLive\<close>
BibTeX \<rightleftharpoons> \<open>\BibTeX{}\<close>
LaTeX \<rightleftharpoons> \<open>\LaTeX{}\<close>
TeX \<rightleftharpoons> \<open>\TeX{}\<close>
pdf \<rightleftharpoons> \<open>PDF\<close>
text\<open>Note that these setups assume that the associated \<^LaTeX> macros
are defined, \<^eg>, in the document prelude. \<close>
define_macro* index \<rightleftharpoons> \<open>\index{\<close> _ \<open>}\<close>
define_macro* bindex \<rightleftharpoons> \<open>\bindex{\<close> _ \<open>}\<close>
ML\<open>
fun boxed_text_antiquotation name (* redefined in these more abstract terms *) =
DOF_lib.gen_text_antiquotation name DOF_lib.report_text
(fn ctxt => DOF_lib.string_2_text_antiquotation ctxt
#> DOF_lib.enclose_env false ctxt "isarbox")
val neant = K(Latex.text("",\<^here>))
fun boxed_theory_text_antiquotation name (* redefined in these more abstract terms *) =
DOF_lib.gen_text_antiquotation name DOF_lib.report_theory_text
(fn ctxt => DOF_lib.string_2_theory_text_antiquotation ctxt
#> DOF_lib.enclose_env false ctxt "isarbox"
(* #> neant *)) (*debugging *)
fun boxed_sml_text_antiquotation name =
DOF_lib.gen_text_antiquotation name (K(K()))
(fn ctxt => Input.source_content
#> Latex.text
#> DOF_lib.enclose_env true ctxt "sml")
(* the simplest conversion possible *)
fun boxed_pdf_antiquotation name =
DOF_lib.gen_text_antiquotation name (K(K()))
(fn ctxt => Input.source_content
#> Latex.text
#> DOF_lib.enclose_env true ctxt "out")
(* the simplest conversion possible *)
fun boxed_latex_antiquotation name =
DOF_lib.gen_text_antiquotation name (K(K()))
(fn ctxt => Input.source_content
#> Latex.text
#> DOF_lib.enclose_env true ctxt "ltx")
(* the simplest conversion possible *)
fun boxed_bash_antiquotation name =
DOF_lib.gen_text_antiquotation name (K(K()))
(fn ctxt => Input.source_content
#> Latex.text
#> DOF_lib.enclose_env true ctxt "bash")
(* the simplest conversion possible *)
\<close>
setup\<open>(* std_text_antiquotation \<^binding>\<open>my_text\<close> #> *)
boxed_text_antiquotation \<^binding>\<open>boxed_text\<close> #>
(* std_text_antiquotation \<^binding>\<open>my_cartouche\<close> #> *)
boxed_text_antiquotation \<^binding>\<open>boxed_cartouche\<close> #>
(* std_theory_text_antiquotation \<^binding>\<open>my_theory_text\<close>#> *)
boxed_theory_text_antiquotation \<^binding>\<open>boxed_theory_text\<close> #>
boxed_sml_text_antiquotation \<^binding>\<open>boxed_sml\<close> #>
boxed_pdf_antiquotation \<^binding>\<open>boxed_pdf\<close> #>
boxed_latex_antiquotation \<^binding>\<open>boxed_latex\<close>#>
boxed_bash_antiquotation \<^binding>\<open>boxed_bash\<close>
\<close>
open_monitor*[this::report]
(*>*)
title*[title::title] \<open>Isabelle/DOF\<close>
title*[title::title]\<open>Isabelle/DOF\<close>
subtitle*[subtitle::subtitle]\<open>User and Implementation Manual\<close>
author*[ adb,
email ="\<open>a.brucker@exeter.ac.uk\<close>",
orcid ="\<open>0000-0002-6355-1200\<close>",
http_site ="\<open>https://www.brucker.ch/\<close>",
affiliation ="\<open>University of Exeter, Exeter, UK\<close>"]\<open>Achim D. Brucker\<close>
author*[ bu,
email = "\<open>wolff@lri.fr\<close>",
affiliation = "\<open>Université Paris-Saclay, LRI, Paris, France\<close>"]\<open>Burkhart Wolff\<close>
text*[adb:: author,
email="\<open>a.brucker@exeter.ac.uk\<close>",
orcid="\<open>0000-0002-6355-1200\<close>",
http_site="\<open>https://www.brucker.ch/\<close>",
affiliation="\<open>University of Exeter, Exeter, UK\<close>"]\<open>Achim D. Brucker\<close>
text*[bu::author,
email = "\<open>wolff@lri.fr\<close>",
affiliation = "\<open>Université Paris-Saclay, LRI, Paris, France\<close>"]\<open>Burkhart Wolff\<close>
abstract*[abs, keywordlist="[\<open>Ontology\<close>, \<open>Ontological Modeling\<close>, \<open>Document Management\<close>,
\<open>Formal Document Development\<close>,\<open>Document Authoring\<close>,\<open>Isabelle/DOF\<close>]"]
\<open> \<^isadof> provides an implementation of \<^dof> on top of Isabelle/HOL.
\<^dof> itself is a novel framework for \<^emph>\<open>defining\<close> ontologies
and \<^emph>\<open>enforcing\<close> them during document development and document
evolution. \<^isadof> targets use-cases such as mathematical texts referring
to a theory development or technical reports requiring a particular structure.
A major application of \<^dof> is the integrated development of
formal certification documents (\<^eg>, for Common Criteria or CENELEC
50128) that require consistency across both formal and informal
arguments.
text*[abs::abstract,
keywordlist="[''Ontology'', ''Ontological Modeling'', ''Document Management'',
''Formal Document Development'', ''Document Authoring'', ''Isabelle/DOF'']"]
\<open> \isadof provides an implementation of \dof on top of Isabelle/HOL.
\dof itself is a novel framework for \<^emph>\<open>defining\<close> ontologies
and \<^emph>\<open>enforcing\<close> them during document development and document
evolution. A major goal of \dof is the integrated development of
formal certification documents (\eg, for Common Criteria or CENELEC
50128) that require consistency across both formal and informal
arguments.
\<^isadof> is integrated into Isabelle's IDE, which
allows for smooth ontology development as well as immediate
ontological feedback during the editing of a document.
Its checking facilities leverage the collaborative
development of documents required to be consistent with an
underlying ontological structure.
In this user-manual, we give an in-depth presentation of the design
concepts of \<^dof>'s Ontology Definition Language (ODL) and describe
comprehensively its major commands. Many examples show typical best-practice
applications of the system.
\isadof is integrated into Isabelle's IDE, which
allows for smooth ontology development as well as immediate
ontological feedback during the editing of a document.
It is an unique feature of \<^isadof> that ontologies may be used to control
the link between formal and informal content in documents in a machine
checked way. These links can connect both text elements and formal
modeling elements such as terms, definitions, code and logical formulas,
altogether \<^emph>\<open>integrated\<close> in a state-of-the-art interactive theorem prover.
In this paper, we give an in-depth presentation of the design
concepts of \dof's Ontology Definition Language (ODL) and key
aspects of the technology of its implementation. \isadof is the
first ontology language supporting machine-checked
links between the formal and informal parts in an LCF-style
interactive theorem proving environment.
\<close>
(*<*)

View File

@ -1,16 +1,3 @@
(*************************************************************************
* Copyright (C)
* 2019-2021 The University of Exeter
* 2018-2021 The University of Paris-Saclay
* 2018 The University of Sheffield
*
* License:
* This program can be redistributed and/or modified under the terms
* of the 2-clause BSD-style license.
*
* SPDX-License-Identifier: BSD-2-Clause
*************************************************************************)
(*<*)
theory "01_Introduction"
imports "00_Frontmatter"
@ -23,7 +10,7 @@ The linking of the \<^emph>\<open>formal\<close> to the \<^emph>\<open>informal\
digitization of knowledge and its propagation. This challenge incites numerous research efforts
summarized under the labels ``semantic web,'' ``data mining,'' or any form of advanced ``semantic''
text processing. A key role in structuring this linking play \<^emph>\<open>document ontologies\<close> (also called
\<^emph>\<open>vocabulary\<close> in the semantic web community~@{cite "w3c:ontologies:2015"}), \<^ie>, a machine-readable
\<^emph>\<open>vocabulary\<close> in the semantic web community~@{cite "w3c:ontologies:2015"}), \ie, a machine-readable
form of the structure of documents as well as the document discourse.
Such ontologies can be used for the scientific discourse within scholarly articles, mathematical
@ -33,8 +20,8 @@ have to follow a structure. In practice, large groups of developers have to pro
set of documents where the consistency is notoriously difficult to maintain. In particular,
certifications are centered around the \<^emph>\<open>traceability\<close> of requirements throughout the entire
set of documents. While technical solutions for the traceability problem exists (most notably:
DOORS~@{cite "ibm:doors:2019"}), they are weak in the treatment of formal entities (such as formulas
and their logical contexts).
DOORS~\cite{doors}), they are weak in the treatment of formal entities (such as formulas and their
logical contexts).
Further applications are the domain-specific discourse in juridical texts or medical reports.
In general, an ontology is a formal explicit description of \<^emph>\<open>concepts\<close> in a domain of discourse
@ -42,21 +29,20 @@ In general, an ontology is a formal explicit description of \<^emph>\<open>conce
as \<^emph>\<open>links\<close> between them. A particular link between concepts is the \<^emph>\<open>is-a\<close> relation declaring
the instances of a subclass to be instances of the super-class.
To address this challenge, we present the Document Ontology Framework (\<^dof>) and an
implementation of \<^dof> called \<^isadof>. \<^dof> is designed for building scalable and user-friendly
tools on top of interactive theorem provers. \<^isadof> is an instance of this novel framework,
implemented as extension of Isabelle/HOL, to \<^emph>\<open>model\<close> typed ontologies and to \<^emph>\<open>enforce\<close> them
during document evolution. Based on Isabelle's infrastructures, ontologies may refer to types,
terms, proven theorems, code, or established assertions. Based on a novel adaption of the Isabelle
IDE (called PIDE, @{cite "wenzel:asynchronous:2014"}), a document is checked to be
\<^emph>\<open>conform\<close> to a particular ontology---\<^isadof> is designed to give fast user-feedback \<^emph>\<open>during the
To adress this challenge, we present developed the Document Ontology Framework (\dof). \dof is
designed for building scalable and user-friendly tools on top of interactive theorem provers,
and an implementation of DOF called \isadof. \isadof is a novel framework, extending of
Isabelle/HOL, to \<^emph>\<open>model\<close> typed ontologies and to \<^emph>\<open>enforce\<close> them during document evolution. Based
on Isabelle infrastructures, ontologies may refer to types, terms, proven theorems, code, or
established assertions. Based on a novel adaption of the Isabelle IDE, a document is checked to be
\<^emph>\<open>conform\<close> to a particular ontology---\isadof is designed to give fast user-feedback \<^emph>\<open>during the
capture of content\<close>. This is particularly valuable in case of document evolution, where the
\<^emph>\<open>coherence\<close> between the formal and the informal parts of the content can be mechanically checked.
To avoid any misunderstanding: \<^isadof> is \<^emph>\<open>not a theory in HOL\<close> on ontologies and operations to
To avoid any misunderstanding: \isadof is \<^emph>\<open>not a theory in HOL\<close> on ontologies and operations to
track and trace links in texts, it is an \<^emph>\<open>environment to write structured text\<close> which
\<^emph>\<open>may contain\<close> Isabelle/HOL definitions and proofs like mathematical articles, tech-reports and
scientific papers---as the present one, which is written in \<^isadof> itself. \<^isadof> is a plugin
scientific papers---as the present one, which is written in \isadof itself. \isadof is a plugin
into the Isabelle/Isar framework in the style of~@{cite "wenzel.ea:building:2007"}.\<close>
subsubsection\<open>How to Read This Manual\<close>
@ -69,41 +55,50 @@ declare_reference*[isadof_developers::text_section]
text\<open>
This manual can be read in different ways, depending on what you want to accomplish. We see three
different main user groups:
\<^enum> \<^emph>\<open>\<^isadof> users\<close>, \<^ie>, users that just want to edit a core document, be it for a paper or a
\<^enum> \<^emph>\<open>\isadof users\<close>, \ie, users that just want to edit a core document, be it for a paper or a
technical report, using a given ontology. These users should focus on
@{docitem (unchecked) \<open>isadof_tour\<close>} and, depending on their knowledge of Isabelle/HOL, also
@{docitem (unchecked) \<open>background\<close>}.
\<^enum> \<^emph>\<open>Ontology developers\<close>, \<^ie>, users that want to develop new ontologies or modify existing
@{docitem_ref (unchecked) \<open>isadof_tour\<close>} and, depending on their knowledge of Isabelle/HOL, also
@{docitem_ref (unchecked) \<open>background\<close>}.
\<^enum> \<^emph>\<open>Ontology developers\<close>, \ie, users that want to develop new ontologies or modify existing
document ontologies. These users should, after having gained acquaintance as a user, focus
on @{docitem (unchecked) \<open>isadof_ontologies\<close>}.
\<^enum> \<^emph>\<open>\<^isadof> developers\<close>, \<^ie>, users that want to extend or modify \<^isadof>, \<^eg>, by adding new
text-elements. These users should read @{docitem (unchecked) \<open>isadof_developers\<close>}
on @{docitem_ref (unchecked) \<open>isadof_ontologies\<close>}.
\<^enum> \<^emph>\<open>\isadof developers\<close>, \ie, users that want to extend or modify \isadof, \eg, by adding new
text-elements. These users should read @{docitem_ref (unchecked) \<open>isadof_developers\<close>}
\<close>
subsubsection\<open>Typographical Conventions\<close>
text\<open>
We acknowledge that understanding \<^isadof> and its implementation in all details requires
We acknowledge that understanding \isadof and its implementation in all details requires
separating multiple technological layers or languages. To help the reader with this, we
will type-set the different languages in different styles. In particular, we will use
\<^item> a light-blue background for input written in Isabelle's Isar language, \<^eg>:
@{boxed_theory_text [display]
\<open>lemma refl: "x = x"
by simp\<close>}
\<^item> a green background for examples of generated document fragments (\<^ie>, PDF output):
@{boxed_pdf [display] \<open>The axiom refl\<close>}
\<^item> a red background for (S)ML-code:
@{boxed_sml [display] \<open>fun id x = x\<close>}
\<^item> a light-blue background for input written in Isabelle's Isar language, \eg:
\begin{isar}
lemma refl: "x = x"
by simp
\end{isar}
\<^item> a green background for examples of generated document fragments (\ie, PDF output):
\begin{out}
The axiom refl
\end{out}
\<^item> a red background for For (S)ML-code:
\begin{sml}
fun id x = x
\end{sml}
\<^item> a yellow background for \LaTeX-code:
@{boxed_latex [display] \<open>\newcommand{\refl}{$x = x$}\<close>}
\begin{ltx}
\newcommand{\refl}{$x = x$}
\end{ltx}
\<^item> a grey background for shell scripts and interactive shell sessions:
@{boxed_bash [display]\<open>ë\prompt{}ë ls
CHANGELOG.md CITATION examples install LICENSE README.md ROOTS src\<close>}
\begin{bash}
ë\prompt{}ë ls
CHANGELOG.md CITATION examples install LICENSE README.md ROOTS src
\end{bash}
\<close>
subsubsection\<open>How to Cite \<^isadof>\<close>
subsubsection\<open>How to Cite \isadof\<close>
text\<open>
If you use or extend \<^isadof> in your publications, please use
\<^item> for the \<^isadof> system~@{cite "brucker.ea:isabelle-ontologies:2018"}:
If you use or extend \isadof in your publications, please use
\<^item> for the \isadof system~@{cite "brucker.ea:isabelle-ontologies:2018"}:
\begin{quote}\small
A.~D. Brucker, I.~Ait-Sadoune, P.~Crisafulli, and B.~Wolff. Using the {Isabelle} ontology
framework: Linking the formal with the informal. In \<^emph>\<open>Conference on Intelligent Computer
@ -111,30 +106,21 @@ text\<open>
Heidelberg, 2018. \href{https://doi.org/10.1007/978-3-319-96812-4\_3}
{10.1007/978-3-319-96812-4\_3}.
\end{quote}
A \<^BibTeX>-entry is available at:
\<^url>\<open>https://www.brucker.ch/bibliography/abstract/brucker.ea-isabelle-ontologies-2018\<close>.
\<^item> for the implementation of \<^isadof>~@{cite "brucker.ea:isabelledof:2019"}:
A \BibTeX-entry is available at:
\url{https://www.brucker.ch/bibliography/abstract/brucker.ea-isabelle-ontologies-2018}.
\<^item> for the implementation of \isadof~@{cite "brucker.ea:isabelledof:2019"}:
\begin{quote}\small
A.~D. Brucker and B.~Wolff. \<^isadof>: Design and implementation. In P.C.~{\"O}lveczky and
G.~Sala{\"u}n, editors, \<^emph>\<open>Software Engineering and Formal Methods (SEFM)\<close>, number 11724 in
Lecture Notes in Computer Science. Springer-Verlag, Heidelberg, 2019.
\href{https://doi.org/10.1007/978-3-030-30446-1_15}{10.1007/978-3-030-30446-1\_15}.
\end{quote}
A \<^BibTeX>-entry is available at:
\<^url>\<open>https://www.brucker.ch/bibliography/abstract/brucker.ea-isabelledof-2019\<close>.
\<^item> for an application of \<^isadof> in the context of certifications:
\begin{quote}\small
A.~D. Brucker and B.~Wolff.
Using Ontologies in Formal Developments Targeting Certification.
In W. Ahrendt and S. Tarifa, editors. \<^emph>\<open>Integrated Formal Methods (IFM)\<close>, number 11918.
Lecture Notes in Computer Science. Springer-Verlag, Heidelberg, 2019.
\href{https://doi.org/10.1007/978-3-030-34968-4\_4}.
A.~D. Brucker and B.~Wolff. \isadof: Design and implementation. In P.~{\"O}lveczky and
G.~Sala{\"u}n, editors, \<^emph>\<open>Software Engineering and Formal Methods (SEFM)\<close>, Lecture Notes
in Computer Science. Springer-Verlag, Heidelberg, 2019.
\end{quote}
A \BibTeX-entry is available at:
\url{https://www.brucker.ch/bibliography/abstract/brucker.ea-isabelledof-2019}.
\<close>
subsubsection\<open>Availability\<close>
text\<open>
The implementation of the framework is available at
\url{\dofurl}. The website also provides links to the latest releases. \<^isadof> is licensed
\url{\dofurl}. The website also provides links to the latest releases. \isadof is licensed
under a 2-clause BSD license (SPDX-License-Identifier: BSD-2-Clause).
\<close>

View File

@ -1,16 +1,3 @@
(*************************************************************************
* Copyright (C)
* 2019-2022 The University of Exeter
* 2018-2022 The University of Paris-Saclay
* 2018 The University of Sheffield
*
* License:
* This program can be redistributed and/or modified under the terms
* of the 2-clause BSD-style license.
*
* SPDX-License-Identifier: BSD-2-Clause
*************************************************************************)
(*<*)
theory "02_Background"
imports "01_Introduction"
@ -26,7 +13,7 @@ figure*[architecture::figure,relative_width="95",src="''figures/isabelle-archite
the IDE (right-hand side). \<close>
text*[bg::introduction]\<open>
While Isabelle is widely perceived as an interactive theorem
While Isabelle @{cite "nipkow.ea:isabelle:2002"} is widely perceived as an interactive theorem
prover for HOL (Higher-order Logic)~@{cite "nipkow.ea:isabelle:2002"}, we would like to emphasize
the view that Isabelle is far more than that: it is the \<^emph>\<open>Eclipse of Formal Methods Tools\<close>. This
refers to the ``\<^emph>\<open>generic system framework of Isabelle/Isar underlying recent versions of Isabelle.
@ -37,31 +24,29 @@ with explicit infrastructure for building derivative systems.\<close>''~@{cite "
The current system framework offers moreover the following features:
\<^item> a build management grouping components into to pre-compiled sessions,
\<^item> a prover IDE (PIDE) framework~@{cite "wenzel:asynchronous:2014"} with various front-ends,
\<^item> a prover IDE (PIDE) framework~@{cite "wenzel:asynchronous:2014"} with various front-ends
\<^item> documentation-generation,
\<^item> code generators for various target languages,
\<^item> an extensible front-end language Isabelle/Isar, and,
\<^item> last but not least, an LCF style, generic theorem prover kernel as
the most prominent and deeply integrated system component.
\<close>
text\<open>
The Isabelle system architecture shown in @{docitem \<open>architecture\<close>} comes with many layers,
The Isabelle system architecture shown in @{docitem_ref \<open>architecture\<close>} comes with many layers,
with Standard ML (SML) at the bottom layer as implementation language. The architecture actually
foresees a \<^emph>\<open>Nano-Kernel\<close> (our terminology) which resides in the SML structure \<^boxed_sml>\<open>Context\<close>.
foresees a \<^emph>\<open>Nano-Kernel\<close> (our terminology) which resides in the SML structure \inlinesml{Context}.
This structure provides a kind of container called \<^emph>\<open>context\<close> providing an identity, an
ancestor-list as well as typed, user-defined state for components (plugins) such as \<^isadof>.
On top of the latter, the LCF-Kernel, tactics, automated proof procedures as well as specific
ancestor-list as well as typed, user-defined state for components (plugins) such as \isadof.
On top of the latter, the LCF-Kernel, tactics, automated proof procedures as well as specific
support for higher specification constructs were built.\<close>
section*[dof::introduction]\<open>The Document Model Required by \<^dof>\<close>
section*[dof::introduction]\<open>The Document Model Required by \dof\<close>
text\<open>
In this section, we explain the assumed document model underlying our Document Ontology Framework
(\<^dof>) in general. In particular we discuss the concepts
\<^emph>\<open>integrated document\<close>\<^bindex>\<open>integrated document\<close>, \<^emph>\<open>sub-document\<close>\<^bindex>\<open>sub-document\<close>,
\<^emph>\<open>text-element\<close>\<^bindex>\<open>text-element\<close>, and \<^emph>\<open>semantic macros\<close>\<^bindex>\<open>semantic macros\<close> occurring
inside text-elements. Furthermore, we assume two different levels of parsers
(for \<^emph>\<open>outer\<close> and \<^emph>\<open>inner syntax\<close>) where the inner-syntax is basically a typed \<open>\<lambda>\<close>-calculus
and some Higher-order Logic (HOL)\<^bindex>\<open>HOL\<close>.
(\dof) in general. In particular we discuss the concepts \<^emph>\<open>integrated document\<close>, \<^emph>\<open>sub-document\<close>,
\<^emph>\<open>text-element\<close> and \<^emph>\<open>semantic macros\<close> occurring inside text-elements. Furthermore, we assume two
different levels of parsers (for \<^emph>\<open>outer\<close> and \<^emph>\<open>inner syntax\<close>) where the inner-syntax is basically
a typed \inlineisar|\<lambda>|-calculus and some Higher-order Logic (HOL).
\<close>
(*<*)
@ -70,136 +55,105 @@ declare_reference*["fig:dependency"::text_section]
text\<open>
The Isabelle Framework is based on a \<^emph>\<open>document-centric view\<close>\<^bindex>\<open>document-centric view\<close> of
a document, treating the input in its integrality as set of (user-programmable) \<^emph>\<open>document element\<close>
that may mutually depend on and link to each other; A \<^emph>\<open>document\<close> in our sense is what is configured in a set of
\<^verbatim>\<open>ROOT\<close>- and \<^verbatim>\<open>ROOTS\<close>-files.
Isabelle assumes a hierarchical document model\<^index>\<open>document model\<close>, \<^ie>, an \<^emph>\<open>integrated\<close> document
consist of a hierarchy of \<^emph>\<open>sub-documents\<close> (files); dependencies are restricted to be
acyclic at this level.
We assume a hierarchical document model\index{document model}, \ie, an \<^emph>\<open>integrated\<close> document
consist of a hierarchy \<^emph>\<open>sub-documents\<close> (files) that can depend acyclically on each other.
Sub-documents can have different document types in order to capture documentations consisting of
documentation, models, proofs, code of various forms and other technical artifacts. We call the
main sub-document type, for historical reasons, \<^emph>\<open>theory\<close>-files. A theory file\<^bindex>\<open>theory!file\<close>
consists of a \<^emph>\<open>header\<close>\<^bindex>\<open>header\<close>, a \<^emph>\<open>context definition\<close>\<^index>\<open>context\<close>, and a body
consisting of a sequence of document elements called
\<^emph>\<open>command\<close>s (see @{figure (unchecked) "fig:dependency"}). Even
the header consists of a sequence of commands used for introductory text elements not depending on
any context. The context-definition contains an \<^boxed_theory_text>\<open>import\<close> and a
\<^boxed_theory_text>\<open>keyword\<close> section, for example:
@{boxed_theory_text [display]\<open>
theory Example \<comment>\<open>Name of the 'theory'\<close>
imports \<comment>\<open>Declaration of 'theory' dependencies\<close>
Main \<comment>\<open>Imports a library called 'Main'\<close>
keywords \<comment>\<open>Registration of keywords defined locally\<close>
requirement \<comment>\<open>A command for describing requirements\<close> \<close>}
where \<^boxed_theory_text>\<open>Example\<close> is the abstract name of the text-file, \<^boxed_theory_text>\<open>Main\<close>
refers to an imported theory (recall that the import relation must be acyclic) and
\<^boxed_theory_text>\<open>keywords\<close> are used to separate commands from each other.
\<close>
main sub-document type, for historical reasons, \<^emph>\<open>theory\<close>-files. A theory file\bindex{theory!file}
consists of a \<^emph>\<open>header\<close>\bindex{header}, a \<^emph>\<open>context definition\<close>\index{context}, and a body
consisting of a sequence of \<^emph>\<open>command\<close>s (see @{figure (unchecked) "fig:dependency"}). Even the header consists
of a sequence of commands used for introductory text elements not depending on any context.
The context-definition contains an \inlineisar{import} and a
\inlineisar{keyword} section, for example:
\begin{isar}
" theory Example (* Name of the 'theory' *)
" imports (* Declaration of 'theory' dependencies *)
" Main (* Imports a library called 'Main' *)
" keywords (* Registration of keywords defined locally *)
" requirement (* A command for describing requirements *)
\end{isar}
where \inlineisar{Example} is the abstract name of the text-file,
\inlineisar{Main} refers to an imported theory (recall that the import
relation must be acyclic) and \inlineisar{keywords} are used to
separate commands from each other.
text\<open> A text-element \<^index>\<open>text-element\<close> may look like this:
We distinguish fundamentally two different syntactic levels:
\<^item> the \emph{outer-syntax}\bindex{syntax!outer}\index{outer syntax|see {syntax, outer}} (\ie, the
syntax for commands) is processed by a lexer-library and parser combinators built on top, and
\<^item> the \emph{inner-syntax}\bindex{syntax!inner}\index{inner syntax|see {syntax, inner}} (\ie, the
syntax for \inlineisar|\<lambda>|-terms in HOL) with its own parametric polymorphism type
checking.
@{boxed_theory_text [display]\<open>
text\<open> According to the \<^emph>\<open>reflexivity\<close> axiom @{thm refl},
we obtain in \<Gamma> for @{term "fac 5"} the result @{value "fac 5"}.\<close>\<close>}
... so it is a command \<^theory_text>\<open>text\<close> followed by an argument (here in \<open>\<open> ... \<close>\<close> parenthesis) which
contains characters and a special notation for semantic macros \<^bindex>\<open>semantic macros\<close>
(here \<^theory_text>\<open>@{term "fac 5"}).\<close>
\<close>
text\<open>While we concentrate in this manual on \<^theory_text>\<open>text\<close>-document elements --- this is the main
use of \<^dof> in its current stage --- it is important to note that there are actually three
families of ``ontology aware'' document elements with analogous
syntax to standard ones. The difference is a bracket with meta-data of the form:
@{theory_text [display,indent=5, margin=70]
\<open>
text*[label::classid, attr\<^sub>1=E\<^sub>1, ... attr\<^sub>n=E\<^sub>n]\<open> some semi-formal text \<close>
ML*[label::classid, attr\<^sub>1=E\<^sub>1, ... attr\<^sub>n=E\<^sub>n]\<open> some SML code \<close>
value*[label::classid, attr\<^sub>1=E\<^sub>1, ... attr\<^sub>n=E\<^sub>n]\<open> some annotated \<lambda>-term \<close>
\<close>}
Depending on the family, we will speak about \<^emph>\<open>(formal) text-contexts\<close>,\<^index>\<open>formal text-contexts\<close>
\<^emph>\<open>(ML) code-contexts\<close>\<^index>\<open>code-contexts\<close> and \<^emph>\<open>term-contexts\<close>\<^index>\<open>term-contexts\<close> if we refer
to sub-elements inside the \<open>\<open>...\<close>\<close> cartouches of these command families. Note that the Isabelle
framework allows for nesting cartouches that permits to support switching into a different
context. In general, this has also the effect that the evaluation of antiquotations changes.
\<^footnote>\<open>In the literature, this concept has been referred to \<open>Cascade-Syntax\<close> and was used in the
Centaur-system and is existing in some limited form in some Emacs-implementations these days. \<close>
\<close>
text\<open>
On the semantic level, we assume a validation process for an integrated document, where the
semantics of a command is a transformation \<open>\<theta> \<rightarrow> \<theta>\<close> for some system state \<open>\<theta>\<close>.
This document model can be instantiated depending on the text-code-, or term-contexts.
For common text elements, \<^eg>, \<^theory_text>\<open>section\<open>...\<close>\<close> or \<^theory_text>\<open>text\<open>...\<close>\<close>,
users can add informal text to a sub-document using a text command:
@{boxed_theory_text [display] \<open>text\<open>This is a description.\<close>\<close> }
semantics of a command is a transformation \inlineisar+\<theta> \<rightarrow> \<theta>+ for some system state
\inlineisar+\<theta>+. This document model can be instantiated with outer-syntax commands for common
text elements, \eg, \inlineisar+section{*...*}+ or \inlineisar+text{*...*}+. Thus, users can add
informal text to a sub-document using a text command:
\begin{isar}
text\<Open>This is a description.\<Close>
\end{isar}
This will type-set the corresponding text in, for example, a PDF document. However, this
translation is not necessarily one-to-one: text elements can be enriched by formal, \<^ie>,
machine-checked content via \<^emph>\<open>semantic macros\<close>, called antiquotations\<^bindex>\<open>antiquotation\<close>:
@{boxed_theory_text [display]
\<open>text\<open> According to the \<^emph>\<open>reflexivity\<close> axiom @{thm "refl"}, we obtain in \<Gamma>
for @{term \<open>fac 5\<close>} the result @{value \<open>fac 5\<close>}.\<close>\<close>
}
which is represented in the final document (\<^eg>, a PDF) by:
@{boxed_pdf [display]
\<open>According to the $\emph{reflexivity}$ axiom $\mathrm{x = x}$, we obtain in $\Gamma$
for $\operatorname{fac} \text{\textrm{5}}$ the result $\text{\textrm{120}}$.\<close>
}
Semantic macros are partial functions of type \<open>\<theta> \<rightarrow> text\<close>; since they can use the
translation is not necessarily one-to-one: text elements can be enriched by formal, \ie,
machine-checked content via \emph{semantic macros}, called antiquotations\bindex{antiquotation}:
\begin{isar}
text\<Open>According to the reflexivity axiom <@>{thm refl}, we obtain in \<Gamma>
for <@>{term "fac 5"} the result <@>{value "fac 5"}.\<Close>
\end{isar}
which is represented in the final document (\eg, a PDF) by:
\begin{out}
According to the reflexivity axiom $\mathrm{x = x}$, we obtain in $\Gamma$ for $\operatorname{fac} \text{\textrm{5}}$ the result $\text{\textrm{120}}$.
\end{out}
Semantic macros are partial functions of type \inlineisar+\<theta> \<rightarrow> text+; since they can use the
system state, they can perform all sorts of specific checks or evaluations (type-checks,
executions of code-elements, references to text-elements or proven theorems such as
\<open>refl\<close>, which is the reference to the axiom of reflexivity).
\inlineisar+refl+, which is the reference to the axiom of reflexivity).
Semantic macros establish \<^emph>\<open>formal content\<close> inside informal content; they can be
type-checked before being displayed and can be used for calculations before being
typeset. They represent the device for linking the formal with the informal.
\<close>
figure*["fig:dependency"::figure,relative_width="70",src="''figures/document-hierarchy''"]
\<open>A Theory-Graph in the Document Model. \<close>
section*[bgrnd21::introduction]\<open>Implementability of the Document Model in other ITP's\<close>
section*[bgrnd21::introduction]\<open>Implementability of the Required Document Model.\<close>
text\<open>
Batch-mode checkers for \<^dof> can be implemented in all systems of the LCF-style prover family,
\<^ie>, systems with a type-checked \<open>term\<close>, and abstract \<open>thm\<close>-type for theorems
(protected by a kernel). This includes, \<^eg>, ProofPower, HOL4, HOL-light, Isabelle, or Coq
and its derivatives. \<^dof> is, however, designed for fast interaction in an IDE. If a user wants
Batch-mode checkers for \dof can be implemented in all systems of the LCF-style prover family,
\ie, systems with a type-checked \inlinesml{term}, and abstract \inlinesml{thm}-type for
theorems (protected by a kernel). This includes, \eg, ProofPower, HOL4, HOL-light, Isabelle, or
Coq and its derivatives. \dof is, however, designed for fast interaction in an IDE. If a user wants
to benefit from this experience, only Isabelle and Coq have the necessary infrastructure of
asynchronous proof-processing and support by a PIDE~@{cite "wenzel:asynchronous:2014" and
"wenzel:system:2014" and "barras.ea:pervasive:2013" and "faithfull.ea:coqoon:2018"} which
in many features over-accomplishes the required features of \<^dof>. For example, current Isabelle
versions offer cascade-syntaxes (different syntaxes and even parser-technologies which can be
nested along the \<open>\<open>...\<close>\<close> barriers), while \<^dof> actually only requires a two-level syntax model.
asynchronous proof-processing and support by a PIDE~@{cite "DBLP:conf/itp/Wenzel14" and
"DBLP:journals/corr/Wenzel14" and "DBLP:conf/mkm/BarrasGHRTWW13"
and "Faithfull:2018:COQ:3204179.3204223"} which in many features over-accomplishes the required
features of \dof. For example, current Isabelle versions offer cascade-syntaxes (different
syntaxes and even parser-technologies which can be nested along the
\inlineisar+\<Open> ... \<Close> + barriers, while \dof actually only requires a two-level
syntax model.
\<close>
figure*["fig:dof-ide"::figure,relative_width="95",src="''figures/cicm2018-combined''"]\<open>
The \<^isadof> IDE (left) and the corresponding PDF (right), showing the first page
of~@{cite "brucker.ea:isabelle-ontologies:2018"}.\<close>
The \isadof IDE (left) and the corresponding PDF (right), showing the first page
of~\cite{brucker.ea:isabelle-ontologies:2018}.\<close>
text\<open>
We call the present implementation of \<^dof> on the Isabelle platform \<^isadof> .
We call the present implementation of \dof on the Isabelle platform \isadof.
@{docitem "fig:dof-ide"} shows a screen-shot of an introductory paper on
\<^isadof>~@{cite "brucker.ea:isabelle-ontologies:2018"}: the \<^isadof> PIDE can be seen on the left,
\isadof~@{cite "brucker.ea:isabelle-ontologies:2018"}: the \isadof PIDE can be seen on the left,
while the generated presentation in PDF is shown on the right.
Isabelle provides, beyond the features required for \<^dof>, a lot of additional benefits.
Besides UTF8-support for characters used in text-elements, Isabelle offers built-in already a
mechanism for user-programmable antiquotations \<^index>\<open>antiquotations\<close> which we use to implement
semantic macros \<^index>\<open>semantic macros\<close> in \<^isadof> (We will actually use these two terms
as synonym in the context of \<^isadof>). Moreover, \<^isadof> allows for the asynchronous
evaluation and checking of the document content~@{cite "wenzel:asynchronous:2014" and
"wenzel:system:2014" and "barras.ea:pervasive:2013"} and is dynamically extensible. Its PIDE
provides a \<^emph>\<open>continuous build, continuous check\<close> functionality, syntax highlighting, and
auto-completion. It also provides infrastructure for displaying meta-information (\<^eg>, binding
and type annotation) as pop-ups, while hovering over sub-expressions. A fine-grained dependency
analysis allows the processing of individual parts of theory files asynchronously, allowing
Isabelle to interactively process large (hundreds of theory files) documents. Isabelle can group
sub-documents into sessions, \<^ie>, sub-graphs of the document-structure that can be ``pre-compiled''
and loaded instantaneously, \<^ie>, without re-processing, which is an important means to scale up. \<close>
Isabelle provides, beyond the features required for \dof, a lot of additional benefits. For
example, it also allows the asynchronous evaluation and checking of the document
content~@{cite "DBLP:conf/itp/Wenzel14" and "DBLP:journals/corr/Wenzel14" and
"DBLP:conf/mkm/BarrasGHRTWW13"} and is dynamically extensible. Its PIDE provides a
\<^emph>\<open>continuous build, continuous check\<close> functionality, syntax highlighting, and auto-completion.
It also provides infrastructure for displaying meta-information (\eg, binding and type annotation)
as pop-ups, while hovering over sub-expressions. A fine-grained dependency analysis allows the
processing of individual parts of theory files asynchronously, allowing Isabelle to interactively
process large (hundreds of theory files) documents. Isabelle can group sub-documents into sessions,
\ie, sub-graphs of the document-structure that can be ``pre-compiled'' and loaded
instantaneously, \ie, without re-processing. \<close>
(*<*)
end

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -1,16 +1,3 @@
(*************************************************************************
* Copyright (C)
* 2019-2022 University of Exeter
* 2018-2022 University of Paris-Saclay
* 2018 The University of Sheffield
*
* License:
* This program can be redistributed and/or modified under the terms
* of the 2-clause BSD-style license.
*
* SPDX-License-Identifier: BSD-2-Clause
*************************************************************************)
(*<*)
theory "05_Implementation"
imports "04_RefMan"
@ -18,22 +5,22 @@ begin
(*>*)
chapter*[isadof_developers::text_section]\<open>Extending \<^isadof>\<close>
chapter*[isadof_developers::text_section]\<open>Extending \isadof\<close>
text\<open>
In this chapter, we describe the basic implementation aspects of \<^isadof>, which is based on
In this chapter, we describe the basic implementation aspects of \isadof, which is based on
the following design-decisions:
\<^item> the entire \<^isadof> is a ``pure add-on,'' \<^ie>, we deliberately resign the possibility to
modify Isabelle itself,
\<^item> we made a small exception to this rule: the \<^isadof> package modifies in its installation
about 10 lines in the \LaTeX-generator (\path{src/patches/thy_output.ML}),
\<^item> the entire \isadof is a ``pure add-on,'' \ie, we deliberately resign on the possibility to
modify Isabelle itself.
\<^item> we made a small exception to this rule: the \isadof package modifies in its installation
about 10 lines in the \LaTeX-generator (\path{src/patches/thy_output.ML}).
\<^item> we decided to make the markup-generation by itself to adapt it as well as possible to the
needs of tracking the linking in documents,
\<^item> \<^isadof> is deeply integrated into the Isabelle's IDE (PIDE) to give immediate feedback during
needs of tracking the linking in documents.
\<^item> \isadof is deeply integrated into the Isabelle's IDE (PIDE) to give immediate feedback during
editing and other forms of document evolution.
\<close>
text\<open>
Semantic macros, as required by our document model, are called \<^emph>\<open>document antiquotations\<close>
in the Isabelle literature~@{cite "wenzel:isabelle-isar:2020"}. While Isabelle's code-antiquotations
in the Isabelle literature~@{cite "wenzel:isabelle-isar:2019"}. While Isabelle's code-antiquotations
are an old concept going back to Lisp and having found via SML and OCaml their ways into modern
proof systems, special annotation syntax inside documentation comments have their roots in
documentation generators such as Javadoc. Their use, however, as a mechanism to embed
@ -41,57 +28,61 @@ text\<open>
IDE support.
\<close>
section\<open>\<^isadof>: A User-Defined Plugin in Isabelle/Isar\<close>
text\<open>
section\<open>\isadof: A User-Defined Plugin in Isabelle/Isar\<close>
text\<open>
A plugin in Isabelle starts with defining the local data and registering it in the framework. As
mentioned before, contexts are structures with independent cells/compartments having three
primitives \<^boxed_sml>\<open>init\<close>, \<^boxed_sml>\<open>extend\<close> and \<^boxed_sml>\<open>merge\<close>. Technically this is done by
instantiating a functor \<^boxed_sml>\<open>Generic_Data\<close>, and the following fairly typical code-fragment
is drawn from \<^isadof>:
primitives \inlinesml+init+, \inlinesml+extend+ and \inlinesml+merge+. Technically this is done by
instantiating a functor \inlinesml+Generic_Data+, and the following fairly typical code-fragment
is drawn from \isadof:
@{boxed_sml [display]
\<open>structure Data = Generic_Data
\begin{sml}
structure Data = Generic_Data
( type T = docobj_tab * docclass_tab * ...
val empty = (initial_docobj_tab, initial_docclass_tab, ...)
val extend = I
fun merge((d1,c1,...),(d2,c2,...)) = (merge_docobj_tab (d1,d2,...),
merge_docclass_tab(c1,c2,...))
);\<close>}
where the table \<^boxed_sml>\<open>docobj_tab\<close> manages document class instances
and \<^boxed_sml>\<open>docclass_tab\<close> the environment for class definitions
(inducing the inheritance relation). Other tables capture, \eg,
);
\end{sml}
where the table \inlinesml+docobj_tab+ manages document classes and \inlinesml+docclass_tab+ the
environment for class definitions (inducing the inheritance relation). Other tables capture, \eg,
the class invariants, inner-syntax antiquotations. Operations follow the MVC-pattern, where
Isabelle/Isar provides the controller part. A typical model operation has the type:
@{boxed_sml [display]
\<open>val opn :: <args_type> -> Context.generic -> Context.generic\<close>}
\begin{sml}
val opn :: <args_type> -> Context.generic -> Context.generic
\end{sml}
representing a transformation on system contexts. For example, the operation of declaring a local
reference in the context is presented as follows:
@{boxed_sml [display]
\<open>fun declare_object_local oid ctxt =
\begin{sml}
fun declare_object_local oid ctxt =
let fun decl {tab,maxano} = {tab=Symtab.update_new(oid,NONE) tab,
maxano=maxano}
in (Data.map(apfst decl)(ctxt)
handle Symtab.DUP _ =>
error("multiple declaration of document reference"))
end\<close>}
where \<^boxed_sml>\<open>Data.map\<close> is the update function resulting from the instantiation of the
functor \<^boxed_sml>\<open>Generic_Data\<close>. This code fragment uses operations from a library structure
\<^boxed_sml>\<open>Symtab\<close> that were used to update the appropriate table for document objects in
end
\end{sml}
where \inlineisar+Data.map+ is the update function resulting from the instantiation of the
functor \inlinesml|Generic_Data|. This code fragment uses operations from a library structure
\inlinesml+Symtab+ that were used to update the appropriate table for document objects in
the plugin-local state. Possible exceptions to the update operation were mapped to a system-global
error reporting function.
Finally, the view-aspects were handled by an API for parsing-combinators. The library structure
\<^boxed_sml>\<open>Scan\<close> provides the operators:
\inlinesml+Scan+ provides the operators:
@{boxed_sml [display]
\<open>op || : ('a -> 'b) * ('a -> 'b) -> 'a -> 'b
\begin{sml}
op || : ('a -> 'b) * ('a -> 'b) -> 'a -> 'b
op -- : ('a -> 'b * 'c) * ('c -> 'd * 'e) -> 'a -> ('b * 'd) * 'e
op >> : ('a -> 'b * 'c) * ('b -> 'd) -> 'a -> 'd * 'c
op option : ('a -> 'b * 'a) -> 'a -> 'b option * 'a
op repeat : ('a -> 'b * 'a) -> 'a -> 'b list * 'a \<close>}
op repeat : ('a -> 'b * 'a) -> 'a -> 'b list * 'a
\end{sml}
for alternative, sequence, and piping, as well as combinators for option and repeat. Parsing
combinators have the advantage that they can be integrated into standard programs,
combinators have the advantage that they can be smoothlessly integrated into standard programs,
and they enable the dynamic extension of the grammar. There is a more high-level structure
\inlinesml{Parse} providing specific combinators for the command-language Isar:
@ -106,25 +97,26 @@ val attributes =(Parse.$$$ "[" |-- (reference
|--(Parse.enum ","attribute)))[]))--| Parse.$$$ "]"
\end{sml}
The ``model'' \<^boxed_sml>\<open>declare_reference_opn\<close> and ``new'' \<^boxed_sml>\<open>attributes\<close> parts were
The ``model'' \inlineisar+declare_reference_opn+ and ``new'' \inlineisar+attributes+ parts were
combined via the piping operator and registered in the Isar toplevel:
@{boxed_sml [display]
\<open>fun declare_reference_opn (((oid,_),_),_) =
\begin{sml}
fun declare_reference_opn (((oid,_),_),_) =
(Toplevel.theory (DOF_core.declare_object_global oid))
val _ = Outer_Syntax.command <@>{command_keyword "declare_reference"}
"declare document reference"
(attributes >> declare_reference_opn);\<close>}
(attributes >> declare_reference_opn);
\end{sml}
Altogether, this gives the extension of Isabelle/HOL with Isar syntax and semantics for the
new \emph{command}:
@{boxed_theory_text [display]\<open>
\begin{isar}
declare_reference [lal::requirement, alpha="main", beta=42]
\<close>}
\end{isar}
The construction also generates implicitly some markup information; for example, when hovering
over the \<^boxed_theory_text>\<open>declare_reference\<close> command in the IDE, a popup window with the text:
over the \inlineisar|declare_reference| command in the IDE, a popup window with the text:
``declare document reference'' will appear.
\<close>
@ -134,116 +126,139 @@ text\<open>
principle: based on a number of combinators, new user-defined antiquotation syntax and semantics
can be added to the system that works on the internal plugin-data freely. For example, in
@{boxed_sml [display]
\<open>val _ = Theory.setup(
\begin{sml}
val _ = Theory.setup(
Thy_Output.antiquotation <@>{binding docitem}
docitem_antiq_parser
(docitem_antiq_gen default_cid) #>
ML_Antiquotation.inline <@>{binding docitem_value}
ML_antiq_docitem_value)\<close>}
the text antiquotation \<^boxed_sml>\<open>docitem\<close> is declared and bounded to a parser for the argument
ML_antiq_docitem_value)
\end{sml}
the text antiquotation \inlineisar+docitem+ is declared and bounded to a parser for the argument
syntax and the overall semantics. This code defines a generic antiquotation to be used in text
elements such as
@{boxed_theory_text [display]\<open>
text\<open>as defined in @{docitem \<open>d1\<close>} ...\<close>
\<close>}
\begin{isar}
text\<Open>as defined in <@>{docitem \<Open>d1\<Close>} ...\<Close>
\end{isar}
The subsequent registration \<^boxed_sml>\<open>docitem_value\<close> binds code to a ML-antiquotation usable
The subsequent registration \inlineisar+docitem_value+ binds code to a ML-antiquotation usable
in an ML context for user-defined extensions; it permits the access to the current ``value''
of document element, \<^ie>, a term with the entire update history.
of document element, \ie; a term with the entire update history.
It is possible to generate antiquotations \emph{dynamically}, as a consequence of a class
definition in ODL. The processing of the ODL class \<^typ>\<open>definition\<close> also \emph{generates}
a text antiquotation \<^boxed_theory_text>\<open>@{"definition" \<open>d1\<close>}\<close>, which works similar to
\<^boxed_theory_text>\<open>@{docitem \<open>d1\<close>}\<close> except for an additional type-check that assures that
\<^boxed_theory_text>\<open>d1\<close> is a reference to a definition. These type-checks support the subclass hierarchy.
definition in ODL. The processing of the ODL class \inlineisar+d$$efinition+ also \emph{generates}
a text antiquotation \inlineisar+<@>{definition \<Open>d1\<Close>}+, which works similar to
\inlineisar+<@>{docitem \<Open>d1\<Close>}+ except for an additional type-check that assures that
\inlineisar+d1+ is a reference to a definition. These type-checks support the subclass hierarchy.
\<close>
section\<open>Implementing Second-level Type-Checking\<close>
text\<open>
On expressions for attribute values, for which we chose to use HOL syntax to avoid that users
need to learn another syntax, we implemented an own pass over type-checked terms. Stored in the
late-binding table \<^boxed_sml>\<open>ISA_transformer_tab\<close>, we register for each inner-syntax-annotation
late-binding table \inlineisar+ISA_transformer_tab+, we register for each inner-syntax-annotation
(ISA's), a function of type
@{boxed_sml [display]
\<open> theory -> term * typ * Position.T -> term option\<close>}
\begin{sml}
theory -> term * typ * Position.T -> term option
\end{sml}
Executed in a second pass of term parsing, ISA's may just return \<^boxed_theory_text>\<open>None\<close>. This is
adequate for ISA's just performing some checking in the logical context \<^boxed_theory_text>\<open>theory\<close>;
Executed in a second pass of term parsing, ISA's may just return \inlineisar+None+. This is
adequate for ISA's just performing some checking in the logical context \inlineisar+theory+;
ISA's of this kind report errors by exceptions. In contrast, \<^emph>\<open>transforming\<close> ISA's will
yield a term; this is adequate, for example, by replacing a string-reference to some term denoted
by it. This late-binding table is also used to generate standard inner-syntax-antiquotations from
a \<^boxed_theory_text>\<open>doc_class\<close>.
a \inlineisar+doc_class+.
\<close>
section\<open>Programming Class Invariants\<close>
text\<open>
See \<^technical>\<open>sec:low_level_inv\<close>.
For the moment, there is no high-level syntax for the definition of class invariants. A
formulation, in SML, of the first class-invariant in @{docref "sec:class_inv"} is straight-forward:
\begin{sml}
fun check_result_inv oid {is_monitor:bool} ctxt =
let val kind = compute_attr_access ctxt "kind" oid <@>{here} <@>{here}
val prop = compute_attr_access ctxt "property" oid <@>{here} <@>{here}
val tS = HOLogic.dest_list prop
in case kind_term of
<@>{term "proof"} => if not(null tS) then true
else error("class result invariant violation")
| _ => false
end
val _ = Theory.setup (DOF_core.update_class_invariant
"tiny_cert.result" check_result_inv)
\end{sml}
The \inlinesml{setup}-command (last line) registers the \inlineisar+check_result_inv+ function
into the \isadof kernel, which activates any creation or modification of an instance of
\inlineisar+result+. We cannot replace \inlineisar+compute_attr_access+ by the corresponding
antiquotation \inlineisar+<@>{docitem_value kind::oid}+, since \inlineisar+oid+ is
bound to a variable here and can therefore not be statically expanded.
\<close>
section\<open>Implementing Monitors\<close>
text\<open>
Since monitor-clauses have a regular expression syntax, it is natural to implement them as
deterministic automata. These are stored in the \<^boxed_sml>\<open>docobj_tab\<close> for monitor-objects
in the \<^isadof> component. We implemented the functions:
deterministic automata. These are stored in the \inlineisar+docobj_tab+ for monitor-objects
in the \isadof component. We implemented the functions:
@{boxed_sml [display]
\<open> val enabled : automaton -> env -> cid list
val next : automaton -> env -> cid -> automaton\<close>}
where \<^boxed_sml>\<open>env\<close> is basically a map between internal automaton states and class-id's
(\<^boxed_sml>\<open>cid\<close>'s). An automaton is said to be \<^emph>\<open>enabled\<close> for a class-id,
iff it either occurs in its accept-set or its reject-set (see @{docitem "sec:monitors"}). During
\begin{sml}
val enabled : automaton -> env -> cid list
val next : automaton -> env -> cid -> automaton
\end{sml}
where \inlineisar+env+ is basically a map between internal automaton states and class-id's
(\inlineisar+cid+'s). An automaton is said to be \<^emph>\<open>enabled\<close> for a class-id,
iff it either occurs in its accept-set or its reject-set (see @{docref "sec:monitors"}). During
top-down document validation, whenever a text-element is encountered, it is checked if a monitor
is \emph{enabled} for this class; in this case, the \<^boxed_sml>\<open>next\<close>-operation is executed. The
transformed automaton recognizing the rest-language is stored in \<^boxed_sml>\<open>docobj_tab\<close> if
possible;
% TODO: clarify the notion of rest-language
otherwise, if \<^boxed_sml>\<open>next\<close> fails, an error is reported. The automata implementation
is, in large parts, generated from a formalization of functional automata~\cite{nipkow.ea:functional-Automata-afp:2004}.
is \emph{enabled} for this class; in this case, the \inlineisar+next+-operation is executed. The
transformed automaton recognizing the rest-language is stored in \inlineisar+docobj_tab+ if
possible; otherwise, if \inlineisar+next+ fails, an error is reported. The automata implementation
is, in large parts, generated from a formalization of functional automata~\cite{Functional-Automata-AFP}.
\<close>
section\<open>The \<^LaTeX>-Core of \<^isadof>\<close>
section\<open>The \LaTeX-Core of \isadof\<close>
text\<open>
The \<^LaTeX>-implementation of \<^isadof> heavily relies on the
``keycommand''~@{cite "chervet:keycommand:2010"} package. In fact, the core \<^isadof> \<^LaTeX>-commands
The \LaTeX-implementation of \isadof heavily relies on the
``keycommand''~@{cite "chervet:keycommand:2010"} package. In fact, the core \isadof \LaTeX-commands
are just wrappers for the corresponding commands from the keycommand package:
@{boxed_latex [display]
\<open>\newcommand\newisadof[1]{%
\begin{ltx}
\newcommand\newisadof[1]{%
\expandafter\newkeycommand\csname isaDof.#1\endcsname}%
\newcommand\renewisadof[1]{%
\expandafter\renewkeycommand\csname isaDof.#1\endcsname}%
\newcommand\provideisadof[1]{%
\expandafter\providekeycommand\csname isaDof.#1\endcsname}%\<close>}
\expandafter\providekeycommand\csname isaDof.#1\endcsname}%
\end{ltx}
The \<^LaTeX>-generator of \<^isadof> maps each \<^boxed_theory_text>\<open>doc_item\<close> to an \<^LaTeX>-environment (recall
@{docitem "text-elements"}). As generic \<^boxed_theory_text>\<open>doc_item\<close>s are derived from the text element,
the environment \inlineltx|isamarkuptext*| builds the core of \<^isadof>'s \<^LaTeX> implementation.
For example, the @{docitem "ass123"} from page \pageref{ass123} is mapped to
The \LaTeX-generator of \isadof maps each \inlineisar{doc_item} to an \LaTeX-environment (recall
@{docref "text-elements"}). As generic \inlineisar{doc_item} are derived from the text element,
the enviornment \inlineltx|{isamarkuptext*}| builds the core of \isadof's \LaTeX{} implementation.
For example, the @{docref "ass123"} from page \pageref{ass123} is mapped to
@{boxed_latex [display]
\<open>\begin{isamarkuptext*}%
\begin{ltx}
\begin{isamarkuptext*}%
[label = {ass122},type = {CENELEC_50128.SRAC},
args={label = {ass122}, type = {CENELEC_50128.SRAC},
CENELEC_50128.EC.assumption_kind = {formal}}
] The overall sampling frequence of the odometer subsystem is therefore
14 khz, which includes sampling, computing and result communication
times ...
\end{isamarkuptext*}\<close>}
\end{isamarkuptext*}
\end{ltx}
This environment is mapped to a plain \<^LaTeX> command via (again, recall @{docitem "text-elements"}):
@{boxed_latex [display]
\<open> \NewEnviron{isamarkuptext*}[1][]{\isaDof[env={text},#1]{\BODY}} \<close>}
This environment is mapped to a plain \LaTeX command via (again, recall @{docref "text-elements"}):
\begin{ltx}
\NewEnviron{isamarkuptext*}[1][]{\isaDof[env={text},#1]{\BODY}}
\end{ltx}
For the command-based setup, \<^isadof> provides a dispatcher that selects the most specific
implementation for a given \<^boxed_theory_text>\<open>doc_class\<close>:
For the command-based setup, \isadof provides a dispatcher that selects the most specific
implementation for a given \inlineisar|doc_class|:
@{boxed_latex [display]
\<open>%% The Isabelle/DOF dispatcher:
\begin{ltx}
%% The Isabelle/DOF dispatcher:
\newkeycommand+[\|]\isaDof[env={UNKNOWN},label=,type={dummyT},args={}][1]{%
\ifcsname isaDof.\commandkey{type}\endcsname%
\csname isaDof.\commandkey{type}\endcsname%
@ -264,7 +279,8 @@ implementation for a given \<^boxed_theory_text>\<open>doc_class\<close>:
definition for "\commandkey{env}" available either.}%
\fi%
\fi%
}\<close>}
}
\end{ltx}
\<close>
(*<*)

View File

@ -1,28 +1,11 @@
(*************************************************************************
* Copyright (C)
* 2019 The University of Exeter
* 2018-2019 The University of Paris-Saclay
* 2018 The University of Sheffield
*
* License:
* This program can be redistributed and/or modified under the terms
* of the 2-clause BSD-style license.
*
* SPDX-License-Identifier: BSD-2-Clause
*************************************************************************)
(*<*)
theory "Isabelle_DOF-Manual"
imports "05_Implementation"
begin
use_template "scrreprt-modern"
use_ontology "technical_report" and "CENELEC_50128"
close_monitor*[this]
check_doc_global
text\<open>Resulting trace in \<^verbatim>\<open>doc_item\<close> ''this'': \<close>
text\<open>Resulting trace in doc\_item ''this'': \<close>
ML\<open>@{trace_attribute this}\<close>
end
(*>*)

View File

@ -1,12 +1,14 @@
session "Isabelle_DOF-Manual" = "Isabelle_DOF" +
options [document = pdf, document_output = "output", document_build = dof, quick_and_dirty = true]
options [document = pdf, document_output = "output", quick_and_dirty = true]
theories
"Isabelle_DOF-Manual"
document_files
"isadof.cfg"
"root.bib"
"root.mst"
"preamble.tex"
"lstisadof-manual.sty"
"build"
"lstisadof-manual.sty"
"figures/antiquotations-PIDE.png"
"figures/cicm2018-combined.png"
"figures/cicm2018-dof.png"
@ -19,15 +21,13 @@ session "Isabelle_DOF-Manual" = "Isabelle_DOF" +
"figures/Dogfood-Intro.png"
"figures/Dogfood-IV-jumpInDocCLass.png"
"figures/Dogfood-V-attribute.png"
"figures/Dogfood-VI-linkappl.png"
"figures/IsaArchGlobal.png"
"figures/IsaArchInteract.png"
"figures/IsaArch.odp"
"figures/isabelle-architecture.pdf"
"figures/isabelle-architecture.svg"
"figures/isadof.png"
"figures/PIDE-interaction.pdf"
"figures/srac-as-es-application.png"
"figures/srac-definition.png"
"figures/Isabelle_DOF-logo.pdf"
"figures/header_CSP_pdf.png"
"figures/header_CSP_source.png"
"figures/definition-use-CSP-pdf.png"
"figures/definition-use-CSP.png"
"figures/MyCommentedIsabelle.png"

View File

@ -0,0 +1,46 @@
#!/usr/bin/env bash
# Copyright (c) 2018-2019 The University of Sheffield. All rights reserved.
# 2018 The University of Paris-Saclay. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in
# the documentation and/or other materials provided with the
# distribution.
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
#
# SPDX-License-Identifier: BSD-2-Clause
set -e
if [ ! -f $ISABELLE_HOME_USER/DOF/document-template/build_lib.sh ]; then
echo ""
echo "Error: Isabelle/DOF not installed"
echo "====="
echo "This is a Isabelle/DOF project. The document preparation requires"
echo "the Isabelle/DOF framework. Please obtain the framework by cloning"
echo "the Isabelle/DOF git repository, i.e.: "
echo " git clone https://git.logicalhacking.com/Isabelle_DOF/Isabelle_DOF"
echo "You can install the framework as follows:"
echo " cd Isabelle_DOF/document-generator"
echo " ./install"
echo ""
exit 1
fi
cp $ISABELLE_HOME_USER/DOF/document-template/build_lib.sh .
source build_lib.sh

Binary file not shown.

Before

Width:  |  Height:  |  Size: 10 KiB

After

Width:  |  Height:  |  Size: 14 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 10 KiB

After

Width:  |  Height:  |  Size: 18 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 17 KiB

After

Width:  |  Height:  |  Size: 23 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 13 KiB

After

Width:  |  Height:  |  Size: 16 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 35 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 124 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 56 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 162 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 43 KiB

After

Width:  |  Height:  |  Size: 96 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 70 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 196 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 203 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 383 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 37 KiB

After

Width:  |  Height:  |  Size: 67 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 26 KiB

After

Width:  |  Height:  |  Size: 50 KiB

View File

@ -0,0 +1,4 @@
Template: scrreprt-modern
Ontology: technical_report
Ontology: cenelec_50128

View File

@ -1,16 +1,3 @@
%% Copyright (C) 2018 The University of Sheffield
%% 2018-2021 The University of Paris-Saclay
%% 2019-2021 The University of Exeter
%%
%% License:
%% This program can be redistributed and/or modified under the terms
%% of the LaTeX Project Public License Distributed from CTAN
%% archives in directory macros/latex/base/lppl.txt; either
%% version 1.3c of the License, or (at your option) any later version.
%% OR
%% The 2-clause BSD-style license.
%%
%% SPDX-License-Identifier: LPPL-1.3c+ OR BSD-2-Clause
\usepackage{listings}
\usepackage{listingsutf8}
\usepackage{tikz}
@ -80,17 +67,116 @@
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%% <isar>
\newcommand{\subscr}[1]{\ensuremath{_{\text{#1}}}}
\newcommand{\supscr}[1]{\ensuremath{^{\text{#1}}}}
\lstdefinestyle{isar}{%
language=%
,basicstyle=\ttfamily%
,showspaces=false%
,showlines=false%
,columns=flexible%
,keepspaces
,morecomment=[s]{(*}{*)}%
% ,moredelim=*[s][\rmfamily]{\{*}{*\}}%
,moredelim = **[is][\beginlstdelim{\{*}{*\}}{black}]{\{*}{*\}}
,showstringspaces=false%
,moredelim=*[is][\supscr]{<bsup>}{<esup>}%
,moredelim=*[is][\subscr]{<bsub>}{<esub>}%
,literate={%
{...}{\,\ldots\,}3%
{<Open>}{\ensuremath{\isacartoucheopen}}1%
{<open>}{\ensuremath{\isacartoucheopen}}1%
%{<@>}{@}1%
{"}{}0%
{é}{\'e}1%
{~}{\ }1%
{::}{:\!:}1%
{<Close>}{\ensuremath{\isacartoucheclose}}1%
{<close>}{\ensuremath{\isacartoucheclose}}1%
{\\<Gamma>}{\ensuremath{\Gamma}}1%
{\\<theta>}{\ensuremath{\theta}}1%
{\\<times>}{\ensuremath{\times}}1%
{\\<equiv>}{\ensuremath{\equiv}}1%
{\\<sigma>}{\ensuremath{\sigma}}1%
{\\<geq>}{\ensuremath{\geq}}1%
{level0}{level\textsubscript{0}}6%
{\\<Rightarrow>}{\ensuremath{\Rightarrow}}1%
{\\<rightarrow>}{\ensuremath{\rightarrow}}1%
{\\<longrightarrow>}{\ensuremath{\rightarrow}}1%
{\\<and>}{\ensuremath{\land}}1%
{\\<or>}{\ensuremath{\lor}}1%
{\\<lfloor>}{\ensuremath{\lfloor}}1%
{\\<rfloor>}{\ensuremath{\rfloor}}1%
%{\\<lparr>}{\ensuremath{\lparr}}1%
%{\\<rparr>}{\ensuremath{\rparr}}1%
{\\<le>}{\ensuremath{\le}}1%
{\\<delta>}{\ensuremath{\delta}}1%
{\\<lambda>}{\ensuremath{\lambda}}1%
{\\<bar>}{\ensuremath{\vert}}1%
{\<sigma>}{\ensuremath{\sigma}}1%
{\\<lparr>}{\ensuremath{\isasymlparr}}1%
{\\<rparr>}{\ensuremath{\isasymrparr}}1%
{\\<leftrightarrow>}{\ensuremath{\leftrightarrow}}1%
{\{*}{\raise.3ex\hbox{$\scriptscriptstyle\langle$}}1%
{*\}}{\raise.3ex\hbox{$\scriptscriptstyle\rangle$}}1%
{\\<open>}{\raise.3ex\hbox{$\scriptscriptstyle\langle$}}1%
{\\<Open>}{\raise.3ex\hbox{$\scriptscriptstyle\langle$}}1%
{\\<close>}{\raise.3ex\hbox{$\scriptscriptstyle\rangle$}}1%
{\\<Close>}{\raise.3ex\hbox{$\scriptscriptstyle\rangle$}}1%
{\\<forall>}{\ensuremath{\forall}}1%
{\\<exists>}{\ensuremath{\exists}}1%
{\\<in>}{\ensuremath{\in}}1%
{\\<delta>}{\ensuremath{\delta}}1%
{\\<real>}{\ensuremath{\mathbb{R}}}1%
{\\<noteq>}{\ensuremath{\neq}}1%
{\\<exists>}{\ensuremath{\exists}}1%
{\\<Forall>}{\ensuremath{\bigwedge\,}}1%
{<string>}{<\ensuremath{\text{\textit{string}}}>}9%
{\\<lbrakk>}{\ensuremath{\mathopen{\lbrack\mkern-3mu\lbrack}}}1%
{\\<lbrace>}{\ensuremath{\mathopen{\lbrace\mkern-4.5mu\mid}}}1%
{\\<rbrakk>}{\ensuremath{\mathclose{\rbrack\mkern-3mu\rbrack}}}1%
{\\<rbrace>}{\ensuremath{\mathclose{\mid\mkern-4.5mu\rbrace}}}1%
}%
% % Defining "tags" (text-antiquotations) based on 1-keywords
,tag=**[s]{@\{}{\}}%
,tagstyle=\color{CornflowerBlue}%
,markfirstintag=true%
,keywordstyle=\bfseries%
,keywords={}
% Defining 2-keywords
,keywordstyle=[2]{\color{Blue!60}\bfseries}%
,alsoletter={*,-}
,morekeywords=[2]{case, then, show, theory, begin, end, ML,section,subsection,paragraph,chapter,text}%
%,moredelim=[s][\textit]{<}{>}
% Defining 3-keywords
,keywordstyle=[3]{\color{OliveGreen!60}\bfseries}%
,morekeywords=[3]{doc_class,declare_reference,update_instance*,
open_monitor*, close_monitor*, declare_reference*,section*,text*,title*,abstract*}%
% Defining 4-keywords
,keywordstyle=[4]{\color{black!60}\bfseries}%
,morekeywords=[4]{where, imports, keywords}%
% Defining 5-keywords
,keywordstyle=[5]{\color{BrickRed!70}\bfseries}%
,morekeywords=[5]{datatype, by, fun, Definition*, definition,
type_synonym, typedecl,
consts, assumes, and, shows, proof, next, qed, lemma, theorem}%
% Defining 6-keywords
,keywordstyle=[6]{\itshape}%
,morekeywords=[6]{meta-args, ref, expr, class_id}%
%
}%
%%
\providecolor{isar}{named}{blue}
\renewcommand{\isacommand}[1]{\textcolor{OliveGreen!60}{\ttfamily\bfseries #1}}
\newcommand{\inlineisarbox}[1]{#1}
\NewTColorBox[]{isarbox}{}{
\def\inlineisar{\lstinline[style=isar,breaklines=true,mathescape,breakatwhitespace=true]}
\newtcblisting{isar}[1][]{%
listing only%
,boxrule=0pt
,boxsep=0pt
,colback=white!90!isar
,enhanced jigsaw
,borderline west={2pt}{0pt}{isar!60!black}
,sharp corners
,before skip balanced=0.5\baselineskip plus 2pt
% ,before skip=10pt
% ,after skip=10pt
,enlarge top by=0mm
@ -98,7 +184,13 @@
,overlay={\node[draw,fill=isar!60!black,xshift=0pt,anchor=north
east,font=\bfseries\footnotesize\color{white}]
at (frame.north east) {Isar};}
}
,listing options={
style=isar
,basicstyle=\small\ttfamily
,mathescape
,#1
}
}%
%% </isar>
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
@ -184,7 +276,6 @@
\lstdefinestyle{lltx}{language=[AlLaTeX]TeX,
,basicstyle=\ttfamily%
,showspaces=false%
,escapechar=ë
,showlines=false%
,morekeywords={newisadof}
% ,keywordstyle=\bfseries%

View File

@ -6,22 +6,23 @@
%% This program can be redistributed and/or modified under the terms
%% of the LaTeX Project Public License Distributed from CTAN
%% archives in directory macros/latex/base/lppl.txt; either
%% version 1.3c of the License, or (at your option) any later version.
%% version 1 of the License, or any later version.
%% OR
%% The 2-clause BSD-style license.
%%
%% SPDX-License-Identifier: LPPL-1.3c+ OR BSD-2-Clause
%%
%% SPDX-License-Identifier: LPPL-1.0+ OR BSD-2-Clause
%% This is a placeholder for user-specific configuration and packages.
\usepackage{etex}
\ifdef{\reserveinserts}{\reserveinserts{28}}{}
\reserveinserts{28}
\usepackage{dirtree}
\renewcommand*\DTstylecomment{\ttfamily\itshape}
\usepackage{textcomp}
\usepackage{xcolor}
\usepackage{lstisadof-manual}
\usepackage{xspace}
\IfFileExists{hvlogos.sty}{\usepackage{hvlogos}}{\newcommand{\TeXLive}{\TeX Live}\newcommand{\BibTeX}{Bib\TeX}}
\usepackage{dtk-logos}
\usepackage{railsetup}
\setcounter{secnumdepth}{2}
\usepackage{index}
@ -29,20 +30,20 @@
%\makeindex
%\AtEndDocument{\printindex}
\newcommand{\dof}{DOF\xspace}
\newcommand{\isactrlemph}{*}
\newcommand{\ie}{i.e.}
\newcommand{\eg}{e.g.}
\newcommand{\path}[1]{\texttt{\nolinkurl{#1}}}
\title{<TITLE>}
\author{<AUTHOR>}
\newcommand{\dof}{DOF\xspace}
\renewcommand{\listofSRACs}{\relax}
\renewcommand{\listofECs}{\relax}
\pagestyle{headings}
\uppertitleback{
Copyright \copyright{} 2019--2022 University of Exeter, UK\\
\phantom{Copyright \copyright{}} 2018--2022 Universit\'e Paris-Saclay, France\\
Copyright \copyright{} 2019\phantom{--2019} University of Exeter, UK\\
\phantom{Copyright \copyright{}} 2018--2019 Universit\'e Paris-Saclay, France\\
\phantom{Copyright \copyright{}} 2018--2019 The University of Sheffield, UK\\
\smallskip
@ -76,14 +77,13 @@ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
}
\lowertitleback{%
This manual describes \isadof version \isadofversion. The latest official
release is \isadoflatestversion{} (\href{https://doi.org/\isadoflatestdoi}{doi:\isadoflatestdoi}).
The DOI \href{https://doi.org/\isadofgenericdoi}{\isadofgenericdoi} will allways point to the latest
release. The latest development version as well as official releases are available at
This manual describes \isadof version \isadofversion.
The latest development version as well as previous releases are available at
\url{\dofurl}.
\paragraph*{Contributors.} We would like to thank the following contributors to \isadof
(in alphabetical order): Idir Ait-Sadoune, Paolo Crisafulli, and Nicolas M{\'e}ric.
(in alphabetical order): Idir Ait-Sadoune, Paolo Crisafulli, and Chantal Keller.
\paragraph*{Acknowledgments.} This work has been partially supported by IRT SystemX, Paris-Saclay,
France, and therefore granted with public funds of the Program ``Investissements d'Avenir.''
@ -109,7 +109,7 @@ France, and therefore granted with public funds of the Program ``Investissements
\hfill
\begin{minipage}{8cm}
\raggedleft\normalsize
Laboratoire des Methodes Formelles (LMF)\\
Laboratoire en Recherche en Informatique (LRI)\\
Universit\'e Paris-Saclay\\
91405 Orsay Cedex\\
France
@ -118,3 +118,4 @@ France, and therefore granted with public funds of the Program ``Investissements
}
\AtBeginDocument{\isabellestyle{literal}\newcommand{\lstnumberautorefname}{Line}}
\renewcommand{\isacommand}[1]{\textcolor{OliveGreen!60}{\ttfamily\bfseries #1}}

View File

@ -3,27 +3,33 @@
@STRING{s-lncs = "LNCS" }
@Misc{ w3c:ontologies:2015,
author = {W3C},
author = {W3C},
title = {Ontologies},
organisation = {W3c},
url = {https://www.w3.org/standards/semanticweb/ontology},
year = 2015
}
@Misc{ ibm:doors:2019,
author = {IBM},
@Misc{ doors,
author = {IBM},
title = {{IBM} Engineering Requirements Management {DOORS} Family},
note = {\url{https://www.ibm.com/us-en/marketplace/requirements-management}},
year = 2019
}
@Manual{ wenzel:isabelle-isar:2020,
title = {The Isabelle/Isar Reference Manual},
author = {Makarius Wenzel},
year = 2020,
note = {Part of the Isabelle distribution.}
@Manual{ wenzel:isabelle-isar:2019,
title = {The Isabelle/Isar Reference Manual},
author = {Makarius Wenzel},
OPTorganization = {},
OPTaddress = {},
OPTedition = {},
OPTmonth = {},
year = {2019},
note = {Part of the Isabelle distribution.},
OPTannote = {}
}
@InCollection{ brucker.ea:isabelledof:2019,
abstract = {DOF is a novel framework for defining ontologies and
enforcing them during document development and evolution. A
@ -41,7 +47,7 @@
ontological feedback during the editing of a document.
In this paper, we give an in-depth presentation of the
design concepts of DOF's Ontology Definition Language
design concepts of DOF{\^a}s Ontology Definition Language
(ODL) and key aspects of the technology of its
implementation. Isabelle/DOF is the first ontology language
supporting machine-checked links between the formal and
@ -60,16 +66,14 @@
publisher = {Springer-Verlag},
address = {Heidelberg},
series = {Lecture Notes in Computer Science},
number = {11724},
isbn = {3-540-25109-X},
doi = {10.1007/978-3-030-30446-1_15},
editor = {Peter C. {\"O}lveczky and Gwen Sala{\"u}n},
editor = {Peter {\"O}lveczky and Gwen Sala{\"u}n},
pdf = {https://www.brucker.ch/bibliography/download/2019/brucker.ea-isabelledof-2019.pdf},
title = {{Isabelle/DOF}: Design and Implementation},
classification= {conference},
areas = {formal methods, software},
categories = {isadof},
year = {2019},
year = 2019,
public = {yes}
}
@ -103,7 +107,7 @@
publisher = {Springer-Verlag},
address = {Heidelberg},
series = {Lecture Notes in Computer Science},
number = {11006},
number = 11006,
url = {https://www.brucker.ch/bibliography/abstract/brucker.ea-isabelle-ontologies-2018},
title = {Using the {Isabelle} Ontology Framework: Linking the
Formal with the Informal},
@ -111,35 +115,16 @@
areas = {formal methods, software},
categories = {isadof},
public = {yes},
year = {2018},
doi = {10.1007/978-3-319-96812-4_3},
year = 2018,
pdf = {https://www.brucker.ch/bibliography/download/2018/brucker.ea-isabelle-ontologies-2018.pdf}
}
@InCollection{ taha.ea:philosophers:2020,
keywords = {CSP, Isabelle/HOL, Process-Algebra,Formal Verification, Refinement},
author = {Safouan Taha and Burkhart Wolff and Lina Ye},
booktitle = {International Conference on Integrated Formal Methods (IFM)},
language = {USenglish},
publisher = {Springer-Verlag},
address = {Heidelberg},
series = {Lecture Notes in Computer Science},
number = {to appear},
title = {Philosophers may dine --- definitively!},
classification= {conference},
areas = {formal methods, software},
public = {yes},
year = {2020}
}
@Book{ boulanger:cenelec-50128:2015,
author = {Boulanger, Jean-Louis},
title = {{CENELEC} 50128 and {IEC} 62279 Standards},
publisher = {Wiley-ISTE},
year = 2015,
address = {Boston}
address = {Boston},
}
@Booklet{ cc:cc-part3:2006,
@ -199,7 +184,7 @@
timestap = {2008-05-26}
}
@InProceedings{wenzel:asynchronous:2014,
@InProceedings{ wenzel:asynchronous:2014,
author = {Makarius Wenzel},
title = {Asynchronous User Interaction and Tool Integration in
{Isabelle}/{PIDE}},
@ -257,7 +242,7 @@
volume = 8558,
publisher = pub-springer,
year = 2014,
doi = {10.1007/978-3-319-08970-6}
doi = {10.1007/978-3-319-08970-6},
}
@InProceedings{ bezzecchi.ea:making:2018,
@ -299,37 +284,39 @@
location = {Toulouse}
}
@InCollection{ wenzel.ea:building:2007,
abstract = {We present the generic system framework of
Isabelle/Isarunderlying recent versions of Isabelle. Among
other things, Isar provides an infrastructure for Isabelle
plug-ins, comprising extensible state components and
extensible syntax that can be bound to tactical ML
programs. Thus the Isabelle/Isar architecture may be
understood as an extension and refinement of the
traditional LCF approach, with explicit infrastructure for
building derivative systems. To demonstrate the technical
potential of the framework, we apply it to a concrete
formalmethods tool: the HOL-Z 3.0 environment, which is
geared towards the analysis of Z specifications and formal
proof of forward-refinements.},
author = {Makarius Wenzel and Burkhart Wolff},
booktitle = {TPHOLs 2007},
editor = {Klaus Schneider and Jens Brandt},
language = {USenglish},
@InCollection{ wenzel.ea:building:2007,
abstract = {We present the generic system framework of
Isabelle/Isarunderlying recent versions of Isabelle. Among
other things, Isar provides an infrastructure for Isabelle
plug-ins, comprising extensible state components and
extensible syntax that can be bound to tactical ML
programs. Thus the Isabelle/Isar architecture may be
understood as an extension and refinement of the
traditional LCF approach, with explicit infrastructure for
building derivative systems. To demonstrate the technical
potential of the framework, we apply it to a concrete
formalmethods tool: the HOL-Z 3.0 environment, which is
geared towards the analysis of Z specifications and formal
proof of forward-refinements.},
author = {Makarius Wenzel and Burkhart Wolff},
booktitle = {TPHOLs 2007},
editor = {Klaus Schneider and Jens Brandt},
language = {USenglish},
acknowledgement={none},
pages = {352--367},
publisher = pub-springer,
address = pub-springer:adr,
number = 4732,
series = s-lncs,
title = {Building Formal Method Tools in the {Isabelle}/{Isar}
Framework},
doi = {10.1007/978-3-540-74591-4_26},
year = 2007
pages = {352--367},
publisher = pub-springer,
address = pub-springer:adr,
number = 4732,
series = s-lncs,
title = {Building Formal Method Tools in the {Isabelle}/{Isar}
Framework},
doi = {10.1007/978-3-540-74591-4_26},
year = 2007
}
@Misc{ biendarra.ea:defining:2019,
%%%%%%%%%%%%%%%%%%%%%%
@Misc{ datarefman19,
title = {Defining (Co)datatypes and Primitively (Co)recursive
Functions in Isabelle/HOL},
author = {Julian Biendarra and Jasmin Christian Blanchette and
@ -339,30 +326,43 @@
year = 2019
}
@Misc{ kraus:defining:2020,
@Misc{ functions19,
title = {Defining Recursive Functions in Isabelle/HOL},
author = {Alexander Kraus},
note = {\url{https://isabelle.in.tum.de/doc/functions.pdf}},
year = 2020
year = 2019
}
@Misc{ nipkow:whats:2020,
@Misc{ nipkowMain19,
title = {What's in Main},
author = {Tobias Nipkow},
note = {\url{https://isabelle.in.tum.de/doc/main.pdf}},
year = 2020
year = 2019
}
@InProceedings{ wenzel:system:2014,
@InProceedings{ DBLP:conf/itp/Wenzel14,
author = {Makarius Wenzel},
title = {Asynchronous User Interaction and Tool Integration in
Isabelle/PIDE},
booktitle = {Interactive Theorem Proving (ITP)},
pages = {515--530},
year = 2014,
doi = {10.1007/978-3-319-08970-6_33},
timestamp = {Sun, 21 May 2017 00:18:59 +0200},
biburl = {https://dblp.org/rec/bib/conf/itp/Wenzel14},
bibsource = {dblp computer science bibliography, https://dblp.org}
}
@InProceedings{ DBLP:journals/corr/Wenzel14,
author = {Makarius Wenzel},
title = {System description: Isabelle/{jEdit} in 2014},
booktitle = {UITP},
pages = {84--94},
year = 2014,
doi = {10.4204/EPTCS.167.10}
doi = {10.4204/EPTCS.167.10},
}
@InProceedings{ barras.ea:pervasive:2013,
@InProceedings{ DBLP:conf/mkm/BarrasGHRTWW13,
author = {Bruno Barras and Lourdes Del Carmen
Gonz{\'{a}}lez{-}Huesca and Hugo Herbelin and Yann
R{\'{e}}gis{-}Gianas and Enrico Tassi and Makarius Wenzel
@ -372,10 +372,10 @@
booktitle = {MKM},
pages = {359--363},
year = 2013,
doi = {10.1007/978-3-642-39320-4_29}
doi = {10.1007/978-3-642-39320-4_29},
}
@Article{ faithfull.ea:coqoon:2018,
@Article{ Faithfull:2018:COQ:3204179.3204223,
author = {Faithfull, Alexander and Bengtson, Jesper and Tassi,
Enrico and Tankink, Carst},
title = {Coqoon},
@ -388,12 +388,55 @@
issn = {1433-2779},
pages = {125--137},
numpages = 13,
url = {https://doi.org/10.1007/s10009-017-0457-2},
doi = {10.1007/s10009-017-0457-2},
acmid = 3204223,
publisher = {Springer-Verlag},
address = {Berlin, Heidelberg}
}
@InCollection{ brucker.wolff19:isadof-design-impl:2019,
abstract = {DOF is a novel framework for \emph{defining} ontologies
and \emph{enforcing} them during document development and
document evolution. A major goal of DOF is the integrated
development of formal certification documents (\eg, for
Common Criteria or CENELEC 50128) that require consistency
across both formal and informal arguments.
To support a consistent development of formal and informal
parts of a document, we provide Isabelle/DOF, an
implementation of DOF on top of Isabelle/HOL. \isadof is
integrated into Isabelle's IDE, which allows for smooth
ontology development as well as immediate ontological
feedback during the editing of a document.
In this paper, we give an in-depth presentation of the
design concepts of DOF's Ontology Definition Language (ODL)
and key aspects of the technology of its implementation.
\isadof is the first ontology language supporting
machine-checked links between the formal and informal parts
in an LCF-style interactive theorem proving environment.
Sufficiently annotated, large documents can easily be
developed collaboratively, while \emph{ensuring their
consistency}, and the impact of changes (in the formal and
the semi-formal content) is tracked automatically.},
address = {Heidelberg},
author = {Achim D. Brucker and Burkhart Wolff},
booktitle = {International Conference on Software Engineering and
Formal Methods (SEFM)},
doi = {10.1007/978-3-319-96812-4_3},
keywords = {Isabelle, HOL, Ontologies, Certification},
language = {USenglish},
location = {Oslo, Norway},
number = {TO APPEAR},
pdf = {https://www.lri.fr/~wolff/papers/conf/2019-sefm-isa_dof-framework.pdf},
publisher = {Springer-Verlag},
series = {Lecture Notes in Computer Science},
title = {{I}sabelle/{DOF}: {D}esign and {I}mplementation},
year = 2019
}
@InProceedings{ abrial:steam-boiler:1996,
author = {Abrial, Jean-Raymond},
title = {Steam-Boiler Control Specification Problem},
@ -445,24 +488,25 @@
the development, deployment and maintenanceactivities.}
}
@Article{ kraus.ea:regular-sets-afp:2010,
@Article{ Regular-Sets-AFP,
author = {Alexander Krauss and Tobias Nipkow},
title = {Regular Sets and Expressions},
journal = {Archive of Formal Proofs},
month = may,
year = 2010,
note = {\url{https://isa-afp.org/entries/Regular-Sets.html}, Formal
note = {\url{http://isa-afp.org/entries/Regular-Sets.html}, Formal
proof development},
issn = {2150-914x}
}
@Article{ nipkow.ea:functional-Automata-afp:2004,
@Article{ Functional-Automata-AFP,
author = {Tobias Nipkow},
title = {Functional Automata},
journal = {Archive of Formal Proofs},
month = mar,
year = 2004,
note = {\url{https://isa-afp.org/entries/Functional-Automata.html},
note = {\url{http://isa-afp.org/entries/Functional-Automata.html},
Formal proof development},
issn = {2150-914x}
}
@ -473,11 +517,11 @@
year = 2019
}
@Booklet{ wenzel:system-manual:2020,
@Booklet{ wenzel:system-manual:2019,
author = {Makarius Wenzel},
title = {The {Isabelle} System Manual},
year = 2020,
note = {Part of the Isabelle distribution.}
year = 2019,
note = {Part of the Isabelle distribution.}
}
@Booklet{ chervet:keycommand:2010,
@ -508,6 +552,5 @@
@Book{ eijkhout:latex-cs:2012,
author = {Victor Eijkhout},
title = {The Computer Science of TeX and LaTeX},
publisher = {Texas Advanced Computing Center},
year = 2012
year = 2012,
}

View File

@ -1,17 +1,18 @@
session "TR_MyCommentedIsabelle" = "Isabelle_DOF" +
options [document = pdf, document_output = "output", document_build = dof, quick_and_dirty = true]
options [document = pdf, document_output = "output",quick_and_dirty = true]
theories
"TR_MyCommentedIsabelle"
document_files
"root.bib"
"isadof.cfg"
"preamble.tex"
"prooftree.sty"
"build"
"figures/markup-demo.png"
"figures/text-element.pdf"
"figures/isabelle-architecture.pdf"
"figures/pure-inferences-I.pdf"
"figures/pure-inferences-II.pdf"
"figures/document-model.pdf"
"figures/MyCommentedIsabelle.png"

View File

@ -0,0 +1,46 @@
#!/usr/bin/env bash
# Copyright (c) 2018-2019 The University of Sheffield. All rights reserved.
# 2018 The University of Paris-Saclay. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in
# the documentation and/or other materials provided with the
# distribution.
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
#
# SPDX-License-Identifier: BSD-2-Clause
set -e
if [ ! -f $ISABELLE_HOME_USER/DOF/document-template/build_lib.sh ]; then
echo ""
echo "Error: Isabelle/DOF not installed"
echo "====="
echo "This is a Isabelle/DOF project. The document preparation requires"
echo "the Isabelle/DOF framework. Please obtain the framework by cloning"
echo "the Isabelle/DOF git repository, i.e.: "
echo " git clone https://git.logicalhacking.com/Isabelle_DOF/Isabelle_DOF"
echo "You can install the framework as follows:"
echo " cd Isabelle_DOF/document-generator"
echo " ./install"
echo ""
exit 1
fi
cp $ISABELLE_HOME_USER/DOF/document-template/build_lib.sh .
source build_lib.sh

Binary file not shown.

Before

Width:  |  Height:  |  Size: 162 KiB

View File

@ -0,0 +1,2 @@
Template: scrreprt
Ontology: technical_report

View File

@ -5,16 +5,16 @@
%% This program can be redistributed and/or modified under the terms
%% of the LaTeX Project Public License Distributed from CTAN
%% archives in directory macros/latex/base/lppl.txt; either
%% version 1.3c of the License, or (at your option) any later version.
%% version 1 of the License, or any later version.
%% OR
%% The 2-clause BSD-style license.
%%
%% SPDX-License-Identifier: LPPL-1.3c+ OR BSD-2-Clause
%%
%% SPDX-License-Identifier: LPPL-1.0+ OR BSD-2-Clause
%% This is a placeholder for user-specific configuration and packages.
\renewcommand{\isasymtheta}{\texorpdfstring{\isamath{\vartheta}}{ϑ}}
\usepackage{prooftree}
\title{<TITLE>}
\author{<AUTHOR>}
\newcommand{\eg}{e.\,g.}
\newcommand{\ie}{i.\,e.}

284
install Executable file
View File

@ -0,0 +1,284 @@
#!/usr/bin/env bash
# Copyright (c) 2018-2019 The University of Sheffield.
# 2019-2019 The University of Exeter.
# 2018-2019 The University of Paris-Saclay.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in
# the documentation and/or other materials provided with the
# distribution.
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
#
# SPDX-License-Identifier: BSD-2-Clause
#set -e
shopt -s nocasematch
# get global configuration
source .config
print_help()
{
echo "Usage: install [OPTION] "
echo ""
echo "Run ..."
echo ""
echo " --help, -h display this help message"
echo " --isabelle, -i isabelle isabelle command used for installation"
echo " (default: $ISABELLE)"
echo " --skip-patch-and-afp, -s skip installation of Isabelle/DOF patch for"
echo " Isabelle and required AFP entries. "
echo " USE AT YOUR OWN RISK (default: $SKIP)"
}
exit_error() {
echo ""
echo " *** Isabelle/DOF installation FAILED, please check the README.md for help ***"
echo ""
exit 1
}
check_isabelle_version() {
echo "* Checking Isabelle version:"
if [ "$ISABELLE_VERSION" != "$ACTUAL_ISABELLE_VERSION" ]; then
echo " WARNING:"
echo " The version of Isabelle (i.e., $ACTUAL_ISABELLE_VERSION) you are using"
echo " IS NOT SUPPORTED"
echo " by the current version of Isabelle/DOF. Please install a supported"
echo " version of Isabelle and rerun the install script, providing the"
echo " the \"isabelle\" command as argument."
echo " Isabelle ($ISABELLE_VERSION) can be obtained from:"
echo " $ISABELLE_URL"
echo
read -p " Still continue (y/N)? " -n 1 -r
echo
if [[ $REPLY =~ ^[Yy]$ ]];
then
echo " Continuing installation on your OWN risk."
else
exit_error
fi
else
echo " Success: found supported Isabelle version ($ISABELLE_VERSION)"
fi
}
check_pdftex() {
echo "* Checking (La)TeX installation:"
OLDDIR=`pwd`
DIR=`mktemp -d`
cd $DIR;
pdftex -interaction=nonstopmode \\expanded{Success}\\end > /dev/null
if [ $? -eq 0 ]; then
echo " Success: pdftex supports \\expanded{} primitive."
else
cd $OLDDIR
echo " WARNING:"
echo " The version of pdf(La)TeX you are using is outdated (and does"
echo " not support the \\expanded primitive). It is not supported by the"
echo " current version of Isabelle/DOF. Please install a supported TeX"
echo " distribution (e.g., TeXLive 2019 or later)."
echo
read -p " Still continue (y/N)? " -n 1 -r
echo
if [[ $REPLY =~ ^[Yy]$ ]];
then
echo " Continuing installation on your OWN risk."
else
exit_error
fi
fi
cd $OLDDIR
}
check_afp_entries() {
echo "* Checking availability of AFP entries:"
missing=""
required="Regular-Sets Functional-Automata"
for afp in $required; do
res=`$ISABELLE build -n $afp 2>/dev/null || true`
if [ "$res" != "" ]; then
echo " Success: found APF entry $afp."
else
echo " Warning: could not find AFP entry $afp."
missing="$missing $afp"
fi
done
if [ "$missing" != "" ]; then
echo " Trying to install AFP (this might take a few *minutes*) ...."
extract=""
for e in $missing; do
extract="$extract $AFP_DATE/thys/$e"
done
mkdir -p .afp
if curl -s -L $AFP_URL | tar zxf - -C .afp $extract; then
for e in $missing; do
echo " Registering $e in $ISABELLE_HOME_USER/ROOTS"
touch $ISABELLE_HOME_USER/ROOTS
grep -q $PWD/.afp/$AFP_DATE/thys/$e $ISABELLE_HOME_USER/ROOTS || echo "$PWD/.afp/$AFP_DATE/thys/$e" >> $ISABELLE_HOME_USER/ROOTS
done
echo " AFP installation successful."
else
echo " FAILURE: could not find AFP entries: $missing."
echo " Please obtain the AFP from"
echo " $AFP_URL"
echo " and follow the following instructions:"
echo " https://www.isa-afp.org/using.html"
exit_error
fi
fi
}
check_isa_dof_patch() {
echo "* Check availability of Isabelle/DOF patch:"
src="src/patches/thy_output.ML"
dst="$ISABELLE_HOME/src/Pure/Thy/thy_output.ML"
if command -v cmp > /dev/null 2>&1 && cmp -s "$src" "$dst" ; then
echo " Success: latest Isabelle/DOF patch already applied"
if isabelle process -e 'Thy_Output.set_meta_args_parser' &> /dev/null ; then
true
else
echo " Warning: Isabelle/HOL needs to be rebuild to activate patch."
fi
else
command -v cmp >/dev/null 2>&1 || echo " Warning: cmp not available, cannot check if patch is already applied."
echo " Warning: Isabelle/DOF patch is not available or outdated."
echo " Trying to patch system ...."
if [ ! -f "$dst.backup-by-isadof-installer" ]; then
cp -f "$dst" "$dst.backup-by-isadof-installer" || true;
fi
if (cp -f $src $dst) &> /dev/null; then
echo " Applied patch successfully, Isabelle/HOL will be rebuilt during"
echo " the next start of Isabelle."
else
echo " FAILURE: Could not apply Isabelle/DOF patch."
echo " Please copy $src to $dst, e.g.:"
echo " cp -f $src $dst"
echo " and rebuild Isabelle/HOL."
exit_error
fi
fi
}
check_old_installation(){
echo "* Searching for existing installation:"
if [[ -d "$ISABELLE_HOME_USER/DOF" ]]; then
echo " Found old installation, moving it to $ISABELLE_HOME_USER/DOF.bak."
rm -rf "$ISABELLE_HOME_USER/DOF.bak"
mv "$ISABELLE_HOME_USER/DOF" "$ISABELLE_HOME_USER/DOF.bak"
else
echo " No old installation found."
fi
}
install_and_register(){
echo "* Installing Isabelle/DOF"
DIR="$ISABELLE_HOME_USER/DOF/Tools"
echo " - Installing Tools in $DIR"
mkdir -p "$DIR"
cp $GEN_DIR/Tools/* "$DIR"
chmod 755 "$DIR"/*
DIR="$ISABELLE_HOME_USER/DOF/document-template"
echo " - Installing document templates in $DIR"
mkdir -p "$DIR"
cp $GEN_DIR/scripts/* "$DIR"
cp $GEN_DIR/document-templates/* "$DIR"
cp $GEN_DIR/DOF/*/*.sty "$DIR"
ISABELLE_SHORT_VERSION=`echo $ISABELLE_VERSION | sed -e 's/:.*$//'`
sed -i -e "s|%%% CONFIG %%%| \
\\\\renewcommand{\\\\dof@isabelleversion}{$ISABELLE_SHORT_VERSION} \
\\\\renewcommand{\\\\isabellefullversion}{$ISABELLE_VERSION\\\\xspace} \
\\\\renewcommand{\\\\dof@version}{$DOF_VERSION} \
\\\\renewcommand{\\\\isabelleurl}{$ISABELLE_URL} \
\\\\renewcommand{\\\\dofurl}{$DOF_URL} \
\\\\renewcommand{\\\\dof@artifacturl}{https://$DOF_ARTIFACT_HOST/$DOF_ARTIFACT_DIR}|" \
"$DIR/DOF-core.sty"
DIR="$ISABELLE_HOME_USER/DOF/latex"
echo " - Installing LaTeX styles in $DIR"
mkdir -p "$DIR"
cp $GEN_DIR/ontologies/*/*.sty "$DIR"
DIR="$ISABELLE_HOME_USER/etc"
echo " - Registering Isabelle/DOF"
mkdir -p "$DIR"
if [[ $ISABELLE_TOOLS = *DOF* ]]; then
echo " * Tools already registered in $DIR/settings"
else
echo " * Registering tools in $DIR/settings"
echo 'ISABELLE_TOOLS=$ISABELLE_TOOLS:$ISABELLE_HOME_USER/DOF/Tools' \
>> "$DIR/settings"
fi
sed -i -e "s|<isadofurl>|$DOF_URL|" $ISABELLE_HOME_USER/DOF/*/*
grep -q $PWD\$ $ISABELLE_HOME_USER/ROOTS || echo "$PWD" >> $ISABELLE_HOME_USER/ROOTS
}
ISABELLE=`which isabelle`
SKIP="false"
while [ $# -gt 0 ]
do
case "$1" in
--isabelle|-i)
ISABELLE="$2";
shift;;
--skip-patch-and-afp|-s)
SKIP="true";;
--help|-h)
print_help
exit 0;;
*) print_help
exit 1;;
esac
shift
done
ACTUAL_ISABELLE_VERSION=`$ISABELLE version`
GEN_DIR=src
PROG=`echo $0 | sed 's|.*/||'`;
VARS=`$ISABELLE getenv ISABELLE_HOME_USER ISABELLE_HOME ISABELLE_TOOLS`
for i in $VARS; do
export "$i"
done
echo ""
echo "Isabelle/DOF Installer"
echo "======================"
check_isabelle_version
check_pdftex
if [ "$SKIP" = "true" ]; then
echo "* Warning: skipping installation of Isabelle patch and AFP entries."
else
check_isa_dof_patch
check_afp_entries
fi
check_old_installation
install_and_register
echo "* Installation successful. Enjoy Isabelle/DOF, you can build the session"
echo " Isabelle_DOF and all example documents by executing:"
echo " $ISABELLE build -D ."
exit 0

View File

@ -1,179 +0,0 @@
#!/usr/bin/env bash
# Copyright (c) 2018-2019 The University of Sheffield.
# 2019-2020 The University of Exeter.
# 2018-2020 The University of Paris-Saclay.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in
# the documentation and/or other materials provided with the
# distribution.
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
#
# SPDX-License-Identifier: BSD-2-Clause
#set -e
shopt -s nocasematch
print_help()
{
echo "Usage: isabelle env ./install-afp [OPTION] "
echo ""
echo "Warning: This tools is deprecated."
echo ""
echo "Run ..."
echo ""
echo " --help, -h display this help message"
}
exit_error() {
echo ""
echo " *** Local AFP installation FAILED, please check the README.md for help ***"
echo ""
exit 1
}
confirm_usage() {
echo "* From Isabelle2021-1 on, the recommended method for making the whole AFP "
echo " available to Isabelle is the isabelle components -u command."
echo " For doing so, please follow the instructions at: "
echo " https://www.isa-afp.org/help/"
echo ""
echo " Alternatively, you can continue, on your own risk, to install only"
echo " the AFP entries required to run Isabelle/DOF."
echo ""
read -p " Still continue (y/N)? " -n 1 -r
echo
if [[ $REPLY =~ ^[Yy]$ ]];
then
echo " Continuing installation on your OWN risk."
else
exit_error
fi
}
check_isabelle_version() {
echo "* Checking Isabelle version:"
if [ "$ISABELLE_VERSION" != "$ACTUAL_ISABELLE_VERSION" ]; then
echo " WARNING:"
echo " The version of Isabelle (i.e., $ACTUAL_ISABELLE_VERSION) you are using"
echo " IS NOT SUPPORTED"
echo " by the current version of Isabelle/DOF. Please install a supported"
echo " version of Isabelle and rerun the install script, providing the"
echo " the \"isabelle\" command as argument."
echo " Isabelle ($ISABELLE_VERSION) can be obtained from:"
echo " https://isabelle.in.tum.de/website-$ISABELLE_VERSION/"
echo ""
read -p " Still continue (y/N)? " -n 1 -r
echo
if [[ $REPLY =~ ^[Yy]$ ]];
then
echo " Continuing installation on your OWN risk."
else
exit_error
fi
else
echo " Success: found supported Isabelle version ($ISABELLE_VERSION)"
fi
}
check_afp_entries() {
echo "* Checking availability of AFP entries:"
missing=""
required="Regular-Sets Functional-Automata Physical_Quantities"
for afp in $required; do
res=`$ISABELLE_TOOL build -n $afp 2>/dev/null || true`
if [ "$res" != "" ]; then
echo " Success: found APF entry $afp."
else
echo " Warning: could not find AFP entry $afp."
missing="$missing $afp"
fi
done
if [ "$missing" != "" ]; then
echo " Trying to install AFP (this might take a few *minutes*) ...."
extract=""
for e in $missing; do
extract="$extract $AFP_DATE/thys/$e"
done
mkdir -p .afp
if curl -s -L $AFP_URL | tar zxf - -C .afp $extract; then
for e in $missing; do
echo " Registering $e"
$ISABELLE_TOOL components -u "$PWD/.afp/$AFP_DATE/thys/$e"
done
echo " AFP installation successful."
else
echo " FAILURE: could not find AFP entries: $missing."
echo " Please obtain the AFP from"
echo " $AFP_URL"
echo " and follow the following instructions:"
echo " https://www.isa-afp.org/using.html"
exit_error
fi
fi
}
while [ $# -gt 0 ]
do
case "$1" in
--help|-h)
print_help
exit 0;;
*) print_help
exit 1;;
esac
shift
done
if [ -z ${ISABELLE_TOOL+x} ];
then
print_help
exit 1
fi
ACTUAL_ISABELLE_VERSION=`$ISABELLE_TOOL version`
ISABELLE_VERSION="Isabelle$($ISABELLE_TOOL dof_param -b isabelle_version)"
if [ ${ISABELLE_VERSION} = "Isabelle" ];
then
echo "Error: cannot find Isabelle/DOF configuration, please check that you"
echo " registered Isabelle/DOF as an Isabelle component, e.g., using"
echo " isabelle components -u ."
exit 1
fi
AFP_DATE="$($ISABELLE_TOOL dof_param -b afp_version)"
AFP_URL="https://www.isa-afp.org/release/"$AFP_DATE".tar.gz"
echo ""
echo "Isabelle/DOF AFP Installation Utility"
echo "====================================="
confirm_usage
check_isabelle_version
check_afp_entries
echo "* AFP Installation successful."
echo " You should now be able to enjoy Isabelle/DOF by building its session"
echo " and all example documents by executing:"
echo " $ISABELLE_TOOL build -D ."
exit 0

69
src/DOF/Assert.thy Normal file
View File

@ -0,0 +1,69 @@
section \<open> Little theory implementing the an assertion command in Isabelle/HOL. \<close>
text\<open>This command is useful for certification documents allowing to validate
corner-cases of (executable) definitions. \<close>
theory Assert
imports Main
keywords "assert" ::thy_decl
begin
subsection\<open>Core\<close>
ML\<open>
local
(* Reimplementation needed because not exported from ML structure Value_Command *)
fun value_maybe_select some_name =
case some_name
of NONE => Value_Command.value
| SOME name => Value_Command.value_select name;
in
(* Reimplementation needed because not exported from ML structure Value_Command *)
val opt_modes =
Scan.optional (@{keyword "("} |-- Parse.!!! (Scan.repeat1 Parse.name --| @{keyword ")"})) [];
(* Reimplementation needed because not exported from ML structure Value_Command *)
val opt_evaluator =
Scan.option (@{keyword "["} |-- Parse.name --| @{keyword "]"})
(* Reimplementation structure Value_Command due to tiny modification of value_cmd. *)
fun assert_cmd some_name modes raw_t ctxt (* state*) =
let
(* val ctxt = Toplevel.context_of state; *)
val t = Syntax.read_term ctxt raw_t;
val t' = value_maybe_select some_name ctxt t;
val ty' = Term.type_of t';
val ty' = case ty' of @{typ "bool"} => ty' | _ => error "Assertion expressions must be boolean.";
val t' = case t' of @{term "True"} => t' | _ => error "Assertion failed.";
val ctxt' = Variable.auto_fixes t' ctxt;
val p = Print_Mode.with_modes modes (fn () =>
Pretty.block [Pretty.quote (Syntax.pretty_term ctxt' t'), Pretty.fbrk,
Pretty.str "::", Pretty.brk 1, Pretty.quote (Syntax.pretty_typ ctxt' ty')]) ();
in Pretty.writeln p end;
val _ =
Outer_Syntax.command @{command_keyword assert} "evaluate and print term"
(opt_evaluator -- opt_modes -- Parse.term
>> (fn ((some_name, modes), t) =>
Toplevel.keep ( (assert_cmd some_name modes t) o Toplevel.context_of) ));
end
\<close>
subsection\<open> Test: \<close>
(*
assert ""
assert "3 = 4"
assert "False"
assert "5 * 5 = 25"
*)
subsection\<open>Example\<close>
assert "True \<and> True "
assert "(5::int) * 5 = 25 "
end

57
src/DOF/AssertLong.thy Normal file
View File

@ -0,0 +1,57 @@
theory AssertLong
imports Main
keywords "assert" ::thy_decl
begin
ML\<open>
fun value_maybe_select some_name =
case some_name
of NONE => Value_Command.value
| SOME name => Value_Command.value_select name;
val TT = Unsynchronized.ref (HOLogic.boolT);
fun value_cmd2 some_name modes raw_t state =
let
val ctxt = Toplevel.context_of state;
val t = Syntax.read_term ctxt raw_t;
val t' = value_maybe_select some_name ctxt t;
val ty' = Term.type_of t';
val t' = case ty' of @{typ "bool"} => t' | _ => error "Assertion expressions must be boolean.";
val t' = case t' of @{term "True"} => t' | _ => error "Assertion failed.";
val ctxt' = Variable.auto_fixes t' ctxt;
val p = Print_Mode.with_modes modes (fn () =>
Pretty.block [Pretty.quote (Syntax.pretty_term ctxt' t'), Pretty.fbrk,
Pretty.str "::", Pretty.brk 1, Pretty.quote (Syntax.pretty_typ ctxt' ty')]) ();
in Pretty.writeln p end;
\<close>
ML\<open>value_cmd2\<close>
definition ASSERT :: "bool \<Rightarrow> bool" where "ASSERT p == (p=True)"
ML\<open>val x = @{code "ASSERT"}\<close>
ML\<open>
val opt_modes =
Scan.optional (@{keyword "("} |-- Parse.!!! (Scan.repeat1 Parse.name --| @{keyword ")"})) [];
val opt_evaluator =
Scan.option (@{keyword "["} |-- Parse.name --| @{keyword "]"})
val _ =
Outer_Syntax.command @{command_keyword assert} "evaluate and print term"
(opt_evaluator -- opt_modes -- Parse.term
>> (fn ((some_name, modes), t) =>
let val _ = writeln t in
(* Toplevel.keep (Value_Command.value_cmd some_name modes (enclose "ASSERT(" ")" t)) *)
Toplevel.keep (value_cmd2 some_name modes t)
end));
\<close>
assert "True"
assert "True \<and> True "
ML\<open>!TT ;
@{term "True"}\<close>

View File

@ -1,37 +1,56 @@
(*************************************************************************
* Copyright (C)
* 2019 The University of Exeter
* 2018-2019 The University of Paris-Saclay
* 2018 The University of Sheffield
*
* License:
* This program can be redistributed and/or modified under the terms
* of the 2-clause BSD-style license.
*
* SPDX-License-Identifier: BSD-2-Clause
*************************************************************************)
chapter \<open>The Document Ontology Common Library for the Isabelle Ontology Framework\<close>
text\<open> Building a fundamental infrastructure for common document elements such as
Structuring Text-Elements (the top classes), Figures, (Tables yet todo)
The COL provides a number of ontological "macros" like "section*" which
automatically set a number of class-attributes in particular ways without
user-interference.
\<close>
text\<open> Offering
\<^item> ...
\<^item>
\<^item> LaTeX support. \<close>
theory Isa_COL
imports Isa_DOF
keywords "title*" "subtitle*"
"chapter*" "section*"
"subsection*" "subsubsection*"
"paragraph*" "subparagraph*"
"figure*" "side_by_side_figure*" :: document_body
begin
section\<open>Basic Text and Text-Structuring Elements\<close>
section\<open> Library of Standard Text Ontology \<close>
datatype placement = pl_h | (*here*)
pl_t | (*top*)
pl_b | (*bottom*)
pl_ht | (*here -> top*)
pl_hb (*here -> bottom*)
doc_class figure =
relative_width :: "int" (* percent of textwidth *)
src :: "string"
placement :: placement
spawn_columns :: bool <= True
doc_class side_by_side_figure = figure +
anchor :: "string"
caption :: "string"
relative_width2 :: "int" (* percent of textwidth *)
src2 :: "string"
anchor2 :: "string"
caption2 :: "string"
doc_class figure_group =
(* trace :: "doc_class rexp list" <= "[]" automatically generated since monitor clause *)
caption :: "string"
rejects figure_group (* this forbids recursive figure-groups not supported
by the current LaTeX style-file. *)
accepts "\<lbrace>figure\<rbrace>\<^sup>+"
(* dito the future table *)
(* dito the future monitor: table - block *)
text\<open> The attribute @{term "level"} in the subsequent enables doc-notation support section* etc.
we follow LaTeX terminology on levels
@ -42,606 +61,43 @@ we follow LaTeX terminology on levels
\<^enum> subsubsection = Some 3
\<^enum> ...
for scholarly paper: invariant level > 0. \<close>
for scholarly paper: invariant level > 0 \<close>
doc_class text_element =
doc_class text_element =
level :: "int option" <= "None"
referentiable :: bool <= "False"
variants :: "String.literal set" <= "{STR ''outline'', STR ''document''}"
doc_class "chapter" = text_element +
level :: "int option" <= "Some 0"
doc_class "section" = text_element +
level :: "int option" <= "Some 1"
doc_class "subsection" = text_element +
level :: "int option" <= "Some 2"
doc_class "subsubsection" = text_element +
level :: "int option" <= "Some 3"
section\<open>Some attempt to model standardized links to Standard Isabelle Formal Content\<close>
doc_class assertions =
properties :: "term list"
doc_class "thms" =
properties :: "thm list"
subsection\<open>Ontological Macros\<close>
doc_class formal_item =
item :: "(assertions + thms)"
ML\<open>
doc_class definitions =
requires :: "formal_item list"
establishes :: "thms list"
structure Onto_Macros =
struct
local open ODL_Meta_Args_Parser in
(* *********************************************************************** *)
(* Ontological Macro Command Support *)
(* *********************************************************************** *)
doc_class formal_content =
style :: "string option"
accepts "\<lbrace>formal_item\<rbrace>\<^sup>+"
(* {markdown = true} sets the parsing process such that in the text-core markdown elements are
accepted. *)
fun enriched_text_element_cmd level =
let fun transform doc_attrs = case level of
NONE => doc_attrs
| SOME(NONE) => (("level",@{here}),"None")::doc_attrs
| SOME(SOME x) => (("level",@{here}),"Some("^ Int.toString x ^"::int)")::doc_attrs
in Monitor_Command_Parser.gen_enriched_document_cmd {inline=true} I transform end;
(*
val enriched_document_command_macro =
let fun transform_cid X = (writeln (@{make_string} X); X)
in gen_enriched_document_command {inline=true} transform_cid I end;
*)
local
fun transform_cid thy NONE X = X
|transform_cid thy (SOME ncid) NONE = (SOME(ncid,@{here}))
|transform_cid thy (SOME cid) (SOME (sub_cid,pos)) =
let val cid_long = DOF_core.read_cid_global thy cid
val sub_cid_long = DOF_core.read_cid_global thy sub_cid
in if DOF_core.is_subclass_global thy sub_cid_long cid_long
then (SOME (sub_cid,pos))
else (* (SOME (sub_cid,pos)) *)
(* BUG : check reveals problem of Definition* misuse. *)
error("class "^sub_cid_long^
" must be sub-class of "^cid_long)
end
in
fun enriched_formal_statement_command ncid (S: (string * string) list) =
let fun transform_attr doc_attrs = (map (fn(cat,tag) => ((cat,@{here}),tag)) S) @
(("formal_results",@{here}),"([]::thm list)")::doc_attrs
in fn margs => fn thy =>
Monitor_Command_Parser.gen_enriched_document_cmd {inline=true}
(transform_cid thy ncid) transform_attr margs thy
end;
fun enriched_document_cmd_exp ncid (S: (string * string) list) =
(* expands ncid into supertype-check. *)
let fun transform_attr attrs = (map (fn(cat,tag) => ((cat,@{here}),tag)) S) @ attrs
in fn margs => fn thy =>
Monitor_Command_Parser.gen_enriched_document_cmd {inline=true} (transform_cid thy ncid)
transform_attr margs thy
end;
end (* local *)
fun heading_command (name, pos) descr level =
Monitor_Command_Parser.document_command (name, pos) descr
{markdown = false, body = true} (enriched_text_element_cmd level);
val _ = heading_command \<^command_keyword>\<open>title*\<close> "section heading" NONE;
val _ = heading_command \<^command_keyword>\<open>subtitle*\<close> "section heading" NONE;
val _ = heading_command \<^command_keyword>\<open>chapter*\<close> "section heading" (SOME (SOME 0));
val _ = heading_command \<^command_keyword>\<open>section*\<close> "section heading" (SOME (SOME 1));
val _ = heading_command \<^command_keyword>\<open>subsection*\<close> "subsection heading" (SOME (SOME 2));
val _ = heading_command \<^command_keyword>\<open>subsubsection*\<close> "subsubsection heading" (SOME (SOME 3));
val _ = heading_command \<^command_keyword>\<open>paragraph*\<close> "paragraph heading" (SOME (SOME 4));
val _ = heading_command \<^command_keyword>\<open>subparagraph*\<close> "subparagraph heading" (SOME (SOME 5));
end
end
\<close>
section\<open> Library of Standard Text Ontology \<close>
datatype placement = pl_h | (*here*)
pl_t | (*top*)
pl_b | (*bottom*)
pl_ht | (*here -> top*)
pl_hb (*here -> bottom*)
ML\<open>(Symtab.defined (#docclass_tab(DOF_core.get_data_global @{theory}))) "side_by_side_figure"\<close>
print_doc_classes
doc_class figure =
relative_width :: "int" (* percent of textwidth *)
src :: "string"
placement :: placement
spawn_columns :: bool <= True
doc_class figure2 = figure +
caption :: string
doc_class side_by_side_figure = figure +
anchor :: "string"
caption :: "string"
relative_width2 :: "int" (* percent of textwidth *)
src2 :: "string"
anchor2 :: "string"
caption2 :: "string"
print_doc_classes
doc_class figure_group =
(* trace :: "doc_class rexp list" <= "[]" automatically generated since monitor clause *)
caption :: "string"
rejects figure_group (* this forbids recursive figure-groups not supported
by the current LaTeX style-file. *)
accepts "\<lbrace>figure\<rbrace>\<^sup>+"
print_doc_classes
section\<open>Layout Trimming Commands (with syntactic checks)\<close>
ML\<open>
local
val scan_cm = Scan.ahead (Basic_Symbol_Pos.$$$ "c" |-- Basic_Symbol_Pos.$$$ "m" ) ;
val scan_pt = Scan.ahead (Basic_Symbol_Pos.$$$ "p" |-- Basic_Symbol_Pos.$$$ "t" ) ;
val scan_blank = Scan.repeat ( Basic_Symbol_Pos.$$$ " "
|| Basic_Symbol_Pos.$$$ "\t"
|| Basic_Symbol_Pos.$$$ "\n");
in
val scan_latex_measure = (scan_blank
|-- Scan.option (Basic_Symbol_Pos.$$$ "-")
|-- Symbol_Pos.scan_nat
|-- (Scan.option ((Basic_Symbol_Pos.$$$ ".") |-- Symbol_Pos.scan_nat))
|-- scan_blank
|-- (scan_cm || scan_pt)
|-- scan_blank
) ;
fun check_latex_measure _ src =
let val _ = ((Scan.catch scan_latex_measure (Symbol_Pos.explode(Input.source_content src)))
handle Fail _ => error ("syntax error in LaTeX measure") )
in () end
val parse_latex_measure = Parse.embedded_input >> (fn src => (check_latex_measure () (* dummy arg *) src;
(fst o Input.source_content) src ) )
end\<close>
setup\<open> DOF_lib.define_macro \<^binding>\<open>vs\<close> "\\vspace{" "}" (check_latex_measure) \<close>
setup\<open> DOF_lib.define_macro \<^binding>\<open>hs\<close> "\\hspace{" "}" (check_latex_measure) \<close>
(*<*)
text\<open>Tests: \<^vs>\<open>-0.14cm\<close>\<close>
ML\<open> check_latex_measure @{context} (Input.string "-0.14 cm") \<close>
define_macro* vs2 \<rightleftharpoons> \<open>\vspace{\<close> _ \<open>}\<close> (check_latex_measure) (* checkers NYI on Isar-level *)
define_macro* hs2 \<rightleftharpoons> \<open>\hspace{\<close> _ \<open>}\<close> (* works fine without checker.*)
(*>*)
subsection\<open>Figures\<close>
ML\<open>open Args\<close>
ML\<open>
(* *********************************************************************** *)
(* Ontological Macro Command Support *)
(* *********************************************************************** *)
val _ = Onto_Macros.heading_command \<^command_keyword>\<open>figure*\<close> "figure" NONE;
val _ = Onto_Macros.heading_command \<^command_keyword>\<open>side_by_side_figure*\<close> "multiple figures" NONE;
\<close>
(*<*)
(*
ML\<open>ML_Context.expression\<close>
fun setup source =
ML_Context.expression (Input.pos_of source)
(ML_Lex.read "Theory.setup (" @ ML_Lex.read_source source @ ML_Lex.read ")")
|> Context.theory_map;
setup\<open>\<close>
*)
(*>*)
subsubsection\<open>Figure Content\<close>
text\<open>The intermediate development goal is to separate the ontological, top-level construct
\<open>figure*\<close>, which will remain a referentiable, ontological document unit, from the more versatile
\<^emph>\<open>import\<close> of a figure. The hope is that this opens the way for more orthogonality and
abstraction from the LaTeX engine.
\<close>
ML\<open>
type fig_content = {relative_width : int, (* percent of textwidth, default 100 *)
scale : int, (* percent, default 100 *)
caption : Input.source (* default empty *)}
val mt_fig_content = {relative_width = 100,
scale = 100,
caption = Input.empty }: fig_content
(* doof wie 100 m feldweg. *)
fun upd_relative_width key {relative_width,scale,caption } : fig_content =
{relative_width = key,scale = scale,caption = caption}: fig_content
fun upd_scale key {relative_width,scale,caption } : fig_content =
{relative_width = relative_width,scale = key,caption = caption}: fig_content
fun upd_caption key {relative_width,scale,caption} : fig_content =
{relative_width = relative_width,scale = scale,caption= key}: fig_content
val widthN = "width"
val scaleN = "scale"
val captionN = "caption";
fun fig_content_modes (ctxt, toks) =
let val (y, toks') = ((((Scan.optional
(Args.parens
(Parse.list1
( (Args.$$$ widthN |-- Args.$$$ "=" -- Parse.int
>> (fn (_, k) => upd_relative_width k))
|| (Args.$$$ scaleN |-- Args.$$$ "=" -- Parse.int
>> (fn (_, k) => upd_scale k))
|| (Args.$$$ captionN |-- Args.$$$ "=" -- Parse.document_source
>> (fn (_, k) => upd_caption k))
))) [K mt_fig_content])
: (fig_content -> fig_content) list parser)
>> (foldl1 (op #>)))
: (fig_content -> fig_content) parser)
(toks)
in (y, (ctxt, toks')) end
fun document_antiq (check: Proof.context -> Path.T option -> Input.source -> Path.T) =
Args.context -- Scan.lift Parse.path_input >> (fn (ctxt, source) =>
(check ctxt NONE source;
Latex.string (Latex.output_ascii_breakable "/" (Input.string_of source))
|> Latex.macro "isatt"));
fun fig_content_antiquotation name scan =
(Document_Output.antiquotation_raw_embedded name
(scan : ((fig_content -> fig_content) * Input.source) context_parser)
(fn ctxt =>
(fn (cfg_trans,file:Input.source) =>
let val {relative_width,scale,caption} = cfg_trans mt_fig_content
val _ = if relative_width < 0 orelse scale<0 then error("negative parameter.")
else ()
val wdth_s = if relative_width = 100 then ""
else "width="^Real.toString((Real.fromInt relative_width)
/ (Real.fromInt 100))^"\textwidth"
val scale_s= if scale = 100 then ""
else "scale="^Real.toString((Real.fromInt scale) / (Real.fromInt 100))
val arg = enclose "[" "]" (commas [wdth_s,scale_s])
val lab = Document_Output.output_document ctxt {markdown = false} caption
val path = Resources.check_file ctxt NONE file
val _ = writeln("file "^Path.file_name path)
(* ToDo: must be declared source of type png or jpeg or pdf, ... *)
in file
|> (Latex.string o Input.string_of)
|> (XML.enclose ("\\includegraphics"^arg^"{") "}")
|> (fn X => X @ Latex.macro "capture" lab)
end
)
));
val _ = fig_content_antiquotation
: binding
-> ((fig_content -> fig_content) * Input.source) context_parser
-> theory -> theory
val _ = Theory.setup
( fig_content_antiquotation \<^binding>\<open>fig_content\<close>
(fig_content_modes -- Scan.lift(Parse.path_input)))
\<close>
subsection\<open>Tables\<close>
(* TODO ! ! ! *)
(* dito the future monitor: table - block *)
(* some studies *)
text\<open>Tables are (sub) document-elements represented inside the documentation antiquotation
language. The used technology is similar to the existing railroad-diagram support
(cf. \<^url>\<open>https://isabelle.in.tum.de/doc/isar-ref.pdf\<close>, Sec. 4.5).
However, tables are not directly based on the idiosyncrasies of Knuth-based language design ---
However, tables come with a more abstract structure model than conventional typesetting in the
LaTeX tradition. It is based of the following principles:
\<^item> The core unit of a table is a \<^emph>\<open>cell\<close> having a \<^emph>\<open>configuration\<close>, i.e. a
number of attributes specifying its width, height, borderline, etc.
A cell may be \<^emph>\<open>elementary\<close>, i.e. containing structured text or \<^emph>\<open>compound\<close>,
i.e. containing a sub-table.
\<^item> A \<^emph>\<open>table\<close> contains either a list of \<^emph>\<open>rows\<close> or a list of \<^emph>\<open>columns\<close>, which are both
lists of cells.
\<^item> The tables, rows and columns posses own configurations.
\<^item> Concerning the layout, \<^emph>\<open>propagation\<close> laws of configurations control that
information flows top-down from tables to rows or columns, from rows/columns to cells,
from left to right within rows and from top to bottom in columns; propagation produces
the desired presentation effect of tables that cells appear somewhat uniform in it.
\<^item> Since rows are lists of cells, configurations are also a list of attributes.
Attributes of the same kind may appear repeatedly. If the sub-list of attributes
of the same kind is shorter than the list of cells it is referring to, than
the last element in this sub-list is duplicated as many times as necessary. This feature
of configuration propagation is called \<^emph>\<open>filling\<close>.
\<^item> Lists of rows and lists of cells consists of the same number of cells.
\<^item> Since propagation and filling induce a congruence relation on table trees, a normalisation
process is a necessary pre-requisite for the compilation to LaTeX.
\<close>
ML\<open>
local
fun mk_line st1 st2 [a] = [a @ Latex.string st2]
|mk_line st1 st2 (a::S) = [a @ Latex.string st1] @ mk_line st1 st2 S;
(* tab attributes for global setup *)
type cell_config = {cell_placing : string list,
cell_height : string list,
cell_width : string list,
cell_bgnd_color : string list,
cell_line_color : string list,
cell_line_width : string list}
val mt_cell_config = {cell_placing = [],
cell_height = [],
cell_width = [],
cell_bgnd_color= [],
cell_line_color= [],
cell_line_width= [] }: cell_config
(* doof wie 100 m feldweg. *)
fun upd_cell_placing key
{cell_placing,cell_height,cell_width, cell_bgnd_color,
cell_line_color,cell_line_width} : cell_config =
{cell_placing = cell_placing @ [key], cell_height = cell_height,
cell_width = cell_width, cell_bgnd_color= cell_bgnd_color,
cell_line_color= cell_line_color, cell_line_width= cell_line_width }
: cell_config
fun upd_cell_height num
{cell_placing,cell_height,cell_width, cell_bgnd_color,
cell_line_color,cell_line_width} : cell_config =
{cell_placing = cell_placing , cell_height = cell_height @ [num],
cell_width = cell_width, cell_bgnd_color= cell_bgnd_color,
cell_line_color= cell_line_color,cell_line_width= cell_line_width }
: cell_config
fun upd_cell_width num
{cell_placing,cell_height,cell_width, cell_bgnd_color,
cell_line_color,cell_line_width} : cell_config =
{cell_placing = cell_placing , cell_height = cell_height,
cell_width = cell_width@[num],cell_bgnd_color= cell_bgnd_color,
cell_line_color= cell_line_color, cell_line_width= cell_line_width }
: cell_config
fun upd_cell_bgnd_color str
{cell_placing,cell_height,cell_width, cell_bgnd_color,
cell_line_color,cell_line_width} : cell_config =
{cell_placing = cell_placing , cell_height = cell_height,
cell_width = cell_width, cell_bgnd_color= cell_bgnd_color@[str],
cell_line_color= cell_line_color, cell_line_width= cell_line_width }
: cell_config
fun upd_cell_line_color str
{cell_placing,cell_height,cell_width, cell_bgnd_color,
cell_line_color,cell_line_width} : cell_config =
{cell_placing = cell_placing , cell_height = cell_height,
cell_width = cell_width, cell_bgnd_color= cell_bgnd_color,
cell_line_color= cell_line_color@[str], cell_line_width= cell_line_width }
: cell_config
fun upd_cell_line_width num
{cell_placing,cell_height,cell_width, cell_bgnd_color,
cell_line_color,cell_line_width} : cell_config =
{cell_placing = cell_placing , cell_height = cell_height,
cell_width = cell_width, cell_bgnd_color = cell_bgnd_color,
cell_line_color = cell_line_color, cell_line_width = cell_line_width@[num] }
: cell_config
(*global default configs *)
val (tab_cell_placing, tab_cell_placing_setup)
= Attrib.config_string \<^binding>\<open>tab_cell_placing\<close> (K "center");
val (tab_cell_height, tab_cell_height_setup)
= Attrib.config_string \<^binding>\<open>tab_cell_height\<close> (K "0.0cm");
val (tab_cell_width, tab_cell_width_setup)
= Attrib.config_string \<^binding>\<open>tab_cell_width\<close> (K "0.0cm");
val (tab_cell_bgnd_color, tab_cell_bgnd_color_setup)
= Attrib.config_string \<^binding>\<open>tab_cell_bgnd_height\<close> (K "white");
val (tab_cell_line_color, tab_cell_line_color_setup)
= Attrib.config_string \<^binding>\<open>tab_cell_line_color\<close> (K "black");
val (tab_cell_line_width, tab_cell_line_width_setup)
= Attrib.config_string \<^binding>\<open>tab_cell_line_height\<close> (K "0.0cm");
fun default_cell_config ctxt = {cell_placing = [Config.get ctxt tab_cell_placing],
cell_height = [Config.get ctxt tab_cell_height],
cell_width = [Config.get ctxt tab_cell_width],
cell_bgnd_color = [Config.get ctxt tab_cell_bgnd_color],
cell_line_color = [Config.get ctxt tab_cell_line_color],
cell_line_width = [Config.get ctxt tab_cell_line_width]}
: cell_config
val _ = Theory.setup( tab_cell_placing_setup
#> tab_cell_height_setup
#> tab_cell_width_setup
#> tab_cell_bgnd_color_setup
#> tab_cell_line_color_setup
#> tab_cell_line_width_setup
)
(*syntax for local tab specifier *)
val cell_placingN = "cell_placing"
val cell_heightN = "cell_height"
val cell_widthN = "cell_width"
val cell_bgnd_colorN = "cell_bgnd_color"
val cell_line_colorN = "cell_line_color"
val cell_line_widthN = "cell_line_width"
val placing_scan = Args.$$$ "left" || Args.$$$ "center" || Args.$$$ "right"
val color_scan = Args.$$$ "none" || Args.$$$ "red" || Args.$$$ "green"
|| Args.$$$ "blue" || Args.$$$ "black"
(*
val _ = Scan.lift
fun lift scan (st, xs) =
let val (y, xs') = scan xs
in (y, (st, xs')) end;
*)
fun tabitem_modes (ctxt, toks) =
let val (y, toks') = ((((Scan.optional
(Args.parens
(Parse.list1
( (Args.$$$ cell_placingN |-- Args.$$$ "=" -- placing_scan
>> (fn (_, k) => upd_cell_placing k))
|| (Args.$$$ cell_heightN |-- Args.$$$ "=" -- parse_latex_measure
>> (fn (_, k) => upd_cell_height k))
|| (Args.$$$ cell_widthN |-- Args.$$$ "=" -- parse_latex_measure
>> (fn (_, k) => upd_cell_width k))
|| (Args.$$$ cell_bgnd_colorN |-- Args.$$$ "=" -- color_scan
>> (fn (_, k) => upd_cell_bgnd_color k))
|| (Args.$$$ cell_line_colorN |-- Args.$$$ "=" -- color_scan
>> (fn (_, k) => upd_cell_line_color k))
|| (Args.$$$ cell_line_widthN |-- Args.$$$ "=" -- parse_latex_measure
>> (fn (_, k) => upd_cell_line_width k))
))) [K (default_cell_config (Context.the_proof ctxt))])
: (cell_config -> cell_config) list parser)
>> (foldl1 (op #>)))
: (cell_config -> cell_config) parser)
(toks)
in (y, (ctxt, toks')) end
datatype table_tree = mk_tab of cell_config * cell_group
| mk_cell of cell_config * Input.source
and cell_group = mk_row of cell_config * table_tree list
| mk_column of cell_config * table_tree list
val tab_config_parser = tabitem_modes : ((cell_config -> cell_config) ) context_parser
val table_parser = tab_config_parser -- Scan.repeat1(Scan.repeat1(Scan.lift Args.cartouche_input))
fun table_antiquotation name scan =
Document_Output.antiquotation_raw_embedded name
scan
(fn ctxt =>
(fn (cfg_trans,content:Input.source list list) =>
let val cfg = cfg_trans mt_cell_config
val _ = writeln ("XXX"^ @{make_string} cfg)
fun check _ = () (* ToDo *)
val _ = check content
in content
|> (map(map (Document_Output.output_document ctxt {markdown = false})
#> mk_line "&" "\\\\"
#> List.concat )
#> List.concat)
|> XML.enclose "\\table[allerhandquatsch]{" "}"
end
)
);
fun cell_antiquotation name scan =
Document_Output.antiquotation_raw_embedded name
scan
(fn ctxt =>
(fn (cfg_trans,content:Input.source) =>
let val cfg = cfg_trans mt_cell_config
val _ = writeln ("XXX"^ @{make_string} cfg)
in content |> Document_Output.output_document ctxt {markdown = false}
end
)
)
fun row_antiquotation name scan =
Document_Output.antiquotation_raw_embedded name
scan
(fn ctxt =>
(fn (cfg_trans,content:Input.source list) =>
let val cfg = cfg_trans mt_cell_config
val _ = writeln ("XXX"^ @{make_string} cfg)
in content |> (map (Document_Output.output_document ctxt {markdown = false})
#> List.concat)
end
)
)
fun column_antiquotation name scan =
Document_Output.antiquotation_raw_embedded name
scan
(fn ctxt =>
(fn (cfg_trans,content:Input.source list) =>
let val cfg = cfg_trans mt_cell_config
val _ = writeln ("XXX"^ @{make_string} cfg)
in content |> (map (Document_Output.output_document ctxt {markdown = false})
#> List.concat)
end
)
)
in
val _ = Theory.setup
( table_antiquotation \<^binding>\<open>table_inline\<close>
table_parser
#> table_antiquotation \<^binding>\<open>subtab\<close> table_parser
#> cell_antiquotation \<^binding>\<open>cell\<close>
(tab_config_parser--Scan.lift Args.cartouche_input)
#> row_antiquotation \<^binding>\<open>row\<close>
(tab_config_parser--Scan.repeat1(Scan.lift Args.cartouche_input))
#> column_antiquotation \<^binding>\<open>column\<close>
(tab_config_parser--Scan.repeat1(Scan.lift Args.cartouche_input))
);
end
\<close>
text\<open> @{file "../ROOT"} \<close>
define_shortcut* clearpage \<rightleftharpoons> \<open>\clearpage{}\<close>
hf \<rightleftharpoons> \<open>\hfill\<close>
br \<rightleftharpoons> \<open>\break\<close>
declare[[tab_cell_placing="left",tab_cell_height="18.0cm"]]
doc_class concept =
tag :: "string" <= "''''"
properties :: "thm list" <= "[]"
section\<open>Tests\<close>
(*<*)
text\<open> @{fig_content [display] (scale = 80, width=80, caption=\<open>this is \<^term>\<open>\<sigma>\<^sub>i+2\<close> \<dots>\<close>)
\<open>../../examples/scholarly_paper/2018-cicm-isabelle_dof-applications/document/figures/isabelle-architecture.pdf\<close>}\<close>
text\<open> @{table_inline [display] (cell_placing = center,cell_height =\<open>12.0cm\<close>,
cell_height =\<open>13pt\<close>, cell_width = \<open>12.0cm\<close>,
cell_bgnd_color=black,cell_line_color=red,cell_line_width=\<open>12.0cm\<close>)
\<open>\<open>\<^cell>\<open>dfg\<close> \<^col>\<open>dfg\<close> \<^row>\<open>dfg\<close> @{cell (cell_height =\<open>12.0cm\<close>) \<open>abracadabra\<close>}\<close>
\<open>\<open>1\<close> \<open>2\<close> \<open>3\<sigma>\<close>\<close>
\<close>}
\<^cell>\<open>dfg\<close> @{row \<open>is technical\<close> \<open> \<open>\<sigma> * a\<^sub>4\<close> \<close>}\<close>
(*>*)
ML\<open>@{term "side_by_side_figure"};
@{typ "doc_class rexp"};
DOF_core.SPY;
@{typ "doc_class rexp"};
DOF_core.SPY;
\<close>
text\<open>@{term_ \<open>3 + 4::int\<close>} @{value_ \<open>3 + 4::int\<close>} \<close>
end

File diff suppressed because it is too large Load Diff

93
src/DOF/RegExp.thy Normal file
View File

@ -0,0 +1,93 @@
theory RegExp
imports "Functional-Automata.Execute"
begin
term Atom
value "Star (Times(Plus (Atom(CHR ''a'')) (Atom(CHR ''b''))) (Atom(CHR ''c'')))"
notation Star ("\<lbrace>(_)\<rbrace>\<^sup>*" [0]100)
notation Plus (infixr "||" 55)
notation Times (infixr "~~" 60)
notation Atom ("\<lfloor>_\<rfloor>" 65)
(*
datatype 'a rexp = Empty ("<>")
| Atom 'a ("\<lfloor>_\<rfloor>" 65)
| Alt "('a rexp)" "('a rexp)" (infixr "||" 55)
| Conc "('a rexp)" "('a rexp)" (infixr "~~" 60)
| Star "('a rexp)" ("\<lbrace>(_)\<rbrace>\<^sup>*" [0]100)
*)
definition rep1 :: "'a rexp \<Rightarrow> 'a rexp" ("\<lbrace>(_)\<rbrace>\<^sup>+")
where "\<lbrace>A\<rbrace>\<^sup>+ \<equiv> A ~~ \<lbrace>A\<rbrace>\<^sup>*"
definition opt :: "'a rexp \<Rightarrow> 'a rexp" ("\<lbrakk>(_)\<rbrakk>")
where "\<lbrakk>A\<rbrakk> \<equiv> A || One"
value "Star (Conc(Alt (Atom(CHR ''a'')) (Atom(CHR ''b''))) (Atom(CHR ''c'')))"
text\<open>or better equivalently:\<close>
value "\<lbrace>(\<lfloor>CHR ''a''\<rfloor> || \<lfloor>CHR ''b''\<rfloor>) ~~ \<lfloor>CHR ''c''\<rfloor>\<rbrace>\<^sup>*"
section\<open>Definition of a semantic function: the ``language'' of the regular expression\<close>
text\<open> This is just a reminder - already defined in @{theory Regular_Exp} as @{term lang}.\<close>
text\<open>In the following, we give a semantics for our regular expressions, which so far have
just been a term language (i.e. abstract syntax). The semantics is a ``denotational semantics'',
i.e. we give a direct meaning for regular expressions in some universe of ``denotations''.
This universe of denotations is in our concrete case:\<close>
definition enabled :: "('a,'\<sigma> set)da \<Rightarrow> '\<sigma> set \<Rightarrow> 'a list \<Rightarrow> 'a list"
where "enabled A \<sigma> = filter (\<lambda>x. next A x \<sigma> \<noteq> {}) "
text\<open>Now the denotational semantics for regular expression can be defined on a post-card:\<close>
fun L :: "'a rexp => 'a lang"
where L_Emp : "L Zero = {}"
|L_One: "L One = {[]}"
|L_Atom: "L (\<lfloor>a\<rfloor>) = {[a]}"
|L_Un: "L (el || er) = (L el) \<union> (L er)"
|L_Conc: "L (el ~~ er) = {xs@ys | xs ys. xs \<in> L el \<and> ys \<in> L er}"
|L_Star: "L (Star e) = Regular_Set.star(L e)"
text\<open>A more useful definition is the \<close>
fun L\<^sub>s\<^sub>u\<^sub>b :: "'a::order rexp => 'a lang"
where L\<^sub>s\<^sub>u\<^sub>b_Emp: "L\<^sub>s\<^sub>u\<^sub>b Zero = {}"
|L\<^sub>s\<^sub>u\<^sub>b_One: "L\<^sub>s\<^sub>u\<^sub>b One = {[]}"
|L\<^sub>s\<^sub>u\<^sub>b_Atom: "L\<^sub>s\<^sub>u\<^sub>b (\<lfloor>a\<rfloor>) = {z . \<forall>x. x \<le> a \<and> z=[x]}"
|L\<^sub>s\<^sub>u\<^sub>b_Un: "L\<^sub>s\<^sub>u\<^sub>b (el || er) = (L\<^sub>s\<^sub>u\<^sub>b el) \<union> (L\<^sub>s\<^sub>u\<^sub>b er)"
|L\<^sub>s\<^sub>u\<^sub>b_Conc: "L\<^sub>s\<^sub>u\<^sub>b (el ~~ er) = {xs@ys | xs ys. xs \<in> L\<^sub>s\<^sub>u\<^sub>b el \<and> ys \<in> L\<^sub>s\<^sub>u\<^sub>b er}"
|L\<^sub>s\<^sub>u\<^sub>b_Star: "L\<^sub>s\<^sub>u\<^sub>b (Star e) = Regular_Set.star(L\<^sub>s\<^sub>u\<^sub>b e)"
definition XX where "XX = (rexp2na example_expression)"
definition YY where "YY = na2da(rexp2na example_expression)"
(* reminder from execute *)
value "NA.accepts (rexp2na example_expression) [0,1,1,0,0,1]"
value "DA.accepts (na2da (rexp2na example_expression)) [0,1,1,0,0,1]"
definition zero where "zero = (0::nat)"
definition one where "one = (1::nat)"
typ "'a set"
export_code zero one Suc Int.nat nat_of_integer int_of_integer
Zero One Atom Plus Times Star
rexp2na na2da enabled
NA.accepts DA.accepts
example_expression
in SML
module_name RegExpChecker file "RegExpChecker.sml"
SML_file "RegExpChecker.sml"
no_notation Atom ("\<lfloor>_\<rfloor>")
end

339
src/DOF/RegExpChecker.sml Normal file
View File

@ -0,0 +1,339 @@
structure RegExpChecker : sig
type 'a equal
type num
type int
datatype nat = Zero_nat | Suc of nat
type 'a set
datatype 'a rexp = Zero | Onea | Atom of 'a | Plus of 'a rexp * 'a rexp |
Times of 'a rexp * 'a rexp | Star of 'a rexp
val nat : int -> nat
val accepts : 'a * (('b -> 'a -> 'a) * ('a -> bool)) -> 'b list -> bool
val acceptsa :
'a equal -> 'a * (('b -> 'a -> 'a set) * ('a -> bool)) -> 'b list -> bool
val na2da :
'a equal ->
'a * (('b -> 'a -> 'a set) * ('a -> bool)) ->
'a set * (('b -> 'a set -> 'a set) * ('a set -> bool))
val rexp2na :
'a equal ->
'a rexp ->
bool list * (('a -> bool list -> (bool list) set) * (bool list -> bool))
val one : nat
val zero : nat
val enabled :
'a set * (('b -> 'a set -> 'a set) * ('a set -> bool)) ->
'a set -> 'b list -> 'b list
val example_expression : nat rexp
val nat_of_integer : IntInf.int -> nat
val int_of_integer : IntInf.int -> int
end = struct
fun equal_boola p true = p
| equal_boola p false = not p
| equal_boola true p = p
| equal_boola false p = not p;
type 'a equal = {equal : 'a -> 'a -> bool};
val equal = #equal : 'a equal -> 'a -> 'a -> bool;
val equal_bool = {equal = equal_boola} : bool equal;
fun eq A_ a b = equal A_ a b;
fun equal_lista A_ [] (x21 :: x22) = false
| equal_lista A_ (x21 :: x22) [] = false
| equal_lista A_ (x21 :: x22) (y21 :: y22) =
eq A_ x21 y21 andalso equal_lista A_ x22 y22
| equal_lista A_ [] [] = true;
fun equal_list A_ = {equal = equal_lista A_} : ('a list) equal;
datatype num = One | Bit0 of num | Bit1 of num;
datatype int = Zero_int | Pos of num | Neg of num;
datatype nat = Zero_nat | Suc of nat;
datatype 'a set = Set of 'a list | Coset of 'a list;
datatype 'a rexp = Zero | Onea | Atom of 'a | Plus of 'a rexp * 'a rexp |
Times of 'a rexp * 'a rexp | Star of 'a rexp;
fun dup (Neg n) = Neg (Bit0 n)
| dup (Pos n) = Pos (Bit0 n)
| dup Zero_int = Zero_int;
fun plus_nat (Suc m) n = plus_nat m (Suc n)
| plus_nat Zero_nat n = n;
val one_nat : nat = Suc Zero_nat;
fun nat_of_num (Bit1 n) = let
val m = nat_of_num n;
in
Suc (plus_nat m m)
end
| nat_of_num (Bit0 n) = let
val m = nat_of_num n;
in
plus_nat m m
end
| nat_of_num One = one_nat;
fun nat (Pos k) = nat_of_num k
| nat Zero_int = Zero_nat
| nat (Neg k) = Zero_nat;
fun uminus_int (Neg m) = Pos m
| uminus_int (Pos m) = Neg m
| uminus_int Zero_int = Zero_int;
fun plus_num (Bit1 m) (Bit1 n) = Bit0 (plus_num (plus_num m n) One)
| plus_num (Bit1 m) (Bit0 n) = Bit1 (plus_num m n)
| plus_num (Bit1 m) One = Bit0 (plus_num m One)
| plus_num (Bit0 m) (Bit1 n) = Bit1 (plus_num m n)
| plus_num (Bit0 m) (Bit0 n) = Bit0 (plus_num m n)
| plus_num (Bit0 m) One = Bit1 m
| plus_num One (Bit1 n) = Bit0 (plus_num n One)
| plus_num One (Bit0 n) = Bit1 n
| plus_num One One = Bit0 One;
val one_int : int = Pos One;
fun bitM One = One
| bitM (Bit0 n) = Bit1 (bitM n)
| bitM (Bit1 n) = Bit1 (Bit0 n);
fun sub (Bit0 m) (Bit1 n) = minus_int (dup (sub m n)) one_int
| sub (Bit1 m) (Bit0 n) = plus_int (dup (sub m n)) one_int
| sub (Bit1 m) (Bit1 n) = dup (sub m n)
| sub (Bit0 m) (Bit0 n) = dup (sub m n)
| sub One (Bit1 n) = Neg (Bit0 n)
| sub One (Bit0 n) = Neg (bitM n)
| sub (Bit1 m) One = Pos (Bit0 m)
| sub (Bit0 m) One = Pos (bitM m)
| sub One One = Zero_int
and plus_int (Neg m) (Neg n) = Neg (plus_num m n)
| plus_int (Neg m) (Pos n) = sub n m
| plus_int (Pos m) (Neg n) = sub m n
| plus_int (Pos m) (Pos n) = Pos (plus_num m n)
| plus_int Zero_int l = l
| plus_int k Zero_int = k
and minus_int (Neg m) (Neg n) = sub n m
| minus_int (Neg m) (Pos n) = Neg (plus_num m n)
| minus_int (Pos m) (Neg n) = Pos (plus_num m n)
| minus_int (Pos m) (Pos n) = sub m n
| minus_int Zero_int l = uminus_int l
| minus_int k Zero_int = k;
fun list_ex p [] = false
| list_ex p (x :: xs) = p x orelse list_ex p xs;
fun bex (Set xs) p = list_ex p xs;
fun snd (x1, x2) = x2;
fun fst (x1, x2) = x1;
fun next a = fst (snd a);
fun foldl f a [] = a
| foldl f a (x :: xs) = foldl f (f a x) xs;
fun foldl2 f xs a = foldl (fn aa => fn b => f b aa) a xs;
fun delta a = foldl2 (next a);
fun filter p [] = []
| filter p (x :: xs) = (if p x then x :: filter p xs else filter p xs);
fun membera A_ [] y = false
| membera A_ (x :: xs) y = eq A_ x y orelse membera A_ xs y;
fun member A_ x (Coset xs) = not (membera A_ xs x)
| member A_ x (Set xs) = membera A_ xs x;
fun removeAll A_ x [] = []
| removeAll A_ x (y :: xs) =
(if eq A_ x y then removeAll A_ x xs else y :: removeAll A_ x xs);
fun inserta A_ x xs = (if membera A_ xs x then xs else x :: xs);
fun insert A_ x (Coset xs) = Coset (removeAll A_ x xs)
| insert A_ x (Set xs) = Set (inserta A_ x xs);
fun fold f (x :: xs) s = fold f xs (f x s)
| fold f [] s = s;
fun sup_set A_ (Coset xs) a = Coset (filter (fn x => not (member A_ x a)) xs)
| sup_set A_ (Set xs) a = fold (insert A_) xs a;
val bot_set : 'a set = Set [];
fun sup_seta A_ (Set xs) = fold (sup_set A_) xs bot_set;
fun map f [] = []
| map f (x21 :: x22) = f x21 :: map f x22;
fun image f (Set xs) = Set (map f xs);
fun deltaa A_ a [] p = insert A_ p bot_set
| deltaa A_ aa (a :: w) p =
sup_seta A_ (image (deltaa A_ aa w) (next aa a p));
fun null [] = true
| null (x :: xs) = false;
fun start a = fst a;
fun fin a = snd (snd a);
fun accepts a = (fn w => fin a (delta a w (start a)));
fun acceptsa A_ a w = bex (deltaa A_ a w (start a)) (fin a);
fun or x =
(fn (ql, (dl, fl)) => fn (qr, (dr, fr)) =>
([], ((fn a => fn b =>
(case b
of [] =>
sup_set (equal_list equal_bool)
(image (fn aa => true :: aa) (dl a ql))
(image (fn aa => false :: aa) (dr a qr))
| true :: s => image (fn aa => true :: aa) (dl a s)
| false :: s => image (fn aa => false :: aa) (dr a s))),
(fn a =>
(case a of [] => fl ql orelse fr qr | true :: s => fl s
| false :: s => fr s)))))
x;
fun is_empty (Set xs) = null xs;
fun na2da A_ a =
(insert A_ (start a) bot_set,
((fn aa => fn q => sup_seta A_ (image (next a aa) q)),
(fn q => bex q (fin a))));
fun atom A_ a =
([true],
((fn b => fn s =>
(if equal_lista equal_bool s [true] andalso eq A_ b a
then insert (equal_list equal_bool) [false] bot_set else bot_set)),
(fn s => equal_lista equal_bool s [false])));
fun conc x =
(fn (ql, (dl, fl)) => fn (qr, (dr, fr)) =>
(true :: ql,
((fn a => fn b =>
(case b of [] => bot_set
| true :: s =>
sup_set (equal_list equal_bool)
(image (fn aa => true :: aa) (dl a s))
(if fl s then image (fn aa => false :: aa) (dr a qr)
else bot_set)
| false :: s => image (fn aa => false :: aa) (dr a s))),
(fn a =>
(case a of [] => false
| left :: s =>
left andalso (fl s andalso fr qr) orelse
not left andalso fr s)))))
x;
fun plus x =
(fn (q, (d, f)) =>
(q, ((fn a => fn s =>
sup_set (equal_list equal_bool) (d a s)
(if f s then d a q else bot_set)),
f)))
x;
val epsilon :
bool list * (('a -> bool list -> (bool list) set) * (bool list -> bool))
= ([], ((fn _ => fn _ => bot_set), null));
fun star a = or epsilon (plus a);
fun rexp2na A_ Zero = ([], ((fn _ => fn _ => bot_set), (fn _ => false)))
| rexp2na A_ Onea = epsilon
| rexp2na A_ (Atom a) = atom A_ a
| rexp2na A_ (Plus (r, s)) = or (rexp2na A_ r) (rexp2na A_ s)
| rexp2na A_ (Times (r, s)) = conc (rexp2na A_ r) (rexp2na A_ s)
| rexp2na A_ (Star r) = star (rexp2na A_ r);
fun apsnd f (x, y) = (x, f y);
val one : nat = one_nat;
val zero : nat = Zero_nat;
fun enabled a sigma = filter (fn x => not (is_empty (next a x sigma)));
val example_expression : nat rexp =
let
val r0 = Atom Zero_nat;
val r1 = Atom one_nat;
in
Times (Star (Plus (Times (r1, r1), r0)), Star (Plus (Times (r0, r0), r1)))
end;
fun sgn_integer k =
(if ((k : IntInf.int) = (0 : IntInf.int)) then (0 : IntInf.int)
else (if IntInf.< (k, (0 : IntInf.int)) then (~1 : IntInf.int)
else (1 : IntInf.int)));
fun divmod_integer k l =
(if ((k : IntInf.int) = (0 : IntInf.int))
then ((0 : IntInf.int), (0 : IntInf.int))
else (if ((l : IntInf.int) = (0 : IntInf.int)) then ((0 : IntInf.int), k)
else (apsnd o (fn a => fn b => IntInf.* (a, b)) o sgn_integer) l
(if (((sgn_integer k) : IntInf.int) = (sgn_integer l))
then IntInf.divMod (IntInf.abs k, IntInf.abs l)
else let
val (r, s) =
IntInf.divMod (IntInf.abs k, IntInf.abs l);
in
(if ((s : IntInf.int) = (0 : IntInf.int))
then (IntInf.~ r, (0 : IntInf.int))
else (IntInf.- (IntInf.~ r, (1 : IntInf.int)),
IntInf.- (IntInf.abs l, s)))
end)));
fun nat_of_integer k =
(if IntInf.<= (k, (0 : IntInf.int)) then Zero_nat
else let
val (l, j) = divmod_integer k (2 : IntInf.int);
val la = nat_of_integer l;
val lb = plus_nat la la;
in
(if ((j : IntInf.int) = (0 : IntInf.int)) then lb
else plus_nat lb one_nat)
end);
fun times_num (Bit1 m) (Bit1 n) =
Bit1 (plus_num (plus_num m n) (Bit0 (times_num m n)))
| times_num (Bit1 m) (Bit0 n) = Bit0 (times_num (Bit1 m) n)
| times_num (Bit0 m) (Bit1 n) = Bit0 (times_num m (Bit1 n))
| times_num (Bit0 m) (Bit0 n) = Bit0 (Bit0 (times_num m n))
| times_num One n = n
| times_num m One = m;
fun times_int (Neg m) (Neg n) = Pos (times_num m n)
| times_int (Neg m) (Pos n) = Neg (times_num m n)
| times_int (Pos m) (Neg n) = Neg (times_num m n)
| times_int (Pos m) (Pos n) = Pos (times_num m n)
| times_int Zero_int l = Zero_int
| times_int k Zero_int = Zero_int;
fun int_of_integer k =
(if IntInf.< (k, (0 : IntInf.int))
then uminus_int (int_of_integer (IntInf.~ k))
else (if ((k : IntInf.int) = (0 : IntInf.int)) then Zero_int
else let
val (l, j) = divmod_integer k (2 : IntInf.int);
val la = times_int (Pos (Bit0 One)) (int_of_integer l);
in
(if ((j : IntInf.int) = (0 : IntInf.int)) then la
else plus_int la one_int)
end));
end; (*struct RegExpChecker*)

View File

@ -1,23 +1,7 @@
(*************************************************************************
* Copyright (C)
* 2019 The University of Exeter
* 2018-2019 The University of Paris-Saclay
* 2018 The University of Sheffield
*
* License:
* This program can be redistributed and/or modified under the terms
* of the 2-clause BSD-style license.
*
* SPDX-License-Identifier: BSD-2-Clause
*************************************************************************)
chapter\<open>The High-Level Interface to the Automata-Library\<close>
theory RegExpInterface
imports "Functional-Automata.Execute"
keywords
"reflect_ML_exports" :: thy_decl
imports "Functional-Automata.Execute"
begin
@ -68,13 +52,13 @@ This universe of denotations is in our concrete case:\<close>
text\<open>Now the denotational semantics for regular expression can be defined on a post-card:\<close>
fun Lang :: "'a rexp => 'a lang"
where L_Emp : "Lang Zero = {}"
|L_One: "Lang One = {[]}"
|L_Atom: "Lang (\<lfloor>a\<rfloor>) = {[a]}"
|L_Un: "Lang (el || er) = (Lang el) \<union> (Lang er)"
|L_Conc: "Lang (el ~~ er) = {xs@ys | xs ys. xs \<in> Lang el \<and> ys \<in> Lang er}"
|L_Star: "Lang (Star e) = Regular_Set.star(Lang e)"
fun L :: "'a rexp => 'a lang"
where L_Emp : "L Zero = {}"
|L_One: "L One = {[]}"
|L_Atom: "L (\<lfloor>a\<rfloor>) = {[a]}"
|L_Un: "L (el || er) = (L el) \<union> (L er)"
|L_Conc: "L (el ~~ er) = {xs@ys | xs ys. xs \<in> L el \<and> ys \<in> L er}"
|L_Star: "L (Star e) = Regular_Set.star(L e)"
text\<open>A more useful definition is the sub-language - definition\<close>
@ -93,6 +77,7 @@ definition YY where "YY = na2da(rexp2na example_expression)"
value "NA.accepts (rexp2na example_expression) [0,1,1,0,0,1]"
value "DA.accepts (na2da (rexp2na example_expression)) [0,1,1,0,0,1]"
section\<open>HOL - Adaptions and Export to SML\<close>
definition enabled :: "('a,'\<sigma> set)da \<Rightarrow> '\<sigma> set \<Rightarrow> 'a list \<Rightarrow> 'a list"
@ -102,57 +87,29 @@ definition enabled :: "('a,'\<sigma> set)da \<Rightarrow> '\<sigma> set \<Right
definition zero where "zero = (0::nat)"
definition one where "one = (1::nat)"
export_code zero one Suc Int.nat nat_of_integer int_of_integer (* for debugging *)
export_code zero one Suc Int.nat nat_of_integer int_of_integer (* for debugging *)
example_expression (* for debugging *)
Zero One Atom Plus Times Star (* regexp abstract syntax *)
rexp2na na2da enabled (* low-level automata interface *)
NA.accepts DA.accepts
in SML module_name RegExpChecker
subsection\<open>Infrastructure for Reflecting exported SML code\<close>
ML\<open>
fun reflect_local_ML_exports args trans = let
fun eval_ML_context ctxt = let
fun is_sml_file f = String.isSuffix ".ML" (Path.implode (#path f))
val files = (map (Generated_Files.check_files_in (Context.proof_of ctxt)) args)
val ml_files = filter is_sml_file (map #1 (maps Generated_Files.get_files_in files))
val ml_content = map (fn f => Syntax.read_input (Bytes.content (#content f))) ml_files
fun eval ml_content = fold (fn sml => (ML_Context.exec
(fn () => ML_Context.eval_source ML_Compiler.flags sml)))
ml_content
in
(eval ml_content #> Local_Theory.propagate_ml_env) ctxt
end
in
Toplevel.generic_theory eval_ML_context trans
end
val files_in_theory =
(Parse.underscore >> K [] || Scan.repeat1 Parse.path_binding) --
Scan.option (\<^keyword>\<open>(\<close> |-- Parse.!!! (\<^keyword>\<open>in\<close>
|-- Parse.theory_name --| \<^keyword>\<open>)\<close>));
val _ =
Outer_Syntax.command \<^command_keyword>\<open>reflect_ML_exports\<close>
"evaluate generated Standard ML files"
(Parse.and_list1 files_in_theory >> (fn args => reflect_local_ML_exports args));
\<close>
reflect_ML_exports _
in SML module_name RegExpChecker
file "RegExpChecker.sml" (* writing it to a file *)
(* potentially susceptible to race conditions ... *)
SML_file "RegExpChecker.sml" (* reads and eval generated file
into SML toplevel *)
SML_export \<open>structure RegExpChecker = RegExpChecker\<close> (* copies from SML toplevel into
Isabelle/ML toplevel *)
section\<open>The Abstract Interface For Monitor Expressions\<close>
text\<open>Here comes the hic : The reflection of the HOL-Automata module into an SML module
with an abstract interface hiding some generation artefacts like the internal states
of the deterministic automata ...\<close>
ML\<open>
structure RegExpInterface : sig
@ -178,7 +135,7 @@ local open RegExpChecker in
type automaton = state * ((Int.int -> state -> state) * (state -> bool))
val add_atom = fold_aterms (fn Const (c as (_, \<^Type>\<open>rexp _\<close>)) => insert (op=) c | _=> I);
val add_atom = fold_aterms (fn Const(c as(_,Type(@{type_name "rexp"},_)))=> insert (op=) c |_=>I);
fun alphabet termS = rev(map fst (fold add_atom termS []));
fun ext_alphabet env termS =
let val res = rev(map fst (fold add_atom termS [])) @ env;
@ -187,14 +144,14 @@ local open RegExpChecker in
else ()
in res end;
fun conv \<^Const_>\<open>Regular_Exp.rexp.Zero _\<close> _ = Zero
|conv \<^Const_>\<open>Regular_Exp.rexp.One _\<close> _ = Onea
|conv \<^Const_>\<open>Regular_Exp.rexp.Times _ for X Y\<close> env = Times(conv X env, conv Y env)
|conv \<^Const_>\<open>Regular_Exp.rexp.Plus _ for X Y\<close> env = Plus(conv X env, conv Y env)
|conv \<^Const_>\<open>Regular_Exp.rexp.Star _ for X\<close> env = Star(conv X env)
|conv \<^Const_>\<open>RegExpInterface.opt _ for X\<close> env = Plus(conv X env, Onea)
|conv \<^Const_>\<open>RegExpInterface.rep1 _ for X\<close> env = Times(conv X env, Star(conv X env))
|conv (Const (s, \<^Type>\<open>rexp _\<close>)) env =
fun conv (Const(@{const_name "Regular_Exp.rexp.Zero"},_)) _ = Zero
|conv (Const(@{const_name "Regular_Exp.rexp.One"},_)) _ = Onea
|conv (Const(@{const_name "Regular_Exp.rexp.Times"},_) $ X $ Y) env = Times(conv X env, conv Y env)
|conv (Const(@{const_name "Regular_Exp.rexp.Plus"},_) $ X $ Y) env = Plus(conv X env, conv Y env)
|conv (Const(@{const_name "Regular_Exp.rexp.Star"},_) $ X) env = Star(conv X env)
|conv (Const(@{const_name "RegExpInterface.opt"},_) $ X) env = Plus(conv X env, Onea)
|conv (Const(@{const_name "RegExpInterface.rep1"},_) $ X) env = Times(conv X env, Star(conv X env))
|conv (Const (s, Type(@{type_name "rexp"},_))) env =
let val n = find_index (fn x => x = s) env
val _ = if n<0 then error"conversion error of regexp." else ()
in Atom(n) end
@ -239,4 +196,3 @@ end (* struct *)
no_notation Atom ("\<lfloor>_\<rfloor>")
end

View File

@ -6,15 +6,16 @@
%% This program can be redistributed and/or modified under the terms
%% of the LaTeX Project Public License Distributed from CTAN
%% archives in directory macros/latex/base/lppl.txt; either
%% version 1.3c of the License, or (at your option) any later version.
%% version 1 of the License, or any later version.
%% OR
%% The 2-clause BSD-style license.
%%
%% SPDX-License-Identifier: LPPL-1.3c+ OR BSD-2-Clause
%%
%% SPDX-License-Identifier: LPPL-1.0+ OR BSD-2-Clause
\NeedsTeXFormat{LaTeX2e}\relax
\ProvidesPackage{DOF-COL}
[00/00/0000 Document-Type Support Framework for Isabelle.]
[0000/00/00 Unreleased v0.0.0+%
Document-Type Support Framework for Isabelle.]
\RequirePackage{DOF-core}
@ -54,7 +55,6 @@
\newisadof{side_by_side_figure.Isa_COL.side_by_side_figure}%
[label=,type=%
,Isa_COL.figure.relative_width=%
,Isa_COL.figure.placement=%
,Isa_COL.figure.src=%
,Isa_COL.side_by_side_figure.anchor=%
,Isa_COL.side_by_side_figure.caption=%

View File

@ -1,25 +0,0 @@
%% Copyright (C) 2021 University of Exeter
%% 2021 University of Paris-Saclay
%%
%% License:
%% This program can be redistributed and/or modified under the terms
%% of the LaTeX Project Public License Distributed from CTAN
%% archives in directory macros/latex/base/lppl.txt; either
%% version 1.3c of the License, or (at your option) any later version.
%% OR
%% The 2-clause BSD-style license.
%%
%% SPDX-License-Identifier: LPPL-1.3c+ OR BSD-2-Clause
\NeedsTeXFormat{LaTeX2e}\relax
\ProvidesPackage{DOF-amssymb}
[00/00/0000 Document-Type Support Framework for Isabelle (amssymb wrapper for lualatex/pdflatex).]
\usepackage{ifxetex,ifluatex}
\ifnum 0\ifxetex 1\fi\ifluatex 1\fi=0 % if pdftex
\usepackage{amssymb}
\else % if luatex or xetex
\usepackage{unicode-math}
\usepackage{latexsym}
\fi

View File

@ -6,43 +6,36 @@
%% This program can be redistributed and/or modified under the terms
%% of the LaTeX Project Public License Distributed from CTAN
%% archives in directory macros/latex/base/lppl.txt; either
%% version 1.3c of the License, or (at your option) any later version.
%% version 1 of the License, or any later version.
%% OR
%% The 2-clause BSD-style license.
%%
%% SPDX-License-Identifier: LPPL-1.3c+ OR BSD-2-Clause
%%
%% SPDX-License-Identifier: LPPL-1.0+ OR BSD-2-Clause
\NeedsTeXFormat{LaTeX2e}\relax
\ProvidesPackage{DOF-core}
[00/00/0000 Document-Type Support Framework for Isabelle.]
\usepackage[T1]{fontenc}
\usepackage[utf8]{inputenc}
\usepackage[english,USenglish]{babel}
[0000/00/00 Unreleased v0.0.0+%
Document-Type Support Framework for Isabelle.]
\RequirePackage{keycommand}
\RequirePackage{environ}
\RequirePackage{graphicx}
\RequirePackage{xcolor}
\RequirePackage{xspace}
\RequirePackage{etoolbox}
\RequirePackage{fp}
\usepackage{amsmath}
\usepackage{DOF-amssymb}
\usepackage{isabelle}
\usepackage{isabellesym}
\newcommand{\isabelleurl}{UNDEFINED}
\newcommand{\dofurl}{UNDEFINED}
\newcommand{\dof@isabelleversion}{UNDEFINED}
\newcommand{\isabellefullversion}{UNDEFINED\xspace}
\newcommand{\dof@version}{UNDEFINED}
\newcommand{\dof@artifacturl}{UNDEFINED}
\isabellestyle{it}
%%% CONFIG %%%
\RequirePackage{dof-config}
\newcommand{\isabelleversion}{\dof@isabelleversion\xspace}
\newcommand{\dofversion}{\dof@version\xspace}
\newcommand{\isadofversion}{\dofversion/\isabelleversion}
\newcommand{\isadoflatestversion}{\doflatestversion/\isabellelatestversion}
\newcommand{\isadofversion}{\dofversion/\isabelleversion\xspace}
\newcommand{\isadofdir}{Isabelle_DOF-\dof@version_\dof@isabelleversion}
\newcommand{\isadofdirn}{Isabelle\_DOF-\dof@version\_\dof@isabelleversion}
\newcommand{\isadofarchive}{\isadofdir.tar.xz}
@ -156,12 +149,5 @@
% begin: label and ref
\newisadof{label}[label=,type=][1]{\label{#1}}
\newisadof{ref}[label=,type=][1]{\autoref{#1}}
\newisadof{macroDef}[label=,type=][1]{MMM \label{#1}} %% place_holder
\newisadof{macroExp}[label=,type=][1]{MMM \autoref{#1}} %% place_holder
% end: label and ref
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
\title{No Title Given}
\author{No Author Given}
\input{ontologies}
\IfFileExists{preamble.tex}{\input{preamble.tex}}{}%

View File

@ -1,19 +1,16 @@
session "Isabelle_DOF" = "Functional-Automata" +
options [document = pdf, document_output = "output", document_build = dof]
options [document = pdf, document_output = "output"]
sessions
"Regular-Sets"
directories
"DOF"
"ontologies"
"ontologies/CENELEC_50128"
"ontologies/Conceptual"
"ontologies/scholarly_paper"
"ontologies/small_math"
"ontologies/technical_report"
"ontologies/CC_v3_1_R5"
theories
"DOF/Isa_DOF"
"ontologies/ontologies"
export_classpath
session "Isabelle_DOF-tests" = "Isabelle_DOF" +
options [document = false]
theories
"tests/AssnsLemmaThmEtc"
"tests/Concept_ExampleInvariant"
"tests/Concept_Example"
"tests/InnerSyntaxAntiquotations"
"tests/Attributes"

View File

@ -1 +0,0 @@
tests

150
src/Tools/mkroot_DOF Executable file
View File

@ -0,0 +1,150 @@
#/usr/bin/env bash
# Copyright (c) 2019 University of Exeter
# 2018-2019 University of Paris-Saclay
# 2018-2019 The University of Sheffield
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in
# the documentation and/or other materials provided with the
# distribution.
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
#
# SPDX-License-Identifier: BSD-2-Clause
#
# DESCRIPTION: prepare session root directory with DOF setup
## diagnostics
PRG="$(basename "$0")"
function usage()
{
echo
echo "Usage: isabelle $PRG [OPTIONS] [DIR]"
echo
echo " Options are:"
echo " -h print this help text and exit"
echo " -n NAME alternative session name (default: DIR base name)"
echo " -o ONTOLOGY (default: $DEFAULT_ONTOLOGY)"
echo " Available ontologies:"
for t in "$ISABELLE_HOME_USER/DOF/latex/"*.sty; do
if [[ $t =~ DOF-(.*).sty$ ]]; then
echo " * ${BASH_REMATCH[1]}"
fi
done
echo " -t TEMPLATE (default: $DEFAULT_TEMPLATE)"
echo " Available document templates:"
for t in "$ISABELLE_HOME_USER/DOF/document-template/"*.tex; do
if [[ $t =~ root-(.*).tex$ ]]; then
echo " * ${BASH_REMATCH[1]}"
fi
done
echo
echo " Prepare session root DIR (default: current directory)."
echo
exit 1
}
function fail()
{
echo "$1" >&2
exit 2
}
## process command line
# options
NAME=""
DEFAULT_TEMPLATE="scrartcl"
DEFAULT_ONTOLOGY="scholarly_paper"
TEMPLATE="$DEFAULT_TEMPLATE"
ONTOLOGY="$DEFAULT_ONTOLOGY"
while getopts "t:o:n:dh" OPT
do
case "$OPT" in
h)
usage
exit 1
;;
n)
NAME="$OPTARG"
;;
o)
if [ ! -f "$ISABELLE_HOME_USER/DOF/latex/DOF-$OPTARG.sty" ]; then
echo "ERROR: Ontology $OPTARG not available!"
exit 1
fi
ONTOLOGY="$ONTOLOGY $OPTARG"
;;
t)
TEMPLATE="$OPTARG"
if [ ! -f "$ISABELLE_HOME_USER/DOF/document-template/root-$TEMPLATE.tex" ]; then
echo "ERROR: Template $TEMPLATE not available!"
exit 1
fi
;;
\?)
usage
exit 0
;;
esac
done
shift $(($OPTIND - 1))
# args
if [ "$#" -eq 0 ]; then
DIR="."
elif [ "$#" -eq 1 ]; then
DIR="$1"
shift
else
usage
fi
if [ -z "$NAME" ]; then
NAME="$DIR"
fi
$ISABELLE_TOOL mkroot -n "$NAME" "$DIR"
echo " \"preamble.tex\"" >> "$DIR"/ROOT
echo " \"build\"" >> "$DIR"/ROOT
sed -i -e "s/root.tex/isadof.cfg/" "$DIR"/ROOT
sed -i -e "s/HOL/Isabelle_DOF/" "$DIR"/ROOT
rm -f "$DIR"/document/root.tex
# Creating isadof.cfg
echo "Template: $TEMPLATE" > "$DIR"/document/isadof.cfg
cp "$ISABELLE_HOME_USER/DOF/document-template/build" "$DIR"/document/
for o in $ONTOLOGY; do
echo "Ontology: $o" >> "$DIR"/document/isadof.cfg;
done
# Creating praemble.tex
TITLE=$(echo "$NAME" | tr _ - | tr -d '\\')
AUTHOR=$(echo "By $USER" | tr _ - | tr -d '\\')
echo "%% This is a placeholder for user-specific configuration and packages." >> "$DIR"/document/preamble.tex
echo "\\title{$TITLE}{}{}{}{}{}{}" >> "$DIR"/document/preamble.tex
echo "\\author{$AUTHOR}{}{}{}{}{}" >> "$DIR"/document/preamble.tex

View File

@ -1,16 +1,16 @@
%% Copyright (c) 2019-2022 University of Exeter
%% 2018-2022 University of Paris-Saclay
%% Copyright (c) 2019 University of Exeter
%% 2018-2019 University of Paris-Saclay
%% 2018-2019 The University of Sheffield
%%
%% License:
%% This program can be redistributed and/or modified under the terms
%% of the LaTeX Project Public License Distributed from CTAN
%% archives in directory macros/latex/base/lppl.txt; either
%% version 1.3c of the License, or (at your option) any later version.
%% version 1 of the License, or any later version.
%% OR
%% The 2-clause BSD-style license.
%%
%% SPDX-License-Identifier: LPPL-1.3c+ OR BSD-2-Clause
%%
%% SPDX-License-Identifier: LPPL-1.0+ OR BSD-2-Clause
%% Warning: Do Not Edit!
%% =====================
@ -23,11 +23,28 @@
%% preamble.tex.
\documentclass[submission,copyright,creativecommons]{eptcs}
\usepackage{DOF-core}
\bibliographystyle{eptcs}% the mandatory bibstyle
\usepackage[T1]{fontenc}
\usepackage[utf8]{inputenc}
\usepackage[USenglish]{babel}
\usepackage{isabelle}
%\usepackage{underscore}
\usepackage{xcolor}
\usepackage{isabellesym}
\usepackage{amsmath}
\usepackage{amssymb}
\usepackage[numbers, sort&compress, sectionbib]{natbib}
\IfFileExists{DOF-core.sty}{}{%
\PackageError{DOF-core}{Isabelle/DOF not installed.
This is a Isabelle_DOF project. The document preparation requires
the Isabelle_DOF framework. Please obtain the framework by cloning
the Isabelle_DOF git repository, i.e.:
"git clone <isadofurl>"
You can install the framework as follows:
"cd Isabelle_DOF/document-generator && ./install"}{%
For further help, see <isadofurl>}
}
\newcommand{\subtitle}[1]{%
\PackageError{DOF-eptcs-UNSUPPORTED}
{The LaTeX class eptcs does not support subtitles.}
@ -37,6 +54,7 @@
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% provide an alternative definition of
% begin: scholarly_paper.author
\RequirePackage{DOF-core}
\newcommand{\dofeptcsinstitute}[1]{\mbox{}\\\protect\scriptsize%
\protect\begin{tabular}[t]{@{\protect\footnotesize}c@{}}%
#1%
@ -62,15 +80,15 @@
\makeatother
% end: scholarly_paper.author
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
\input{ontologies}
\IfFileExists{preamble.tex}{\input{preamble.tex}}{}%
\begin{document}
\maketitle
\input{session}
% optional bibliography
\IfFileExists{root.bib}{%
\bibliography{root}
}{}
\IfFileExists{root.bib}{{\bibliography{root}}}{}
\end{document}
%%% Local Variables:

View File

@ -1,33 +1,47 @@
%% Copyright (c) 2019-2022 University of Exeter
%% 2018-2022 University of Paris-Saclay
%% Copyright (c) 2019 University of Exeter
%% 2018-2019 University of Paris-Saclay
%% 2018-2019 The University of Sheffield
%%
%% License:
%% This program can be redistributed and/or modified under the terms
%% of the LaTeX Project Public License Distributed from CTAN
%% archives in directory macros/latex/base/lppl.txt; either
%% version 1.3c of the License, or (at your option) any later version.
%% version 1 of the License, or any later version.
%% OR
%% The 2-clause BSD-style license.
%%
%% SPDX-License-Identifier: LPPL-1.3c+ OR BSD-2-Clause
%%
%% SPDX-License-Identifier: LPPL-1.0+ OR BSD-2-Clause
%% Warning: Do Not Edit!
%% =====================
%% This is the root file for the Isabelle/DOF using the lipics class.
%% Note that lipics cannot is *not* distributed as part of Isabelle/DOF;
%% you need to download lipics.cls from
%% Note that lipics cannot be distributed as part of Isabelle/DOF; you need
%% to download lipics.cls from
%% https://www.dagstuhl.de/en/publications/lipics/instructions-for-authors/
%% and add it manually to the praemble.tex and the ROOT file.
%%
%% All customization and/or additional packages should be added to the file
%% preamble.tex.
\documentclass[a4paper,UKenglish,cleveref, autoref,thm-restate]{lipics-v2021}
\documentclass[a4paper,USenglish,cleveref, autoref]{lipics-v2019}
\bibliographystyle{plainurl}% the mandatory bibstyle
\usepackage{isabelle}
\usepackage{isabellesym}
% \usepackage{amsmath}
% \usepackage{amssymb}
\usepackage[numbers, sort&compress, sectionbib]{natbib}
\IfFileExists{DOF-core.sty}{}{%
\PackageError{DOF-core}{Isabelle/DOF not installed.
This is a Isabelle_DOF project. The document preparation requires
the Isabelle_DOF framework. Please obtain the framework by cloning
the Isabelle_DOF git repository, i.e.:
"git clone <isadofurl>"
You can install the framework as follows:
"cd Isabelle_DOF/document-generator && ./install"}{%
For further help, see <isadofurl>}
}
\usepackage{DOF-core}
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% provide an alternative definition of
@ -53,21 +67,24 @@
\makeatother
% end: scholarly_paper.author
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
\input{ontologies}
\renewcommand{\DOFauthor}{}
\renewcommand{\DOFinstitute}{}
\expandafter\newcommand\csname 2authand\endcsname{}
\expandafter\newcommand\csname 3authand\endcsname{}
\expandafter\newcommand\csname 4authand\endcsname{}
\IfFileExists{preamble.tex}{\input{preamble.tex}}{}%
\begin{document}
\maketitle
\input{session}
% optional bibliography
\IfFileExists{root.bib}{%
\small
{\small
\bibliography{root}
}{}
}}{}
\end{document}
%%% Local Variables:

View File

@ -1,29 +1,52 @@
%% Copyright (c) 2019-2022 University of Exeter
%% 2018-2022 University of Paris-Saclay
%% Copyright (c) 2019 University of Exeter
%% 2018-2019 University of Paris-Saclay
%% 2018-2019 The University of Sheffield
%%
%% License:
%% This program can be redistributed and/or modified under the terms
%% of the LaTeX Project Public License Distributed from CTAN
%% archives in directory macros/latex/base/lppl.txt; either
%% version 1.3c of the License, or (at your option) any later version.
%% version 1 of the License, or any later version.
%% OR
%% The 2-clause BSD-style license.
%%
%% SPDX-License-Identifier: LPPL-1.3c+ OR BSD-2-Clause
%%
%% SPDX-License-Identifier: LPPL-1.0+ OR BSD-2-Clause
%% Warning: Do Not Edit!
%% =====================
%% This is the root file for the Isabelle/DOF using the lncs class.
%% Note that lncs cannot be distributed as part of Isabelle/DOF; you need
%% to download lncs.cls from
%% https://www.springer.com/gp/computer-science/lncs/conference-proceedings-guidelines
%% and add it manually to the praemble.tex and the ROOT file.
%%
%% All customization and/or additional packages should be added to the file
%% preamble.tex.
\RequirePackage{ifvtex}
\documentclass{llncs}
\usepackage{DOF-core}
\bibliographystyle{splncs04}
\usepackage[T1]{fontenc}
\usepackage[utf8]{inputenc}
\usepackage[USenglish]{babel}
\usepackage{isabelle}
\usepackage{xcolor}
\usepackage{isabellesym}
\usepackage{amsmath}
\usepackage{amssymb}
\usepackage[numbers, sort&compress, sectionbib]{natbib}
\IfFileExists{DOF-core.sty}{}{%
\PackageError{DOF-core}{Isabelle/DOF not installed.
This is a Isabelle_DOF project. The document preparation requires
the Isabelle_DOF framework. Please obtain the framework by cloning
the Isabelle_DOF git repository, i.e.:
"git clone <isadofurl>"
You can install the framework as follows:
"cd Isabelle_DOF/document-generator && ./install"}{%
For further help, see <isadofurl>}
}
\input{ontologies}
\IfFileExists{preamble.tex}{\input{preamble.tex}}{}%
\usepackage{graphicx}
\usepackage{hyperref}
\setcounter{tocdepth}{3}
\hypersetup{%
@ -35,7 +58,8 @@
} % more detailed digital TOC (aka bookmarks)
\sloppy
\allowdisplaybreaks[4]
\urlstyle{rm}
\isabellestyle{it}
\usepackage[caption]{subfig}
\usepackage[size=footnotesize]{caption}
@ -50,6 +74,17 @@
\renewcommand{\floatpagefraction}{0.7} % require fuller float pages
\renewcommand{\dblfloatpagefraction}{0.7} % require fuller float pages
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%%% Overrides the (rightfully issued) warning by Koma Script that \rm
%%% etc. should not be used (they are deprecated since more than a
%%% decade)
\DeclareOldFontCommand{\rm}{\normalfont\rmfamily}{\mathrm}
\DeclareOldFontCommand{\sf}{\normalfont\sffamily}{\mathsf}
\DeclareOldFontCommand{\tt}{\normalfont\ttfamily}{\mathtt}
\DeclareOldFontCommand{\bf}{\normalfont\bfseries}{\mathbf}
\DeclareOldFontCommand{\it}{\normalfont\itshape}{\mathit}
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
\begin{document}
\selectlanguage{USenglish}%
\renewcommand{\bibname}{References}%
@ -65,10 +100,17 @@
\maketitle
\maketitle
\input{session}
% optional bibliography
\IfFileExists{root.bib}{{\small\bibliography{root}}}{}
\IfFileExists{root.bib}{%
{\small \renewcommand{\doi}[1]{}
\newcommand{\urlprefix}{}
\bibliographystyle{spmpscinat}
\bibliography{root}
}}{}
\end{document}
%%% Local Variables:

View File

@ -1,16 +1,16 @@
%% Copyright (c) 2019-2022 University of Exeter
%% 2018-2022 University of Paris-Saclay
%% Copyright (c) 2019 University of Exeter
%% 2018-2019 University of Paris-Saclay
%% 2018-2019 The University of Sheffield
%%
%% License:
%% This program can be redistributed and/or modified under the terms
%% of the LaTeX Project Public License Distributed from CTAN
%% archives in directory macros/latex/base/lppl.txt; either
%% version 1.3c of the License, or (at your option) any later version.
%% version 1 of the License, or any later version.
%% OR
%% The 2-clause BSD-style license.
%%
%% SPDX-License-Identifier: LPPL-1.3c+ OR BSD-2-Clause
%%
%% SPDX-License-Identifier: LPPL-1
%% Warning: Do Not Edit!
%% =====================
@ -20,17 +20,31 @@
%% preamble.tex.
\RequirePackage{ifvtex}
\documentclass[abstract=true,fontsize=11pt,DIV=12,paper=a4]{scrartcl}
\usepackage{DOF-core}
\documentclass[fontsize=11pt,DIV=12,paper=a4]{scrartcl}
\usepackage[T1]{fontenc}
\usepackage[utf8]{inputenc}
\usepackage{textcomp}
\bibliographystyle{abbrvnat}
\usepackage[english]{babel}
\RequirePackage[caption]{subfig}
\usepackage{isabelle}
\usepackage{isabellesym}
\IfFileExists{DOF-core.sty}{}{%
\PackageError{DOF-core}{Isabelle/DOF not installed.
This is a Isabelle_DOF project. The document preparation requires
the Isabelle_DOF framework. Please obtain the framework by cloning
the Isabelle_DOF git repository, i.e.:
"git clone <isadofurl>"
You can install the framework as follows:
"cd Isabelle_DOF/document-generator && ./install"}{%
For further help, see <isadofurl>}
}
\input{ontologies}
\IfFileExists{preamble.tex}{\input{preamble.tex}}{}%
\usepackage{amsmath}
\usepackage{amssymb}
\usepackage[numbers, sort&compress, sectionbib]{natbib}
\usepackage{graphicx}
\usepackage{hyperref}
\setcounter{tocdepth}{3}
\hypersetup{%
@ -42,16 +56,30 @@
} % more detailed digital TOC (aka bookmarks)
\sloppy
\allowdisplaybreaks[4]
\urlstyle{rm}
\isabellestyle{it}
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%%% Overrides the (rightfully issued) warning by Koma Script that \rm
%%% etc. should not be used (they are deprecated since more than a
%%% decade)
\DeclareOldFontCommand{\rm}{\normalfont\rmfamily}{\mathrm}
\DeclareOldFontCommand{\sf}{\normalfont\sffamily}{\mathsf}
\DeclareOldFontCommand{\tt}{\normalfont\ttfamily}{\mathtt}
\DeclareOldFontCommand{\bf}{\normalfont\bfseries}{\mathbf}
\DeclareOldFontCommand{\it}{\normalfont\itshape}{\mathit}
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
\newenvironment{frontmatter}{}{}
\begin{document}
\begin{frontmatter}
\maketitle
\tableofcontents
\end{frontmatter}
\input{session}
% optional bibliography
\IfFileExists{root.bib}{{\bibliography{root}}}{}
\IfFileExists{root.bib}{%
\bibliography{root}
}{}
\end{document}
%%% Local Variables:

Some files were not shown because too many files have changed in this diff Show More