Compare commits
228 Commits
2021-ITP-P
...
main
Author | SHA1 | Date |
---|---|---|
Nicolas Méric | 8513f7d267 | |
Nicolas Méric | 2b1a9d009e | |
Nicolas Méric | cd758d2c44 | |
Nicolas Méric | 8496963fec | |
Nicolas Méric | 72d8000f7b | |
Nicolas Méric | 17ec11b297 | |
Nicolas Méric | a96e17abf3 | |
Nicolas Méric | 74b60e47d5 | |
Nicolas Méric | a42dd4ea6c | |
Nicolas Méric | b162a24749 | |
Nicolas Méric | a9432c7b52 | |
Nicolas Méric | 9f28d4949e | |
Nicolas Méric | 885c23a138 | |
Nicolas Méric | a589d4cd47 | |
Burkhart Wolff | e1f143d151 | |
Burkhart Wolff | fd60cf2312 | |
Nicolas Méric | 73dfcd6c1e | |
Nicolas Méric | c0afe1105e | |
Burkhart Wolff | e414b97afb | |
Nicolas Méric | 0b2d28b547 | |
Nicolas Méric | 37d7ed7d17 | |
Nicolas Méric | 312734afbd | |
Burkhart Wolff | 8cee80d78e | |
Makarius Wenzel | ec0d525426 | |
Makarius Wenzel | 791990039b | |
Makarius Wenzel | 78d61390fe | |
Makarius Wenzel | ffcf1f3240 | |
Makarius Wenzel | 5471d873a9 | |
Makarius Wenzel | df37250a00 | |
Makarius Wenzel | 185daeb577 | |
Makarius Wenzel | 8037fd15f2 | |
Makarius Wenzel | afcd78610b | |
Makarius Wenzel | b8a9ef5118 | |
Makarius Wenzel | a4e75c8b12 | |
Makarius Wenzel | d20e9ccd22 | |
Makarius Wenzel | f2ee5d3780 | |
Makarius Wenzel | 44cae2e631 | |
Makarius Wenzel | 7b2bf35353 | |
Makarius Wenzel | e8c7fa6018 | |
Makarius Wenzel | b12e61511d | |
Makarius Wenzel | 3cac42e6cb | |
Makarius Wenzel | aee8ba1df1 | |
Makarius Wenzel | d93e1383d4 | |
Makarius Wenzel | 3d5d1e7476 | |
Makarius Wenzel | 4264e7cd15 | |
Makarius Wenzel | 96f4077c53 | |
Makarius Wenzel | d7fb39d7eb | |
Makarius Wenzel | b95826962f | |
Makarius Wenzel | 912d4bb49e | |
Makarius Wenzel | a6c1a2baa4 | |
Makarius Wenzel | bb5963c6e2 | |
Makarius Wenzel | cc3e2a51a4 | |
Makarius Wenzel | 9e4e5b49eb | |
Makarius Wenzel | b65ecbdbef | |
Makarius Wenzel | 3be2225dcf | |
Makarius Wenzel | f44f0af01c | |
Makarius Wenzel | 9a11baf840 | |
Makarius Wenzel | 48c167aa23 | |
Makarius Wenzel | 700a9bbfee | |
Makarius Wenzel | 73299941ad | |
Makarius Wenzel | 5a8c438c41 | |
Makarius Wenzel | 7772c73aaa | |
Makarius Wenzel | ca18453043 | |
Makarius Wenzel | 1a122b1a87 | |
Makarius Wenzel | 47d95c467e | |
Makarius Wenzel | bf3085d4c0 | |
Makarius Wenzel | 068e6e0411 | |
Makarius Wenzel | 09e9980691 | |
Makarius Wenzel | 94ce3fdec2 | |
Makarius Wenzel | 44819bff02 | |
Makarius Wenzel | a6ab1e101e | |
Makarius Wenzel | c29ec9641a | |
Nicolas Méric | 06833aa190 | |
Nicolas Méric | 4f0c7e1e95 | |
Nicolas Méric | 0040949cf8 | |
Nicolas Méric | e68c332912 | |
Burkhart Wolff | b2c4f40161 | |
Burkhart Wolff | 309952e0ce | |
Burkhart Wolff | 830e1b440a | |
Burkhart Wolff | 2149db9efc | |
Burkhart Wolff | 1547ace64b | |
Burkhart Wolff | 39acd61dfd | |
Burkhart Wolff | 29770b17ee | |
Achim D. Brucker | b4f4048cff | |
Achim D. Brucker | eac94f2a01 | |
Achim D. Brucker | ab1877ce8e | |
Achim D. Brucker | fc575a5be5 | |
Achim D. Brucker | 4e47c38860 | |
Achim D. Brucker | 943af164f4 | |
Achim D. Brucker | 873151b4f3 | |
Achim D. Brucker | 82645c2e8e | |
Achim D. Brucker | f09a2df943 | |
Achim D. Brucker | cfdbd18bfa | |
Achim D. Brucker | 0b807ea4bc | |
Makarius Wenzel | 516f5d2f79 | |
Makarius Wenzel | 5ac41a72ac | |
Makarius Wenzel | 15feeb7d92 | |
Makarius Wenzel | 0c8a0e1d63 | |
Burkhart Wolff | 0aec98b95a | |
Burkhart Wolff | 43871ced48 | |
Burkhart Wolff | 0fa1048d6d | |
Burkhart Wolff | 33490f8f15 | |
Burkhart Wolff | 01632b5251 | |
Burkhart Wolff | 8a54831295 | |
Burkhart Wolff | 427226f593 | |
Achim D. Brucker | f14c0bebbb | |
Achim D. Brucker | 7f500dc257 | |
Burkhart Wolff | c05bb0bf4d | |
Burkhart Wolff | 66f78001eb | |
Burkhart Wolff | 5a06d3618b | |
Burkhart Wolff | e63ef4e189 | |
Burkhart Wolff | bba7d9d5c5 | |
Burkhart Wolff | 07a9c10001 | |
Burkhart Wolff | 5779c729a4 | |
Burkhart Wolff | 03f2836f5d | |
Burkhart Wolff | d2703b0dbd | |
Achim D. Brucker | 9f2e2b53a4 | |
Achim D. Brucker | 4caee16cb6 | |
Achim D. Brucker | 6ee7058d51 | |
Burkhart Wolff | 583636404f | |
Burkhart Wolff | 8a9684590a | |
Burkhart Wolff | 81c4ae2c13 | |
Achim D. Brucker | 2c1b56d277 | |
Achim D. Brucker | f40d33b9ed | |
Achim D. Brucker | 6a94728747 | |
Achim D. Brucker | 99facb109c | |
Achim D. Brucker | f6d97db0d3 | |
Achim D. Brucker | 4a6fa93644 | |
Achim D. Brucker | 6ca0b0fd21 | |
Achim D. Brucker | 65ae177fbc | |
Achim D. Brucker | b93ff8f65c | |
Achim D. Brucker | adf87dfde4 | |
Achim D. Brucker | df5d037942 | |
Achim D. Brucker | f2f48f2340 | |
Achim D. Brucker | 6839f63129 | |
Achim D. Brucker | 3febf83b3c | |
Achim D. Brucker | fb8dbfac49 | |
Achim D. Brucker | 45e4a11a74 | |
Burkhart Wolff | c8a3c58f7f | |
Achim D. Brucker | 1939ffeea4 | |
Achim D. Brucker | 74093dfaae | |
Achim D. Brucker | d2b6cb81aa | |
Achim D. Brucker | b24ede4400 | |
Achim D. Brucker | 205aa5a6b1 | |
Achim D. Brucker | c8f3bfc65d | |
Achim D. Brucker | 44f9317b35 | |
Achim D. Brucker | 6c2a0d6876 | |
Achim D. Brucker | 909dda1ea2 | |
Achim D. Brucker | 367d8f28ad | |
Achim D. Brucker | d3f41dca9e | |
Achim D. Brucker | ae3d35e363 | |
Achim D. Brucker | 41a6c22822 | |
Achim D. Brucker | 4ac7c84403 | |
Achim D. Brucker | 38f6516ad9 | |
Achim D. Brucker | 03b721f014 | |
Achim D. Brucker | c5752ba4a2 | |
Achim D. Brucker | 5721398340 | |
Achim D. Brucker | 6c0d325673 | |
Achim D. Brucker | b40069bedd | |
Achim D. Brucker | 70b2647e7c | |
Achim D. Brucker | c1efddf252 | |
Achim D. Brucker | 9ded308371 | |
Achim D. Brucker | f63d922096 | |
Achim D. Brucker | 11b309da02 | |
Achim D. Brucker | 1444f8f48b | |
Achim D. Brucker | e6ca682114 | |
Achim D. Brucker | 15fb6fdc2d | |
Achim D. Brucker | 9d5c71d4e1 | |
Burkhart Wolff | 013296f25e | |
Achim D. Brucker | d10b277c60 | |
Achim D. Brucker | 7c50ffb3af | |
Achim D. Brucker | 3a9826901a | |
Achim D. Brucker | a54373ad2f | |
Achim D. Brucker | aa7d0aec09 | |
Achim D. Brucker | 31778374ed | |
Achim D. Brucker | 0d55da68de | |
Achim D. Brucker | a973707a73 | |
Achim D. Brucker | b83f7a8abb | |
Achim D. Brucker | e138855623 | |
Achim D. Brucker | 5582644068 | |
Achim D. Brucker | 5278608b89 | |
Achim D. Brucker | 59658cea6f | |
Achim D. Brucker | ef674b5ae2 | |
Achim D. Brucker | ac8c939179 | |
Burkhart Wolff | c16ec333f1 | |
Burkhart Wolff | d1e4fd173b | |
Burkhart Wolff | 43c857af2c | |
Burkhart Wolff | 0cc010cecc | |
Burkhart Wolff | ba7bd6dc03 | |
Burkhart Wolff | 43b0a3049f | |
Nicolas Méric | 03fd491d5d | |
Nicolas Méric | 9673359688 | |
Nicolas Méric | 5d1b271336 | |
Nicolas Méric | 83c790d66a | |
Nicolas Méric | 9981c31966 | |
Nicolas Méric | 319b39905f | |
Nicolas Méric | c00c6ed31d | |
Nicolas Méric | ae3300ac2c | |
Achim D. Brucker | 61f167c29c | |
Achim D. Brucker | 2833deff90 | |
Achim D. Brucker | a8424979eb | |
Achim D. Brucker | f6f6f32b50 | |
Achim D. Brucker | 15e71fe189 | |
Achim D. Brucker | 45c23b4330 | |
Achim D. Brucker | 995feb6685 | |
Nicolas Méric | d8fde4b4f4 | |
Achim D. Brucker | 41e6c9ed02 | |
Achim D. Brucker | cbad96aba5 | |
Achim D. Brucker | 82c9a07c1a | |
Achim D. Brucker | ae8b91ac4e | |
Achim D. Brucker | 0f3f5d4b56 | |
Achim D. Brucker | fee83a2a29 | |
Achim D. Brucker | a0993b6eea | |
Achim D. Brucker | 64b4eca5ea | |
Achim D. Brucker | 2e4fb5d174 | |
Achim D. Brucker | 317c5a7759 | |
Achim D. Brucker | 12f1b230e6 | |
Achim D. Brucker | 530783c23b | |
Nicolas Méric | 1457c1cb85 | |
Nicolas Méric | e3caad804b | |
Nicolas Méric | 17df6a271b | |
Nicolas Méric | a331b80095 | |
Nicolas Méric | 74420a932f | |
Nicolas Méric | 8e1702d2da | |
Achim D. Brucker | 609f09e919 | |
Achim D. Brucker | 0f5e5bf6f6 | |
Achim D. Brucker | 5c886d49b4 | |
Nicolas Méric | b1f73e9235 |
|
@ -0,0 +1,2 @@
|
|||
core.autocrlf false
|
||||
core.eol lf
|
|
@ -15,3 +15,4 @@ It may also contain additional tools and script that are useful for preparing a
|
|||
* pdflatex
|
||||
* [browser_info](https://artifacts.logicalhacking.com/ci/Isabelle_DOF/Isabelle_DOF/main/latest/pdflatex/browser_info/Unsorted/)
|
||||
* [aux files](https://artifacts.logicalhacking.com/ci/Isabelle_DOF/Isabelle_DOF/main/latest/pdflatex/)
|
||||
* [Isabelle_DOF-Unreleased_Isabelle2022.tar.xz](https://artifacts.logicalhacking.com/ci/Isabelle_DOF/Isabelle_DOF/main/latest/Isabelle_DOF-Unreleased_Isabelle2022.tar.xz)
|
||||
|
|
|
@ -1,20 +1,32 @@
|
|||
pipeline:
|
||||
build:
|
||||
image: docker.io/logicalhacking/isabelle2021-1
|
||||
image: docker.io/logicalhacking/isabelle2022
|
||||
commands:
|
||||
- export ARTIFACT_DIR=$CI_WORKSPACE/.artifacts/$CI_REPO/$CI_BRANCH/$CI_BUILD_NUMBER/$LATEX
|
||||
- mkdir -p $ARTIFACT_DIR
|
||||
- export `isabelle getenv ISABELLE_HOME_USER`
|
||||
- mkdir -p $ISABELLE_HOME_USER/etc
|
||||
- echo "ISABELLE_PDFLATEX=\"$LATEX --file-line-error\"" >> $ISABELLE_HOME_USER/etc/settings
|
||||
- isabelle components -u `pwd`
|
||||
- isabelle build -D . -o browser_info
|
||||
- isabelle mkroot_DOF DOF_test
|
||||
- isabelle components -u .
|
||||
- isabelle build -D . -o browser_info
|
||||
- isabelle dof_mkroot -q DOF_test
|
||||
- isabelle build -D DOF_test
|
||||
- cp -r $ISABELLE_HOME_USER/browser_info $ARTIFACT_DIR
|
||||
- cd $ARTIFACT_DIR
|
||||
- cd ../..
|
||||
- ln -s * latest
|
||||
archive:
|
||||
image: docker.io/logicalhacking/isabelle2022
|
||||
commands:
|
||||
- export ARTIFACT_DIR=$CI_WORKSPACE/.artifacts/$CI_REPO/$CI_BRANCH/$CI_BUILD_NUMBER/$LATEX
|
||||
- mkdir -p $ARTIFACT_DIR
|
||||
- export ISABELLE_VERSION=`isabelle version`
|
||||
- ./.woodpecker/mk_release -d
|
||||
- cp Isabelle_DOF-Unreleased_$ISABELLE_VERSION.tar.xz $ARTIFACT_DIR/../
|
||||
when:
|
||||
matrix:
|
||||
LATEX: lualatex
|
||||
deploy:
|
||||
image: docker.io/drillster/drone-rsync
|
||||
settings:
|
||||
|
@ -35,9 +47,9 @@ pipeline:
|
|||
from_secret: email
|
||||
from: ci@logicalhacking.com
|
||||
when:
|
||||
status: [ changed, failure ]
|
||||
status: [ failure ]
|
||||
|
||||
matrix:
|
||||
LATEX:
|
||||
- pdflatex
|
||||
- lualatex
|
||||
- pdflatex
|
||||
|
|
|
@ -46,7 +46,9 @@ print_help()
|
|||
echo " --tag tag, -t tag use tag for release archive"
|
||||
echo " (default: use master branch)"
|
||||
echo " --publish, -p publish generated artefact"
|
||||
echo " (use master: $PUBLISH)"
|
||||
echo " (default: $PUBLISH)"
|
||||
echo " --quick-and-dirty, -d only build required artifacts, no complete test"
|
||||
echo " (default: $DIRTY)"
|
||||
}
|
||||
|
||||
check_isabelle_version() {
|
||||
|
@ -68,7 +70,7 @@ clone_repo()
|
|||
echo " * Switching to tag $TAG"
|
||||
(cd $ISADOF_WORK_DIR && git checkout $TAG)
|
||||
else
|
||||
echo " * Not tag specified, using master branch"
|
||||
echo " * No tag specified, using master branch"
|
||||
fi
|
||||
(cd $ISADOF_WORK_DIR && git show -s --format="COMMIT=%H%nDATE=%cd" --date=short | sed -e 's|-|/|g') >> $ISADOF_WORK_DIR/etc/settings
|
||||
|
||||
|
@ -78,9 +80,30 @@ build_and_install_manuals()
|
|||
{
|
||||
echo "* Building manual"
|
||||
|
||||
(cd $ISADOF_WORK_DIR && $ISABELLE env ./install-afp)
|
||||
(cd $ISADOF_WORK_DIR && $ISABELLE build -c -D . )
|
||||
|
||||
if [ "$DIRTY" = "true" ]; then
|
||||
if [ -z ${ARTIFACT_DIR+x} ]; then
|
||||
echo " * Quick and Dirty Mode (local build)"
|
||||
$ISABELLE build -d . Isabelle_DOF-Manual 2018-cicm-isabelle_dof-applications
|
||||
mkdir -p $ISADOF_WORK_DIR/examples/scholarly_paper/2018-cicm-isabelle_dof-applications/output/
|
||||
cp examples/scholarly_paper/2018-cicm-isabelle_dof-applications/output/document.pdf \
|
||||
$ISADOF_WORK_DIR/examples/scholarly_paper/2018-cicm-isabelle_dof-applications/output/
|
||||
mkdir -p $ISADOF_WORK_DIR/examples/technical_report/Isabelle_DOF-Manual/output/
|
||||
cp examples/technical_report/Isabelle_DOF-Manual/output/document.pdf \
|
||||
$ISADOF_WORK_DIR/examples/technical_report/Isabelle_DOF-Manual/output/;
|
||||
else
|
||||
echo " * Quick and Dirty Mode (running on CI)"
|
||||
mkdir -p $ISADOF_WORK_DIR/examples/scholarly_paper/2018-cicm-isabelle_dof-applications/output/
|
||||
cp $ARTIFACT_DIR/browser_info/Unsorted/2018-cicm-isabelle_dof-applications/document.pdf \
|
||||
$ISADOF_WORK_DIR/examples/scholarly_paper/2018-cicm-isabelle_dof-applications/output/
|
||||
|
||||
mkdir -p $ISADOF_WORK_DIR/examples/technical_report/Isabelle_DOF-Manual/output/
|
||||
cp $ARTIFACT_DIR/browser_info/Unsorted/Isabelle_DOF-Manual/document.pdf \
|
||||
$ISADOF_WORK_DIR/examples/technical_report/Isabelle_DOF-Manual/output/;
|
||||
fi
|
||||
else
|
||||
(cd $ISADOF_WORK_DIR && $ISABELLE env ./install-afp)
|
||||
(cd $ISADOF_WORK_DIR && $ISABELLE build -c -D . )
|
||||
fi
|
||||
mkdir -p $ISADOF_WORK_DIR/doc
|
||||
echo "Isabelle/DOF Manuals!" > $ISADOF_WORK_DIR/doc/Contents
|
||||
|
||||
|
@ -104,7 +127,6 @@ create_archive()
|
|||
(mv $ISADOF_WORK_DIR $ISADOF_DIR)
|
||||
(cd $BUILD_DIR && tar cf $ISADOF_TAR.tar $ISADOF_TAR && xz $ISADOF_DIR.tar)
|
||||
mv $BUILD_DIR/$ISADOF_TAR.tar.xz .
|
||||
rm -rf $BUILD_DIR
|
||||
}
|
||||
|
||||
sign_archive()
|
||||
|
@ -126,9 +148,8 @@ ISABELLE=`which isabelle`
|
|||
USE_TAG="false"
|
||||
SIGN="false"
|
||||
PUBLISH="false"
|
||||
DIRTY="false"
|
||||
BUILD_DIR=`mktemp -d`
|
||||
ISABELLE_HOME_USER=`mktemp -d`
|
||||
export ISABELLE_HOME_USER
|
||||
ISADOF_WORK_DIR="$BUILD_DIR/Isabelle_DOF"
|
||||
while [ $# -gt 0 ]
|
||||
do
|
||||
|
@ -144,6 +165,8 @@ do
|
|||
SIGN="true";;
|
||||
--publish|-p)
|
||||
PUBLISH="true";;
|
||||
--quick-and-dirty|-d)
|
||||
DIRTY="true";;
|
||||
--help|-h)
|
||||
print_help
|
||||
exit 0;;
|
||||
|
@ -156,14 +179,24 @@ done
|
|||
|
||||
clone_repo
|
||||
|
||||
$ISABELLE components -x `pwd`
|
||||
$ISABELLE components -u $ISADOF_WORK_DIR
|
||||
ISADOF_MAIN_DIR=`pwd`
|
||||
|
||||
VARS=`$ISABELLE getenv ISABELLE_VERSION DOF_VERSION ISABELLE_HOME_USER`
|
||||
if [ "$DIRTY" = "true" ]; then
|
||||
echo "Running in Quick and Dirty mode!"
|
||||
$ISABELLE components -u $ISADOF_MAIN_DIR
|
||||
else
|
||||
$ISABELLE components -x $ISADOF_MAIN_DIR
|
||||
$ISABELLE components -u $ISADOF_WORK_DIR
|
||||
fi
|
||||
|
||||
VARS=`$ISABELLE getenv ISABELLE_TOOL`
|
||||
for i in $VARS; do
|
||||
export "$i"
|
||||
done
|
||||
|
||||
ISABELLE_VERSION="Isabelle$($ISABELLE_TOOL dof_param -b isabelle_version)"
|
||||
DOF_VERSION="$($ISABELLE_TOOL dof_param -b dof_version)"
|
||||
|
||||
ISABELLE_SHORT_VERSION=`echo $ISABELLE_VERSION | sed -e 's/:.*$//'`
|
||||
ISADOF_TAR="Isabelle_DOF-"$DOF_VERSION"_"$ISABELLE_SHORT_VERSION
|
||||
ISADOF_DIR="$BUILD_DIR/$ISADOF_TAR"
|
||||
|
@ -171,6 +204,11 @@ ISADOF_DIR="$BUILD_DIR/$ISADOF_TAR"
|
|||
check_isabelle_version
|
||||
build_and_install_manuals
|
||||
|
||||
if [ "$DIRTY" != "true" ]; then
|
||||
$ISABELLE components -x $ISADOF_WORK_DIR
|
||||
$ISABELLE components -u $ISADOF_MAIN_DIR
|
||||
fi
|
||||
|
||||
create_archive
|
||||
|
||||
if [ "$SIGN" = "true" ]; then
|
||||
|
@ -181,10 +219,6 @@ if [ "$PUBLISH" = "true" ]; then
|
|||
publish_archive
|
||||
fi
|
||||
|
||||
$ISABELLE components -x $ISADOF_WORK_DIR
|
||||
$ISABELLE components -u `pwd`
|
||||
|
||||
rm -rf ISABELLE_HOME_USER
|
||||
rm -rf ISADOF_WORK_DIR
|
||||
rm -rf $BUILD_DIR
|
||||
|
||||
exit 0
|
||||
|
|
34
CHANGELOG.md
|
@ -5,23 +5,30 @@ All notable changes to this project will be documented in this file.
|
|||
The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/)
|
||||
and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0.html).
|
||||
|
||||
## Unreleased
|
||||
## [Unreleased]
|
||||
|
||||
### Added
|
||||
|
||||
### Changed
|
||||
|
||||
- removed explicit use of build script. Requires removing "build" script entry
|
||||
from ROOT files.
|
||||
- lipics support is now official, requires document option "document_comment_latex=true"
|
||||
in the ROOT file.
|
||||
- Isabelle/DOF is now a proper Isabelle component that should be installed using the
|
||||
"isabelle components" command. The installation script is now only an convenience
|
||||
means for installing the required AFP entries.
|
||||
|
||||
## 1.2.0 - 2022-03-26
|
||||
- Updated Isabelle version to Isabelle 2022
|
||||
|
||||
## 1.1.0 - 2021-03-20
|
||||
## [1.3.0] - 2022-07-08
|
||||
|
||||
### Changed
|
||||
|
||||
- The project-specific configuration is not part of the `ROOT` file, the formerly
|
||||
used `isadof.cfg` is obsolete and no longer supported.
|
||||
- Removed explicit use of `document/build` script. Requires removing the `build` script
|
||||
entry from ROOT files.
|
||||
- Isabelle/DOF is now a proper Isabelle component that should be installed using the
|
||||
`isabelle components` command. The installation script is now only a convenient way
|
||||
of installing the required AFP entries.
|
||||
- `mkroot_DOF` has been renamed to `dof_mkroot` (and reimplemented in Scala).
|
||||
|
||||
## [1.2.0] - 2022-03-26
|
||||
|
||||
## [1.1.0] - 2021-03-20
|
||||
|
||||
### Added
|
||||
|
||||
|
@ -38,4 +45,7 @@ and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0.
|
|||
|
||||
- First public release
|
||||
|
||||
[Unreleased]: https://git.logicalhacking.com/Isabelle_DOF/Isabelle_DOF/compare/v1.2.0/Isabelle2021...HEAD
|
||||
[Unreleased]: https://git.logicalhacking.com/Isabelle_DOF/Isabelle_DOF/compare/v1.3.0/Isabelle2021...HEAD
|
||||
[1.3.0]: https://git.logicalhacking.com/Isabelle_DOF/Isabelle_DOF/compare/v1.2.0/Isabelle2021...v1.3.0/Isabelle2021-1
|
||||
[1.2.0]: https://git.logicalhacking.com/Isabelle_DOF/Isabelle_DOF/compare/v1.1.0/Isabelle2021...v1.2.0/Isabelle2021
|
||||
[1.1.0]: https://git.logicalhacking.com/Isabelle_DOF/Isabelle_DOF/compare/v1.0.0/Isabelle2019...v1.1.0/Isabelle2021
|
||||
|
|
|
@ -1,40 +1,35 @@
|
|||
# [Isabelle/DOF](https://git.logicalhacking.com/Isabelle_DOF/Isabelle_DOF): Document Preparation Setup
|
||||
|
||||
Isabelle/DOF is a novel Document Ontology Framework on top of Isabelle.
|
||||
Isabelle/DOF allows for both conventional typesetting as well as formal
|
||||
development. The manual for [Isabelle/DOF 1.2.0/Isabelle2021 is available
|
||||
online.](https://artifacts.logicalhacking.com/releases/Isabelle_DOF/Isabelle_DOF/Isabelle_DOF-1.2.0_Isabelle2021.pdf)
|
||||
Isabelle/DOF allows for both conventional typesetting and formal development.
|
||||
The manual for [Isabelle/DOF 1.3.0/Isabelle2021-1 is available
|
||||
online.](https://artifacts.logicalhacking.com/releases/Isabelle_DOF/Isabelle_DOF/Isabelle_DOF-1.3.0_Isabelle2021-1.pdf)
|
||||
|
||||
## Pre-requisites
|
||||
|
||||
Isabelle/DOF has three major pre-requisites:
|
||||
Isabelle/DOF has three major prerequisites:
|
||||
|
||||
* **Isabelle:** Isabelle/DOF requires [Isabelle 2021-1](http://isabelle.in.tum.de/website-Isabelle2021-1/).
|
||||
Please download the Isabelle 2021-1 distribution for your operating
|
||||
system from the [Isabelle
|
||||
website](http://isabelle.in.tum.de/website-Isabelle2021-1/).
|
||||
* **AFP:** Isabelle/DOF requires two entries from the [Archive of
|
||||
Formal Proofs (AFP)](https://www.isa-afp.org/). Please install the
|
||||
AFP following the instructions given at
|
||||
<https://www.isa-afp.org/using.html>. For your convenience, we also
|
||||
provide a script that only installs the two entries required by
|
||||
Isabelle/DOF into the local Isabelle/DOF directory. You can use this
|
||||
script as follows:
|
||||
```console
|
||||
foo@bar:~$ isabelle env ./install-afp
|
||||
```
|
||||
* **LaTeX:** Isabelle/DOF requires a modern LaTeX installation, i.e., at least
|
||||
[TeX Live 2022](https://www.tug.org/texlive/) with all available updates applied.
|
||||
* **Isabelle:** Isabelle/DOF requires [Isabelle
|
||||
2022](https://isabelle.in.tum.de/website-Isabelle2022/). Please download the
|
||||
Isabelle 2022 distribution for your operating system from the [Isabelle
|
||||
website](https://isabelle.in.tum.de/website-Isabelle2022/).
|
||||
* **AFP:** Isabelle/DOF requires several entries from the [Archive of Formal Proofs
|
||||
(AFP)](https://www.isa-afp.org/). Please install the AFP following the
|
||||
instructions given at <https://www.isa-afp.org/using.html>.
|
||||
* **LaTeX:** Isabelle/DOF requires a modern LaTeX installation, i.e., at least
|
||||
[TeX Live 2022](https://www.tug.org/texlive/) with all available updates
|
||||
applied.
|
||||
|
||||
## Installation
|
||||
|
||||
Isabelle/DOF is provided as a Isabelle component. After installing the
|
||||
pre-requisites the Isabelle/Archive needs to be unpacked and
|
||||
registered. Change into the directory you unpacked Isabelle/DOF (this
|
||||
should be the directory containing this `README.md` file) and execute
|
||||
Isabelle/DOF is provided as an Isabelle component. After installing the
|
||||
prerequisites, change into the directory containing Isabelle/DOF (this should be
|
||||
the directory containing this `README.md` file) and execute (if you executed
|
||||
this command already during the installation of the prerequisites, you can skip
|
||||
it now):
|
||||
|
||||
```console
|
||||
foo@bar:~$ isabelle components -u `pwd`
|
||||
foo@bar:~$ isabelle components -u .
|
||||
```
|
||||
|
||||
The final step for the installation is:
|
||||
|
@ -49,25 +44,24 @@ This will compile Isabelle/DOF and run the example suite.
|
|||
|
||||
### Opening an Example
|
||||
|
||||
If you want to work with or extend one of the examples, e.g., you can
|
||||
open it similar to any standard Isabelle theory:
|
||||
If you want to work with or extend one of the examples, e.g., you can open it
|
||||
similar to any standard Isabelle theory:
|
||||
|
||||
```console
|
||||
isabelle jedit -d . -l Isabelle_DOF examples/scholarly_paper/2018_cicm/IsaDofApplications.thy
|
||||
```
|
||||
|
||||
This will open an example of a scientific paper using the pre-compiled
|
||||
session ``Isabelle_DOF``, i.e., you will not be able to edit the
|
||||
ontology definitions. If you want to edit the ontology definition,
|
||||
just open the theory file with the default HOL session:
|
||||
This will open an example of a scientific paper using the pre-compiled session
|
||||
``Isabelle_DOF``, i.e., you will not be able to edit the ontology definitions.
|
||||
If you want to edit the ontology definition, just open the theory file with the
|
||||
default HOL session:
|
||||
|
||||
```console
|
||||
isabelle jedit -d . -l HOL examples/scholarly_paper/2018_cicm/IsaDofApplications.thy
|
||||
```
|
||||
|
||||
While this gives you more flexibility, it might "clutter" your editing
|
||||
experience, as a lot of internal theories are loaded into Isabelle's
|
||||
editor.
|
||||
experience, as a lot of internal theories are loaded into Isabelle's editor.
|
||||
|
||||
### Creating a New Project
|
||||
|
||||
|
@ -75,55 +69,55 @@ The DOF-plugin provides an alternative to Isabelle's ``mkroot`` command.
|
|||
Isabelle projects that use DOF need to be created using
|
||||
|
||||
```console
|
||||
foo@bar:~$ isabelle mkroot_DOF
|
||||
foo@bar:~$ isabelle dof_mkroot
|
||||
```
|
||||
|
||||
The ``mkroot_DOF`` command takes the same parameter as the standard
|
||||
``mkroot`` command of Isabelle. Thereafter, the normal Isabelle
|
||||
command for building documents can be used.
|
||||
The ``dof_mkroot`` command takes the same parameter as the standard ``mkroot``
|
||||
command of Isabelle. Thereafter, the normal Isabelle command for building
|
||||
documents can be used.
|
||||
|
||||
Using the ``-o`` option, different ontology setups can be
|
||||
selected and using the ``-t`` option, different LaTeX setups
|
||||
can be selected. For example,
|
||||
Using the ``-o`` option, different ontology setups can be selected and using the
|
||||
``-t`` option, different LaTeX setups can be selected. For example,
|
||||
|
||||
```console
|
||||
foo@bar:~$ isabelle mkroot_DOF -o scholarly_paper -t scrartcl
|
||||
foo@bar:~$ isabelle dof_mkroot -o scholarly_paper -t scrartcl
|
||||
```
|
||||
|
||||
creates a setup using the scholarly_paper ontology and the article
|
||||
class from the KOMA-Script bundle.
|
||||
creates a setup using the scholarly_paper ontology and the article class from
|
||||
the KOMA-Script bundle.
|
||||
|
||||
The help (option ``-h``) show a list of all supported ontologies and
|
||||
document templates:
|
||||
The help (option ``-h``) show a list of all supported ontologies and document
|
||||
templates:
|
||||
|
||||
```console
|
||||
foo@bar:~$ isabelle mkroot_DOF -h
|
||||
foo@bar:~$ isabelle dof_mkroot -h
|
||||
|
||||
Usage: isabelle mkroot_DOF [OPTIONS] [DIR]
|
||||
Usage: isabelle dof_mkroot [OPTIONS] [DIRECTORY]
|
||||
|
||||
Options are:
|
||||
-h print this help text and exit
|
||||
-n NAME alternative session name (default: DIR base name)
|
||||
-o ONTOLOGY (default: scholarly_paper)
|
||||
Available ontologies:
|
||||
* cenelec_50128
|
||||
* mathex
|
||||
* scholarly_paper
|
||||
-t TEMPLATE (default: scrartcl)
|
||||
Available document templates:
|
||||
* lncs
|
||||
* scrartcl
|
||||
* scrreprt
|
||||
* scrreprt-modern
|
||||
-I init Mercurial repository and add generated files
|
||||
-h print help
|
||||
-n NAME alternative session name (default: directory base name)
|
||||
-o NAMES list of ontologies, separated by blanks
|
||||
(default: "technical_report scholarly_paper")
|
||||
-q quiet mode: less verbosity
|
||||
-t NAME template (default: "scrreprt-modern")
|
||||
|
||||
Prepare session root DIR (default: current directory).
|
||||
Create session root directory for Isabelle/DOF (default: current directory).
|
||||
```
|
||||
|
||||
## Releases
|
||||
|
||||
For releases, signed archives including a PDF version of the Isabelle/DOF manual are
|
||||
For releases, signed archives including a PDF version of the Isabelle/DOF manual
|
||||
are available:
|
||||
|
||||
* Isabelle/DOF 1.3.0/Isabelle2021-1
|
||||
* [Isabelle_DOF-1.3.0_Isabelle2021-1.pdf](https://artifacts.logicalhacking.com/releases/Isabelle_DOF/Isabelle_DOF/Isabelle_DOF-1.3.0_Isabelle2021-1.pdf)
|
||||
* [Isabelle_DOF-1.3.0_Isabelle2021-1.tar.xz](https://artifacts.logicalhacking.com/releases/Isabelle_DOF/Isabelle_DOF/Isabelle_DOF-1.3.0_Isabelle2021-1.tar.xz)
|
||||
* [Isabelle_DOF-1.3.0_Isabelle2021-1.tar.xz.asc](https://artifacts.logicalhacking.com/releases/Isabelle_DOF/Isabelle_DOF/Isabelle_DOF-1.3.0_Isabelle2021-1.tar.xz.asc)
|
||||
|
||||
### Older Releases
|
||||
|
||||
* Isabelle/DOF 1.2.0/Isabelle2021
|
||||
* [Isabelle_DOF-1.2.0_Isabelle2021.pdf](https://artifacts.logicalhacking.com/releases/Isabelle_DOF/Isabelle_DOF/Isabelle_DOF-1.2.0_Isabelle2021.pdf)
|
||||
* [Isabelle_DOF-1.2.0_Isabelle2021.tar.xz](https://artifacts.logicalhacking.com/releases/Isabelle_DOF/Isabelle_DOF/Isabelle_DOF-1.2.0_Isabelle2021.tar.xz)
|
||||
|
@ -153,7 +147,7 @@ Main contacts:
|
|||
* Idir Ait-Sadoune
|
||||
* Paolo Crisafulli
|
||||
* Chantal Keller
|
||||
* Nicolas Méric
|
||||
* Nicolas Méric
|
||||
|
||||
## License
|
||||
|
||||
|
@ -163,28 +157,32 @@ SPDX-License-Identifier: BSD-2-Clause
|
|||
|
||||
## Publications
|
||||
|
||||
* Achim D. Brucker, Idir Ait-Sadoune, Paolo Crisafulli, and Burkhart
|
||||
Wolff. [Using The Isabelle Ontology Framework: Linking the Formal
|
||||
with the Informal](https://www.brucker.ch/bibliography/download/2018/brucker.ea-isabelle-ontologies-2018.pdf).
|
||||
In Conference on Intelligent Computer Mathematics (CICM). Lecture
|
||||
Notes in Computer Science (11006), Springer-Verlag, 2018.
|
||||
* Achim D. Brucker, Idir Ait-Sadoune, Paolo Crisafulli, and Burkhart Wolff.
|
||||
[Using The Isabelle Ontology Framework: Linking the Formal with the
|
||||
Informal](https://www.brucker.ch/bibliography/download/2018/brucker.ea-isabelle-ontologies-2018.pdf).
|
||||
In Conference on Intelligent Computer Mathematics (CICM). Lecture Notes in
|
||||
Computer Science (11006), Springer-Verlag, 2018.
|
||||
[doi:10.1007/978-3-319-96812-4_3](https://doi.org/10.1007/978-3-319-96812-4_3).
|
||||
|
||||
* Achim D. Brucker and Burkhart Wolff. [Isabelle/DOF: Design and
|
||||
Implementation](https://www.brucker.ch/bibliography/download/2019/brucker.ea-isabelledof-2019.pdf).
|
||||
In Software Engineering and Formal Methods (SEFM). Lecture Notes in
|
||||
Computer Science (11724), Springer-Verlag, 2019.
|
||||
In Software Engineering and Formal Methods (SEFM). Lecture Notes in Computer
|
||||
Science (11724), Springer-Verlag, 2019.
|
||||
[doi:10.1007/978-3-030-30446-1_15](https://doi.org/10.1007/978-3-030-30446-1_15).
|
||||
|
||||
* Achim D. Brucker, Burkhart Wolff. [Using Ontologies in Formal Developments Targeting Certification](https://www.brucker.ch/bibliography/download/2019/brucker.ea-ontologies-certification-2019.pdf). In
|
||||
Integrated Formal Methods (IFM). Lecture Notes in Computer Science (11918). Springer-Verlag 2019.
|
||||
* Achim D. Brucker, Burkhart Wolff. [Using Ontologies in Formal Developments
|
||||
Targeting
|
||||
Certification](https://www.brucker.ch/bibliography/download/2019/brucker.ea-ontologies-certification-2019.pdf).
|
||||
In Integrated Formal Methods (IFM). Lecture Notes in Computer Science (11918).
|
||||
Springer-Verlag 2019.
|
||||
[doi:10.1007/978-3-030-34968-4_4](http://dx.doi.org/10.1007/978-3-030-34968-4_4)
|
||||
|
||||
* Sergio Bezzecchi, Paolo Crisafulli, Charlotte Pichot, and Burkhart Wolff. [Making Agile Development
|
||||
Processes fit for V-style Certification Procedures.](https://hal.archives-ouvertes.fr/hal-01702815/document)
|
||||
In ERTS 2018. <https://hal.archives-ouvertes.fr/hal-01702815>
|
||||
* Sergio Bezzecchi, Paolo Crisafulli, Charlotte Pichot, and Burkhart Wolff.
|
||||
[Making Agile Development Processes fit for V-style Certification
|
||||
Procedures.](https://hal.archives-ouvertes.fr/hal-01702815/document). In ERTS
|
||||
2018. <https://hal.archives-ouvertes.fr/hal-01702815>
|
||||
|
||||
## Upstream Repository
|
||||
|
||||
The upstream git repository, i.e., the single source of truth, for this project is hosted
|
||||
at <https://git.logicalhacking.com/Isabelle_DOF/Isabelle_DOF>.
|
||||
The upstream git repository, i.e., the single source of truth, for this project
|
||||
is hosted at <https://git.logicalhacking.com/Isabelle_DOF/Isabelle_DOF>.
|
||||
|
|
|
@ -4,9 +4,10 @@ no_build = false
|
|||
requirements = \
|
||||
env:ISABELLE_SCALA_JAR
|
||||
sources = \
|
||||
src/scala/dof.scala \
|
||||
src/scala/dof_document_build.scala \
|
||||
src/scala/dof_mkroot.scala \
|
||||
src/scala/dof_tools.scala
|
||||
services = \
|
||||
isabelle_dof.DOF_Tools \
|
||||
isabelle_dof.DOF_Document_Build$Engine
|
||||
isabelle.dof.DOF_Tools \
|
||||
isabelle.dof.DOF_Document_Build$Engine
|
||||
|
|
12
etc/options
|
@ -1,12 +0,0 @@
|
|||
(* :mode=isabelle-options: *)
|
||||
|
||||
section "Isabelle/DOF"
|
||||
|
||||
public option dof_url : string = "https://git.logicalhacking.com/Isabelle_DOF/Isabelle_DOF"
|
||||
-- "Isabelle/DOF source repository"
|
||||
|
||||
option dof_artifact_dir : string = "releases/Isabelle_DOF/Isabelle_DOF"
|
||||
-- "Isabelle/DOF release artifacts"
|
||||
|
||||
option dof_artifact_host : string = "artifacts.logicalhacking.com"
|
||||
|
23
etc/settings
|
@ -1,25 +1,4 @@
|
|||
# -*- shell-script -*- :mode=shellscript:
|
||||
|
||||
ISABELLE_DOF_HOME="$COMPONENT"
|
||||
ISABELLE_TOOLS="$ISABELLE_TOOLS":"$ISABELLE_DOF_HOME/src/Tools"
|
||||
ISABELLE_DOCS="$ISABELLE_DOF_HOME/doc":"$ISABELLE_DOCS"
|
||||
|
||||
# Isabelle/DOF Version Information
|
||||
DOF_VERSION="Unreleased" # "Unreleased" for development, semantic version for releases
|
||||
DOF_DATE="00/00/00"
|
||||
DOF_LATEST_VERSION="1.2.0"
|
||||
DOF_LATEST_ISABELLE="Isabelle2021"
|
||||
DOF_LATEST_DOI="10.5281/zenodo.6385695"
|
||||
DOF_GENERIC_DOI="10.5281/zenodo.3370482"
|
||||
#
|
||||
# Isabelle and AFP Configuration
|
||||
ISABELLE_VERSION="Isabelle2021-1"
|
||||
ISABELLE_URL="https://isabelle.in.tum.de/website-Isabelle2021-1/"
|
||||
AFP_DATE="afp-2021-12-28"
|
||||
AFP_URL="https://www.isa-afp.org/release/"$AFP_DATE".tar.gz"
|
||||
#
|
||||
# Isabelle/DOF Repository Configuration
|
||||
DOF_URL="https://git.logicalhacking.com/Isabelle_DOF/Isabelle_DOF"
|
||||
DOF_ARTIFACT_DIR="releases/Isabelle_DOF/Isabelle_DOF"
|
||||
DOF_ARTIFACT_HOST="artifacts.logicalhacking.com"
|
||||
#
|
||||
ISABELLE_DOCS="$ISABELLE_DOF_HOME/doc:$ISABELLE_DOCS"
|
||||
|
|
|
@ -1,9 +1,10 @@
|
|||
session "mini_odo" = "Isabelle_DOF" +
|
||||
options [document = pdf, document_output = "output", document_build = dof]
|
||||
sessions
|
||||
"Physical_Quantities"
|
||||
theories
|
||||
"mini_odo"
|
||||
document_files
|
||||
"isadof.cfg"
|
||||
"preamble.tex"
|
||||
"root.bib"
|
||||
"root.mst"
|
||||
|
@ -12,3 +13,4 @@ session "mini_odo" = "Isabelle_DOF" +
|
|||
"figures/odometer.jpeg"
|
||||
"figures/three-phase-odo.pdf"
|
||||
"figures/wheel-df.png"
|
||||
|
||||
|
|
0
examples/CENELEC_50128/mini_odo/document/figures/df-numerics-encshaft.png
Executable file → Normal file
Before Width: | Height: | Size: 27 KiB After Width: | Height: | Size: 27 KiB |
Before Width: | Height: | Size: 407 KiB After Width: | Height: | Size: 407 KiB |
0
examples/CENELEC_50128/mini_odo/document/figures/three-phase-odo.pdf
Executable file → Normal file
Before Width: | Height: | Size: 23 KiB After Width: | Height: | Size: 23 KiB |
|
@ -1,3 +0,0 @@
|
|||
Template: scrreprt-modern
|
||||
Ontology: technical_report
|
||||
Ontology: cenelec_50128
|
|
@ -14,11 +14,16 @@
|
|||
(*<*)
|
||||
theory
|
||||
mini_odo
|
||||
imports
|
||||
imports
|
||||
"Isabelle_DOF.CENELEC_50128"
|
||||
"Isabelle_DOF.technical_report"
|
||||
"Physical_Quantities.SI" "Physical_Quantities.SI_Pretty"
|
||||
begin
|
||||
use_template "scrreprt-modern"
|
||||
use_ontology technical_report and CENELEC_50128
|
||||
declare[[strict_monitor_checking=true]]
|
||||
define_shortcut* dof \<rightleftharpoons> \<open>\dof\<close>
|
||||
isadof \<rightleftharpoons> \<open>\isadof{}\<close>
|
||||
(*>*)
|
||||
|
||||
title*[title::title]\<open>The CENELEC 50128 Ontology\<close>
|
||||
|
@ -27,14 +32,34 @@ subtitle*[subtitle::subtitle]\<open>Case Study: An Odometer-Subsystem\<close>
|
|||
chapter*[casestudy::technical]\<open>An Odometer-Subsystem\<close>
|
||||
text\<open>
|
||||
In our case study, we will follow the phases of analysis, design, and implementation of the
|
||||
odometry function of a train. This software processes data from an odometer to compute the position,
|
||||
speed, and acceleration of a train. This system provides the basis for many
|
||||
safety critical decisions, \eg, the opening of the doors. Due to its relatively small size, it
|
||||
odometry function of a train. This \<^cenelec_term>\<open>SF\<close> processes data from an odometer to compute
|
||||
the position, speed, and acceleration of a train. This system provides the basis for many
|
||||
safety critical decisions, \<^eg>, the opening of the doors. Due to its relatively small size, it
|
||||
is a manageable, albeit realistic target for a comprehensive formal development: it covers a
|
||||
physical model of the environment, the physical and architectural model of the odometer including
|
||||
the problem of numerical sampling, and the boundaries of efficient computations. The interplay
|
||||
between environment and measuring-device as well as the implementation problems on a platform
|
||||
with limited resources makes the odometer a fairly typical safety critical embedded system.
|
||||
physical model of the environment, the physical and architectural model of the odometer,
|
||||
but also the \<^cenelec_term>\<open>SFRS\<close> aspects including the problem of numerical sampling and the
|
||||
boundaries of efficient computations. The interplay between environment and measuring-device as
|
||||
well as the implementation problems on a platform with limited resources makes the odometer a
|
||||
fairly typical \<^cenelec_term>\<open>safety\<close> critical \<^cenelec_term>\<open>component\<close> of an embedded system.
|
||||
|
||||
The case-study is presented in form of an \<^emph>\<open>integrated source\<close> in \<^isadof> containing all four
|
||||
reports from the phases:
|
||||
\<^item> \<^term>\<open>software_requirements\<close> with deliverable \<^doc_class>\<open>SWRS\<close>
|
||||
(or long:\<^typ>\<open>software_requirements_specification\<close>(-report))
|
||||
\<^item> \<^term>\<open>software_architecture_and_design\<close> with deliverable \<^doc_class>\<open>SWDS\<close>
|
||||
(or long: \<^typ>\<open>software_design_specification\<close>(-report))
|
||||
\<^item> \<^term>\<open>software_component_design\<close> with deliverable \<^doc_class>\<open>SWCDVR\<close>
|
||||
(or long: \<^typ>\<open>software_component_design_verification\<close>(-report).)
|
||||
\<^item> \<^term>\<open>component_implementation_and_testing\<close> with deliverable \<^doc_class>\<open>SWADVR\<close>
|
||||
(or long: \<^typ>\<open>software_architecture_and_design_verification\<close>(-report))
|
||||
|
||||
The objective of this case study is to demonstrate deep-semantical ontologoies in
|
||||
software developments targeting certifications, and in particular, how \<^isadof>'s
|
||||
integrated source concept permits to assure \<^cenelec_term>\<open>traceability\<close>.
|
||||
|
||||
\<^bold>\<open>NOTE\<close> that this case study has aspects that were actually covered by CENELEC 50126 -
|
||||
the 'systems'-counterpart covering hardware aspects. Recall that the CENELEC 50128 covers
|
||||
software.
|
||||
|
||||
Due to space reasons, we will focus on the analysis part of the integrated
|
||||
document; the design and code parts will only be outlined in a final resume. The
|
||||
|
@ -45,7 +70,8 @@ text\<open>
|
|||
development.
|
||||
\<close>
|
||||
|
||||
section\<open>System Requirements as an \<^emph>\<open>Integrated Source\<close>\<close>
|
||||
section\<open>A CENELEC-conform development as an \<^emph>\<open>Integrated Source\<close>\<close>
|
||||
|
||||
text\<open>Accurate information of a train's location along a track is in an important prerequisite
|
||||
to safe railway operation. Position, speed and acceleration measurement usually lies on a
|
||||
set of independent measurements based on different physical principles---as a way to enhance
|
||||
|
@ -74,17 +100,23 @@ text\<open>
|
|||
|
||||
This model is already based on several fundamental assumptions relevant for the correct
|
||||
functioning of the system and for its integration into the system as a whole. In
|
||||
particular, we need to make the following assumptions explicit:\vspace{-.3cm}
|
||||
\<^item> that the wheel is perfectly circular with a given, constant radius,
|
||||
\<^item> that the slip between the trains wheel and the track negligible,
|
||||
\<^item> the distance between all teeth of a wheel is the same and constant, and
|
||||
\<^item> the sampling rate of positions is a given constant.
|
||||
particular, we need to make the following assumptions explicit: \<^vs>\<open>-0.3cm\<close>\<close>
|
||||
|
||||
text*["perfect-wheel"::assumption]
|
||||
\<open>\<^item> the wheel is perfectly circular with a given, constant radius. \<^vs>\<open>-0.3cm\<close>\<close>
|
||||
text*["no-slip"::assumption]
|
||||
\<open>\<^item> the slip between the trains wheel and the track negligible. \<^vs>\<open>-0.3cm\<close>\<close>
|
||||
text*["constant-teeth-dist"::assumption]
|
||||
\<open>\<^item> the distance between all teeth of a wheel is the same and constant, and \<^vs>\<open>-0.3cm\<close>\<close>
|
||||
text*["constant-sampling-rate"::assumption]
|
||||
\<open>\<^item> the sampling rate of positions is a given constant.\<close>
|
||||
|
||||
text\<open>
|
||||
These assumptions have to be traced throughout the certification process as
|
||||
|
||||
\<^emph>\<open>derived requirements\<close> (or, in CENELEC terminology, as \<^emph>\<open>exported constraints\<close>), which is
|
||||
also reflected by their tracing throughout the body of certification documents. This may result
|
||||
in operational regulations, \eg, regular checks for tolerable wheel defects. As for the
|
||||
in operational regulations, \<^eg>, regular checks for tolerable wheel defects. As for the
|
||||
\<^emph>\<open>no slip\<close>-assumption, this leads to the modeling of constraints under which physical
|
||||
slip can be neglected: the device can only produce reliable results under certain physical
|
||||
constraints (speed and acceleration limits). Moreover, the \<^emph>\<open>no slip\<close>-assumption motivates
|
||||
|
@ -93,57 +125,53 @@ text\<open>
|
|||
\<close>
|
||||
|
||||
subsection\<open>Capturing ``System Architecture.''\<close>
|
||||
|
||||
figure*["three-phase"::figure,relative_width="70",src="''figures/three-phase-odo''"]
|
||||
\<open>An odometer with three sensors \<open>C1\<close>, \<open>C2\<close>, and \<open>C3\<close>.\<close>
|
||||
|
||||
text\<open>
|
||||
\begin{figure}
|
||||
\centering
|
||||
\includegraphics[width=.70\textwidth]{figures/three-phase-odo}
|
||||
\begin{picture}(0,0)
|
||||
\put(-112,44){\includegraphics[width=.30\textwidth]{figures/odometer}}
|
||||
\end{picture}
|
||||
\caption{An odometer with three sensors \inlineisar{C1}, \inlineisar{C2}, and \inlineisar{C3}.}\label{three-phase}
|
||||
\end{figure}
|
||||
The requirements analysis also contains a sub-document \<^emph>\<open>system architecture description\<close>
|
||||
(CENELEC notion) that contains technical drawing of the odometer, a timing diagrams
|
||||
(see \autoref{three-phase}), and tables describing the encoding of the position
|
||||
for the possible signal transitions of the sensors \inlineisar{C1}, \inlineisar{C2}, and $C3.$
|
||||
The requirements analysis also contains a document \<^doc_class>\<open>SYSAD\<close>
|
||||
(\<^typ>\<open>system_architecture_description\<close>) that contains technical drawing of the odometer,
|
||||
a timing diagram (see \<^figure>\<open>three-phase\<close>), and tables describing the encoding of the position
|
||||
for the possible signal transitions of the sensors \<open>C1\<close>, \<open>C2\<close>, and \<open>C3\<close>.
|
||||
\<close>
|
||||
|
||||
subsection\<open>Capturing ``System Interfaces.''\<close>
|
||||
text\<open>
|
||||
The requirements analysis also contains a sub-document \<^emph>\<open>functions and interfaces\<close>
|
||||
(CENELEC notion) describing the technical format of the output of the odometry function.
|
||||
This section, \eg, specifies the output \<^emph>\<open>speed\<close> as given by a \<^verbatim>\<open>int_32\<close> to be the
|
||||
``Estimation of the speed (in mm/sec) evaluated over the latest $N_{\text{avg}}$ samples''
|
||||
where the speed refers to the physical speed of the train and $N_{\text{avg}}$ a parameter of the
|
||||
The requirements analysis also contains a sub-document \<^doc_class>\<open>FnI\<close> (\<^typ>\<open>functions_and_interfaces\<close>)
|
||||
describing the technical format of the output of the odometry function.
|
||||
This section, \<^eg>, specifies the output \<^emph>\<open>speed\<close> as given by a \<^verbatim>\<open>int_32\<close> to be the
|
||||
``Estimation of the speed (in mm/sec) evaluated over the latest \<open>N\<^sub>a\<^sub>v\<^sub>g\<close> samples''
|
||||
where the speed refers to the physical speed of the train and \<open>N\<^sub>a\<^sub>v\<^sub>g\<close> a parameter of the
|
||||
sub-system configuration. \<close>
|
||||
|
||||
(*<*)
|
||||
declare_reference*["df-numerics-encshaft"::figure]
|
||||
declare_reference*["df-numerics-encshaft"::figure]
|
||||
(*>*)
|
||||
subsection\<open>Capturing ``Required Performances.''\<close>
|
||||
text\<open>
|
||||
The given analysis document is relatively implicit on the expected precision of the measurements;
|
||||
however, certain interface parameters like \inlineisar*Odometric_Position_TimeStamp*
|
||||
(a counter on the number of samplings) and \inlineisar*Relative_Position* are defined by as
|
||||
however, certain interface parameters like \<open>Odometric_Position_TimeStamp\<close>
|
||||
(a counter on the number of samplings) and \<open>Relative_Position\<close> are defined by as
|
||||
unsigned 32 bit integer. These definitions imply that exported constraints concerning the acceptable
|
||||
time of service as well the maximum distance before a necessary reboot of the subsystem.
|
||||
For our case-study, we assume maximum deviation of the \inlineisar*Relative_Position* to the
|
||||
For our case-study, we assume maximum deviation of the \<open>Relative_Position\<close> to the
|
||||
theoretical distance.
|
||||
|
||||
The requirement analysis document describes the physical environment, the architecture
|
||||
of the measuring device, and the required format and precision of the measurements of the odometry
|
||||
function as represented (see @{figure (unchecked) "df-numerics-encshaft"}).\<close>
|
||||
|
||||
figure*["df-numerics-encshaft"::figure,relative_width="76",src="''figures/df-numerics-encshaft''"]
|
||||
\<open>Real distance vs. discrete distance vs. shaft-encoder sequence\<close>
|
||||
|
||||
|
||||
subsection\<open>Capturing the ``Software Design Spec'' (Resume).\<close>
|
||||
text\<open>
|
||||
\enlargethispage{\baselineskip}
|
||||
The design provides a function that manages an internal first-in-first-out buffer of
|
||||
shaft-encodings and corresponding positions. Central for the design is a step-function analyzing
|
||||
new incoming shaft encodings, checking them and propagating two kinds of error-states (one allowing
|
||||
recovery, another one, fatal, signaling, \eg, a defect of the receiver hardware),
|
||||
recovery, another one, fatal, signaling, \<^eg>, a defect of the receiver hardware),
|
||||
calculating the relative position, speed and acceleration.
|
||||
\<close>
|
||||
|
||||
|
@ -161,89 +189,123 @@ text\<open>
|
|||
in AutoCorres.
|
||||
\<close>
|
||||
|
||||
(*<*)
|
||||
definition teeth_per_wheelturn::nat ("tpw") where "tpw \<equiv> SOME x. x > 0"
|
||||
definition wheel_diameter ::"real[m]" ("w\<^sub>d") where "w\<^sub>d \<equiv> SOME x. x > 0"
|
||||
definition wheel_circumference::"real[m]" ("w\<^sub>0") where "w\<^sub>0 \<equiv> pi *\<^sub>Q w\<^sub>d"
|
||||
definition \<delta>s\<^sub>r\<^sub>e\<^sub>s ::"real[m]" where "\<delta>s\<^sub>r\<^sub>e\<^sub>s \<equiv> 1 / (2 * 3 * tpw) *\<^sub>Q w\<^sub>0 "
|
||||
(*>*)
|
||||
|
||||
|
||||
section\<open>Formal Enrichment of the Software Requirements Specification\<close>
|
||||
text\<open>
|
||||
After the \<^emph>\<open>capture\<close>-phase, where we converted/integrated existing informal analysis and design
|
||||
documents as well as code into an integrated Isabelle document, we entered into the phase of
|
||||
\<open>formal enrichment\<close>. For example, from the assumptions in the architecture follow
|
||||
the definitions:
|
||||
\begin{isar}
|
||||
definition teeth_per_wheelturn::nat ("tpw") where "tpw \<equiv> SOME x. x > 0"
|
||||
definition wheel_diameter::real ("w$_d$") where "w$_d$ \<equiv> SOME x. x > 0"
|
||||
definition wheel_circumference::real ("w$_{\text{circ}}$") where "w$_{\text{circ}}$ \<equiv> pi * w$_d$"
|
||||
definition \<delta>s$_{\text{res}}$::real where "\<delta>s$_{\text{res}}$ \<equiv> w$_{\text{circ}}$ / (2 * 3 * tpw)"
|
||||
\end{isar}
|
||||
Here, \inlineisar{real} refers to the real numbers as defined in the HOL-Analysis
|
||||
library, which provides concepts such as Cauchy Sequences, limits,
|
||||
differentiability, and a very substantial part of classical Calculus. \inlineisar{SOME} is the
|
||||
Hilbert choice operator from HOL; the definitions of the model parameters admit all possible positive values as uninterpreted
|
||||
constants. Our perfect-wheel assumption is translated into a calculation of the circumference of the
|
||||
wheel, while \inlineisar{\<delta>s<bsub>res<esub>}, the resolution of the odometer, can be calculated
|
||||
\<open>formal enrichment\<close>. For example, from the assumptions in the architecture follow
|
||||
the definitions:
|
||||
|
||||
@{theory_text [display]\<open>
|
||||
definition teeth_per_wheelturn::nat ("tpw") where "tpw \<equiv> SOME x. x > 0"
|
||||
definition wheel_diameter::"real[m]" ("w\<^sub>d") where "w\<^sub>d \<equiv> SOME x. x > 0"
|
||||
definition wheel_circumference::"real[m]" ("w\<^sub>0") where "w\<^sub>0 \<equiv> pi *\<^sub>Q w\<^sub>d"
|
||||
definition \<delta>s\<^sub>r\<^sub>e\<^sub>s::"real[m]" where "\<delta>s\<^sub>r\<^sub>e\<^sub>s \<equiv> 1 / (2 * 3 * tpw) *\<^sub>Q w\<^sub>0 "
|
||||
\<close>}
|
||||
|
||||
Here, \<open>real\<close> refers to the real numbers as defined in the HOL-Analysis library, which provides
|
||||
concepts such as Cauchy Sequences, limits, differentiability, and a very substantial part of
|
||||
classical Calculus. \<open>SOME\<close> is the Hilbert choice operator from HOL; the definitions of the
|
||||
model parameters admit all possible positive values as uninterpreted constants. Our
|
||||
\<^assumption>\<open>perfect-wheel\<close> is translated into a calculation of the circumference of the
|
||||
wheel, while \<open>\<delta>s\<^sub>r\<^sub>e\<^sub>s\<close>, the resolution of the odometer, can be calculated
|
||||
from the these parameters. HOL-Analysis permits to formalize the fundamental physical observables:
|
||||
\begin{isar}
|
||||
type_synonym distance_function = "real\<Rightarrow>real"
|
||||
definition Speed::"distance_function\<Rightarrow>real\<Rightarrow>real" where "Speed f \<equiv> deriv f"
|
||||
definition Accel::"distance_function\<Rightarrow>real\<Rightarrow>real"
|
||||
where "Accel f \<equiv> deriv (deriv f)"
|
||||
\end{isar}
|
||||
which permits to constrain the central observable \inlineisar|distance_function| in a
|
||||
\<close>
|
||||
|
||||
(*<*)
|
||||
type_synonym distance_function = "real[s] \<Rightarrow> real[m]"
|
||||
consts Speed::"distance_function \<Rightarrow> real[s] \<Rightarrow> real[m\<cdot>s\<^sup>-\<^sup>1]"
|
||||
consts Accel::"distance_function \<Rightarrow> real[s] \<Rightarrow> real[m\<cdot>s\<^sup>-\<^sup>2]"
|
||||
consts Speed\<^sub>M\<^sub>a\<^sub>x::"real[m\<cdot>s\<^sup>-\<^sup>1]"
|
||||
|
||||
(* Non - SI conform common abrbreviations *)
|
||||
definition "kmh \<equiv> kilo *\<^sub>Q metre \<^bold>/ hour :: 'a::{field,ring_char_0}[m\<cdot>s\<^sup>-\<^sup>1]"
|
||||
definition "kHz \<equiv> kilo *\<^sub>Q hertz :: 'a::{field,ring_char_0}[s\<^sup>-\<^sup>1]"
|
||||
|
||||
(*>*)
|
||||
text\<open>
|
||||
@{theory_text [display]\<open>
|
||||
type_synonym distance_function = "real[s]\<Rightarrow>real[m]"
|
||||
definition Speed::"distance_function\<Rightarrow>real\<Rightarrow>real" where "Speed f \<equiv> deriv f"
|
||||
definition Accel::"distance_function\<Rightarrow>real\<Rightarrow>real" where "Accel f \<equiv> deriv (deriv f)"
|
||||
\<close>}
|
||||
|
||||
which permits to constrain the central observable \<open>distance_function\<close> in a
|
||||
way that they describe the space of ``normal behavior'' where we expect the odometer to produce
|
||||
reliable measurements over a \inlineisar|distance_function df|.
|
||||
reliable measurements over a \<open>distance_function df\<close> .
|
||||
|
||||
The essence of the physics of the train is covered by the following definition:
|
||||
\begin{isar}
|
||||
definition normally_behaved_distance_function :: "(real \<Rightarrow> real) \<Rightarrow> bool"
|
||||
where normally_behaved_distance_function df =
|
||||
( \<forall> t. df(t) \<in> \<real>$_{\ge 0}$ \<and> (\<forall> t \<in> \<real>$_{\le 0}$. df(t) = 0)
|
||||
\<and> df differentiable on$_{\text{R}}$ \<and> (Speed df)differentiable on$_{\text{R}}$
|
||||
\<and> (Accel df)differentiable on$_{\ensuremath{R}}$
|
||||
\<and> (\<forall> t. (Speed df) t \<in> {-Speed$_{\text{Max}}$ .. Speed$_{\text{Max}}$})
|
||||
\<and> (\<forall> t. (Accel df) t \<in> {-\<bar>Accel$_{\text{Max}}$\<bar> .. \<bar>Accel$_{\text{Max}}$\<bar>}))
|
||||
\end{isar}
|
||||
|
||||
@{theory_text [display]\<open>
|
||||
definition normally_behaved_distance_function :: "(real \<Rightarrow> real) \<Rightarrow> bool"
|
||||
where normally_behaved_distance_function df =
|
||||
( \<forall> t. df(t) \<in> \<real>\<^sub>\<ge>\<^sub>0 \<and> (\<forall> t \<in> \<real>\<real>\<^sub>\<ge>\<^sub>0. df(t) = 0)
|
||||
\<and> df differentiable on \<real>\<^sub>\<ge>\<^sub>0 \<and> (Speed df)differentiable on \<real>\<^sub>\<ge>\<^sub>0$
|
||||
\<and> (Accel df)differentiable on \<real>\<^sub>\<ge>\<^sub>0
|
||||
\<and> (\<forall> t. (Speed df) t \<in> {Speed\<^sub>M\<^sub>i\<^sub>n .. Speed\<^sub>M\<^sub>a\<^sub>x})
|
||||
\<and> (\<forall> t. (Accel df) t \<in> {Accel\<^sub>M\<^sub>i\<^sub>n .. Accel\<^sub>M\<^sub>a\<^sub>x}))
|
||||
\<close>}
|
||||
|
||||
which constrains the distance functions in the bounds described of the informal descriptions and
|
||||
states them as three-fold differentiable function in certain bounds concerning speed and acceleration.
|
||||
Note that violations, in particular of the constraints on speed and acceleration, \<^emph>\<open>do\<close> occur in practice.
|
||||
In such cases, the global system adapts recovery strategies that are out of the scope of our model.
|
||||
Concepts like \inlineisar+shaft_encoder_state+ (a triple with the sensor values
|
||||
\inlineisar{C1}, \inlineisar{C2}, \inlineisar{C3}) were formalized as types, while tables were
|
||||
states them as three-fold differentiable function in certain bounds concerning speed and
|
||||
acceleration. Note that violations, in particular of the constraints on speed and acceleration,
|
||||
\<^emph>\<open>do\<close> occur in practice. In such cases, the global system adapts recovery strategies that are out
|
||||
of the scope of our model. Concepts like \<open>shaft_encoder_state\<close> (a triple with the sensor values
|
||||
\<open>C1\<close>, \<open>C2\<close>, \<open>C3\<close>) were formalized as types, while tables were
|
||||
defined as recursive functions:
|
||||
\enlargethispage{2\baselineskip}\begin{isar}
|
||||
fun phase$_0$ :: "nat \<Rightarrow> shaft_encoder_state" where
|
||||
"phase$_0$ (0) = \<lparr> C1 = False, C2 = False, C3 = True \<rparr>"
|
||||
|"phase$_0$ (1) = \<lparr> C1 = True, C2 = False, C3 = True \<rparr>"
|
||||
|"phase$_0$ (2) = \<lparr> C1 = True, C2 = False, C3 = False\<rparr>"
|
||||
|"phase$_0$ (3) = \<lparr> C1 = True, C2 = True, C3 = False\<rparr>"
|
||||
|"phase$_0$ (4) = \<lparr> C1 = False, C2 = True, C3 = False\<rparr>"
|
||||
|"phase$_0$ (5) = \<lparr> C1 = False, C2 = True, C3 = True \<rparr>"
|
||||
|"phase$_0$ x = phase$_0$(x - 6)"
|
||||
definition Phase ::"nat\<Rightarrow>shaft_encoder_state" where Phase(x) = phase$_0$(x-1)
|
||||
\end{isar}
|
||||
We now define shaft encoder sequences as
|
||||
translations of distance functions:
|
||||
\begin{isar}
|
||||
definition encoding::"distance_function\<Rightarrow>nat\<Rightarrow>real\<Rightarrow>shaft_encoder_state"
|
||||
where "encoding df init$_{\text{pos}}$ \<equiv> \<lambda>x. Phase(nat\<lfloor>df(x) / \<delta>s$_{\text{res}}$\<rfloor> + init$_{\text{pos}}$)"
|
||||
\end{isar}
|
||||
where \inlineisar+init$_{\text{pos}}$+ is the initial position of the wheel.
|
||||
\inlineisar+sampling+'s were constructed from encoding sequences over discretized time points:
|
||||
\begin{isar}
|
||||
definition $\!\!$sampling::"distance$\!$_function\<Rightarrow>nat\<Rightarrow>real\<Rightarrow>nat\<Rightarrow>shaft$\!$_encoder$\!$_state"
|
||||
where "sampling df init$_{\text{pos}}$ \<delta>t \<equiv> \<lambda>n::nat. encoding df init$_{\text{pos}}$ (n * \<delta>t)"
|
||||
\end{isar}
|
||||
The sampling interval \inlineisar+\<delta>t+ (the inverse of the sampling frequency) is a critical
|
||||
|
||||
@{theory_text [display]\<open>
|
||||
fun phase\<^sub>0 :: "nat \<Rightarrow> shaft_encoder_state" where
|
||||
"phase\<^sub>0 (0) = \<lparr> C1 = False, C2 = False, C3 = True \<rparr>"
|
||||
|"phase\<^sub>0 (1) = \<lparr> C1 = True, C2 = False, C3 = True \<rparr>"
|
||||
|"phase\<^sub>0 (2) = \<lparr> C1 = True, C2 = False, C3 = False\<rparr>"
|
||||
|"phase\<^sub>0 (3) = \<lparr> C1 = True, C2 = True, C3 = False\<rparr>"
|
||||
|"phase\<^sub>0 (4) = \<lparr> C1 = False, C2 = True, C3 = False\<rparr>"
|
||||
|"phase\<^sub>0 (5) = \<lparr> C1 = False, C2 = True, C3 = True \<rparr>"
|
||||
|"phase\<^sub>0 x = phase\<^sub>0(x - 6)"
|
||||
definition Phase ::"nat\<Rightarrow>shaft_encoder_state" where Phase(x) = phase\<^sub>0(x-1)
|
||||
\<close>}
|
||||
|
||||
We now define shaft encoder sequences as translations of distance functions:
|
||||
|
||||
@{theory_text [display]\<open>
|
||||
definition encoding::"distance_function\<Rightarrow>nat\<Rightarrow>real\<Rightarrow>shaft_encoder_state"
|
||||
where "encoding df init\<^sub>p\<^sub>o\<^sub>s \<equiv> \<lambda>x. Phase(nat\<lfloor>df(x) / \<delta>s\<^sub>r\<^sub>e\<^sub>s\<rfloor> + init\<^sub>p\<^sub>o\<^sub>s)"
|
||||
\<close>}
|
||||
|
||||
where \<open>init\<^sub>p\<^sub>o\<^sub>s\<close> is the initial position of the wheel.
|
||||
\<open>sampling\<close>'s were constructed from encoding sequences over discretized time points:
|
||||
|
||||
@{theory_text [display]\<open>
|
||||
definition sampling::"distance_function\<Rightarrow>nat\<Rightarrow>real\<Rightarrow>nat\<Rightarrow>shaft_encoder_state"
|
||||
where "sampling df init\<^sub>p\<^sub>o\<^sub>s \<delta>t \<equiv> \<lambda>n::nat. encoding df initinit\<^sub>p\<^sub>o\<^sub>s (n * \<delta>t)"
|
||||
\<close>}
|
||||
|
||||
parameter of the configuration of a system.
|
||||
|
||||
Finally, we can formally define the required performances. From the interface description
|
||||
and the global model parameters such as wheel diameter, the number of teeth per wheel, the sampling
|
||||
frequency etc., we can infer the maximal time of service as well the maximum distance the
|
||||
device can measure.
|
||||
As an example configuration, choosing 1m for
|
||||
\inlineisar+w$_d$+, 100 for \inlineisar+tpw+, 80km/h \inlineisar+Speed$_{\text{Max}}$+,
|
||||
and 14400Hz for the sampling frequency, results in an odometer resolution of 2.3mm,
|
||||
a maximum distance of 9878km, and a maximal system up-time of 123.4 hours.
|
||||
Finally, we can formally define the required performances. From the interface description
|
||||
and the global model parameters such as wheel diameter, the number of teeth per wheel, the
|
||||
sampling frequency etc., we can infer the maximal time of service as well the maximum distance
|
||||
the device can measure. As an example configuration, choosing:
|
||||
|
||||
\<^item> \<^term>\<open>(1 *\<^sub>Q metre):: real[m]\<close> for \<^term>\<open>w\<^sub>d\<close> (wheel-diameter),
|
||||
\<^item> \<^term>\<open>100 :: real\<close> for \<^term>\<open>tpw\<close> (teeth per wheel),
|
||||
\<^item> \<^term>\<open>80 *\<^sub>Q kmh :: real[m\<cdot>s\<^sup>-\<^sup>1]\<close> for \<^term>\<open>Speed\<^sub>M\<^sub>a\<^sub>x\<close>,
|
||||
\<^item> \<^term>\<open>14.4 *\<^sub>Q kHz :: real[s\<^sup>-\<^sup>1]\<close> for the sampling frequency,
|
||||
|
||||
results in an odometer resolution of \<^term>\<open>2.3 *\<^sub>Q milli *\<^sub>Q metre\<close>, a maximum distance of
|
||||
\<^term>\<open>9878 *\<^sub>Q kilo *\<^sub>Q metre\<close>, and a maximal system up-time of \<^term>\<open>123.4 *\<^sub>Q hour\<close>s.
|
||||
The required precision of an odometer can be defined by a constant describing
|
||||
the maximally allowed difference between \inlineisar+df(n*\<delta>t)+ and
|
||||
\inlineisar+sampling df init$_{\text{pos}}$ \<delta>t n+ for all \inlineisar+init$_{\text{pos}}$ \<in>{0..5}+.
|
||||
the maximally allowed difference between \<open>df(n*\<delta>t)\<close> and
|
||||
\<open>sampling df init\<^sub>p\<^sub>o\<^sub>s \<delta>t n\<close> for all \<open>init\<^sub>p\<^sub>o\<^sub>s \<in>{0..5}\<close>.
|
||||
\<close>
|
||||
(*<*)
|
||||
ML\<open>val two_thirty2 = 1024 * 1024 * 1024 * 4;
|
||||
|
@ -253,41 +315,50 @@ ML\<open>val two_thirty2 = 1024 * 1024 * 1024 * 4;
|
|||
|
||||
section*[verific::technical]\<open>Verification of the Software Requirements Specification\<close>
|
||||
text\<open>The original documents contained already various statements that motivate certain safety
|
||||
properties of the device. For example, the \inlineisar+Phase+-table excludes situations in which
|
||||
all sensors \inlineisar{C1}, \inlineisar{C2}, and \inlineisar{C3} are all ``off'' or situations in
|
||||
properties of the device. For example, the \<open>Phase\<close>-table excludes situations in which
|
||||
all sensors \<open>C1\<close>, \<open>C2\<close>, and \<open>C3\<close> are all ``off'' or situations in
|
||||
which sensors are ``on,'' reflecting a physical or electrical error in the odometer. It can be
|
||||
shown by a very small Isabelle case-distinction proof that this safety requirement follows indeed from the
|
||||
above definitions:
|
||||
\begin{isar}
|
||||
lemma Encoder_Property_1:(C1(Phase x) \<and> C2(Phase x) \<and> C3(Phase x))=False
|
||||
proof (cases x)
|
||||
case 0 then show ?thesis by (simp add: Phase_def)
|
||||
next
|
||||
case (Suc n) then show ?thesis
|
||||
by(simp add: Phase_def,rule_tac n = n in cycle_case_split,simp_all)
|
||||
qed
|
||||
\end{isar}
|
||||
for all positions \inlineisar+x+. Similarly, it is proved that the table is indeed
|
||||
cyclic: \inlineisar+ phase$_0$ x = phase$_0$(x mod 6)+ and locally injective:
|
||||
\inlineisar+\<forall>x<6. \<forall>y<6. phase$_0$ x = phase$_0$ y \<longrightarrow> x = y+.
|
||||
These lemmas, building the ``theory of an odometer,'' culminate in a theorem
|
||||
that we would like to present in more detail.
|
||||
\begin{isar}
|
||||
theorem minimal_sampling :
|
||||
assumes * : normally_behaved_distance_function df
|
||||
and ** : \<delta>t * Speed$_{\text{Max}}$ < \<delta>s$_{\text{res}}$
|
||||
shows \<forall> \<delta>X\<le>\<delta>t. 0<\<delta>X \<longrightarrow>
|
||||
\<exists>f. retracting (f::nat\<Rightarrow>nat) \<and>
|
||||
sampling df init$_{\text{pos}}$ \<delta>X = (sampling df init$_{\text{pos}}$ \<delta>t) o f
|
||||
shown by a very small Isabelle case-distinction proof that this safety requirement follows indeed
|
||||
from the above definitions:
|
||||
|
||||
\end{isar}
|
||||
This theorem states for \inlineisar+normally_behaved_distance_function+s that there is
|
||||
@{theory_text [display]\<open>
|
||||
lemma Encoder_Property_1:(C1(Phase x) \<and> C2(Phase x) \<and> C3(Phase x))=False
|
||||
proof (cases x)
|
||||
case 0 then show ?thesis by (simp add: Phase_def)
|
||||
next
|
||||
case (Suc n) then show ?thesis
|
||||
by(simp add: Phase_def,rule_tac n = n in cycle_case_split,simp_all)
|
||||
qed
|
||||
\<close>}
|
||||
|
||||
for all positions \<open>x\<close>. Similarly, it is proved that the table is indeed cyclic:
|
||||
|
||||
\<open>phase\<^sub>0 x = phase\<^sub>0(x mod 6)\<close>
|
||||
|
||||
and locally injective:
|
||||
|
||||
\<open>\<forall>x<6. \<forall>y<6. phase\<^sub>0 x = phase\<^sub>0 y \<longrightarrow> x = y\<close>
|
||||
|
||||
These lemmas, building the ``theory of an odometer,'' culminate in a theorem
|
||||
that we would like to present in more detail.
|
||||
|
||||
@{theory_text [display]\<open>
|
||||
theorem minimal_sampling :
|
||||
assumes * : normally_behaved_distance_function df
|
||||
and ** : \<delta>t * Speed\<^sub>M\<^sub>a\<^sub>x < \<delta>s\<^sub>r\<^sub>e\<^sub>s
|
||||
shows \<forall> \<delta>X\<le>\<delta>t. 0<\<delta>X \<longrightarrow>
|
||||
\<exists>f. retracting (f::nat\<Rightarrow>nat) \<and>
|
||||
sampling df init\<^sub>p\<^sub>o\<^sub>s \<delta>X = (sampling df init\<^sub>p\<^sub>o\<^sub>s \<delta>t) o f
|
||||
|
||||
\<close>}
|
||||
|
||||
This theorem states for \<open>normally_behaved_distance_function\<close>s that there is
|
||||
a minimal sampling frequency assuring the safety of the measurements; samplings on
|
||||
some \inlineisar$df$ gained from this minimal sampling frequency can be ``pumped up''
|
||||
some \<open>df\<close> gained from this minimal sampling frequency can be ``pumped up''
|
||||
to samplings of these higher sampling frequencies; they do not contain more information.
|
||||
Of particular interest is the second assumption, labelled ``\inlineisar|**|,'' which
|
||||
establishes a lower bound from \inlineisar+w$_{\text{circ}}$+, \inlineisar+tpw+,
|
||||
\inlineisar+Speed$_{\text{Max}}$+ for the sampling frequency. Methodologically, this represents
|
||||
Of particular interest is the second assumption, labelled ``\<open>**\<close>'' which
|
||||
establishes a lower bound from \<open>w\<^sub>0\<close>, \<open>tpw\<close>,
|
||||
\<open>Speed\<^sub>M\<^sub>a\<^sub>x\<close> for the sampling frequency. Methodologically, this represents
|
||||
an exported constraint that can not be represented \<^emph>\<open>inside\<close> the design model: it means that the
|
||||
computations have to be fast enough on the computing platform in order to assure that the
|
||||
calculations are valid. It was in particular this exported constraint that forced us to give up
|
||||
|
@ -301,6 +372,7 @@ standard~@{cite "bsi:50128:2014"}, 7.2.4.22 and are usually addressed in an own
|
|||
\<close>
|
||||
|
||||
chapter*[ontomodeling::text_section]\<open>The CENELEC 50128 Ontology\<close>
|
||||
|
||||
text\<open>
|
||||
Modeling an ontology from a semi-formal text such as~@{cite"bsi:50128:2014"} is,
|
||||
like any other modeling activity, not a simple one-to-one translation of some
|
||||
|
@ -312,125 +384,144 @@ text\<open>
|
|||
|
||||
section*[lhf::text_section]
|
||||
\<open>Tracking Concepts and Definitions\<close>
|
||||
|
||||
text\<open>
|
||||
\isadof is designed to annotate text elements with structured meta-information and to reference
|
||||
\<^isadof> is designed to annotate text elements with structured meta-information and to reference
|
||||
these text elements throughout the integrated source. A classical application of this capability
|
||||
is the annotation of concepts and terms definitions---be them informal, semi-formal or formal---and
|
||||
their consistent referencing. In the context of our CENELEC ontology, \eg, we can translate the
|
||||
their consistent referencing. In the context of our CENELEC ontology, \<^eg>, we can translate the
|
||||
third chapter of @{cite "bsi:50128:2014"} ``Terms, Definitions and Abbreviations'' directly
|
||||
into our Ontology Definition Language (ODL). Picking one example out of 49, consider the definition
|
||||
of the concept ``traceability'' in paragraphs 3.1.46 (a notion referenced 31 times in the standard),
|
||||
which we translated directly into:
|
||||
\begin{isar}
|
||||
Definition*[traceability::concept]<open> degree to which relationship
|
||||
can be established between two or more products of a development
|
||||
process, especially those having a predecessor/successor or
|
||||
master/subordinate relationship to one another. <close>
|
||||
\end{isar}
|
||||
In the integrated source of the odometry study, we can reference in a text element to this
|
||||
of the concept \<^cenelec_term>\<open>traceability\<close> in paragraphs 3.1.46 (a notion referenced 31 times in
|
||||
the standard), which we translated directly into:
|
||||
|
||||
@{theory_text [display]\<open>
|
||||
Definition*[traceability, short_name="''traceability''"]
|
||||
\<open>degree to which relationship can be established between two or more products of a
|
||||
development process, especially those having a predecessor/successor or
|
||||
master/subordinate relationship to one another.\<close>
|
||||
\<close>}
|
||||
|
||||
In the integrated source of the odometry study, we can reference in a text element to this
|
||||
concept as follows:
|
||||
\begin{isar}
|
||||
text*[...]<open> ... to assure <@>{concept traceability} for
|
||||
<@>{requirement bitwiseAND}, we prove ... <close>
|
||||
\end{isar}
|
||||
The presentation of this document element inside \isadof is immediately hyperlinked against the
|
||||
\inlineisar+Definition*+ element shown above; this serves as documentation of
|
||||
|
||||
@{theory_text [display]\<open>
|
||||
text*[...]\<open> ... to assure <@>{cenelec_term traceability} for
|
||||
<@>{requirement bitwiseAND}, we prove ... \<close>
|
||||
\<close>}
|
||||
|
||||
|
||||
\<^isadof> also uses the underlying ontology to generate the navigation markup inside the IDE, \<^ie>
|
||||
the presentation of this document element inside \<^isadof> is immediately hyperlinked against the
|
||||
@{theory_text \<open> Definition* \<close>}-element shown above; this serves as documentation of
|
||||
the standard for the development team working on the integrated source. The PDF presentation
|
||||
of such links depends on the actual configurations for the document generation; We will explain
|
||||
this later.
|
||||
CENELEC foresees also a number of roles, phases, safety integration levels, etc., which were
|
||||
directly translated into HOL enumeration types usable in ontological concepts of ODL.
|
||||
\begin{isar}
|
||||
datatype role =
|
||||
PM (* Program Manager *) | RQM (* Requirements Manager *)
|
||||
| DES (* Designer *) | IMP (* Implementer *) |
|
||||
| VER (* Verifier *) | VAL (* Validator *) | ...
|
||||
datatype phase =
|
||||
SYSDEV_ext (* System Development *) | SPl (* Software Planning *)
|
||||
| SR (* Software Requirement *) | SA (* Software Architecture *)
|
||||
| SDES (* Software Design *) | ...
|
||||
\end{isar}
|
||||
Similarly, we can formalize the Table A.5: Verification and Testing of @{cite "bsi:50128:2014"}:
|
||||
a classification of \<^emph>\<open>verification and testing techniques\<close>:
|
||||
\begin{isar}
|
||||
datatype vnt_technique =
|
||||
|
||||
@{theory_text [display]\<open>
|
||||
datatype role =
|
||||
PM (* Program Manager *) | RQM (* Requirements Manager *)
|
||||
| DES (* Designer *) | IMP (* Implementer *) |
|
||||
| VER (* Verifier *) | VAL (* Validator *) | ...
|
||||
datatype phase =
|
||||
SYSDEV_ext (* System Development *) | SPl (* Software Planning *)
|
||||
| SR (* Software Requirement *) | SA (* Software Architecture *)
|
||||
| SDES (* Software Design *) | ...
|
||||
\<close>}
|
||||
|
||||
Similarly, we can formalize the Table A.5: Verification and Testing of @{cite "bsi:50128:2014"}:
|
||||
a classification of \<^emph>\<open>verification and testing techniques\<close>:
|
||||
|
||||
@{theory_text [display]\<open>
|
||||
datatype vnt_technique =
|
||||
formal_proof "thm list" | stat_analysis
|
||||
| dyn_analysis dyn_ana_kind | ...
|
||||
\end{isar}
|
||||
In contrast to the standard, we can parameterize \inlineisar+formal_proof+ with a list of
|
||||
theorems, an entity known in the Isabelle kernel. Here, \isadof assures for text elements
|
||||
\<close>}
|
||||
|
||||
In contrast to the standard, we can parameterize \<open>formal_proof\<close> with a list of
|
||||
theorems, an entity known in the Isabelle kernel. Here, \<^isadof> assures for text elements
|
||||
annotated with theorem names, that they refer indeed to established theorems in the Isabelle
|
||||
environment. Additional checks could be added to make sure that these theorems have a particular
|
||||
form.
|
||||
|
||||
While we claim that this possibility to link to theorems (and test-results) is unique in the
|
||||
world of systems attempting to assure traceability, referencing a particular (proven) theorem is
|
||||
definitively not sufficient to satisfy the claimed requirement. Human evaluators will always have
|
||||
to check that the provided theorem \<open>adequately\<close> represents the claim; we do not in the slightest
|
||||
suggest that their work is superfluous. Our framework allows to statically check that tests or proofs
|
||||
have been provided, at places where the ontology requires them to be, and both assessors and developers
|
||||
can rely on this check and navigate through related information easily. It does not guarantee that
|
||||
intended concepts for, \eg, safety or security have been adequately modeled.
|
||||
world of systems attempting to assure \<^cenelec_term>\<open>traceability\<close>, referencing a particular
|
||||
(proven) theorem is definitively not sufficient to satisfy the claimed requirement. Human
|
||||
evaluators will always have to check that the provided theorem \<open>adequately\<close> represents the claim;
|
||||
we do not in the slightest suggest that their work is superfluous. Our framework allows to
|
||||
statically check that tests or proofs have been provided, at places where the ontology requires
|
||||
them to be, and both assessors and developers can rely on this check and navigate through
|
||||
related information easily. It does not guarantee that intended concepts for, \<^eg>, safety
|
||||
or security have been adequately modeled.
|
||||
\<close>
|
||||
|
||||
section*[moe::text_section]
|
||||
\<open>Major Ontological Entities: Requirements and Evidence\<close>
|
||||
text\<open>
|
||||
We introduce central concept of a \<^emph>\<open>requirement\<close> as an ODL \inlineisar*doc_class*
|
||||
based on some generic basic library \inlineisar*text_element* providing basic layout attributes.
|
||||
\begin{isar}
|
||||
doc_class requirement = text_element +
|
||||
long_name :: "string option"
|
||||
is_concerned :: "role set"
|
||||
\end{isar}
|
||||
where the \inlineisar*roles* are exactly the ones defined in the previous section and represent
|
||||
the groups of stakeholders in the CENELEC process. Therefore, the \inlineisar+is_concerned+-attribute
|
||||
allows expressing who ``owns'' this text-element. \isadof supports a role-based
|
||||
presentation, \eg, different presentation styles of the
|
||||
integrated source may decide to highlight, to omit, to defer into an annex, text entities
|
||||
according to the role-set.
|
||||
We introduce central concept of a \<^emph>\<open>requirement\<close> as an ODL \<^theory_text>\<open>doc_class\<close>
|
||||
based on the generic basic library \<^doc_class>\<open>text_element\<close> providing basic layout attributes.
|
||||
|
||||
@{theory_text [display]\<open>
|
||||
doc_class requirement = text_element +
|
||||
long_name :: "string option"
|
||||
is_concerned :: "role set"
|
||||
\<close>}
|
||||
|
||||
the groups of stakeholders in the CENELEC process. Therefore, the \<open>is_concerned\<close>-attribute
|
||||
allows expressing who ``owns'' this text-element. \<^isadof> supports a role-based
|
||||
presentation, \<^eg>, different presentation styles of the integrated source may decide to highlight,
|
||||
to omit, to defer into an annex, text entities according to the role-set.
|
||||
|
||||
Since ODL supports single inheritance, we can express sub-requirements and therefore a style
|
||||
of requirement decomposition as advocated in GSN~@{cite "kelly.ea:goal:2004"}:
|
||||
\begin{isar}
|
||||
doc_class sub_requirement =
|
||||
decomposes :: "requirement"
|
||||
relates_to :: "requirement set"
|
||||
\end{isar}\<close>
|
||||
|
||||
@{theory_text [display]\<open>
|
||||
doc_class sub_requirement =
|
||||
decomposes :: "requirement"
|
||||
relates_to :: "requirement set"
|
||||
\<close>}
|
||||
\<close>
|
||||
|
||||
section*[claimsreqevidence::text_section]\<open>Tracking Claims, Derived Requirements and Evidence\<close>
|
||||
text\<open>An example for making explicit implicit principles,
|
||||
consider the following statement @{cite "bsi:50128:2014"}, pp. 25.:\vspace{-1.5mm}
|
||||
consider the following statement @{cite "bsi:50128:2014"}, pp. 25.: \<^vs>\<open>-0.15cm\<close>
|
||||
|
||||
\begin{quote}\small
|
||||
The objective of software verification is to examine and arrive at a judgment based on
|
||||
evidence that output items (process, documentation, software or application) of a specific
|
||||
development phase fulfill the requirements and plans with respect to completeness, correctness
|
||||
and consistency.
|
||||
\end{quote}\vspace{-1.5mm}
|
||||
The terms \<^emph>\<open>judgment\<close> and \<^emph>\<open>evidence\<close> are used as a kind of leitmotif throughout the CENELEC
|
||||
standard, but they are neither explained nor even listed in the general glossary. However, the
|
||||
standard is fairly explicit on the \<^emph>\<open>phase\<close>s and the organizational roles that different stakeholders
|
||||
should have in the process. Our version to express this key concept of judgment, \eg, by
|
||||
the following concept:
|
||||
\begin{isar}
|
||||
doc_class judgement =
|
||||
refers_to :: requirement
|
||||
evidence :: "vnt_technique list"
|
||||
status :: status
|
||||
is_concerned :: "role set" <= "{VER,ASR,VAL}"
|
||||
\end{isar}
|
||||
\end{quote} \<^vs>\<open>-0.15cm\<close>
|
||||
|
||||
The terms \<^onto_class>\<open>judgement\<close> based on \<^term>\<open>evidence\<close> are used as a kind of leitmotif throughout
|
||||
the CENELEC standard, but they are neither explained nor even listed in the general glossary.
|
||||
However, the standard is fairly explicit on the \<^emph>\<open>phase\<close>s and the organizational roles that
|
||||
different stakeholders should have in the process. Our version to express this key concept of
|
||||
\<^onto_class>\<open>judgement\<close> , \<^eg>, by the following concept:
|
||||
|
||||
@{theory_text [display]\<open>
|
||||
doc_class judgement =
|
||||
refers_to :: requirement
|
||||
evidence :: "vnt_technique list"
|
||||
status :: status
|
||||
is_concerned :: "role set" <= "{VER,ASR,VAL}"
|
||||
\<close>}
|
||||
|
||||
As one can see, the role set is per default set to the verification team, the assessors and the
|
||||
validation team.
|
||||
|
||||
There are different views possible here: an alternative would be to define \inlineisar+evidence+
|
||||
as ontological concept with \inlineisar+vnt_technique+'s (rather than an attribute of judgement)
|
||||
and consider the basis of judgments as a relation between requirements and relation:
|
||||
\begin{isar}
|
||||
doc_class judgement =
|
||||
based_on :: "(requirement \<times> evidence) set"
|
||||
status :: status
|
||||
is_concerned :: "role set" <= "{VER,ASR,VAL}"
|
||||
\end{isar}
|
||||
There are different views possible here: an alternative would be to define \<^term>\<open>evidence\<close>
|
||||
as ontological concept with \<^typ>\<open>vnt_technique\<close>'s (rather than an attribute of judgement)
|
||||
and consider the basis of a summary containing the relation between requirements and relation:
|
||||
|
||||
@{theory_text [display]\<open>
|
||||
doc_class summary =
|
||||
based_on :: "(requirement \<times> evidence) set"
|
||||
status :: status
|
||||
is_concerned :: "role set" <= "{VER,ASR,VAL}"
|
||||
\<close>}
|
||||
|
||||
More experimentation will be needed to find out what kind of ontological modeling is most
|
||||
adequate for developers in the context of \isadof.
|
||||
|
@ -442,60 +533,66 @@ text\<open>From the variety of different possibilities for adding CENELEC annota
|
|||
integrated source, we will, in the following, point out three scenarios.\<close>
|
||||
|
||||
subsection\<open>Internal Verification of Claims in the Requirements Specification.\<close>
|
||||
text\<open>In our case, the SR-team early on detected a property necessary
|
||||
text\<open>In our case, the \<^term>\<open>SR\<close>-team early on detected a property necessary
|
||||
for error-detection of the device (c.f. @{technical verific}):
|
||||
\enlargethispage{2\baselineskip}\begin{isar}
|
||||
text*[encoder_props::requirement]<open> The requirement specification team ...
|
||||
C1 & C2 & C3 = 0 (bitwise logical AND operation)
|
||||
C1 | C2 | C3 = 1 (bitwise logical OR operation) <close>
|
||||
\end{isar}
|
||||
|
||||
@{theory_text [display]\<open>
|
||||
text*[encoder_props::requirement]\<open> The requirement specification team identifies the property:
|
||||
C1 & C2 & C3 = 0 (bitwise logical AND operation)
|
||||
C1 | C2 | C3 = 1 (bitwise logical OR operation) \<close>
|
||||
\<close>}
|
||||
|
||||
After the Isabelle proofs shown in @{technical verific}, we can either register the theorems
|
||||
directly in an evidence statement:
|
||||
|
||||
\begin{isar}
|
||||
text*[J1::judgement, refers_to="<@>{docitem <open>encoder_props<close>}",
|
||||
evidence="[formal_proof[<@>{thm <open>Encoder_Property_1<close>},
|
||||
<@>{thm <open>Encoder_Property_2<close>}]]"]
|
||||
<open>The required encoder properties are in fact verified to be consistent
|
||||
with the formalization of <@>{term "phase$_0$"}.<close>
|
||||
\end{isar}
|
||||
The references \inlineisar|<@>{...}|, called antiquotation, allow us not only to reference to
|
||||
@{theory_text [display]\<open>
|
||||
text*[J1::judgement, refers_to="@{docitem <open>encoder_props<close>}",
|
||||
evidence="[formal_proof[@{thm <open>Encoder_Property_1<close>},
|
||||
@{thm <open>Encoder_Property_2<close>}]]"]
|
||||
\<open>The required encoder properties are in fact verified to be consistent
|
||||
with the formalization of @{term "phase\<^sub>0"}.\<close>
|
||||
\<close>}
|
||||
|
||||
The references \<open>@{...}\<close>, called antiquotation, allow us not only to reference to
|
||||
formal concepts, they are checked for consistency and there are also antiquotations that
|
||||
print the formally checked content (\eg, the statement of a theorem).
|
||||
print the formally checked content (\<^eg>, the statement of a theorem).
|
||||
\<close>
|
||||
|
||||
subsection\<open>Exporting Claims of the Requirements Specification.\<close>
|
||||
text\<open>By definition, the main purpose of the requirement specification is the
|
||||
identification of the safety requirements. As an example, we state the required precision of an
|
||||
odometric function: for any normally behaved distance function \inlineisar+df+, and any representable
|
||||
and valid sampling sequence that can be constructed for \inlineisar+df+, we require that the
|
||||
difference between the physical distance and distance calculable from the
|
||||
@{term Odometric_Position_Count} is bound by the minimal resolution of the odometer.
|
||||
\begin{isar}
|
||||
text*[R5::safety_requirement]<open>We can now state ... <close>
|
||||
definition
|
||||
Odometric_Position_Count_precise::(shaft_encoder_state list\<Rightarrow>output)\<Rightarrow>bool
|
||||
where Odometric_Position_Count_precise odofunction \<equiv>
|
||||
(\<forall> df. \<forall>S. normally_behaved_distance_function df
|
||||
\<longrightarrow> representable S
|
||||
\<longrightarrow> valid_sampling S df
|
||||
\<longrightarrow> (let pos = uint(Odometric_Position_Count(odofunction S))
|
||||
in \<bar>df((length S - 1)*\<delta>t$_{\text{odo}}$) - (\<delta>s$_{\text{res}}$ * pos)\<bar> \<le> \<delta>s$_{\text{res}}$))
|
||||
|
||||
update_instance*[R5::safety_requirement,
|
||||
formal_definition:="[<@>{thm <open>Odometric_Position_Count_precise_def<close>}]"]
|
||||
\end{isar}
|
||||
By \inlineisar+update_instance*+, we book the property \inlineisar+Position_Count_precise_def+ as
|
||||
\inlineisar+safety_requirement+, a specific sub-class of \inlineisar+requirement+s
|
||||
text\<open>By definition, the main purpose of the requirement specification is the identification of
|
||||
the safety requirements. As an example, we state the required precision of an odometric function:
|
||||
for any normally behaved distance function \<open>df\<close>, and any representable and valid
|
||||
sampling sequence that can be constructed for \<open>df\<close>, we require that the difference
|
||||
between the physical distance and distance calculable from the @{term Odometric_Position_Count}
|
||||
is bound by the minimal resolution of the odometer.
|
||||
|
||||
@{theory_text [display]\<open>
|
||||
text*[R5::safety_requirement]\<open>We can now state ... \<close>
|
||||
definition Odometric_Position_Count_precise :: "(shaft_encoder_state list\<Rightarrow>output)\<Rightarrow>bool"
|
||||
where "Odometric_Position_Count_precise odofunction \<equiv>
|
||||
(\<forall> df. \<forall>S. normally_behaved_distance_function df
|
||||
\<longrightarrow> representable S
|
||||
\<longrightarrow> valid_sampling S df
|
||||
\<longrightarrow> (let pos = uint(Odometric_Position_Count(odofunction S))
|
||||
in \<bar>df((length S - 1)*\<delta>t\<^sub>o\<^sub>d\<^sub>o) - (\<delta>s\<^sub>r\<^sub>e\<^sub>s * pos)\<bar> \<le> \<delta>s\<^sub>r\<^sub>e\<^sub>s))"
|
||||
|
||||
update_instance*[R5::safety_requirement,
|
||||
formal_definition:="[@{thm \<open>Odometric_Position_Count_precise_def\<close>}]"]
|
||||
\<close>}
|
||||
|
||||
By \<^theory_text>\<open>update_instance*\<close>, we book the property \<open>Position_Count_precise_def\<close> as
|
||||
\<^onto_class>\<open>safety_requirement\<close>, a specific sub-class of \<^onto_class>\<open>requirement\<close>s
|
||||
requesting a formal definition in Isabelle.\<close>
|
||||
|
||||
subsection\<open>Exporting Derived Requirements.\<close>
|
||||
|
||||
text\<open>Finally, we discuss the situation where the verification team discovered a critical side-condition
|
||||
for a major theorem necessary for the safety requirements; this was in our development the case for
|
||||
the condition labelled ``\inlineisar|**|'' in @{docitem verific}. The current CENELEC standard clearly separates
|
||||
the condition labelled ``\<open>**\<close>'' in @{docitem verific}. The current CENELEC standard clearly separates
|
||||
``requirement specifications'' from ``verification reports,'' which is probably motivated
|
||||
by the overall concern of organizational separation and of document consistency. While this
|
||||
document organization is possible in \isadof, it is in our experience often counter-productive
|
||||
document organization is possible in \<^isadof>, it is in our experience often counter-productive
|
||||
in practice: organizations tend to defend their documents because the impact of changes is more and more
|
||||
difficult to oversee. This effect results in a dramatic development slow-down and an increase of
|
||||
costs. Furthermore, these barriers exclude situations where developers perfectly know, for example,
|
||||
|
@ -510,58 +607,58 @@ different PDF versions and for each version, document specific consistency guara
|
|||
automatically enforced.
|
||||
|
||||
In our case study, we define this condition as predicate, declare an explanation of it as
|
||||
\inlineisar+SRAC+ (CENELEC for: safety-related application condition; ontologically, this is a
|
||||
derived class from \inlineisar+requirement+.) and add the definition of the predicate into the
|
||||
\<^onto_class>\<open>SRAC\<close> (CENELEC for: safety-related application condition; ontologically, this is a
|
||||
derived class from \<^onto_class>\<open>requirement\<close>.) and add the definition of the predicate into the
|
||||
document instance as described in the previous section.\<close>
|
||||
|
||||
|
||||
|
||||
|
||||
text\<open>\appendix\<close>
|
||||
chapter\<open>Appendix\<close>
|
||||
text\<open>
|
||||
\<^item> \inlineisar|<@>{thm refl}|: @{thm refl}
|
||||
\<^item> \inlineisar|<@>{thm [source] refl}|: @{thm [source] refl}
|
||||
\<^item> \inlineisar|<@>{thm[mode=Rule] conjI}|: @{thm[mode=Rule] conjI}
|
||||
\<^item> \inlineisar|<@>{file "mini_odo.thy"}|: @{file "mini_odo.thy"}
|
||||
\<^item> \inlineisar|<@>{value "3+4::int"}|: @{value "3+4::int"}
|
||||
\<^item> \inlineisar|<@>{const hd}|: @{const hd}
|
||||
\<^item> \inlineisar|<@>{theory HOL.List}|: @{theory HOL.List}
|
||||
\<^item> \inlineisar|<@>{term "3"}|: @{term "3"}
|
||||
\<^item> \inlineisar|<@>{type bool}|: @{type bool}
|
||||
\<^item> \inlineisar|<@>{term [show_types] "f x = a + x"}|: @{term [show_types] "f x = a + x"}
|
||||
\<^item> \<open>@{thm refl}\<close> : @{thm refl}
|
||||
\<^item> \<open>@{thm [source] refl}\<close> : @{thm [source] refl}
|
||||
\<^item> \<open>@{thm[mode=Rule] conjI}\<close> : @{thm[mode=Rule] conjI}
|
||||
\<^item> \<open>@{file "mini_odo.thy"}\<close> : @{file "mini_odo.thy"}
|
||||
\<^item> \<open>@{value "3+4::int"}}\<close> : @{value "3+4::int"}
|
||||
\<^item> \<open>@{const hd}\<close> : @{const hd}
|
||||
\<^item> \<open>@{theory HOL.List}\<close> : @{theory HOL.List}s
|
||||
\<^item> \<open>@{tserm "3"}\<close> : @{term "3"}
|
||||
\<^item> \<open>@{type bool}\<close> : @{type bool}
|
||||
\<^item> \<open>@{thm term [show_types] "f x = a + x"}\<close> : @{term [show_types] "f x = a + x"}
|
||||
\<close>
|
||||
|
||||
text\<open>Examples for declaration of typed doc-items "assumption" and "hypothesis",
|
||||
text\<open>Examples for declaration of typed doc-classes "assumption" (sic!) and "hypothesis" (sic!!),
|
||||
concepts defined in the underlying ontology @{theory "Isabelle_DOF.CENELEC_50128"}. \<close>
|
||||
text*[ass2::assumption, long_name="Some ''assumption one''"] \<open> The subsystem Y is safe. \<close>
|
||||
text*[hyp1::hypothesis] \<open> P not equal NP \<close>
|
||||
text*[hyp1::hypothesis] \<open> \<open>P \<noteq> NP\<close> \<close>
|
||||
|
||||
text\<open>A real example fragment from a larger project, declaring a text-element as a
|
||||
"safety-related application condition", a concept defined in the
|
||||
@{theory "Isabelle_DOF.CENELEC_50128"} ontology:\<close>
|
||||
text\<open>
|
||||
A real example fragment fsrom a larger project, declaring a text-element as a
|
||||
"safety-related application condition", a concept defined in the
|
||||
@{theory "Isabelle_DOF.CENELEC_50128"} ontology:\<close>
|
||||
|
||||
text*[hyp2::hypothesis]\<open>Under the assumption @{assumption \<open>ass2\<close>} we establish the following: ... \<close>
|
||||
|
||||
text*[ass122::SRAC, long_name="Some ''ass122''"] \<open> The overall sampling frequence of the odometer
|
||||
subsystem is therefore 14 khz, which includes sampling, computing and
|
||||
result communication times... \<close>
|
||||
text*[ass122::SRAC, long_name="Some ''ass122''"]
|
||||
\<open> The overall sampling frequence of the odometer subsystem is therefore 14 khz,
|
||||
which includes sampling, computing and result communication times... \<close>
|
||||
|
||||
text*[ass123::SRAC] \<open> The overall sampling frequence of the odometer
|
||||
subsystem is therefore 14 khz, which includes sampling, computing and
|
||||
result communication times... \<close>
|
||||
text*[ass123::SRAC]
|
||||
\<open> The overall sampling frequence of the odometer subsystem is therefore 14 khz,
|
||||
which includes sampling, computing and result communication times... \<close>
|
||||
|
||||
text*[ass124::EC, long_name="Some ''ass124''"] \<open> The overall sampling frequence of the odometer
|
||||
subsystem is therefore 14 khz, which includes sampling, computing and
|
||||
result communication times... \<close>
|
||||
text*[ass124::EC, long_name="Some ''ass124''"]
|
||||
\<open> The overall sampling frequence of the odometer subsystem is therefore 14 khz,
|
||||
which includes sampling, computing and result communication times... \<close>
|
||||
|
||||
text*[t10::test_result] \<open> This is a meta-test. This could be an ML-command
|
||||
that governs the external test-execution via, eg., a makefile or specific calls
|
||||
to a test-environment or test-engine. \<close>
|
||||
text*[t10::test_result]
|
||||
\<open> This is a meta-test. This could be an ML-command that governs the external
|
||||
test-execution via, \<^eg>, a makefile or specific calls to a test-environment or test-engine. \<close>
|
||||
|
||||
|
||||
text\<open>Finally some examples of references to doc-items, i.e. text-elements with declared
|
||||
meta-information and status. \<close>
|
||||
text \<open> Finally some examples of references to doc-items, i.e. text-elements
|
||||
with declared meta-information and status. \<close>
|
||||
|
||||
text \<open> As established by @{test_result (unchecked) \<open>t10\<close>},
|
||||
@{test_result (define) \<open>t10\<close>} \<close>
|
||||
text \<open> the @{test_result \<open>t10\<close>}
|
||||
|
@ -569,10 +666,10 @@ text \<open> the @{test_result \<open>t10\<close>}
|
|||
text \<open> represent a justification of the safety related applicability
|
||||
condition @{SRAC \<open>ass122\<close>} aka exported constraint @{EC \<open>ass122\<close>}.\<close>
|
||||
|
||||
(* due to notational conventions for antiquotations, one may even write: *)
|
||||
(* CRASHES WITH LaTeX backend error. *)
|
||||
text \<open> represent a justification of the safety related applicability
|
||||
condition \<^SRAC>\<open>ass122\<close> aka exported constraint \<^EC>\<open>ass122\<close>.\<close>
|
||||
text \<open> due to notational conventions for antiquotations, one may even write:
|
||||
|
||||
"represent a justification of the safety related applicability
|
||||
condition \<^SRAC>\<open>ass122\<close> aka exported constraint \<^EC>\<open>ass122\<close>."\<close>
|
||||
|
||||
|
||||
(*<*)
|
||||
|
|
15
examples/scholarly_paper/2018-cicm-isabelle_dof-applications/IsaDofApplications.thy
Executable file → Normal file
|
@ -16,6 +16,9 @@ theory IsaDofApplications
|
|||
imports "Isabelle_DOF.scholarly_paper"
|
||||
begin
|
||||
|
||||
use_template "lncs"
|
||||
use_ontology "scholarly_paper"
|
||||
|
||||
open_monitor*[this::article]
|
||||
declare[[strict_monitor_checking=false]]
|
||||
|
||||
|
@ -71,7 +74,7 @@ abstract*[abs::abstract, keywordlist="[''Ontology'',''Ontological Modeling'',''I
|
|||
\<close>
|
||||
|
||||
section*[intro::introduction]\<open> Introduction \<close>
|
||||
text*[introtext::introduction]\<open>
|
||||
text*[introtext::introduction, level = "Some 1"]\<open>
|
||||
The linking of the \<^emph>\<open>formal\<close> to the \<^emph>\<open>informal\<close> is perhaps the
|
||||
most pervasive challenge in the digitization of knowledge and its
|
||||
propagation. This challenge incites numerous research efforts
|
||||
|
@ -120,7 +123,7 @@ declare_reference*[ontomod::text_section]
|
|||
declare_reference*[ontopide::text_section]
|
||||
declare_reference*[conclusion::text_section]
|
||||
(*>*)
|
||||
text*[plan::introduction]\<open> The plan of the paper is follows: we start by introducing the underlying
|
||||
text*[plan::introduction, level="Some 1"]\<open> The plan of the paper is follows: we start by introducing the underlying
|
||||
Isabelle system (@{text_section (unchecked) \<open>bgrnd\<close>}) followed by presenting the
|
||||
essentials of \<^isadof> and its ontology language (@{text_section (unchecked) \<open>isadof\<close>}).
|
||||
It follows @{text_section (unchecked) \<open>ontomod\<close>}, where we present three application
|
||||
|
@ -130,7 +133,7 @@ conclusions and discuss related work in @{text_section (unchecked) \<open>conclu
|
|||
|
||||
section*[bgrnd::text_section,main_author="Some(@{docitem ''bu''}::author)"]
|
||||
\<open> Background: The Isabelle System \<close>
|
||||
text*[background::introduction]\<open>
|
||||
text*[background::introduction, level="Some 1"]\<open>
|
||||
While Isabelle is widely perceived as an interactive theorem prover
|
||||
for HOL (Higher-order Logic)~@{cite "nipkow.ea:isabelle:2002"}, we
|
||||
would like to emphasize the view that Isabelle is far more than that:
|
||||
|
@ -159,7 +162,7 @@ figure*[architecture::figure,relative_width="100",src="''figures/isabelle-archit
|
|||
asynchronous communication between the Isabelle system and
|
||||
the IDE (right-hand side). \<close>
|
||||
|
||||
text*[blug::introduction]\<open> The Isabelle system architecture shown in @{figure \<open>architecture\<close>}
|
||||
text*[blug::introduction, level="Some 1"]\<open> The Isabelle system architecture shown in @{figure \<open>architecture\<close>}
|
||||
comes with many layers, with Standard ML (SML) at the bottom layer as implementation
|
||||
language. The architecture actually foresees a \<^emph>\<open>Nano-Kernel\<close> (our terminology) which
|
||||
resides in the SML structure \<^ML_structure>\<open>Context\<close>. This structure provides a kind of container called
|
||||
|
@ -191,7 +194,7 @@ For the antiquotation \inlineisar+\at{value "fac 5"}+ we assume the usual defin
|
|||
\inlineisar+fac+ in HOL.
|
||||
\<close>
|
||||
|
||||
text*[anti]\<open> Thus, antiquotations can refer to formal content, can be type-checked before being
|
||||
text*[anti::introduction, level = "Some 1"]\<open> Thus, antiquotations can refer to formal content, can be type-checked before being
|
||||
displayed and can be used for calculations before actually being typeset. When editing,
|
||||
Isabelle's PIDE offers auto-completion and error-messages while typing the above
|
||||
\<^emph>\<open>semi-formal\<close> content. \<close>
|
||||
|
@ -256,7 +259,7 @@ enforcing a sequence of text-elements that must belong to the corresponding clas
|
|||
To start using \<^isadof>, one creates an Isabelle project (with the name
|
||||
\inlinebash{IsaDofApplications}):
|
||||
\begin{bash}
|
||||
isabelle mkroot_DOF -o scholarly_paper -t lncs -d IsaDofApplications
|
||||
isabelle dof_mkroot -o scholarly_paper -t lncs IsaDofApplications
|
||||
\end{bash}
|
||||
where the \inlinebash{-o scholarly_paper} specifies the ontology for writing scientific articles and
|
||||
\inlinebash{-t lncs} specifies the use of Springer's \LaTeX-configuration for the Lecture Notes in
|
||||
|
|
|
@ -1,13 +1,13 @@
|
|||
session "2018-cicm-isabelle_dof-applications" = "Isabelle_DOF" +
|
||||
options [document = pdf, document_output = "output", document_build = dof,
|
||||
quick_and_dirty = true]
|
||||
options [document = pdf, document_output = "output", document_build = dof, quick_and_dirty = true]
|
||||
theories
|
||||
IsaDofApplications
|
||||
document_files
|
||||
"isadof.cfg"
|
||||
"root.bib"
|
||||
"authorarchive.sty"
|
||||
"preamble.tex"
|
||||
"lstisadof.sty"
|
||||
"vector_iD_icon.pdf"
|
||||
"figures/isabelle-architecture.pdf"
|
||||
"figures/Dogfood-Intro.png"
|
||||
"figures/InteractiveMathSheet.png"
|
||||
|
|
|
@ -0,0 +1,339 @@
|
|||
%% Copyright (C) 2008-2019 Achim D. Brucker, https://www.brucker.ch
|
||||
%%
|
||||
%% License:
|
||||
%% This program can be redistributed and/or modified under the terms
|
||||
%% of the LaTeX Project Public License Distributed from CTAN
|
||||
%% archives in directory macros/latex/base/lppl.txt; either
|
||||
%% version 1.3c of the License, or (at your option) any later version.
|
||||
%% OR
|
||||
%% The 2-clause BSD-style license.
|
||||
%%
|
||||
%% SPDX-License-Identifier: LPPL-1.3c+ OR BSD-2-Clause
|
||||
\NeedsTeXFormat{LaTeX2e}\relax
|
||||
\ProvidesPackage{authorarchive}
|
||||
[0000/00/00 Unreleased v1.1.1+%
|
||||
Self-archiving information for scientific publications.]
|
||||
%
|
||||
\PassOptionsToPackage{hyphens}{url}
|
||||
%
|
||||
\RequirePackage{ifthen}
|
||||
\RequirePackage[inline]{enumitem}
|
||||
\RequirePackage{graphicx}
|
||||
\RequirePackage{eso-pic}
|
||||
\RequirePackage{intopdf}
|
||||
\RequirePackage{kvoptions}
|
||||
\RequirePackage{hyperref}
|
||||
\RequirePackage{calc}
|
||||
\RequirePackage{qrcode}
|
||||
\RequirePackage{hvlogos}
|
||||
%
|
||||
%Better url breaking
|
||||
\g@addto@macro{\UrlBreaks}{\UrlOrds}
|
||||
%
|
||||
% Option declarations
|
||||
% -------------------
|
||||
\SetupKeyvalOptions{
|
||||
family=AA,
|
||||
prefix=AA@
|
||||
}
|
||||
%
|
||||
\DeclareStringOption[.]{bibtexdir}
|
||||
\DeclareStringOption[https://duckduckgo.com/?q=]{baseurl}
|
||||
\DeclareStringOption[.pdf]{suffix}
|
||||
\DeclareStringOption[UNKNOWN PUBLISHER]{publisher}[]
|
||||
\DeclareStringOption[UNKNOWN YEAR]{year}[]
|
||||
\DeclareStringOption[]{key}[]
|
||||
\DeclareStringOption[]{doi}[]
|
||||
\DeclareStringOption[]{doiText}[]
|
||||
\DeclareStringOption[]{publisherurl}[]
|
||||
\DeclareStringOption[UNKNOWN START PAGE]{startpage}[]
|
||||
\DeclareStringOption[UNKNOWN PUBLICATION]{publication}[]
|
||||
|
||||
\DeclareBoolOption{ACM}
|
||||
\DeclareBoolOption{acmart}
|
||||
\DeclareBoolOption{ENTCS}
|
||||
\DeclareBoolOption{IEEE}
|
||||
\DeclareBoolOption{LNCS}
|
||||
\DeclareBoolOption{LNI}
|
||||
\DeclareBoolOption{nocopyright}
|
||||
\DeclareBoolOption{nourl}
|
||||
\DeclareBoolOption{nobib}
|
||||
\DeclareBoolOption{orcidicon}
|
||||
%\ProcessOptions\relax
|
||||
|
||||
|
||||
% Default option rule
|
||||
\DeclareDefaultOption{%
|
||||
\ifx\CurrentOptionValue\relax
|
||||
\PackageWarningNoLine{\@currname}{%
|
||||
Unknown option `\CurrentOption'\MessageBreak
|
||||
is passed to package `authorarchive'%
|
||||
}%
|
||||
% Pass the option to package color.
|
||||
% Again it is better to expand \CurrentOption.
|
||||
\expandafter\PassOptionsToPackage\expandafter{\CurrentOption}{color}%
|
||||
\else
|
||||
% Package color does not take options with values.
|
||||
% We provide the standard LaTeX error.
|
||||
\@unknownoptionerror
|
||||
\fi
|
||||
}
|
||||
\ProcessKeyvalOptions*
|
||||
|
||||
% Provide command for dynamic configuration seutp
|
||||
\def\authorsetup{\kvsetkeys{AA}}
|
||||
|
||||
% Load local configuration
|
||||
\InputIfFileExists{authorarchive.config}{}{}
|
||||
|
||||
|
||||
\newlength\AA@x
|
||||
\newlength\AA@y
|
||||
\newlength\AA@width
|
||||
|
||||
\def\AA@bibBibTeX{\AA@bibtexdir/\AA@key.bib}
|
||||
\def\AA@bibBibTeXLong{\AA@bibtexdir/\AA@key.bibtex}
|
||||
\def\AA@bibWord{\AA@bibtexdir/\AA@key.word.xml}
|
||||
\def\AA@bibEndnote{\AA@bibtexdir/\AA@key.enw}
|
||||
\def\AA@bibRIS{\AA@bibtexdir/\AA@key.ris}
|
||||
|
||||
\newboolean{AA@bibExists}
|
||||
\setboolean{AA@bibExists}{false}
|
||||
\IfFileExists{\AA@bibBibTeX}{\setboolean{AA@bibExists}{true}}{}
|
||||
\IfFileExists{\AA@bibBibTeXLong}{\setboolean{AA@bibExists}{true}}{}
|
||||
\IfFileExists{\AA@bibWord}{\setboolean{AA@bibExists}{true}}{}
|
||||
\IfFileExists{\AA@bibEndnote}{\setboolean{AA@bibExists}{true}}{}
|
||||
\IfFileExists{\AA@bibRIS}{\setboolean{AA@bibExists}{true}}{}
|
||||
|
||||
\setlength\AA@x{1in+\hoffset+\oddsidemargin}
|
||||
|
||||
\newcommand{\authorcrfont}{\footnotesize}
|
||||
\newcommand{\authorat}[1]{\AtPageUpperLeft{\put(\LenToUnit{\AA@x},\LenToUnit{.2cm-\paperheight}){#1}}}
|
||||
\newcommand{\authorwidth}[1]{\setlength{\AA@width}{#1}}
|
||||
\setlength{\AA@width}{\textwidth}
|
||||
|
||||
\def\AA@pageinfo{}
|
||||
\ifthenelse{\equal{\AA@startpage}{UNKNOWN START PAGE}}{%
|
||||
}{%
|
||||
\setcounter{page}{\AA@startpage}%
|
||||
\def\AA@pageinfo{pp. \thepage--\pageref{\aa@lastpage}, }
|
||||
}
|
||||
|
||||
|
||||
|
||||
%%%% sig-alternate.cls
|
||||
\ifAA@ACM%
|
||||
\ifthenelse{\equal{\AA@publisher}{UNKNOWN PUBLISHER}}{%
|
||||
\setkeys{AA}{publisher=ACM}
|
||||
}{}
|
||||
\global\boilerplate={}
|
||||
\global\copyrightetc={}
|
||||
\renewcommand{\conferenceinfo}[2]{}
|
||||
\renewcommand{\authorcrfont}{\scriptsize}
|
||||
\setlength\AA@x{1in+\hoffset+\oddsidemargin}
|
||||
\setlength\AA@y{-\textheight+\topmargin+\headheight-\footskip} % -\voffset-\topmargin-\headheight-\footskip}
|
||||
\renewcommand{\authorat}[1]{\put(\LenToUnit{\AA@x},\LenToUnit{\AA@y}){#1}}
|
||||
\setlength{\AA@width}{\columnwidth}
|
||||
\fi
|
||||
%
|
||||
%%%% acmart.cls
|
||||
\ifAA@acmart%
|
||||
\ifthenelse{\equal{\AA@publisher}{UNKNOWN PUBLISHER}}{%
|
||||
\setkeys{AA}{publisher=ACM}
|
||||
}{}
|
||||
\renewcommand{\authorat}[1]{\AtPageUpperLeft{\put(\LenToUnit{\AA@x},\LenToUnit{0.2cm-\paperheight}){#1}}}
|
||||
\setlength{\AA@width}{\textwidth}
|
||||
\fi
|
||||
%
|
||||
%%%% LNCS
|
||||
\ifAA@LNCS%
|
||||
\ifAA@orcidicon%
|
||||
\renewcommand{\orcidID}[1]{\href{https://orcid.org/#1}{%
|
||||
\textsuperscript{\,\includegraphics[height=2\fontcharht\font`A]{vector_iD_icon}}}}
|
||||
\else\relax\fi%
|
||||
%
|
||||
\ifthenelse{\equal{\AA@publisher}{UNKNOWN PUBLISHER}}{%
|
||||
\setkeys{AA}{publisher=Springer-Verlag}
|
||||
}{}
|
||||
\renewcommand{\authorcrfont}{\scriptsize}
|
||||
\@ifclasswith{llncs}{a4paper}{%
|
||||
\ExplSyntaxOn
|
||||
\@ifundefined{pdfmanagement_add:nnn}{%
|
||||
\pdfpagesattr{/CropBox [92 114 523 780]}%
|
||||
}{%
|
||||
\pdfmanagement_add:nnn {Pages}{CropBox}{[92~114~523~780]}
|
||||
}%
|
||||
\ExplSyntaxOff
|
||||
\renewcommand{\authorat}[1]{\put(\LenToUnit{\AA@x},40){#1}}%
|
||||
}{%
|
||||
\ExplSyntaxOn
|
||||
\@ifundefined{pdfmanagement_add:nnn}{%
|
||||
\pdfpagesattr{/CropBox [92 65 523 731]}% LNCS page: 152x235 mm
|
||||
}{%
|
||||
\pdfmanagement_add:nnn {Pages}{CropBox}{[92~62~523~731]}
|
||||
}%
|
||||
\ExplSyntaxOff
|
||||
\renewcommand{\authorat}[1]{\put(\LenToUnit{\AA@x},23){#1}}
|
||||
}
|
||||
\setlength{\AA@width}{\textwidth}
|
||||
\setcounter{tocdepth}{2}
|
||||
\fi
|
||||
%
|
||||
%%%% LNI
|
||||
\ifAA@LNI%
|
||||
\ifthenelse{\equal{\AA@publisher}{UNKNOWN PUBLISHER}}{%
|
||||
\setkeys{AA}{publisher=GI}
|
||||
}{}
|
||||
\renewcommand{\authorat}[1]{\put(\LenToUnit{\AA@x},35){#1}}
|
||||
\renewcommand{\authorcrfont}{\scriptsize}
|
||||
\pdfpagesattr{/CropBox [70 65 526.378 748.15]} % TODO
|
||||
\setlength{\AA@width}{\textwidth}
|
||||
\setcounter{tocdepth}{2}
|
||||
\fi
|
||||
%
|
||||
%%%% ENTCS
|
||||
\ifAA@ENTCS%
|
||||
\addtolength{\voffset}{1cm}
|
||||
\ifthenelse{\equal{\AA@publisher}{UNKNOWN PUBLISHER}}{%
|
||||
\setkeys{AA}{publisher=Elsevier Science B.~V.}
|
||||
}{}
|
||||
\renewcommand{\authorat}[1]{\put(\LenToUnit{\AA@x},\LenToUnit{-.5cm-\the\ht\AA@authoratBox}){#1}}
|
||||
\renewcommand{\authorcrfont}{\scriptsize}
|
||||
\setlength{\AA@width}{\textwidth}
|
||||
\fi
|
||||
%
|
||||
%%%% IEEE
|
||||
\ifAA@IEEE%
|
||||
\ifthenelse{\equal{\AA@publisher}{UNKNOWN PUBLISHER}}{%
|
||||
\setkeys{AA}{publisher=IEEE}
|
||||
}{}
|
||||
\renewcommand{\authorat}[1]{\put(\LenToUnit{\AA@x},6){#1}}
|
||||
\renewcommand{\authorcrfont}{\scriptsize}
|
||||
\setlength{\AA@width}{\textwidth}
|
||||
\setcounter{tocdepth}{2}
|
||||
\fi
|
||||
%
|
||||
|
||||
\hypersetup{%
|
||||
draft = false,
|
||||
bookmarksopen = true,
|
||||
bookmarksnumbered= true,
|
||||
pdfauthor = {\@author},
|
||||
pdftitle = {\@title},
|
||||
}
|
||||
|
||||
\@ifpackageloaded{totpages}{%
|
||||
\def\aa@lastpage{TotPages}
|
||||
}{%
|
||||
\RequirePackage{lastpage}
|
||||
\def\aa@lastpage{LastPage}
|
||||
}
|
||||
\newsavebox{\AA@authoratBox}
|
||||
|
||||
\AddToShipoutPicture*{%
|
||||
\setlength{\unitlength}{1mm}%
|
||||
\savebox{\AA@authoratBox}{%
|
||||
\parbox{1.4cm}{%
|
||||
\bgroup%
|
||||
\normallineskiplimit=0pt%
|
||||
\ifAA@nourl%
|
||||
\ifx\AA@doi\@empty\relax%
|
||||
\else%
|
||||
\qrcode[hyperlink,height=1.17cm,padding]{https://doi.org/\AA@doi}%
|
||||
\fi%
|
||||
\else%
|
||||
\qrcode[hyperlink,height=1.17cm,padding]{\AA@baseurl/\AA@key\AA@suffix}%
|
||||
\fi%
|
||||
\egroup%
|
||||
}%
|
||||
\ifAA@nourl\ifx\AA@doi\@empty\addtolength{\AA@width}{1.4cm}\fi\fi
|
||||
\parbox{\AA@width-1.4cm}{\authorcrfont%
|
||||
\ifAA@LNCS%
|
||||
\AA@publication, \AA@pageinfo \AA@year. %
|
||||
\ifAA@nocopyright\else
|
||||
\textcopyright~\AA@year~\AA@publisher.
|
||||
\fi
|
||||
This is the author's
|
||||
version of the work. It is posted
|
||||
\ifAA@nourl\relax\else%
|
||||
at \url{\AA@baseurl/\AA@key\AA@suffix} %
|
||||
\fi
|
||||
\ifAA@nocopyright\relax\else
|
||||
by permission of \AA@publisher{}
|
||||
\fi
|
||||
for your personal use.
|
||||
\ifx\AA@doi\@empty%
|
||||
\relax
|
||||
\else
|
||||
The final publication is available at Springer via
|
||||
\ifx\AA@doiText\@empty%
|
||||
\url{https://doi.org/\AA@doi}.
|
||||
\else
|
||||
\href{https://doi.org/\AA@doi}{\AA@doiText}.
|
||||
\fi
|
||||
\fi
|
||||
\else
|
||||
\ifAA@nocopyright\relax\else
|
||||
\textcopyright~\AA@year~\AA@publisher. %
|
||||
\fi%
|
||||
This is the author's
|
||||
version of the work. It is posted
|
||||
\ifAA@nourl\relax\else%
|
||||
at \url{\AA@baseurl/\AA@key\AA@suffix} %
|
||||
\fi
|
||||
\ifAA@nocopyright\relax\else
|
||||
by permission of \AA@publisher{} %
|
||||
\fi
|
||||
for your personal use. Not for redistribution. The definitive
|
||||
version was published in \emph{\AA@publication}, \AA@pageinfo \AA@year%
|
||||
\ifx\AA@doi\@empty%
|
||||
\ifx\AA@publisherurl\@empty%
|
||||
.%
|
||||
\else
|
||||
\url{\AA@publisherurl}.%
|
||||
\fi
|
||||
\else
|
||||
\ifx\AA@doiText\@empty%
|
||||
, doi: \href{https://doi.org/\AA@doi}{\AA@doi}.%
|
||||
\else
|
||||
, doi: \href{https://doi.org/\AA@doi}{\AA@doiText}.%
|
||||
\fi
|
||||
\fi
|
||||
\fi
|
||||
\ifAA@nobib\relax\else%
|
||||
\ifthenelse{\boolean{AA@bibExists}}{%
|
||||
\hfill
|
||||
\begin{itemize*}[label={}, itemjoin={,}]
|
||||
\IfFileExists{\AA@bibBibTeX}{%
|
||||
\item \attachandlink{\AA@bibBibTeX}[application/x-bibtex]{BibTeX entry of this paper}{\BibTeX}%
|
||||
}{%
|
||||
\IfFileExists{\AA@bibBibTeXLong}{%
|
||||
\item \attachandlink[\AA@key.bib]{\AA@bibBibTeXLong}[application/x-bibtex]{BibTeX entry of this paper}{\BibTeX}%
|
||||
}{%
|
||||
\typeout{No file \AA@bibBibTeX{} (and no \AA@bibBibTeXLong) found. Not embedded reference in BibTeX format.}%
|
||||
}%
|
||||
}%
|
||||
\IfFileExists{\AA@bibWord}{%
|
||||
\item \attachandlink{\AA@bibWord}[application/xml]{XML entry of this paper (e.g., for Word 2007 and later)}{Word}%
|
||||
}{%
|
||||
\typeout{No file \AA@bibWord{} found. Not embedded reference for Word 2007 and later.}%
|
||||
}%
|
||||
\IfFileExists{\AA@bibEndnote}{%
|
||||
\item \attachandlink{\AA@bibEndnote}[application/x-endnote-refer]{Endnote entry of this paper}{EndNote}%
|
||||
}{%
|
||||
\typeout{No file \AA@bibEndnote{} found. Not embedded reference in Endnote format.}%
|
||||
}%
|
||||
\IfFileExists{\AA@bibRIS}{%
|
||||
\item \attachandlink{\AA@bibRIS}[application/x-research-info-systems]{RIS entry of this paper}{RIS}%
|
||||
}{%
|
||||
\typeout{No file \AA@bibRIS{} found. Not embedded reference in RIS format.}%
|
||||
}%
|
||||
\end{itemize*}\\
|
||||
}{%
|
||||
\PackageError{authorarchive}{No bibliographic files found. Specify option 'nobib' if this is intended.}
|
||||
}
|
||||
\fi
|
||||
}
|
||||
}
|
||||
\authorat{\raisebox{\the\ht\AA@authoratBox}{\usebox{\AA@authoratBox}}}
|
||||
}
|
0
examples/scholarly_paper/2018-cicm-isabelle_dof-applications/document/figures/Dogfood-II-bgnd1.png
Executable file → Normal file
Before Width: | Height: | Size: 14 KiB After Width: | Height: | Size: 14 KiB |
0
examples/scholarly_paper/2018-cicm-isabelle_dof-applications/document/figures/Dogfood-III-bgnd-text_section.png
Executable file → Normal file
Before Width: | Height: | Size: 18 KiB After Width: | Height: | Size: 18 KiB |
0
examples/scholarly_paper/2018-cicm-isabelle_dof-applications/document/figures/Dogfood-IV-jumpInDocCLass.png
Executable file → Normal file
Before Width: | Height: | Size: 23 KiB After Width: | Height: | Size: 23 KiB |
0
examples/scholarly_paper/2018-cicm-isabelle_dof-applications/document/figures/Dogfood-Intro.png
Executable file → Normal file
Before Width: | Height: | Size: 85 KiB After Width: | Height: | Size: 85 KiB |
0
examples/scholarly_paper/2018-cicm-isabelle_dof-applications/document/figures/Dogfood-V-attribute.png
Executable file → Normal file
Before Width: | Height: | Size: 16 KiB After Width: | Height: | Size: 16 KiB |
0
examples/scholarly_paper/2018-cicm-isabelle_dof-applications/document/figures/Dogfood-figures.png
Executable file → Normal file
Before Width: | Height: | Size: 18 KiB After Width: | Height: | Size: 18 KiB |
0
examples/scholarly_paper/2018-cicm-isabelle_dof-applications/document/figures/InteractiveMathSheet.png
Executable file → Normal file
Before Width: | Height: | Size: 75 KiB After Width: | Height: | Size: 75 KiB |
0
examples/scholarly_paper/2018-cicm-isabelle_dof-applications/document/figures/antiquotations-PIDE.png
Executable file → Normal file
Before Width: | Height: | Size: 96 KiB After Width: | Height: | Size: 96 KiB |
0
examples/scholarly_paper/2018-cicm-isabelle_dof-applications/document/figures/isabelle-architecture.pdf
Executable file → Normal file
0
examples/scholarly_paper/2018-cicm-isabelle_dof-applications/document/figures/isabelle-architecture.svg
Executable file → Normal file
Before Width: | Height: | Size: 57 KiB After Width: | Height: | Size: 57 KiB |
0
examples/scholarly_paper/2018-cicm-isabelle_dof-applications/document/figures/srac-as-es-application.png
Executable file → Normal file
Before Width: | Height: | Size: 67 KiB After Width: | Height: | Size: 67 KiB |
0
examples/scholarly_paper/2018-cicm-isabelle_dof-applications/document/figures/srac-definition.png
Executable file → Normal file
Before Width: | Height: | Size: 50 KiB After Width: | Height: | Size: 50 KiB |
|
@ -1,2 +0,0 @@
|
|||
Template: scrartcl
|
||||
Ontology: scholarly_paper
|
0
examples/scholarly_paper/2018-cicm-isabelle_dof-applications/document/lstisadof.sty
Executable file → Normal file
29
examples/scholarly_paper/2018-cicm-isabelle_dof-applications/document/preamble.tex
Executable file → Normal file
|
@ -53,23 +53,18 @@
|
|||
\usepackage[size=footnotesize]{caption}
|
||||
|
||||
|
||||
\subject{Example of an Academic Paper\footnote{%
|
||||
This document is an example setup for writing an academic paper. While
|
||||
it is optimized for the Springer's LNCS class, it uses a Koma Script
|
||||
LaTeX class to avoid the re-distribution of \texttt{llncs.cls},
|
||||
which would violate Springer's copyright. This example has been
|
||||
published at CICM 2018:
|
||||
\protect\begin{quote}
|
||||
Achim D. Brucker, Idir Ait-Sadoune, Paolo Crisafulli, and
|
||||
Burkhart Wolff. Using The Isabelle Ontology Framework: Linking
|
||||
the Formal with the Informal. In Conference on Intelligent
|
||||
Computer Mathematics (CICM). Lecture Notes in Computer Science
|
||||
(11006), Springer-Verlag, 2018.
|
||||
\protect\end{quote}
|
||||
Note that the content of this example is not updated and, hence,
|
||||
might not be correct with respect to the latest version of
|
||||
\isadof{}.
|
||||
}}
|
||||
\usepackage[LNCS,
|
||||
orcidicon,
|
||||
key=brucker.ea-isabelle-ontologies-2018,
|
||||
year=2018,
|
||||
publication={F. Rabe et al. (Eds.): CICM 2018, LNAI 11006},
|
||||
nobib,
|
||||
startpage={1},
|
||||
doi={10.1007/978-3-319-96812-4_3},
|
||||
doiText={10.1007/978-3-319-96812-4\_3},
|
||||
]{authorarchive}
|
||||
\authorrunning{A. D. Brucker et al.}
|
||||
\pagestyle{headings}
|
||||
|
||||
|
||||
\title{<TITLE>}
|
||||
|
|
0
examples/scholarly_paper/2018-cicm-isabelle_dof-applications/document/root.bib
Executable file → Normal file
|
@ -3,6 +3,5 @@ session "2020-iFM-csp" = "Isabelle_DOF" +
|
|||
theories
|
||||
"paper"
|
||||
document_files
|
||||
"isadof.cfg"
|
||||
"root.bib"
|
||||
"preamble.tex"
|
||||
|
|
|
@ -1,2 +0,0 @@
|
|||
Template: scrartcl
|
||||
Ontology: scholarly_paper
|
|
@ -6870,7 +6870,7 @@ isbn="978-3-540-48509-4"
|
|||
title = {{Isabelle's} Logic: {HOL}},
|
||||
author = {Tobias Nipkow and Lawrence C. Paulson and Markus Wenzel},
|
||||
year = 2009,
|
||||
misc = {\url{http://isabelle.in.tum.de/library/HOL/}}
|
||||
misc = {\url{https://isabelle.in.tum.de/library/HOL/}}
|
||||
}
|
||||
|
||||
@InProceedings{ garson.ea:security:2008,
|
||||
|
@ -11000,7 +11000,7 @@ isbn="978-1-4471-3182-3"
|
|||
journal = {Archive of Formal Proofs},
|
||||
month = apr,
|
||||
year = 2019,
|
||||
note = {\url{http://isa-afp.org/entries/HOL-CSP.html}},
|
||||
note = {\url{https://isa-afp.org/entries/HOL-CSP.html}},
|
||||
ISSN = {2150-914x},
|
||||
}
|
||||
|
||||
|
|
|
@ -3,6 +3,8 @@ theory "paper"
|
|||
imports "Isabelle_DOF.scholarly_paper"
|
||||
begin
|
||||
|
||||
use_template "scrartcl"
|
||||
use_ontology "scholarly_paper"
|
||||
|
||||
open_monitor*[this::article]
|
||||
|
||||
|
@ -50,7 +52,7 @@ abstract*[abs, keywordlist="[\<open>Shallow Embedding\<close>,\<open>Process-Alg
|
|||
\<close>
|
||||
text\<open>\<close>
|
||||
section*[introheader::introduction,main_author="Some(@{docitem ''bu''}::author)"]\<open> Introduction \<close>
|
||||
text*[introtext::introduction]\<open>
|
||||
text*[introtext::introduction, level="Some 1"]\<open>
|
||||
Communicating Sequential Processes (\<^csp>) is a language
|
||||
to specify and verify patterns of interaction of concurrent systems.
|
||||
Together with CCS and LOTOS, it belongs to the family of \<^emph>\<open>process algebras\<close>.
|
||||
|
@ -152,7 +154,7 @@ processes \<open>Skip\<close> (successful termination) and \<open>Stop\<close> (
|
|||
\<open>\<T>(Skip) = \<T>(Stop) = {[]}\<close>.
|
||||
Note that the trace sets, representing all \<^emph>\<open>partial\<close> history, is in general prefix closed.\<close>
|
||||
|
||||
text*[ex1::math_example, status=semiformal] \<open>
|
||||
text*[ex1::math_example, status=semiformal, level="Some 1"] \<open>
|
||||
Let two processes be defined as follows:
|
||||
|
||||
\<^enum> \<open>P\<^sub>d\<^sub>e\<^sub>t = (a \<rightarrow> Stop) \<box> (b \<rightarrow> Stop)\<close>
|
||||
|
@ -352,7 +354,7 @@ Roscoe and Brooks @{cite "Roscoe1992AnAO"} finally proposed another ordering, ca
|
|||
that completeness could at least be assured for read-operations. This more complex ordering
|
||||
is based on the concept \<^emph>\<open>refusals after\<close> a trace \<open>s\<close> and defined by \<open>\<R> P s \<equiv> {X | (s, X) \<in> \<F> P}\<close>.\<close>
|
||||
|
||||
Definition*[process_ordering, short_name="''process ordering''"]\<open>
|
||||
Definition*[process_ordering, level= "Some 2", short_name="''process ordering''"]\<open>
|
||||
We define \<open>P \<sqsubseteq> Q \<equiv> \<psi>\<^sub>\<D> \<and> \<psi>\<^sub>\<R> \<and> \<psi>\<^sub>\<M> \<close>, where
|
||||
\<^enum> \<open>\<psi>\<^sub>\<D> = \<D> P \<supseteq> \<D> Q \<close>
|
||||
\<^enum> \<open>\<psi>\<^sub>\<R> = s \<notin> \<D> P \<Rightarrow> \<R> P s = \<R> Q s\<close>
|
||||
|
@ -528,10 +530,10 @@ To handle termination better, we added two new processes \<open>CHAOS\<^sub>S\<^
|
|||
\<close>
|
||||
|
||||
(*<*) (* a test ...*)
|
||||
text*[X22 ::math_content ]\<open>\<open>RUN A \<equiv> \<mu> X. \<box> x \<in> A \<rightarrow> X\<close> \<close>
|
||||
text*[X32::"definition", mcc=defn]\<open>\<open>CHAOS A \<equiv> \<mu> X. (STOP \<sqinter> (\<box> x \<in> A \<rightarrow> X))\<close> \<close>
|
||||
Definition*[X42]\<open>\<open>CHAOS\<^sub>S\<^sub>K\<^sub>I\<^sub>P A \<equiv> \<mu> X. (SKIP \<sqinter> STOP \<sqinter> (\<box> x \<in> A \<rightarrow> X))\<close> \<close>
|
||||
Definition*[X52::"definition"]\<open>\<open>CHAOS\<^sub>S\<^sub>K\<^sub>I\<^sub>P A \<equiv> \<mu> X. (SKIP \<sqinter> STOP \<sqinter> (\<box> x \<in> A \<rightarrow> X))\<close> \<close>
|
||||
text*[X22 ::math_content, level="Some 2" ]\<open>\<open>RUN A \<equiv> \<mu> X. \<box> x \<in> A \<rightarrow> X\<close> \<close>
|
||||
text*[X32::"definition", level="Some 2", mcc=defn]\<open>\<open>CHAOS A \<equiv> \<mu> X. (STOP \<sqinter> (\<box> x \<in> A \<rightarrow> X))\<close> \<close>
|
||||
Definition*[X42, level="Some 2"]\<open>\<open>CHAOS\<^sub>S\<^sub>K\<^sub>I\<^sub>P A \<equiv> \<mu> X. (SKIP \<sqinter> STOP \<sqinter> (\<box> x \<in> A \<rightarrow> X))\<close> \<close>
|
||||
Definition*[X52::"definition", level="Some 2"]\<open>\<open>CHAOS\<^sub>S\<^sub>K\<^sub>I\<^sub>P A \<equiv> \<mu> X. (SKIP \<sqinter> STOP \<sqinter> (\<box> x \<in> A \<rightarrow> X))\<close> \<close>
|
||||
|
||||
text\<open> The \<open>RUN\<close>-process defined @{math_content X22} represents the process that accepts all
|
||||
events, but never stops nor deadlocks. The \<open>CHAOS\<close>-process comes in two variants shown in
|
||||
|
@ -539,11 +541,11 @@ events, but never stops nor deadlocks. The \<open>CHAOS\<close>-process comes in
|
|||
stops or accepts any offered event, whereas \<open>CHAOS\<^sub>S\<^sub>K\<^sub>I\<^sub>P\<close> can additionally terminate.\<close>
|
||||
(*>*)
|
||||
|
||||
Definition*[X2]\<open>\<open>RUN A \<equiv> \<mu> X. \<box> x \<in> A \<rightarrow> X\<close> \<close>
|
||||
Definition*[X3]\<open>\<open>CHAOS A \<equiv> \<mu> X. (STOP \<sqinter> (\<box> x \<in> A \<rightarrow> X))\<close> \<close>
|
||||
Definition*[X4]\<open>\<open>CHAOS\<^sub>S\<^sub>K\<^sub>I\<^sub>P A \<equiv> \<mu> X. (SKIP \<sqinter> STOP \<sqinter> (\<box> x \<in> A \<rightarrow> X))\<close>\<close>
|
||||
Definition*[X5]\<open>\<open>DF A \<equiv> \<mu> X. (\<sqinter> x \<in> A \<rightarrow> X)\<close> \<close>
|
||||
Definition*[X6]\<open>\<open>DF\<^sub>S\<^sub>K\<^sub>I\<^sub>P A \<equiv> \<mu> X. ((\<sqinter> x \<in> A \<rightarrow> X) \<sqinter> SKIP)\<close> \<close>
|
||||
Definition*[X2, level="Some 2"]\<open>\<open>RUN A \<equiv> \<mu> X. \<box> x \<in> A \<rightarrow> X\<close> \<close>
|
||||
Definition*[X3, level="Some 2"]\<open>\<open>CHAOS A \<equiv> \<mu> X. (STOP \<sqinter> (\<box> x \<in> A \<rightarrow> X))\<close> \<close>
|
||||
Definition*[X4, level="Some 2"]\<open>\<open>CHAOS\<^sub>S\<^sub>K\<^sub>I\<^sub>P A \<equiv> \<mu> X. (SKIP \<sqinter> STOP \<sqinter> (\<box> x \<in> A \<rightarrow> X))\<close>\<close>
|
||||
Definition*[X5, level="Some 2"]\<open>\<open>DF A \<equiv> \<mu> X. (\<sqinter> x \<in> A \<rightarrow> X)\<close> \<close>
|
||||
Definition*[X6, level="Some 2"]\<open>\<open>DF\<^sub>S\<^sub>K\<^sub>I\<^sub>P A \<equiv> \<mu> X. ((\<sqinter> x \<in> A \<rightarrow> X) \<sqinter> SKIP)\<close> \<close>
|
||||
|
||||
text\<open>In the following, we denote \<open> \<R>\<P> = {DF\<^sub>S\<^sub>K\<^sub>I\<^sub>P, DF, RUN, CHAOS, CHAOS\<^sub>S\<^sub>K\<^sub>I\<^sub>P}\<close>.
|
||||
All five reference processes are divergence-free.
|
||||
|
@ -605,16 +607,16 @@ handled separately. One contribution of our work is establish their precise rela
|
|||
the Failure/Divergence Semantics of \<^csp>.\<close>
|
||||
|
||||
(* bizarre: Definition* does not work for this single case *)
|
||||
text*[X10::"definition"]\<open> \<open>deadlock\<^sub>-free P \<equiv> DF\<^sub>S\<^sub>K\<^sub>I\<^sub>P UNIV \<sqsubseteq>\<^sub>\<F> P\<close> \<close>
|
||||
text*[X10::"definition", level="Some 2"]\<open> \<open>deadlock\<^sub>-free P \<equiv> DF\<^sub>S\<^sub>K\<^sub>I\<^sub>P UNIV \<sqsubseteq>\<^sub>\<F> P\<close> \<close>
|
||||
|
||||
text\<open>\<^noindent> A process \<open>P\<close> is deadlock-free if and only if after any trace \<open>s\<close> without \<open>\<surd>\<close>, the union of \<open>\<surd>\<close>
|
||||
and all events of \<open>P\<close> can never be a refusal set associated to \<open>s\<close>, which means that \<open>P\<close> cannot
|
||||
be deadlocked after any non-terminating trace.
|
||||
\<close>
|
||||
|
||||
Theorem*[T1, short_name="\<open>DF definition captures deadlock-freeness\<close>"]
|
||||
Theorem*[T1, short_name="\<open>DF definition captures deadlock-freeness\<close>", level="Some 2"]
|
||||
\<open> \hfill \break \<open>deadlock_free P \<longleftrightarrow> (\<forall>s\<in>\<T> P. tickFree s \<longrightarrow> (s, {\<surd>}\<union>events_of P) \<notin> \<F> P)\<close> \<close>
|
||||
Definition*[X11]\<open> \<open>livelock\<^sub>-free P \<equiv> \<D> P = {} \<close> \<close>
|
||||
Definition*[X11, level="Some 2"]\<open> \<open>livelock\<^sub>-free P \<equiv> \<D> P = {} \<close> \<close>
|
||||
|
||||
text\<open> Recall that all five reference processes are livelock-free.
|
||||
We also have the following lemmas about the
|
||||
|
@ -628,7 +630,7 @@ text\<open>
|
|||
Finally, we proved the following theorem that confirms the relationship between the two vital
|
||||
properties:
|
||||
\<close>
|
||||
Theorem*[T2, short_name="''DF implies LF''"]
|
||||
Theorem*[T2, short_name="''DF implies LF''", level="Some 2"]
|
||||
\<open> \<open>deadlock_free P \<longrightarrow> livelock_free P\<close> \<close>
|
||||
|
||||
text\<open>
|
||||
|
@ -795,7 +797,7 @@ This normal form is closed under deterministic and communication operators.
|
|||
The advantage of this format is that we can mimick the well-known product automata construction
|
||||
for an arbitrary number of synchronized processes under normal form.
|
||||
We only show the case of the synchronous product of two processes: \<close>
|
||||
text*[T3::"theorem", short_name="\<open>Product Construction\<close>"]\<open>
|
||||
text*[T3::"theorem", short_name="\<open>Product Construction\<close>", level="Some 2"]\<open>
|
||||
Parallel composition translates to normal form:
|
||||
@{cartouche [display,indent=5]\<open>(P\<^sub>n\<^sub>o\<^sub>r\<^sub>m\<lbrakk>\<tau>\<^sub>1,\<upsilon>\<^sub>1\<rbrakk> \<sigma>\<^sub>1) || (P\<^sub>n\<^sub>o\<^sub>r\<^sub>m\<lbrakk>\<tau>\<^sub>2,\<upsilon>\<^sub>2\<rbrakk> \<sigma>\<^sub>2) =
|
||||
P\<^sub>n\<^sub>o\<^sub>r\<^sub>m\<lbrakk>\<lambda>(\<sigma>\<^sub>1,\<sigma>\<^sub>2). \<tau>\<^sub>1 \<sigma>\<^sub>1 \<inter> \<tau>\<^sub>2 \<sigma>\<^sub>2 , \<lambda>(\<sigma>\<^sub>1,\<sigma>\<^sub>2).\<lambda>e.(\<upsilon>\<^sub>1 \<sigma>\<^sub>1 e, \<upsilon>\<^sub>2 \<sigma>\<^sub>2 e)\<rbrakk> (\<sigma>\<^sub>1,\<sigma>\<^sub>2)\<close>}
|
||||
|
@ -815,7 +817,7 @@ states via the closure \<open>\<RR>\<close>, which is defined inductively over:
|
|||
Thus, normalization leads to a new characterization of deadlock-freeness inspired
|
||||
from automata theory. We formally proved the following theorem:\<close>
|
||||
|
||||
text*[T4::"theorem", short_name="\<open>DF vs. Reacheability\<close>"]
|
||||
text*[T4::"theorem", short_name="\<open>DF vs. Reacheability\<close>", level="Some 2"]
|
||||
\<open> If each reachable state \<open>s \<in> (\<RR> \<tau> \<upsilon>)\<close> has outgoing transitions,
|
||||
the \<^csp> process is deadlock-free:
|
||||
@{cartouche [display,indent=10] \<open>\<forall>\<sigma> \<in> (\<RR> \<tau> \<upsilon> \<sigma>\<^sub>0). \<tau> \<sigma> \<noteq> {} \<Longrightarrow> deadlock_free (P\<^sub>n\<^sub>o\<^sub>r\<^sub>m\<lbrakk>\<tau>,\<upsilon>\<rbrakk> \<sigma>\<^sub>0)\<close>}
|
||||
|
|
|
@ -54,7 +54,7 @@ text\<open>
|
|||
Depending on your operating system and depending if you put Isabelle's \<^boxed_bash>\<open>bin\<close> directory
|
||||
in your \<^boxed_bash>\<open>PATH\<close>, you will need to invoke \<^boxed_bash>\<open>isabelle\<close> using its
|
||||
full qualified path, \<^eg>:
|
||||
@{boxed_bash [display]\<open>ë\prompt{}ë /usr/local/Isabelleë\isabelleversionë/bin/isabelle version
|
||||
@{boxed_bash [display]\<open>ë\prompt{}ë /usr/local/Isabelleë\isabelleversion/bin/isabelleë version
|
||||
ë\isabellefullversionë\<close>}
|
||||
\<close>
|
||||
|
||||
|
@ -69,22 +69,25 @@ text\<open>
|
|||
subsubsection*[isadof::technical]\<open>Installing \<^isadof>\<close>
|
||||
text\<open>
|
||||
In the following, we assume that you already downloaded the \<^isadof> distribution
|
||||
(\href{\isadofarchiveurl}{\isadofarchiven}) from the \<^isadof> web site. The main steps for
|
||||
installing are extracting the \<^isadof> distribution and calling its \<^boxed_bash>\<open>install-afp\<close> script.
|
||||
(\href{\isadofarchiveurl}{\isadofarchiven}) from the \<^isadof> web site.
|
||||
We start by extracting the \<^isadof> archive:
|
||||
@{boxed_bash [display]\<open>ë\prompt{}ë tar xf ë\href{\isadofarchiveurl}{\isadofarchiven}ë\<close>}
|
||||
This will create a directory \texttt{\isadofdirn} containing \<^isadof> distribution.
|
||||
|
||||
\<^isadof> depends on the the AFP (\<^url>\<open>https://www.isa-afp.org\<close>), namely the AFP
|
||||
Next, we need to register \<^isadof> as an Isabelle component:
|
||||
|
||||
@{boxed_bash [display]\<open>ë\prompt{}ë isabelle components -u ë\mbox{\isadofdirn}ë\<close>}
|
||||
|
||||
Moreover, \<^isadof> depends on the the AFP (\<^url>\<open>https://www.isa-afp.org\<close>), namely the AFP
|
||||
entries ``Functional Automata''~@{cite "nipkow.ea:functional-Automata-afp:2004"} and
|
||||
``Regular Sets and Expressions''~@{cite "kraus.ea:regular-sets-afp:2010"}. You can either
|
||||
install the complete AFP, following the instructions given at \<^url>\<open>https://www.isa-afp.org/using.html\<close>),
|
||||
or use the provided \<^boxed_bash>\<open>install\<close> script to install a minimal AFP
|
||||
setup into the local \<^isadof> directory. The script needs to be prefixed with the
|
||||
\<^boxed_bash>\<open>isabelle env\<close> command:
|
||||
\<^boxed_bash>\<open>isabelle eëënv\<close> command:
|
||||
|
||||
@{boxed_bash [display]\<open>ë\prompt{}ë cd ë\isadofdirnë
|
||||
ë\prompt{\isadofdirn}ë isabelle env ./install-afp
|
||||
ë\prompt{\isadofdirn}ë isabelle eëënv ë\mbox{./install-afp}ë
|
||||
|
||||
Isabelle/DOF Installer
|
||||
======================
|
||||
|
@ -115,15 +118,17 @@ session and all example documents, execute:
|
|||
subsection*[first_project::technical]\<open>Creating an \<^isadof> Project\<close>
|
||||
text\<open>
|
||||
\<^isadof> provides its own variant of Isabelle's
|
||||
\<^boxed_bash>\<open>mkroot\<close> tool, called \<^boxed_bash>\<open>mkroot_DOF\<close>\index{mkroot\_DOF}:
|
||||
@{boxed_bash [display]\<open>ë\prompt{}ë isabelle mkroot_DOF myproject
|
||||
\<^boxed_bash>\<open>mkroot\<close> tool, called \<^boxed_bash>\<open>dof_mkroot\<close>\index{mkroot\_DOF}:
|
||||
@{boxed_bash [display]\<open>ë\prompt{}ë isabelle dof_mkroot myproject
|
||||
|
||||
Preparing session "myproject" iëën "myproject"
|
||||
creating "myproject/ROOT"
|
||||
creating "myproject/document/root.tex"
|
||||
creating "myproject/myproject.thy"
|
||||
creating "myproject/document/preamble.tex"
|
||||
|
||||
Now use the following coëëmmand line to build the session:
|
||||
isabelle build -D myproject \<close>}
|
||||
Now use the following commanëëd line to build the session:
|
||||
|
||||
isabelle build -D myproject\<close>}
|
||||
The created project uses the default configuration (the ontology for writing academic papers
|
||||
(scholarly\_paper) using a report layout based on the article class (\<^boxed_latex>\<open>scrartcl\<close>) of
|
||||
the KOMA-Script bundle~@{cite "kohm:koma-script:2019"}). The directory \<^boxed_bash>\<open>myproject\<close>
|
||||
|
@ -139,21 +144,23 @@ The directory \<^boxed_bash>\<open>myproject\<close> contains the following fil
|
|||
.1 .
|
||||
.2 myproject.
|
||||
.3 document.
|
||||
.4 isadof.cfg\DTcomment{\<^isadof> configuration}.
|
||||
.4 preamble.tex\DTcomment{Manual \<^LaTeX>-configuration}.
|
||||
.3 myproject.thy\DTcomment{Example theory file}.
|
||||
.3 ROOT\DTcomment{Isabelle build-configuration}.
|
||||
}
|
||||
\end{minipage}
|
||||
\end{center}
|
||||
The \<^isadof> configuration (\<^boxed_bash>\<open>isadof.cfg\<close>) specifies the required
|
||||
ontologies and the document template using a YAML syntax.\<^footnote>\<open>Isabelle power users will recognize that
|
||||
|
||||
The main two configuration files\<^footnote>\<open>Isabelle power users will recognize that
|
||||
\<^isadof>'s document setup does not make use of a file \<^boxed_bash>\<open>root.tex\<close>: this file is
|
||||
replaced by built-in document templates.\<close> The main two configuration files for
|
||||
users are:
|
||||
replaced by built-in document templates.\<close> for users are:
|
||||
\<^item> The file \<^boxed_bash>\<open>ROOT\<close>\<^index>\<open>ROOT\<close>, which defines the Isabelle session. New theory files as well as new
|
||||
files required by the document generation (\<^eg>, images, bibliography database using \<^BibTeX>, local
|
||||
\<^LaTeX>-styles) need to be registered in this file. For details of Isabelle's build system, please
|
||||
consult the Isabelle System Manual~@{cite "wenzel:system-manual:2020"}.
|
||||
consult the Isabelle System Manual~@{cite "wenzel:system-manual:2020"}. In addition to the
|
||||
standard features of, this file also contains \<^isadof> specific configurations:
|
||||
\<^item> \<^boxed_bash>\<open>document_build=dof\<close> needs to be present, to tell Isabelle, to use the
|
||||
Isabelle/DOF backend for the document generation.
|
||||
\<^item> The file \<^boxed_bash>\<open>preamble.tex\<close>\<^index>\<open>preamble.tex\<close>, which allows users to add additional
|
||||
\<^LaTeX>-packages or to add/modify \<^LaTeX>-commands.
|
||||
\<close>
|
||||
|
@ -166,33 +173,24 @@ text\<open>
|
|||
obtains and adds the necessary \<^LaTeX> class file.
|
||||
This is due to licensing restrictions).\<close>
|
||||
text\<open>
|
||||
This can be configured by using the command-line options of \<^boxed_bash>\<open>mkroot_DOF\<close>. In
|
||||
This can be configured by using the command-line options of \<^boxed_bash>\<open>dof_mkroot\<close>. In
|
||||
Particular, \<^boxed_bash>\<open>-o\<close> allows selecting the ontology and \<^boxed_bash>\<open>-t\<close> allows selecting
|
||||
the document template. The built-in help (using \<^boxed_bash>\<open>-h\<close>) shows all available options
|
||||
as well as a complete list of the available document templates and ontologies:
|
||||
the document template. The built-in help (using \<^boxed_bash>\<open>-h\<close>) shows all available options:
|
||||
|
||||
@{boxed_bash [display]\<open>ë\prompt{}ë isabelle mkroot_DOF -h
|
||||
@{boxed_bash [display]\<open>ë\prompt{}ë isabelle dof_mkroot -h
|
||||
|
||||
Usage: isabelle mkroot_DOF [OPTIONS] [DIR]
|
||||
Usage: isabelle dof_mkroot [OPTIONS] [DIRECTORY]
|
||||
|
||||
Options are:
|
||||
-h print this hëëelp text and exëëit
|
||||
-n NAME alternative session name (default: DIR base name)
|
||||
-o ONTOLOGY (default: scholarly_paper)
|
||||
Available ontologies:
|
||||
* CENELEC_50128
|
||||
* math_exam
|
||||
* scholarly_paper
|
||||
* technical_report
|
||||
-t TEMPLATE (default: scrartcl)
|
||||
Available document templates:
|
||||
* lncs
|
||||
* scrartcl
|
||||
* scrreprt-modern
|
||||
* scrreprt
|
||||
|
||||
Prepare session root DIR (default: current directory). \<close>}
|
||||
-I init Mercurial repository and add generated files
|
||||
-h print help
|
||||
-n NAME alternative session name (default: directory base name)
|
||||
-o NAMES list of ontologies, separated by blanks
|
||||
(default: "technical_report scholarly_paper")
|
||||
-q quiet mode: less verbosity
|
||||
-t NAME template (default: "scrreprt-modern")
|
||||
|
||||
Create session root directory for Isabelle/DOF (default: current directory).\<close>}
|
||||
\<close>
|
||||
|
||||
section*[writing_doc::technical]\<open>Writing Documents: General Remarks\<close>
|
||||
|
@ -327,15 +325,16 @@ doc_class text_section = text_element +
|
|||
\<close>}
|
||||
|
||||
The \<^const>\<open>Isa_COL.text_element.level\<close>-attibute \<^index>\<open>level\<close> enables doc-notation support for
|
||||
headers, chapters, sections, and subsections; we follow here the \<^LaTeX> terminology on levels to which \<^isadof> is currently targeting at.
|
||||
The values are interpreted accordingly to the \<^LaTeX> standard. The correspondence between the levels
|
||||
and the structural entities is summarized as follows:
|
||||
headers, chapters, sections, and subsections; we follow here the \<^LaTeX> terminology on levels to
|
||||
which \<^isadof> is currently targeting at. The values are interpreted accordingly to the \<^LaTeX>
|
||||
standard. The correspondence between the levels and the structural entities is summarized
|
||||
as follows:
|
||||
|
||||
\<^item> part \<^index>\<open>part\<close> \<open>Some -1\<close> \vspace{-0.3cm}
|
||||
\<^item> chapter \<^index>\<open>chapter\<close> \<open>Some 0\<close> \vspace{-0.3cm}
|
||||
\<^item> section \<^index>\<open>section\<close> \<open>Some 1\<close> \vspace{-0.3cm}
|
||||
\<^item> subsection \<^index>\<open>subsection\<close> \<open>Some 2\<close> \vspace{-0.3cm}
|
||||
\<^item> subsubsection \<^index>\<open>subsubsection\<close> \<open>Some 3\<close> \vspace{-0.1cm}
|
||||
\<^item> part \<^index>\<open>part\<close> \<open>Some -1\<close> \<^vs>\<open>-0.3cm\<close>
|
||||
\<^item> chapter \<^index>\<open>chapter\<close> \<open>Some 0\<close> \<^vs>\<open>-0.3cm\<close>
|
||||
\<^item> section \<^index>\<open>section\<close> \<open>Some 1\<close> \<^vs>\<open>-0.3cm\<close>
|
||||
\<^item> subsection \<^index>\<open>subsection\<close> \<open>Some 2\<close> \<^vs>\<open>-0.3cm\<close>
|
||||
\<^item> subsubsection \<^index>\<open>subsubsection\<close> \<open>Some 3\<close> \<^vs>\<open>-0.3cm\<close>
|
||||
|
||||
Additional means assure that the following invariant is maintained in a document
|
||||
conforming to \<^verbatim>\<open>scholarly_paper\<close>:
|
||||
|
@ -343,6 +342,8 @@ conforming to \<^verbatim>\<open>scholarly_paper\<close>:
|
|||
\center {\<open>level > 0\<close>}
|
||||
\<close>
|
||||
|
||||
text\<open>\<^vs>\<open>1.0cm\<close>\<close>
|
||||
|
||||
text\<open> The rest of the ontology introduces concepts for \<^typ>\<open>introduction\<close>, \<^typ>\<open>conclusion\<close>,
|
||||
\<^typ>\<open>related_work\<close>, \<^typ>\<open>bibliography\<close> etc. More details can be found in \<^verbatim>\<open>scholarly_paper\<close>
|
||||
contained in the theory \<^theory>\<open>Isabelle_DOF.scholarly_paper\<close>. \<close>
|
||||
|
@ -505,9 +506,6 @@ side_by_side_figure*["hyperlinks"::side_by_side_figure,anchor="''fig:Dogfood-IV-
|
|||
caption2="''Exploring an attribute (hyperlinked to the class).''",
|
||||
relative_width2="47",
|
||||
src2="''figures/Dogfood-V-attribute''"]\<open>Navigation via generated hyperlinks.\<close>
|
||||
(* TODO: Update the images in fig:Dogfood-IV-jumpInDocCLass side_by_side_figure
|
||||
to align them. This has to wait that the exploration of an attribute is again possible.
|
||||
See: https://git.logicalhacking.com/Isabelle_DOF/Isabelle_DOF/issues/10 *)
|
||||
|
||||
text\<open>
|
||||
From these class definitions, \<^isadof> also automatically generated editing
|
||||
|
@ -694,8 +692,29 @@ text\<open>There are options to display sub-parts of formulas etc., but it is a
|
|||
of tight-checking that the information must be given complete and exactly in the syntax of
|
||||
Isabelle. This may be over-precise and a burden to readers not familiar with Isabelle, which may
|
||||
motivate authors to choose the aforementioned freeform-style.
|
||||
|
||||
Additionally, documents antiquotations were added to check and evaluate terms with
|
||||
term antiquotations:
|
||||
\<^item> \<^theory_text>\<open>@{term_ \<open>term\<close> }\<close> parses and type-checks \<open>term\<close> with term antiquotations,
|
||||
for instance \<^theory_text>\<open>@{term_ \<open>@{cenelec-term \<open>FT\<close>}\<close>}\<close> will parse and check
|
||||
that \<open>FT\<close> is indeed an instance of the class \<^typ>\<open>cenelec_term\<close>,
|
||||
\<^item> \<^theory_text>\<open>@{value_ \<open>term\<close> }\<close> performs the evaluation of \<open>term\<close> with term antiquotations,
|
||||
for instance \<^theory_text>\<open>@{value_ \<open>mcc @{cenelec-term \<open>FT\<close>}\<close>}\<close>
|
||||
will print the value of the \<^const>\<open>mcc\<close> attribute of the instance \<open>FT\<close>.
|
||||
\<^theory_text>\<open>value_\<close> may have an optional argument between square brackets to specify the evaluator but this
|
||||
argument must be specified after a default optional argument already defined
|
||||
by the text antiquotation implementation.
|
||||
So one must use the following syntax if he does not want to specify the first optional argument:
|
||||
\<open>@{value_ [] [nbe] \<open>r @{F \<open>xcv4\<close>}\<close>}\<close>. Note the empty brackets.
|
||||
\<close>
|
||||
|
||||
(*<*)
|
||||
declare_reference*["subsec:onto-term-ctxt"::technical]
|
||||
(*>*)
|
||||
|
||||
text\<open>They are text-contexts equivalents to the \<^theory_text>\<open>term*\<close> and \<^theory_text>\<open>value*\<close> commands
|
||||
for term-contexts introduced in @{technical (unchecked) \<open>subsec:onto-term-ctxt\<close>}\<close>
|
||||
|
||||
subsection\<open>A Technical Report with Tight Checking\<close>
|
||||
text\<open>An example of tight checking is a small programming manual developed by the
|
||||
second author in order to document programming trick discoveries while implementing in Isabelle.
|
||||
|
|
|
@ -235,8 +235,7 @@ A document class\<^bindex>\<open>document class\<close> can be defined using the
|
|||
We call document classes with an \<open>accepts_clause\<close>
|
||||
\<^emph>\<open>monitor classes\<close> or \<^emph>\<open>monitors\<close> for short.
|
||||
\<^rail>\<open> (@@{command "doc_class"}| @@{command "onto_class"}) class_id '=' (class_id '+')? (attribute_decl+) \<newline>
|
||||
(invariant_decl *)
|
||||
(accepts_clause rejects_clause?)?\<close>
|
||||
(invariant_decl *) (rejects_clause accepts_clause)? \<newline> (accepts_clause *)\<close>
|
||||
\<^item> \<open>attribute_decl\<close>:\<^index>\<open>attribute\_decl@\<open>attribute_decl\<close>\<close>
|
||||
\<^rail>\<open> name '::' '"' type '"' default_clause? \<close>
|
||||
\<^item> \<open>invariant_decl\<close>:\<^index>\<open>invariant\_decl@\<open>invariant_decl\<close>\<close>
|
||||
|
@ -245,10 +244,10 @@ A document class\<^bindex>\<open>document class\<close> can be defined using the
|
|||
disambiguate the argument of the expression
|
||||
and the \<^boxed_text>\<open>\<sigma>\<close> symbol is reserved to reference the instance of the class itself.
|
||||
\<^rail>\<open> 'invariant' (name '::' '"' term '"' + 'and') \<close>
|
||||
\<^item> \<open>accepts_clause\<close>:\<^index>\<open>accepts\_clause@\<open>accepts_clause\<close>\<close>
|
||||
\<^rail>\<open> 'accepts' '"' regexpr '"'\<close>
|
||||
\<^item> \<open>rejects_clause\<close>:\<^index>\<open>rejects\_clause@\<open>rejects_clause\<close>\<close>
|
||||
\<^rail>\<open> 'rejects' (class_id * ',') \<close>
|
||||
\<^item> \<open>accepts_clause\<close>:\<^index>\<open>accepts\_clause@\<open>accepts_clause\<close>\<close>
|
||||
\<^rail>\<open> 'accepts' ('"' regexpr '"' + 'and')\<close>
|
||||
\<^item> \<open>default_clause\<close>:\<^index>\<open>default\_clause@\<open>default_clause\<close>\<close>
|
||||
\<^rail>\<open> '<=' '"' expr '"' \<close>
|
||||
\<^item> \<open>regexpr\<close>:\<^index>\<open>regexpr@\<open>regexpr\<close>\<close>
|
||||
|
@ -331,14 +330,14 @@ is currently only available in the SML API's of the kernel.
|
|||
symbolic evaluation using the simplifier, \<open>nbe\<close> for \<^emph>\<open>normalization by
|
||||
evaluation\<close> and \<^emph>\<open>code\<close> for code generation in SML.
|
||||
\<^item> \<open>upd_meta_args\<close> :
|
||||
\<^rail>\<open> (obj_id ('::' class_id) ((',' attribute ('=' | '+=') HOL_term) * ))\<close>
|
||||
\<^rail>\<open> (obj_id ('::' class_id) ((',' attribute (':=' | '+=') HOL_term) * ))\<close>
|
||||
\<^item> \<open>annotated_text_element\<close> :
|
||||
\<^rail>\<open>
|
||||
( @@{command "text*"} '[' meta_args ']' '\<open>' formal_text '\<close>'
|
||||
| @@{command "ML*"} ('[' meta_args ']')? '\<open>' SML_code '\<close>'
|
||||
| @@{command "term*"} ('[' meta_args ']')? '\<open>' HOL_term '\<close>'
|
||||
| @@{command "value*"} \<newline> ('[' meta_args ']' ('[' evaluator ']')?)? '\<open>' HOL_term '\<close>'
|
||||
| @@{command "assert*"} \<newline> ('[' meta_args ']' ('[' evaluator ']')?)? '\<open>' HOL_term '\<close>'
|
||||
| (@@{command "value*"}
|
||||
| @@{command "assert*"}) \<newline> ('[' meta_args ']')? ('[' evaluator ']')? '\<open>' HOL_term '\<close>'
|
||||
)
|
||||
\<close>
|
||||
\<^rail>\<open>
|
||||
|
@ -404,13 +403,24 @@ a referentiable meta-object of class \<^theory_text>\<open>cid\<close> is create
|
|||
The SML-code is type-checked and executed
|
||||
in the context of the SML toplevel of the Isabelle system as in the corresponding
|
||||
\<^theory_text>\<open>ML\<open> \<dots> SML-code \<dots> \<close>\<close>-command.
|
||||
Additionally, ML antiquotations were added to check and evaluate terms with
|
||||
term antiquotations:
|
||||
\<^item> \<^theory_text>\<open>@{term_ \<open>term\<close> }\<close> parses and type-checks \<open>term\<close> with term antiquotations,
|
||||
for instance \<^theory_text>\<open>@{term_ \<open>@{cenelec-term \<open>FT\<close>}\<close>}\<close> will parse and check
|
||||
that \<open>FT\<close> is indeed an instance of the class \<^typ>\<open>cenelec_term\<close>,
|
||||
\<^item> \<^theory_text>\<open>@{value_ \<open>term\<close> }\<close> performs the evaluation of \<open>term\<close> with term antiquotations,
|
||||
for instance \<^theory_text>\<open>@{value_ \<open>mcc @{cenelec-term \<open>FT\<close>}\<close>}\<close>
|
||||
will print the value of the \<^const>\<open>mcc\<close> attribute of the instance \<open>FT\<close>.
|
||||
\<^theory_text>\<open>value_\<close> may have an optional argument between square brackets to specify the evaluator:
|
||||
\<open>@{value_ [nbe] \<open>mcc @{cenelec-term \<open>FT\<close>}\<close>}\<close> forces \<open>value_\<close> to evaluate
|
||||
the term using normalization by evaluation (see @{cite "wenzel:isabelle-isar:2020"}).
|
||||
\<close>
|
||||
|
||||
(*<*)
|
||||
declare_reference*["text-elements-expls"::technical]
|
||||
(*>*)
|
||||
|
||||
subsection\<open>Ontological Term-Contexts and their Management\<close>
|
||||
subsection*["subsec:onto-term-ctxt"::technical]\<open>Ontological Term-Contexts and their Management\<close>
|
||||
text\<open>The major commands providing term-contexts are
|
||||
\<^theory_text>\<open>term*[oid::cid, ...] \<open> \<dots> HOL-term \<dots> \<close>\<close>,
|
||||
\<^theory_text>\<open>value*[oid::cid, ...] \<open> \<dots> HOL-term \<dots> \<close>\<close> and
|
||||
|
@ -418,8 +428,9 @@ text\<open>The major commands providing term-contexts are
|
|||
Wrt. creation, track-ability and checking they are analogous to the ontological text and
|
||||
code-commands. However the argument terms may contain term-antiquotations stemming from an
|
||||
ontology definition. Both term-contexts were type-checked and \<^emph>\<open>validated\<close> against
|
||||
the global context (so: in the term \<open>@{A \<open>oid\<close>}\<close>, \<open>oid\<close> is indeed a string which refers
|
||||
to a meta-object belonging to the document class \<open>A\<close>, for example).
|
||||
the global context (so: in the term @{term_ \<open>@{author \<open>bu\<close>}\<close>}, \<open>bu\<close>
|
||||
is indeed a string which refers to a meta-object belonging
|
||||
to the document class \<^typ>\<open>author\<close>, for example).
|
||||
The term-context in the \<open>value*\<close>-command and \<^emph>\<open>assert*\<close>-command is additionally expanded
|
||||
(\<^eg> replaced) by a term denoting the meta-object.
|
||||
This expansion happens \<^emph>\<open>before\<close> evaluation of the term, thus permitting
|
||||
|
@ -434,7 +445,6 @@ Note unspecified attribute values were represented by free fresh variables which
|
|||
to choose either the normalization-by-evaluation strategy \<^theory_text>\<open>nbe\<close> or a proof attempt via
|
||||
the \<^theory_text>\<open>auto\<close> method. A failure of these strategies will be reported and regarded as non-validation
|
||||
of this meta-object. The latter leads to a failure of the entire command.
|
||||
|
||||
\<close>
|
||||
|
||||
(*<*)
|
||||
|
@ -1009,9 +1019,6 @@ text\<open>
|
|||
these types. They are just types declared in HOL,
|
||||
which are ``inhabited'' by special constant symbols carrying strings, for
|
||||
example of the format \<^boxed_theory_text>\<open>@{thm <string>}\<close>.
|
||||
% TODO:
|
||||
% Update meta-types implementation explanation to the new implementation
|
||||
% in repository commit 08c101c5440eee2a087068581952026e88c39f6a
|
||||
When HOL
|
||||
expressions were used to denote values of \<^boxed_theory_text>\<open>doc_class\<close>
|
||||
instance attributes, this requires additional checks after
|
||||
|
@ -1043,11 +1050,19 @@ subsection*["sec:class_inv"::technical]\<open>ODL Class Invariants\<close>
|
|||
text\<open>
|
||||
Ontological classes as described so far are too liberal in many situations.
|
||||
There is a first high-level syntax implementation for class invariants.
|
||||
These invariants can be checked when an instance of the class is defined.
|
||||
To enable the checking of the invariants, the \<^boxed_theory_text>\<open>invariants_checking\<close>
|
||||
These invariants are checked when an instance of the class is defined,
|
||||
and trigger warnings.
|
||||
The checking is enabled by default but can be disabled with the
|
||||
\<^boxed_theory_text>\<open>invariants_checking\<close> theory attribute:
|
||||
@{boxed_theory_text [display]\<open>
|
||||
declare[[invariants_checking = false]]\<close>}
|
||||
|
||||
To enable the strict checking of the invariants,
|
||||
that is to trigger errors instead of warnings,
|
||||
the \<^boxed_theory_text>\<open>invariants_strict_checking\<close>
|
||||
theory attribute must be set:
|
||||
@{boxed_theory_text [display]\<open>
|
||||
declare[[invariants_checking = true]]\<close>}
|
||||
declare[[invariants_strict_checking = true]]\<close>}
|
||||
|
||||
For example, let's define the following two classes:
|
||||
@{boxed_theory_text [display]\<open>
|
||||
|
@ -1107,8 +1122,15 @@ text\<open>
|
|||
All specified constraints are already checked in the IDE of \<^dof> while editing.
|
||||
The invariant \<^boxed_theory_text>\<open>author_finite\<close> enforces that the user sets the
|
||||
\<^boxed_theory_text>\<open>authored_by\<close> set.
|
||||
The invariants \<^theory_text>\<open>author_finite\<close> and \<^theory_text>\<open>establish_defined\<close> can not be checked directly
|
||||
and need a little help.
|
||||
We can set the \<open>invariants_checking_with_tactics\<close> theory attribute to help the checking.
|
||||
It will enable a basic tactic, using unfold and auto:
|
||||
@{boxed_theory_text [display]\<open>
|
||||
declare[[invariants_checking_with_tactics = true]]\<close>}
|
||||
There are still some limitations with this high-level syntax.
|
||||
For now, the high-level syntax does not support monitors (see \<^technical>\<open>sec:monitors\<close>).
|
||||
For now, the high-level syntax does not support the checking of
|
||||
specific monitor behaviors (see \<^technical>\<open>sec:monitors\<close>).
|
||||
For example, one would like to delay a final error message till the
|
||||
closing of a monitor.
|
||||
For this use-case you can use low-level class invariants (see \<^technical>\<open>sec:low_level_inv\<close>).
|
||||
|
@ -1150,6 +1172,20 @@ text\<open>
|
|||
instances of \<open>S\<close>.
|
||||
\<close>
|
||||
text\<open>
|
||||
Should the specified ranges of admissible instances not be observed, warnings will be triggered.
|
||||
To forbid the violation of the specified ranges,
|
||||
one can enable the \<^boxed_theory_text>\<open>strict_monitor_checking\<close> theory attribute:
|
||||
@{boxed_theory_text [display]\<open>declare[[strict_monitor_checking = true]]\<close>}
|
||||
It is possible to enable the tracing of free classes occurring inside the scope of a monitor by
|
||||
enabling the \<^boxed_theory_text>\<open>free_class_in_monitor_checking\<close>
|
||||
theory attribute:
|
||||
@{boxed_theory_text [display]\<open>declare[[free_class_in_monitor_checking = true]]\<close>}
|
||||
Then a warning will be triggered when defining an instance of a free class
|
||||
inside the scope of a monitor.
|
||||
To forbid free classes inside the scope of a monitor, one can enable the
|
||||
\<^boxed_theory_text>\<open>free_class_in_monitor_strict_checking\<close> theory attribute:
|
||||
@{boxed_theory_text [display]\<open>declare[[free_class_in_monitor_strict_checking = true]]\<close>}
|
||||
|
||||
Monitored document sections can be nested and overlap; thus, it is
|
||||
possible to combine the effect of different monitors. For example, it
|
||||
would be possible to refine the \<^boxed_theory_text>\<open>example\<close> section by its own
|
||||
|
@ -1167,8 +1203,17 @@ text\<open>
|
|||
header delimiting the borders of its representation. Class invariants
|
||||
on monitors allow for specifying structural properties on document
|
||||
sections.
|
||||
For now, the high-level syntax of invariants is not supported for monitors and you must use
|
||||
the low-level class invariants (see \<^technical>\<open>sec:low_level_inv\<close>.\<close>
|
||||
For now, the high-level syntax of invariants does not support the checking of
|
||||
specific monitor behaviors like the one just described and you must use
|
||||
the low-level class invariants (see \<^technical>\<open>sec:low_level_inv\<close>).
|
||||
|
||||
Low-level invariants checking can be set up to be triggered
|
||||
when opening a monitor, when closing a monitor, or both
|
||||
by using the \<^ML>\<open>DOF_core.update_class_eager_invariant\<close>,
|
||||
\<^ML>\<open>DOF_core.update_class_lazy_invariant\<close>, or \<^ML>\<open>DOF_core.update_class_invariant\<close> commands
|
||||
respectively, to add the invariants to the theory context
|
||||
(See \<^technical>\<open>sec:low_level_inv\<close> for an example).
|
||||
\<close>
|
||||
|
||||
|
||||
subsection*["sec:low_level_inv"::technical]\<open>ODL Low-level Class Invariants\<close>
|
||||
|
@ -1185,9 +1230,9 @@ text\<open>
|
|||
fun check_result_inv oid {is_monitor:bool} ctxt =
|
||||
let
|
||||
val kind =
|
||||
AttributeAccess.compute_attr_access ctxt "evidence" oid <@>{here} <@>{here}
|
||||
ISA_core.compute_attr_access ctxt "evidence" oid NONE <@>{here}
|
||||
val prop =
|
||||
AttributeAccess.compute_attr_access ctxt "property" oid <@>{here} <@>{here}
|
||||
ISA_core.compute_attr_access ctxt "property" oid NONE <@>{here}
|
||||
val tS = HOLogic.dest_list prop
|
||||
in case kind of
|
||||
<@>{term "proof"} => if not(null tS) then true
|
||||
|
@ -1236,10 +1281,11 @@ value*\<open>filter (\<lambda>\<sigma>. the (text_section.level \<sigma>) > 1)
|
|||
|
||||
section*[infrastructure::technical]\<open>Technical Infrastructure\<close>
|
||||
|
||||
text\<open>
|
||||
text\<open>
|
||||
%FIXME update story concerning "list"
|
||||
The list of fully supported (\<^ie>, supporting both interactive ontological modeling and
|
||||
document generation) ontologies and the list of supported document templates can be
|
||||
obtained by calling \<^boxed_bash>\<open>isabelle mkroot_DOF -h\<close> (see \<^technical>\<open>first_project\<close>).
|
||||
obtained by calling \<^boxed_bash>\<open>isabelle dof_mkroot -h\<close> (see \<^technical>\<open>first_project\<close>).
|
||||
Note that the postfix \<^boxed_bash>\<open>-UNSUPPORTED\<close> denotes experimental ontologies or templates
|
||||
for which further manual setup steps might be required or that are not fully tested. Also note
|
||||
that the \<^LaTeX>-class files required by the templates need to be already installed on your
|
||||
|
@ -1323,6 +1369,7 @@ text\<open>
|
|||
text\<open>
|
||||
Developing a new document template ``\<^boxed_bash>\<open>bar\<close>'' requires the following steps:
|
||||
\<^item> develop a new \<^LaTeX>-template \<^boxed_bash>\<open>src/document-templates/root-bar.tex\<close>
|
||||
\<^item> add a suitable \<^theory_text>\<open>define_template\<close> command to theory \<^theory>\<open>Isabelle_DOF.Isa_DOF\<close>.
|
||||
\<^item> activation of the new document template by executing the \<^boxed_bash>\<open>install\<close> script. You can skip the lengthy
|
||||
checks for the AFP entries and the installation of the Isabelle patch by using the
|
||||
\<^boxed_bash>\<open>--skip-afp\<close> option:
|
||||
|
@ -1363,14 +1410,7 @@ text\<open>
|
|||
\usepackage{isabelle,isabellesym,amssymb} % Required (by Isabelle)
|
||||
\usepackage{amsmath} % Used by some ontologies
|
||||
\bibliographystyle{abbrv}
|
||||
\IfFileExists{DOF-core.sty}{}{ % Required by Isabelle/DOF
|
||||
\PackageError{DOF-core}{The doëëcument preparation
|
||||
requires the Isabelle/DOF framework.}{For further help, see
|
||||
ë\url{\dofurl}ë
|
||||
}
|
||||
\input{ontologies} % This will include the document specific
|
||||
% ontologies from isadof.cfg
|
||||
\IfFileExists{preamble.tex}{\input{preamble.tex}}{}
|
||||
\input{dof-common} % setup shared between all Isabelle/DOF templates
|
||||
\usepackage{graphicx} % Required for images.
|
||||
\usepackage[caption]{subfig}
|
||||
\usepackage[size=footnotesize]{caption}
|
||||
|
@ -1396,7 +1436,7 @@ text\<open>
|
|||
|
||||
subsubsection\<open>Getting Started\<close>
|
||||
text\<open>
|
||||
In general, we recommend creating a test project (\<^eg>, using \<^boxed_bash>\<open>isabelle mkroot_DOF\<close>)
|
||||
In general, we recommend creating a test project (\<^eg>, using \<^boxed_bash>\<open>isabelle dof_mkroot\<close>)
|
||||
to develop new document templates or ontology representations. The default setup of the \<^isadof>
|
||||
build system generated a \<^path>\<open>output/document\<close> directory with a self-contained \<^LaTeX>-setup. In
|
||||
this directory, you can directly use \<^LaTeX> on the main file, called \<^path>\<open>root.tex\<close>:
|
||||
|
|
|
@ -53,7 +53,6 @@ text\<open>
|
|||
\<open>structure Data = Generic_Data
|
||||
( type T = docobj_tab * docclass_tab * ...
|
||||
val empty = (initial_docobj_tab, initial_docclass_tab, ...)
|
||||
val extend = I
|
||||
fun merge((d1,c1,...),(d2,c2,...)) = (merge_docobj_tab (d1,d2,...),
|
||||
merge_docclass_tab(c1,c2,...))
|
||||
);\<close>}
|
||||
|
|
2
examples/technical_report/Isabelle_DOF-Manual/Isabelle_DOF-Manual.thy
Executable file → Normal file
|
@ -15,6 +15,8 @@
|
|||
theory "Isabelle_DOF-Manual"
|
||||
imports "05_Implementation"
|
||||
begin
|
||||
use_template "scrreprt-modern"
|
||||
use_ontology "technical_report" and "CENELEC_50128"
|
||||
close_monitor*[this]
|
||||
check_doc_global
|
||||
text\<open>Resulting trace in \<^verbatim>\<open>doc_item\<close> ''this'': \<close>
|
||||
|
|
|
@ -1,10 +1,8 @@
|
|||
session "Isabelle_DOF-Manual" = "Isabelle_DOF" +
|
||||
options [document = pdf, document_output = "output", document_build = dof,
|
||||
quick_and_dirty = true]
|
||||
options [document = pdf, document_output = "output", document_build = dof, quick_and_dirty = true]
|
||||
theories
|
||||
"Isabelle_DOF-Manual"
|
||||
document_files
|
||||
"isadof.cfg"
|
||||
"root.bib"
|
||||
"root.mst"
|
||||
"preamble.tex"
|
||||
|
|
BIN
examples/technical_report/Isabelle_DOF-Manual/document/figures/Dogfood-II-bgnd1.png
Executable file → Normal file
Before Width: | Height: | Size: 14 KiB After Width: | Height: | Size: 10 KiB |
BIN
examples/technical_report/Isabelle_DOF-Manual/document/figures/Dogfood-III-bgnd-text_section.png
Executable file → Normal file
Before Width: | Height: | Size: 18 KiB After Width: | Height: | Size: 10 KiB |
BIN
examples/technical_report/Isabelle_DOF-Manual/document/figures/Dogfood-IV-jumpInDocCLass.png
Executable file → Normal file
Before Width: | Height: | Size: 23 KiB After Width: | Height: | Size: 17 KiB |
0
examples/technical_report/Isabelle_DOF-Manual/document/figures/Dogfood-Intro.png
Executable file → Normal file
Before Width: | Height: | Size: 85 KiB After Width: | Height: | Size: 85 KiB |
BIN
examples/technical_report/Isabelle_DOF-Manual/document/figures/Dogfood-V-attribute.png
Executable file → Normal file
Before Width: | Height: | Size: 16 KiB After Width: | Height: | Size: 13 KiB |
0
examples/technical_report/Isabelle_DOF-Manual/document/figures/Dogfood-figures.png
Executable file → Normal file
Before Width: | Height: | Size: 18 KiB After Width: | Height: | Size: 18 KiB |
0
examples/technical_report/Isabelle_DOF-Manual/document/figures/Isabelle_DOF-logo.pdf
Executable file → Normal file
0
examples/technical_report/Isabelle_DOF-Manual/document/figures/MyCommentedIsabelle.png
Executable file → Normal file
Before Width: | Height: | Size: 162 KiB After Width: | Height: | Size: 162 KiB |
0
examples/technical_report/Isabelle_DOF-Manual/document/figures/antiquotations-PIDE.png
Executable file → Normal file
Before Width: | Height: | Size: 43 KiB After Width: | Height: | Size: 43 KiB |
0
examples/technical_report/Isabelle_DOF-Manual/document/figures/cicm2018-combined.png
Executable file → Normal file
Before Width: | Height: | Size: 214 KiB After Width: | Height: | Size: 214 KiB |
0
examples/technical_report/Isabelle_DOF-Manual/document/figures/cicm2018-dof.png
Executable file → Normal file
Before Width: | Height: | Size: 135 KiB After Width: | Height: | Size: 135 KiB |
0
examples/technical_report/Isabelle_DOF-Manual/document/figures/cicm2018-pdf.png
Executable file → Normal file
Before Width: | Height: | Size: 73 KiB After Width: | Height: | Size: 73 KiB |
0
examples/technical_report/Isabelle_DOF-Manual/document/figures/definition-use-CSP-pdf.png
Executable file → Normal file
Before Width: | Height: | Size: 70 KiB After Width: | Height: | Size: 70 KiB |
0
examples/technical_report/Isabelle_DOF-Manual/document/figures/document-hierarchy.pdf
Executable file → Normal file
0
examples/technical_report/Isabelle_DOF-Manual/document/figures/document-hierarchy.svg
Executable file → Normal file
Before Width: | Height: | Size: 36 KiB After Width: | Height: | Size: 36 KiB |
0
examples/technical_report/Isabelle_DOF-Manual/document/figures/header_CSP_pdf.png
Executable file → Normal file
Before Width: | Height: | Size: 203 KiB After Width: | Height: | Size: 203 KiB |
0
examples/technical_report/Isabelle_DOF-Manual/document/figures/isabelle-architecture.pdf
Executable file → Normal file
0
examples/technical_report/Isabelle_DOF-Manual/document/figures/isabelle-architecture.svg
Executable file → Normal file
Before Width: | Height: | Size: 57 KiB After Width: | Height: | Size: 57 KiB |
0
examples/technical_report/Isabelle_DOF-Manual/document/figures/isadof.png
Executable file → Normal file
Before Width: | Height: | Size: 81 KiB After Width: | Height: | Size: 81 KiB |
0
examples/technical_report/Isabelle_DOF-Manual/document/figures/srac-as-es-application.png
Executable file → Normal file
Before Width: | Height: | Size: 37 KiB After Width: | Height: | Size: 37 KiB |
0
examples/technical_report/Isabelle_DOF-Manual/document/figures/srac-definition.png
Executable file → Normal file
Before Width: | Height: | Size: 26 KiB After Width: | Height: | Size: 26 KiB |
|
@ -1,4 +0,0 @@
|
|||
Template: scrreprt-modern
|
||||
Ontology: technical_report
|
||||
Ontology: cenelec_50128
|
||||
|
0
examples/technical_report/Isabelle_DOF-Manual/document/lstisadof-manual.sty
Executable file → Normal file
|
@ -451,7 +451,7 @@
|
|||
journal = {Archive of Formal Proofs},
|
||||
month = may,
|
||||
year = 2010,
|
||||
note = {\url{http://isa-afp.org/entries/Regular-Sets.html}, Formal
|
||||
note = {\url{https://isa-afp.org/entries/Regular-Sets.html}, Formal
|
||||
proof development},
|
||||
issn = {2150-914x}
|
||||
}
|
||||
|
@ -462,7 +462,7 @@
|
|||
journal = {Archive of Formal Proofs},
|
||||
month = mar,
|
||||
year = 2004,
|
||||
note = {\url{http://isa-afp.org/entries/Functional-Automata.html},
|
||||
note = {\url{https://isa-afp.org/entries/Functional-Automata.html},
|
||||
Formal proof development},
|
||||
issn = {2150-914x}
|
||||
}
|
||||
|
|
|
@ -1,11 +1,9 @@
|
|||
session "TR_MyCommentedIsabelle" = "Isabelle_DOF" +
|
||||
options [document = pdf, document_output = "output", document_build = dof,
|
||||
quick_and_dirty = true]
|
||||
options [document = pdf, document_output = "output", document_build = dof, quick_and_dirty = true]
|
||||
theories
|
||||
"TR_MyCommentedIsabelle"
|
||||
document_files
|
||||
"root.bib"
|
||||
"isadof.cfg"
|
||||
"preamble.tex"
|
||||
"prooftree.sty"
|
||||
"figures/markup-demo.png"
|
||||
|
|
30
examples/technical_report/TR_my_commented_isabelle/TR_MyCommentedIsabelle.thy
Executable file → Normal file
|
@ -1,7 +1,7 @@
|
|||
(*************************************************************************
|
||||
* Copyright (C)
|
||||
* 2019-2021 The University of Exeter
|
||||
* 2018-2021 The University of Paris-Saclay
|
||||
* 2019-2022 The University of Exeter
|
||||
* 2018-2022 The University of Paris-Saclay
|
||||
* 2018 The University of Sheffield
|
||||
*
|
||||
* License:
|
||||
|
@ -14,16 +14,18 @@
|
|||
(*<*)
|
||||
theory TR_MyCommentedIsabelle
|
||||
imports "Isabelle_DOF.technical_report"
|
||||
|
||||
begin
|
||||
|
||||
use_template "scrreprt"
|
||||
use_ontology "technical_report"
|
||||
|
||||
define_shortcut* isabelle \<rightleftharpoons> \<open>Isabelle/HOL\<close>
|
||||
|
||||
open_monitor*[this::report]
|
||||
(*>*)
|
||||
|
||||
title*[tit::title]\<open>My Personal, Eclectic Isabelle Programming Manual\<close>
|
||||
subtitle*[stit::subtitle]\<open>Version : Isabelle 2020\<close>
|
||||
subtitle*[stit::subtitle]\<open>Version : Isabelle 2022\<close>
|
||||
text*[bu::author,
|
||||
email = "''wolff@lri.fr''",
|
||||
affiliation = "\<open>Université Paris-Saclay, LRI, France\<close>"]\<open>Burkhart Wolff\<close>
|
||||
|
@ -62,7 +64,7 @@ text\<open> \<^vs>\<open>-0.5cm\<close>
|
|||
Isabelle and Isabelle/HOL, a complementary text to the unfortunately somewhat outdated
|
||||
"The Isabelle Cookbook" in \<^url>\<open>https://nms.kcl.ac.uk/christian.urban/Cookbook/\<close>.
|
||||
The present text is also complementary to the current version of
|
||||
\<^url>\<open>https://isabelle.in.tum.de/dist/Isabelle2021/doc/isar-ref.pdf\<close>
|
||||
\<^url>\<open>https://isabelle.in.tum.de/doc/isar-ref.pdf\<close>
|
||||
"The Isabelle/Isar Implementation" by Makarius Wenzel in that it focusses on subjects
|
||||
not covered there, or presents alternative explanations for which I believe, based on my
|
||||
experiences with students and Phds, that they are helpful.
|
||||
|
@ -364,7 +366,7 @@ subsection*[t213::example]\<open>Mechanism 2 : Extending the Global Context \<op
|
|||
|
||||
text\<open>A central mechanism for constructing user-defined data is by the \<^ML_functor>\<open>Generic_Data\<close>-functor.
|
||||
A plugin needing some data \<^verbatim>\<open>T\<close> and providing it with implementations for an
|
||||
\<^verbatim>\<open>empty\<close>, and operations \<^verbatim>\<open>merge\<close> and \<^verbatim>\<open>extend\<close>, can construct a lense with operations
|
||||
\<^verbatim>\<open>empty\<close>, and operation \<^verbatim>\<open>merge\<close>, can construct a lense with operations
|
||||
\<^verbatim>\<open>get\<close> and \<^verbatim>\<open>put\<close> that attach this data into the generic system context. Rather than using
|
||||
unsynchronized SML mutable variables, this is the mechanism to introduce component local
|
||||
data in Isabelle, which allows to manage this data for the necessary backtrack and synchronization
|
||||
|
@ -373,14 +375,12 @@ text\<open>A central mechanism for constructing user-defined data is by the \<^M
|
|||
ML \<open>
|
||||
datatype X = mt
|
||||
val init = mt;
|
||||
val ext = I
|
||||
fun merge (X,Y) = mt
|
||||
|
||||
structure Data = Generic_Data
|
||||
(
|
||||
type T = X
|
||||
val empty = init
|
||||
val extend = ext
|
||||
val merge = merge
|
||||
);
|
||||
\<close>
|
||||
|
@ -1183,7 +1183,7 @@ text\<open> The extensibility of Isabelle as a system framework depends on a num
|
|||
\<^item> \<^ML>\<open>Toplevel.theory: (theory -> theory) -> Toplevel.transition -> Toplevel.transition\<close>
|
||||
adjoins a theory transformer.
|
||||
\<^item> \<^ML>\<open>Toplevel.generic_theory: (generic_theory -> generic_theory) -> Toplevel.transition -> Toplevel.transition\<close>
|
||||
\<^item> \<^ML>\<open>Toplevel.theory': (bool -> theory -> theory) -> Toplevel.transition -> Toplevel.transition\<close>
|
||||
\<^item> \<^ML>\<open>Toplevel.theory': (bool -> theory -> theory) -> Toplevel.presentation -> Toplevel.transition -> Toplevel.transition\<close>
|
||||
\<^item> \<^ML>\<open>Toplevel.exit: Toplevel.transition -> Toplevel.transition\<close>
|
||||
\<^item> \<^ML>\<open>Toplevel.ignored: Position.T -> Toplevel.transition\<close>
|
||||
\<^item> \<^ML>\<open>Toplevel.present_local_theory: (xstring * Position.T) option ->
|
||||
|
@ -1845,6 +1845,7 @@ text\<open>The second part is much more high-level, and can be found under \<^ML
|
|||
This is perhaps meant with the fairly cryptic comment:
|
||||
"Quasi-inner syntax based on outer tokens: concrete argument syntax of
|
||||
attributes, methods etc." at the beginning of this structure.\<close>
|
||||
ML\<open>open Args\<close>
|
||||
|
||||
text\<open> Some more combinators
|
||||
\<^item>\<^ML>\<open>Args.symbolic : Token.T parser\<close>
|
||||
|
@ -1865,12 +1866,11 @@ Common Isar Syntax
|
|||
\<^item>\<^ML>\<open>Args.name_position: (string * Position.T) parser\<close>
|
||||
\<^item>\<^ML>\<open>Args.cartouche_inner_syntax: string parser\<close>
|
||||
\<^item>\<^ML>\<open>Args.cartouche_input: Input.source parser\<close>
|
||||
\<^item>\<^ML>\<open>Args.text_token: Token.T parser \<close>
|
||||
|
||||
|
||||
Common Isar Syntax
|
||||
\<^item>\<^ML>\<open>Args.text_input: Input.source parser\<close>
|
||||
\<^item>\<^ML>\<open>Args.text : string parser\<close>
|
||||
\<^item>\<^ML>\<open>Parse.embedded_input: Input.source parser\<close>
|
||||
\<^item>\<^ML>\<open>Parse.embedded : string parser\<close>
|
||||
\<^item>\<^ML>\<open>Args.binding : Binding.binding parser\<close>
|
||||
|
||||
Common Stuff related to Inner Syntax Parsing
|
||||
|
@ -1893,8 +1893,7 @@ Common Isar Syntax
|
|||
\<^item>\<^ML>\<open>Args.named_source: (Token.T -> Token.src) -> Token.src parser\<close>
|
||||
\<^item>\<^ML>\<open>Args.named_typ : (string -> typ) -> typ parser\<close>
|
||||
\<^item>\<^ML>\<open>Args.named_term : (string -> term) -> term parser\<close>
|
||||
\<^item>\<^ML>\<open>Args.text_declaration: (Input.source -> declaration) -> declaration parser\<close>
|
||||
\<^item>\<^ML>\<open>Args.cartouche_declaration: (Input.source -> declaration) -> declaration parser\<close>
|
||||
\<^item>\<^ML>\<open>Args.embedded_declaration: (Input.source -> declaration) -> declaration parser\<close>
|
||||
\<^item>\<^ML>\<open>Args.typ_abbrev : typ context_parser\<close>
|
||||
\<^item>\<^ML>\<open>Args.typ: typ context_parser\<close>
|
||||
\<^item>\<^ML>\<open>Args.term: term context_parser\<close>
|
||||
|
@ -1903,8 +1902,6 @@ Common Isar Syntax
|
|||
\<^item>\<^ML>\<open>Args.named_source: (Token.T -> Token.src) -> Token.src parser\<close>
|
||||
\<^item>\<^ML>\<open>Args.named_typ : (string -> typ) -> typ parser\<close>
|
||||
\<^item>\<^ML>\<open>Args.named_term: (string -> term) -> term parser\<close>
|
||||
\<^item>\<^ML>\<open>Args.text_declaration: (Input.source -> declaration) -> declaration parser\<close>
|
||||
\<^item>\<^ML>\<open>Args.cartouche_declaration: (Input.source -> declaration) -> declaration parser\<close>
|
||||
|
||||
Syntax for some major Pure commands in Isar
|
||||
\<^item>\<^ML>\<open>Args.prop: term context_parser\<close>
|
||||
|
@ -2132,7 +2129,6 @@ Since Isabelle2018, an own AST is provided for the LaTeX syntax, analogously to
|
|||
\<^item>\<^ML>\<open>Latex.string: string -> Latex.text\<close>
|
||||
\<^item>\<^ML>\<open>Latex.text: string * Position.T -> Latex.text\<close>
|
||||
|
||||
\<^item>\<^ML>\<open>Latex.output_name: string -> string\<close>
|
||||
\<^item>\<^ML>\<open>Latex.output_ascii: string -> string\<close>
|
||||
\<^item>\<^ML>\<open>Latex.output_symbols: Symbol.symbol list -> string\<close>
|
||||
|
||||
|
|
0
examples/technical_report/TR_my_commented_isabelle/document/figures/document-model.pdf
Executable file → Normal file
0
examples/technical_report/TR_my_commented_isabelle/document/figures/isabelle-architecture.pdf
Executable file → Normal file
0
examples/technical_report/TR_my_commented_isabelle/document/figures/markup-demo.png
Executable file → Normal file
Before Width: | Height: | Size: 13 KiB After Width: | Height: | Size: 13 KiB |
0
examples/technical_report/TR_my_commented_isabelle/document/figures/pure-inferences-I.pdf
Executable file → Normal file
Before Width: | Height: | Size: 91 KiB After Width: | Height: | Size: 91 KiB |
0
examples/technical_report/TR_my_commented_isabelle/document/figures/pure-inferences-II.pdf
Executable file → Normal file
Before Width: | Height: | Size: 31 KiB After Width: | Height: | Size: 31 KiB |
0
examples/technical_report/TR_my_commented_isabelle/document/figures/text-element.pdf
Executable file → Normal file
|
@ -1,2 +0,0 @@
|
|||
Template: scrreprt
|
||||
Ontology: technical_report
|
0
examples/technical_report/TR_my_commented_isabelle/document/preamble.tex
Executable file → Normal file
0
examples/technical_report/TR_my_commented_isabelle/document/prooftree.sty
Executable file → Normal file
0
examples/technical_report/TR_my_commented_isabelle/document/root.bib
Executable file → Normal file
67
install-afp
|
@ -30,10 +30,13 @@
|
|||
#set -e
|
||||
shopt -s nocasematch
|
||||
|
||||
|
||||
print_help()
|
||||
{
|
||||
echo "Usage: isabelle env ./install-afp [OPTION] "
|
||||
echo ""
|
||||
echo "Warning: This tools is deprecated."
|
||||
echo ""
|
||||
echo "Run ..."
|
||||
echo ""
|
||||
echo " --help, -h display this help message"
|
||||
|
@ -43,11 +46,29 @@ print_help()
|
|||
|
||||
exit_error() {
|
||||
echo ""
|
||||
echo " *** Isabelle/DOF installation FAILED, please check the README.md for help ***"
|
||||
echo " *** Local AFP installation FAILED, please check the README.md for help ***"
|
||||
echo ""
|
||||
exit 1
|
||||
}
|
||||
|
||||
confirm_usage() {
|
||||
echo "* From Isabelle2021-1 on, the recommended method for making the whole AFP "
|
||||
echo " available to Isabelle is the isabelle components -u command."
|
||||
echo " For doing so, please follow the instructions at: "
|
||||
echo " https://www.isa-afp.org/help/"
|
||||
echo ""
|
||||
echo " Alternatively, you can continue, on your own risk, to install only"
|
||||
echo " the AFP entries required to run Isabelle/DOF."
|
||||
echo ""
|
||||
read -p " Still continue (y/N)? " -n 1 -r
|
||||
echo
|
||||
if [[ $REPLY =~ ^[Yy]$ ]];
|
||||
then
|
||||
echo " Continuing installation on your OWN risk."
|
||||
else
|
||||
exit_error
|
||||
fi
|
||||
}
|
||||
check_isabelle_version() {
|
||||
echo "* Checking Isabelle version:"
|
||||
if [ "$ISABELLE_VERSION" != "$ACTUAL_ISABELLE_VERSION" ]; then
|
||||
|
@ -58,8 +79,8 @@ check_isabelle_version() {
|
|||
echo " version of Isabelle and rerun the install script, providing the"
|
||||
echo " the \"isabelle\" command as argument."
|
||||
echo " Isabelle ($ISABELLE_VERSION) can be obtained from:"
|
||||
echo " $ISABELLE_URL"
|
||||
echo
|
||||
echo " https://isabelle.in.tum.de/website-$ISABELLE_VERSION/"
|
||||
echo ""
|
||||
read -p " Still continue (y/N)? " -n 1 -r
|
||||
echo
|
||||
if [[ $REPLY =~ ^[Yy]$ ]];
|
||||
|
@ -76,7 +97,7 @@ check_isabelle_version() {
|
|||
check_afp_entries() {
|
||||
echo "* Checking availability of AFP entries:"
|
||||
missing=""
|
||||
required="Regular-Sets Functional-Automata"
|
||||
required="Regular-Sets Functional-Automata Physical_Quantities"
|
||||
for afp in $required; do
|
||||
res=`$ISABELLE_TOOL build -n $afp 2>/dev/null || true`
|
||||
if [ "$res" != "" ]; then
|
||||
|
@ -110,12 +131,6 @@ check_afp_entries() {
|
|||
fi
|
||||
}
|
||||
|
||||
register(){
|
||||
echo " - Registering Isabelle/DOF"
|
||||
$ISABELLE_TOOL components -u "$PWD"
|
||||
}
|
||||
|
||||
|
||||
while [ $# -gt 0 ]
|
||||
do
|
||||
case "$1" in
|
||||
|
@ -128,15 +143,37 @@ do
|
|||
shift
|
||||
done
|
||||
|
||||
if [ -z ${ISABELLE_TOOL+x} ];
|
||||
then
|
||||
print_help
|
||||
exit 1
|
||||
fi
|
||||
|
||||
ACTUAL_ISABELLE_VERSION=`$ISABELLE_TOOL version`
|
||||
|
||||
ISABELLE_VERSION="Isabelle$($ISABELLE_TOOL dof_param -b isabelle_version)"
|
||||
|
||||
if [ ${ISABELLE_VERSION} = "Isabelle" ];
|
||||
then
|
||||
echo "Error: cannot find Isabelle/DOF configuration, please check that you"
|
||||
echo " registered Isabelle/DOF as an Isabelle component, e.g., using"
|
||||
echo " isabelle components -u ."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
|
||||
|
||||
AFP_DATE="$($ISABELLE_TOOL dof_param -b afp_version)"
|
||||
AFP_URL="https://www.isa-afp.org/release/"$AFP_DATE".tar.gz"
|
||||
|
||||
echo ""
|
||||
echo "Isabelle/DOF Installer"
|
||||
echo "======================"
|
||||
echo "Isabelle/DOF AFP Installation Utility"
|
||||
echo "====================================="
|
||||
confirm_usage
|
||||
check_isabelle_version
|
||||
check_afp_entries
|
||||
register
|
||||
echo "* Installation successful. Enjoy Isabelle/DOF, you can build the session"
|
||||
echo " Isabelle/DOF and all example documents by executing:"
|
||||
echo "* AFP Installation successful."
|
||||
echo " You should now be able to enjoy Isabelle/DOF by building its session"
|
||||
echo " and all example documents by executing:"
|
||||
echo " $ISABELLE_TOOL build -D ."
|
||||
exit 0
|
||||
|
|