Compare commits
228 Commits
2021-ITP-P
...
main
Author | SHA1 | Date |
---|---|---|
|
8513f7d267 | |
|
2b1a9d009e | |
|
cd758d2c44 | |
|
8496963fec | |
|
72d8000f7b | |
|
17ec11b297 | |
|
a96e17abf3 | |
|
74b60e47d5 | |
|
a42dd4ea6c | |
|
b162a24749 | |
|
a9432c7b52 | |
|
9f28d4949e | |
|
885c23a138 | |
|
a589d4cd47 | |
|
e1f143d151 | |
|
fd60cf2312 | |
|
73dfcd6c1e | |
|
c0afe1105e | |
|
e414b97afb | |
|
0b2d28b547 | |
|
37d7ed7d17 | |
|
312734afbd | |
|
8cee80d78e | |
|
ec0d525426 | |
|
791990039b | |
|
78d61390fe | |
|
ffcf1f3240 | |
|
5471d873a9 | |
|
df37250a00 | |
|
185daeb577 | |
|
8037fd15f2 | |
|
afcd78610b | |
|
b8a9ef5118 | |
|
a4e75c8b12 | |
|
d20e9ccd22 | |
|
f2ee5d3780 | |
|
44cae2e631 | |
|
7b2bf35353 | |
|
e8c7fa6018 | |
|
b12e61511d | |
|
3cac42e6cb | |
|
aee8ba1df1 | |
|
d93e1383d4 | |
|
3d5d1e7476 | |
|
4264e7cd15 | |
|
96f4077c53 | |
|
d7fb39d7eb | |
|
b95826962f | |
|
912d4bb49e | |
|
a6c1a2baa4 | |
|
bb5963c6e2 | |
|
cc3e2a51a4 | |
|
9e4e5b49eb | |
|
b65ecbdbef | |
|
3be2225dcf | |
|
f44f0af01c | |
|
9a11baf840 | |
|
48c167aa23 | |
|
700a9bbfee | |
|
73299941ad | |
|
5a8c438c41 | |
|
7772c73aaa | |
|
ca18453043 | |
|
1a122b1a87 | |
|
47d95c467e | |
|
bf3085d4c0 | |
|
068e6e0411 | |
|
09e9980691 | |
|
94ce3fdec2 | |
|
44819bff02 | |
|
a6ab1e101e | |
|
c29ec9641a | |
|
06833aa190 | |
|
4f0c7e1e95 | |
|
0040949cf8 | |
|
e68c332912 | |
|
b2c4f40161 | |
|
309952e0ce | |
|
830e1b440a | |
|
2149db9efc | |
|
1547ace64b | |
|
39acd61dfd | |
|
29770b17ee | |
|
b4f4048cff | |
|
eac94f2a01 | |
|
ab1877ce8e | |
|
fc575a5be5 | |
|
4e47c38860 | |
|
943af164f4 | |
|
873151b4f3 | |
|
82645c2e8e | |
|
f09a2df943 | |
|
cfdbd18bfa | |
|
0b807ea4bc | |
|
516f5d2f79 | |
|
5ac41a72ac | |
|
15feeb7d92 | |
|
0c8a0e1d63 | |
|
0aec98b95a | |
|
43871ced48 | |
|
0fa1048d6d | |
|
33490f8f15 | |
|
01632b5251 | |
|
8a54831295 | |
|
427226f593 | |
|
f14c0bebbb | |
|
7f500dc257 | |
|
c05bb0bf4d | |
|
66f78001eb | |
|
5a06d3618b | |
|
e63ef4e189 | |
|
bba7d9d5c5 | |
|
07a9c10001 | |
|
5779c729a4 | |
|
03f2836f5d | |
|
d2703b0dbd | |
|
9f2e2b53a4 | |
|
4caee16cb6 | |
|
6ee7058d51 | |
|
583636404f | |
|
8a9684590a | |
|
81c4ae2c13 | |
|
2c1b56d277 | |
|
f40d33b9ed | |
|
6a94728747 | |
|
99facb109c | |
|
f6d97db0d3 | |
|
4a6fa93644 | |
|
6ca0b0fd21 | |
|
65ae177fbc | |
|
b93ff8f65c | |
|
adf87dfde4 | |
|
df5d037942 | |
|
f2f48f2340 | |
|
6839f63129 | |
|
3febf83b3c | |
|
fb8dbfac49 | |
|
45e4a11a74 | |
|
c8a3c58f7f | |
|
1939ffeea4 | |
|
74093dfaae | |
|
d2b6cb81aa | |
|
b24ede4400 | |
|
205aa5a6b1 | |
|
c8f3bfc65d | |
|
44f9317b35 | |
|
6c2a0d6876 | |
|
909dda1ea2 | |
|
367d8f28ad | |
|
d3f41dca9e | |
|
ae3d35e363 | |
|
41a6c22822 | |
|
4ac7c84403 | |
|
38f6516ad9 | |
|
03b721f014 | |
|
c5752ba4a2 | |
|
5721398340 | |
|
6c0d325673 | |
|
b40069bedd | |
|
70b2647e7c | |
|
c1efddf252 | |
|
9ded308371 | |
|
f63d922096 | |
|
11b309da02 | |
|
1444f8f48b | |
|
e6ca682114 | |
|
15fb6fdc2d | |
|
9d5c71d4e1 | |
|
013296f25e | |
|
d10b277c60 | |
|
7c50ffb3af | |
|
3a9826901a | |
|
a54373ad2f | |
|
aa7d0aec09 | |
|
31778374ed | |
|
0d55da68de | |
|
a973707a73 | |
|
b83f7a8abb | |
|
e138855623 | |
|
5582644068 | |
|
5278608b89 | |
|
59658cea6f | |
|
ef674b5ae2 | |
|
ac8c939179 | |
|
c16ec333f1 | |
|
d1e4fd173b | |
|
43c857af2c | |
|
0cc010cecc | |
|
ba7bd6dc03 | |
|
43b0a3049f | |
|
03fd491d5d | |
|
9673359688 | |
|
5d1b271336 | |
|
83c790d66a | |
|
9981c31966 | |
|
319b39905f | |
|
c00c6ed31d | |
|
ae3300ac2c | |
|
61f167c29c | |
|
2833deff90 | |
|
a8424979eb | |
|
f6f6f32b50 | |
|
15e71fe189 | |
|
45c23b4330 | |
|
995feb6685 | |
|
d8fde4b4f4 | |
|
41e6c9ed02 | |
|
cbad96aba5 | |
|
82c9a07c1a | |
|
ae8b91ac4e | |
|
0f3f5d4b56 | |
|
fee83a2a29 | |
|
a0993b6eea | |
|
64b4eca5ea | |
|
2e4fb5d174 | |
|
317c5a7759 | |
|
12f1b230e6 | |
|
530783c23b | |
|
1457c1cb85 | |
|
e3caad804b | |
|
17df6a271b | |
|
a331b80095 | |
|
74420a932f | |
|
8e1702d2da | |
|
609f09e919 | |
|
0f5e5bf6f6 | |
|
5c886d49b4 | |
|
b1f73e9235 |
|
@ -0,0 +1,2 @@
|
||||||
|
core.autocrlf false
|
||||||
|
core.eol lf
|
|
@ -15,3 +15,4 @@ It may also contain additional tools and script that are useful for preparing a
|
||||||
* pdflatex
|
* pdflatex
|
||||||
* [browser_info](https://artifacts.logicalhacking.com/ci/Isabelle_DOF/Isabelle_DOF/main/latest/pdflatex/browser_info/Unsorted/)
|
* [browser_info](https://artifacts.logicalhacking.com/ci/Isabelle_DOF/Isabelle_DOF/main/latest/pdflatex/browser_info/Unsorted/)
|
||||||
* [aux files](https://artifacts.logicalhacking.com/ci/Isabelle_DOF/Isabelle_DOF/main/latest/pdflatex/)
|
* [aux files](https://artifacts.logicalhacking.com/ci/Isabelle_DOF/Isabelle_DOF/main/latest/pdflatex/)
|
||||||
|
* [Isabelle_DOF-Unreleased_Isabelle2022.tar.xz](https://artifacts.logicalhacking.com/ci/Isabelle_DOF/Isabelle_DOF/main/latest/Isabelle_DOF-Unreleased_Isabelle2022.tar.xz)
|
||||||
|
|
|
@ -1,20 +1,32 @@
|
||||||
pipeline:
|
pipeline:
|
||||||
build:
|
build:
|
||||||
image: docker.io/logicalhacking/isabelle2021-1
|
image: docker.io/logicalhacking/isabelle2022
|
||||||
commands:
|
commands:
|
||||||
- export ARTIFACT_DIR=$CI_WORKSPACE/.artifacts/$CI_REPO/$CI_BRANCH/$CI_BUILD_NUMBER/$LATEX
|
- export ARTIFACT_DIR=$CI_WORKSPACE/.artifacts/$CI_REPO/$CI_BRANCH/$CI_BUILD_NUMBER/$LATEX
|
||||||
- mkdir -p $ARTIFACT_DIR
|
- mkdir -p $ARTIFACT_DIR
|
||||||
- export `isabelle getenv ISABELLE_HOME_USER`
|
- export `isabelle getenv ISABELLE_HOME_USER`
|
||||||
- mkdir -p $ISABELLE_HOME_USER/etc
|
- mkdir -p $ISABELLE_HOME_USER/etc
|
||||||
- echo "ISABELLE_PDFLATEX=\"$LATEX --file-line-error\"" >> $ISABELLE_HOME_USER/etc/settings
|
- echo "ISABELLE_PDFLATEX=\"$LATEX --file-line-error\"" >> $ISABELLE_HOME_USER/etc/settings
|
||||||
- isabelle components -u `pwd`
|
|
||||||
- isabelle build -D . -o browser_info
|
- isabelle build -D . -o browser_info
|
||||||
- isabelle mkroot_DOF DOF_test
|
- isabelle components -u .
|
||||||
|
- isabelle build -D . -o browser_info
|
||||||
|
- isabelle dof_mkroot -q DOF_test
|
||||||
- isabelle build -D DOF_test
|
- isabelle build -D DOF_test
|
||||||
- cp -r $ISABELLE_HOME_USER/browser_info $ARTIFACT_DIR
|
- cp -r $ISABELLE_HOME_USER/browser_info $ARTIFACT_DIR
|
||||||
- cd $ARTIFACT_DIR
|
- cd $ARTIFACT_DIR
|
||||||
- cd ../..
|
- cd ../..
|
||||||
- ln -s * latest
|
- ln -s * latest
|
||||||
|
archive:
|
||||||
|
image: docker.io/logicalhacking/isabelle2022
|
||||||
|
commands:
|
||||||
|
- export ARTIFACT_DIR=$CI_WORKSPACE/.artifacts/$CI_REPO/$CI_BRANCH/$CI_BUILD_NUMBER/$LATEX
|
||||||
|
- mkdir -p $ARTIFACT_DIR
|
||||||
|
- export ISABELLE_VERSION=`isabelle version`
|
||||||
|
- ./.woodpecker/mk_release -d
|
||||||
|
- cp Isabelle_DOF-Unreleased_$ISABELLE_VERSION.tar.xz $ARTIFACT_DIR/../
|
||||||
|
when:
|
||||||
|
matrix:
|
||||||
|
LATEX: lualatex
|
||||||
deploy:
|
deploy:
|
||||||
image: docker.io/drillster/drone-rsync
|
image: docker.io/drillster/drone-rsync
|
||||||
settings:
|
settings:
|
||||||
|
@ -35,9 +47,9 @@ pipeline:
|
||||||
from_secret: email
|
from_secret: email
|
||||||
from: ci@logicalhacking.com
|
from: ci@logicalhacking.com
|
||||||
when:
|
when:
|
||||||
status: [ changed, failure ]
|
status: [ failure ]
|
||||||
|
|
||||||
matrix:
|
matrix:
|
||||||
LATEX:
|
LATEX:
|
||||||
- pdflatex
|
|
||||||
- lualatex
|
- lualatex
|
||||||
|
- pdflatex
|
||||||
|
|
|
@ -46,7 +46,9 @@ print_help()
|
||||||
echo " --tag tag, -t tag use tag for release archive"
|
echo " --tag tag, -t tag use tag for release archive"
|
||||||
echo " (default: use master branch)"
|
echo " (default: use master branch)"
|
||||||
echo " --publish, -p publish generated artefact"
|
echo " --publish, -p publish generated artefact"
|
||||||
echo " (use master: $PUBLISH)"
|
echo " (default: $PUBLISH)"
|
||||||
|
echo " --quick-and-dirty, -d only build required artifacts, no complete test"
|
||||||
|
echo " (default: $DIRTY)"
|
||||||
}
|
}
|
||||||
|
|
||||||
check_isabelle_version() {
|
check_isabelle_version() {
|
||||||
|
@ -68,7 +70,7 @@ clone_repo()
|
||||||
echo " * Switching to tag $TAG"
|
echo " * Switching to tag $TAG"
|
||||||
(cd $ISADOF_WORK_DIR && git checkout $TAG)
|
(cd $ISADOF_WORK_DIR && git checkout $TAG)
|
||||||
else
|
else
|
||||||
echo " * Not tag specified, using master branch"
|
echo " * No tag specified, using master branch"
|
||||||
fi
|
fi
|
||||||
(cd $ISADOF_WORK_DIR && git show -s --format="COMMIT=%H%nDATE=%cd" --date=short | sed -e 's|-|/|g') >> $ISADOF_WORK_DIR/etc/settings
|
(cd $ISADOF_WORK_DIR && git show -s --format="COMMIT=%H%nDATE=%cd" --date=short | sed -e 's|-|/|g') >> $ISADOF_WORK_DIR/etc/settings
|
||||||
|
|
||||||
|
@ -78,9 +80,30 @@ build_and_install_manuals()
|
||||||
{
|
{
|
||||||
echo "* Building manual"
|
echo "* Building manual"
|
||||||
|
|
||||||
(cd $ISADOF_WORK_DIR && $ISABELLE env ./install-afp)
|
if [ "$DIRTY" = "true" ]; then
|
||||||
(cd $ISADOF_WORK_DIR && $ISABELLE build -c -D . )
|
if [ -z ${ARTIFACT_DIR+x} ]; then
|
||||||
|
echo " * Quick and Dirty Mode (local build)"
|
||||||
|
$ISABELLE build -d . Isabelle_DOF-Manual 2018-cicm-isabelle_dof-applications
|
||||||
|
mkdir -p $ISADOF_WORK_DIR/examples/scholarly_paper/2018-cicm-isabelle_dof-applications/output/
|
||||||
|
cp examples/scholarly_paper/2018-cicm-isabelle_dof-applications/output/document.pdf \
|
||||||
|
$ISADOF_WORK_DIR/examples/scholarly_paper/2018-cicm-isabelle_dof-applications/output/
|
||||||
|
mkdir -p $ISADOF_WORK_DIR/examples/technical_report/Isabelle_DOF-Manual/output/
|
||||||
|
cp examples/technical_report/Isabelle_DOF-Manual/output/document.pdf \
|
||||||
|
$ISADOF_WORK_DIR/examples/technical_report/Isabelle_DOF-Manual/output/;
|
||||||
|
else
|
||||||
|
echo " * Quick and Dirty Mode (running on CI)"
|
||||||
|
mkdir -p $ISADOF_WORK_DIR/examples/scholarly_paper/2018-cicm-isabelle_dof-applications/output/
|
||||||
|
cp $ARTIFACT_DIR/browser_info/Unsorted/2018-cicm-isabelle_dof-applications/document.pdf \
|
||||||
|
$ISADOF_WORK_DIR/examples/scholarly_paper/2018-cicm-isabelle_dof-applications/output/
|
||||||
|
|
||||||
|
mkdir -p $ISADOF_WORK_DIR/examples/technical_report/Isabelle_DOF-Manual/output/
|
||||||
|
cp $ARTIFACT_DIR/browser_info/Unsorted/Isabelle_DOF-Manual/document.pdf \
|
||||||
|
$ISADOF_WORK_DIR/examples/technical_report/Isabelle_DOF-Manual/output/;
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
(cd $ISADOF_WORK_DIR && $ISABELLE env ./install-afp)
|
||||||
|
(cd $ISADOF_WORK_DIR && $ISABELLE build -c -D . )
|
||||||
|
fi
|
||||||
mkdir -p $ISADOF_WORK_DIR/doc
|
mkdir -p $ISADOF_WORK_DIR/doc
|
||||||
echo "Isabelle/DOF Manuals!" > $ISADOF_WORK_DIR/doc/Contents
|
echo "Isabelle/DOF Manuals!" > $ISADOF_WORK_DIR/doc/Contents
|
||||||
|
|
||||||
|
@ -104,7 +127,6 @@ create_archive()
|
||||||
(mv $ISADOF_WORK_DIR $ISADOF_DIR)
|
(mv $ISADOF_WORK_DIR $ISADOF_DIR)
|
||||||
(cd $BUILD_DIR && tar cf $ISADOF_TAR.tar $ISADOF_TAR && xz $ISADOF_DIR.tar)
|
(cd $BUILD_DIR && tar cf $ISADOF_TAR.tar $ISADOF_TAR && xz $ISADOF_DIR.tar)
|
||||||
mv $BUILD_DIR/$ISADOF_TAR.tar.xz .
|
mv $BUILD_DIR/$ISADOF_TAR.tar.xz .
|
||||||
rm -rf $BUILD_DIR
|
|
||||||
}
|
}
|
||||||
|
|
||||||
sign_archive()
|
sign_archive()
|
||||||
|
@ -126,9 +148,8 @@ ISABELLE=`which isabelle`
|
||||||
USE_TAG="false"
|
USE_TAG="false"
|
||||||
SIGN="false"
|
SIGN="false"
|
||||||
PUBLISH="false"
|
PUBLISH="false"
|
||||||
|
DIRTY="false"
|
||||||
BUILD_DIR=`mktemp -d`
|
BUILD_DIR=`mktemp -d`
|
||||||
ISABELLE_HOME_USER=`mktemp -d`
|
|
||||||
export ISABELLE_HOME_USER
|
|
||||||
ISADOF_WORK_DIR="$BUILD_DIR/Isabelle_DOF"
|
ISADOF_WORK_DIR="$BUILD_DIR/Isabelle_DOF"
|
||||||
while [ $# -gt 0 ]
|
while [ $# -gt 0 ]
|
||||||
do
|
do
|
||||||
|
@ -144,6 +165,8 @@ do
|
||||||
SIGN="true";;
|
SIGN="true";;
|
||||||
--publish|-p)
|
--publish|-p)
|
||||||
PUBLISH="true";;
|
PUBLISH="true";;
|
||||||
|
--quick-and-dirty|-d)
|
||||||
|
DIRTY="true";;
|
||||||
--help|-h)
|
--help|-h)
|
||||||
print_help
|
print_help
|
||||||
exit 0;;
|
exit 0;;
|
||||||
|
@ -156,14 +179,24 @@ done
|
||||||
|
|
||||||
clone_repo
|
clone_repo
|
||||||
|
|
||||||
$ISABELLE components -x `pwd`
|
ISADOF_MAIN_DIR=`pwd`
|
||||||
$ISABELLE components -u $ISADOF_WORK_DIR
|
|
||||||
|
|
||||||
VARS=`$ISABELLE getenv ISABELLE_VERSION DOF_VERSION ISABELLE_HOME_USER`
|
if [ "$DIRTY" = "true" ]; then
|
||||||
|
echo "Running in Quick and Dirty mode!"
|
||||||
|
$ISABELLE components -u $ISADOF_MAIN_DIR
|
||||||
|
else
|
||||||
|
$ISABELLE components -x $ISADOF_MAIN_DIR
|
||||||
|
$ISABELLE components -u $ISADOF_WORK_DIR
|
||||||
|
fi
|
||||||
|
|
||||||
|
VARS=`$ISABELLE getenv ISABELLE_TOOL`
|
||||||
for i in $VARS; do
|
for i in $VARS; do
|
||||||
export "$i"
|
export "$i"
|
||||||
done
|
done
|
||||||
|
|
||||||
|
ISABELLE_VERSION="Isabelle$($ISABELLE_TOOL dof_param -b isabelle_version)"
|
||||||
|
DOF_VERSION="$($ISABELLE_TOOL dof_param -b dof_version)"
|
||||||
|
|
||||||
ISABELLE_SHORT_VERSION=`echo $ISABELLE_VERSION | sed -e 's/:.*$//'`
|
ISABELLE_SHORT_VERSION=`echo $ISABELLE_VERSION | sed -e 's/:.*$//'`
|
||||||
ISADOF_TAR="Isabelle_DOF-"$DOF_VERSION"_"$ISABELLE_SHORT_VERSION
|
ISADOF_TAR="Isabelle_DOF-"$DOF_VERSION"_"$ISABELLE_SHORT_VERSION
|
||||||
ISADOF_DIR="$BUILD_DIR/$ISADOF_TAR"
|
ISADOF_DIR="$BUILD_DIR/$ISADOF_TAR"
|
||||||
|
@ -171,6 +204,11 @@ ISADOF_DIR="$BUILD_DIR/$ISADOF_TAR"
|
||||||
check_isabelle_version
|
check_isabelle_version
|
||||||
build_and_install_manuals
|
build_and_install_manuals
|
||||||
|
|
||||||
|
if [ "$DIRTY" != "true" ]; then
|
||||||
|
$ISABELLE components -x $ISADOF_WORK_DIR
|
||||||
|
$ISABELLE components -u $ISADOF_MAIN_DIR
|
||||||
|
fi
|
||||||
|
|
||||||
create_archive
|
create_archive
|
||||||
|
|
||||||
if [ "$SIGN" = "true" ]; then
|
if [ "$SIGN" = "true" ]; then
|
||||||
|
@ -181,10 +219,6 @@ if [ "$PUBLISH" = "true" ]; then
|
||||||
publish_archive
|
publish_archive
|
||||||
fi
|
fi
|
||||||
|
|
||||||
$ISABELLE components -x $ISADOF_WORK_DIR
|
rm -rf $BUILD_DIR
|
||||||
$ISABELLE components -u `pwd`
|
|
||||||
|
|
||||||
rm -rf ISABELLE_HOME_USER
|
|
||||||
rm -rf ISADOF_WORK_DIR
|
|
||||||
|
|
||||||
exit 0
|
exit 0
|
||||||
|
|
34
CHANGELOG.md
|
@ -5,23 +5,30 @@ All notable changes to this project will be documented in this file.
|
||||||
The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/)
|
The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/)
|
||||||
and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0.html).
|
and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0.html).
|
||||||
|
|
||||||
## Unreleased
|
## [Unreleased]
|
||||||
|
|
||||||
### Added
|
### Added
|
||||||
|
|
||||||
### Changed
|
### Changed
|
||||||
|
|
||||||
- removed explicit use of build script. Requires removing "build" script entry
|
|
||||||
from ROOT files.
|
|
||||||
- lipics support is now official, requires document option "document_comment_latex=true"
|
|
||||||
in the ROOT file.
|
|
||||||
- Isabelle/DOF is now a proper Isabelle component that should be installed using the
|
|
||||||
"isabelle components" command. The installation script is now only an convenience
|
|
||||||
means for installing the required AFP entries.
|
|
||||||
|
|
||||||
## 1.2.0 - 2022-03-26
|
- Updated Isabelle version to Isabelle 2022
|
||||||
|
|
||||||
## 1.1.0 - 2021-03-20
|
## [1.3.0] - 2022-07-08
|
||||||
|
|
||||||
|
### Changed
|
||||||
|
|
||||||
|
- The project-specific configuration is not part of the `ROOT` file, the formerly
|
||||||
|
used `isadof.cfg` is obsolete and no longer supported.
|
||||||
|
- Removed explicit use of `document/build` script. Requires removing the `build` script
|
||||||
|
entry from ROOT files.
|
||||||
|
- Isabelle/DOF is now a proper Isabelle component that should be installed using the
|
||||||
|
`isabelle components` command. The installation script is now only a convenient way
|
||||||
|
of installing the required AFP entries.
|
||||||
|
- `mkroot_DOF` has been renamed to `dof_mkroot` (and reimplemented in Scala).
|
||||||
|
|
||||||
|
## [1.2.0] - 2022-03-26
|
||||||
|
|
||||||
|
## [1.1.0] - 2021-03-20
|
||||||
|
|
||||||
### Added
|
### Added
|
||||||
|
|
||||||
|
@ -38,4 +45,7 @@ and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0.
|
||||||
|
|
||||||
- First public release
|
- First public release
|
||||||
|
|
||||||
[Unreleased]: https://git.logicalhacking.com/Isabelle_DOF/Isabelle_DOF/compare/v1.2.0/Isabelle2021...HEAD
|
[Unreleased]: https://git.logicalhacking.com/Isabelle_DOF/Isabelle_DOF/compare/v1.3.0/Isabelle2021...HEAD
|
||||||
|
[1.3.0]: https://git.logicalhacking.com/Isabelle_DOF/Isabelle_DOF/compare/v1.2.0/Isabelle2021...v1.3.0/Isabelle2021-1
|
||||||
|
[1.2.0]: https://git.logicalhacking.com/Isabelle_DOF/Isabelle_DOF/compare/v1.1.0/Isabelle2021...v1.2.0/Isabelle2021
|
||||||
|
[1.1.0]: https://git.logicalhacking.com/Isabelle_DOF/Isabelle_DOF/compare/v1.0.0/Isabelle2019...v1.1.0/Isabelle2021
|
||||||
|
|
|
@ -1,40 +1,35 @@
|
||||||
# [Isabelle/DOF](https://git.logicalhacking.com/Isabelle_DOF/Isabelle_DOF): Document Preparation Setup
|
# [Isabelle/DOF](https://git.logicalhacking.com/Isabelle_DOF/Isabelle_DOF): Document Preparation Setup
|
||||||
|
|
||||||
Isabelle/DOF is a novel Document Ontology Framework on top of Isabelle.
|
Isabelle/DOF is a novel Document Ontology Framework on top of Isabelle.
|
||||||
Isabelle/DOF allows for both conventional typesetting as well as formal
|
Isabelle/DOF allows for both conventional typesetting and formal development.
|
||||||
development. The manual for [Isabelle/DOF 1.2.0/Isabelle2021 is available
|
The manual for [Isabelle/DOF 1.3.0/Isabelle2021-1 is available
|
||||||
online.](https://artifacts.logicalhacking.com/releases/Isabelle_DOF/Isabelle_DOF/Isabelle_DOF-1.2.0_Isabelle2021.pdf)
|
online.](https://artifacts.logicalhacking.com/releases/Isabelle_DOF/Isabelle_DOF/Isabelle_DOF-1.3.0_Isabelle2021-1.pdf)
|
||||||
|
|
||||||
## Pre-requisites
|
## Pre-requisites
|
||||||
|
|
||||||
Isabelle/DOF has three major pre-requisites:
|
Isabelle/DOF has three major prerequisites:
|
||||||
|
|
||||||
* **Isabelle:** Isabelle/DOF requires [Isabelle 2021-1](http://isabelle.in.tum.de/website-Isabelle2021-1/).
|
* **Isabelle:** Isabelle/DOF requires [Isabelle
|
||||||
Please download the Isabelle 2021-1 distribution for your operating
|
2022](https://isabelle.in.tum.de/website-Isabelle2022/). Please download the
|
||||||
system from the [Isabelle
|
Isabelle 2022 distribution for your operating system from the [Isabelle
|
||||||
website](http://isabelle.in.tum.de/website-Isabelle2021-1/).
|
website](https://isabelle.in.tum.de/website-Isabelle2022/).
|
||||||
* **AFP:** Isabelle/DOF requires two entries from the [Archive of
|
* **AFP:** Isabelle/DOF requires several entries from the [Archive of Formal Proofs
|
||||||
Formal Proofs (AFP)](https://www.isa-afp.org/). Please install the
|
(AFP)](https://www.isa-afp.org/). Please install the AFP following the
|
||||||
AFP following the instructions given at
|
instructions given at <https://www.isa-afp.org/using.html>.
|
||||||
<https://www.isa-afp.org/using.html>. For your convenience, we also
|
* **LaTeX:** Isabelle/DOF requires a modern LaTeX installation, i.e., at least
|
||||||
provide a script that only installs the two entries required by
|
[TeX Live 2022](https://www.tug.org/texlive/) with all available updates
|
||||||
Isabelle/DOF into the local Isabelle/DOF directory. You can use this
|
applied.
|
||||||
script as follows:
|
|
||||||
```console
|
|
||||||
foo@bar:~$ isabelle env ./install-afp
|
|
||||||
```
|
|
||||||
* **LaTeX:** Isabelle/DOF requires a modern LaTeX installation, i.e., at least
|
|
||||||
[TeX Live 2022](https://www.tug.org/texlive/) with all available updates applied.
|
|
||||||
|
|
||||||
## Installation
|
## Installation
|
||||||
|
|
||||||
Isabelle/DOF is provided as a Isabelle component. After installing the
|
Isabelle/DOF is provided as an Isabelle component. After installing the
|
||||||
pre-requisites the Isabelle/Archive needs to be unpacked and
|
prerequisites, change into the directory containing Isabelle/DOF (this should be
|
||||||
registered. Change into the directory you unpacked Isabelle/DOF (this
|
the directory containing this `README.md` file) and execute (if you executed
|
||||||
should be the directory containing this `README.md` file) and execute
|
this command already during the installation of the prerequisites, you can skip
|
||||||
|
it now):
|
||||||
|
|
||||||
```console
|
```console
|
||||||
foo@bar:~$ isabelle components -u `pwd`
|
foo@bar:~$ isabelle components -u .
|
||||||
```
|
```
|
||||||
|
|
||||||
The final step for the installation is:
|
The final step for the installation is:
|
||||||
|
@ -49,25 +44,24 @@ This will compile Isabelle/DOF and run the example suite.
|
||||||
|
|
||||||
### Opening an Example
|
### Opening an Example
|
||||||
|
|
||||||
If you want to work with or extend one of the examples, e.g., you can
|
If you want to work with or extend one of the examples, e.g., you can open it
|
||||||
open it similar to any standard Isabelle theory:
|
similar to any standard Isabelle theory:
|
||||||
|
|
||||||
```console
|
```console
|
||||||
isabelle jedit -d . -l Isabelle_DOF examples/scholarly_paper/2018_cicm/IsaDofApplications.thy
|
isabelle jedit -d . -l Isabelle_DOF examples/scholarly_paper/2018_cicm/IsaDofApplications.thy
|
||||||
```
|
```
|
||||||
|
|
||||||
This will open an example of a scientific paper using the pre-compiled
|
This will open an example of a scientific paper using the pre-compiled session
|
||||||
session ``Isabelle_DOF``, i.e., you will not be able to edit the
|
``Isabelle_DOF``, i.e., you will not be able to edit the ontology definitions.
|
||||||
ontology definitions. If you want to edit the ontology definition,
|
If you want to edit the ontology definition, just open the theory file with the
|
||||||
just open the theory file with the default HOL session:
|
default HOL session:
|
||||||
|
|
||||||
```console
|
```console
|
||||||
isabelle jedit -d . -l HOL examples/scholarly_paper/2018_cicm/IsaDofApplications.thy
|
isabelle jedit -d . -l HOL examples/scholarly_paper/2018_cicm/IsaDofApplications.thy
|
||||||
```
|
```
|
||||||
|
|
||||||
While this gives you more flexibility, it might "clutter" your editing
|
While this gives you more flexibility, it might "clutter" your editing
|
||||||
experience, as a lot of internal theories are loaded into Isabelle's
|
experience, as a lot of internal theories are loaded into Isabelle's editor.
|
||||||
editor.
|
|
||||||
|
|
||||||
### Creating a New Project
|
### Creating a New Project
|
||||||
|
|
||||||
|
@ -75,55 +69,55 @@ The DOF-plugin provides an alternative to Isabelle's ``mkroot`` command.
|
||||||
Isabelle projects that use DOF need to be created using
|
Isabelle projects that use DOF need to be created using
|
||||||
|
|
||||||
```console
|
```console
|
||||||
foo@bar:~$ isabelle mkroot_DOF
|
foo@bar:~$ isabelle dof_mkroot
|
||||||
```
|
```
|
||||||
|
|
||||||
The ``mkroot_DOF`` command takes the same parameter as the standard
|
The ``dof_mkroot`` command takes the same parameter as the standard ``mkroot``
|
||||||
``mkroot`` command of Isabelle. Thereafter, the normal Isabelle
|
command of Isabelle. Thereafter, the normal Isabelle command for building
|
||||||
command for building documents can be used.
|
documents can be used.
|
||||||
|
|
||||||
Using the ``-o`` option, different ontology setups can be
|
Using the ``-o`` option, different ontology setups can be selected and using the
|
||||||
selected and using the ``-t`` option, different LaTeX setups
|
``-t`` option, different LaTeX setups can be selected. For example,
|
||||||
can be selected. For example,
|
|
||||||
|
|
||||||
```console
|
```console
|
||||||
foo@bar:~$ isabelle mkroot_DOF -o scholarly_paper -t scrartcl
|
foo@bar:~$ isabelle dof_mkroot -o scholarly_paper -t scrartcl
|
||||||
```
|
```
|
||||||
|
|
||||||
creates a setup using the scholarly_paper ontology and the article
|
creates a setup using the scholarly_paper ontology and the article class from
|
||||||
class from the KOMA-Script bundle.
|
the KOMA-Script bundle.
|
||||||
|
|
||||||
The help (option ``-h``) show a list of all supported ontologies and
|
The help (option ``-h``) show a list of all supported ontologies and document
|
||||||
document templates:
|
templates:
|
||||||
|
|
||||||
```console
|
```console
|
||||||
foo@bar:~$ isabelle mkroot_DOF -h
|
foo@bar:~$ isabelle dof_mkroot -h
|
||||||
|
|
||||||
Usage: isabelle mkroot_DOF [OPTIONS] [DIR]
|
Usage: isabelle dof_mkroot [OPTIONS] [DIRECTORY]
|
||||||
|
|
||||||
Options are:
|
Options are:
|
||||||
-h print this help text and exit
|
-I init Mercurial repository and add generated files
|
||||||
-n NAME alternative session name (default: DIR base name)
|
-h print help
|
||||||
-o ONTOLOGY (default: scholarly_paper)
|
-n NAME alternative session name (default: directory base name)
|
||||||
Available ontologies:
|
-o NAMES list of ontologies, separated by blanks
|
||||||
* cenelec_50128
|
(default: "technical_report scholarly_paper")
|
||||||
* mathex
|
-q quiet mode: less verbosity
|
||||||
* scholarly_paper
|
-t NAME template (default: "scrreprt-modern")
|
||||||
-t TEMPLATE (default: scrartcl)
|
|
||||||
Available document templates:
|
|
||||||
* lncs
|
|
||||||
* scrartcl
|
|
||||||
* scrreprt
|
|
||||||
* scrreprt-modern
|
|
||||||
|
|
||||||
Prepare session root DIR (default: current directory).
|
Create session root directory for Isabelle/DOF (default: current directory).
|
||||||
```
|
```
|
||||||
|
|
||||||
## Releases
|
## Releases
|
||||||
|
|
||||||
For releases, signed archives including a PDF version of the Isabelle/DOF manual are
|
For releases, signed archives including a PDF version of the Isabelle/DOF manual
|
||||||
are available:
|
are available:
|
||||||
|
|
||||||
|
* Isabelle/DOF 1.3.0/Isabelle2021-1
|
||||||
|
* [Isabelle_DOF-1.3.0_Isabelle2021-1.pdf](https://artifacts.logicalhacking.com/releases/Isabelle_DOF/Isabelle_DOF/Isabelle_DOF-1.3.0_Isabelle2021-1.pdf)
|
||||||
|
* [Isabelle_DOF-1.3.0_Isabelle2021-1.tar.xz](https://artifacts.logicalhacking.com/releases/Isabelle_DOF/Isabelle_DOF/Isabelle_DOF-1.3.0_Isabelle2021-1.tar.xz)
|
||||||
|
* [Isabelle_DOF-1.3.0_Isabelle2021-1.tar.xz.asc](https://artifacts.logicalhacking.com/releases/Isabelle_DOF/Isabelle_DOF/Isabelle_DOF-1.3.0_Isabelle2021-1.tar.xz.asc)
|
||||||
|
|
||||||
|
### Older Releases
|
||||||
|
|
||||||
* Isabelle/DOF 1.2.0/Isabelle2021
|
* Isabelle/DOF 1.2.0/Isabelle2021
|
||||||
* [Isabelle_DOF-1.2.0_Isabelle2021.pdf](https://artifacts.logicalhacking.com/releases/Isabelle_DOF/Isabelle_DOF/Isabelle_DOF-1.2.0_Isabelle2021.pdf)
|
* [Isabelle_DOF-1.2.0_Isabelle2021.pdf](https://artifacts.logicalhacking.com/releases/Isabelle_DOF/Isabelle_DOF/Isabelle_DOF-1.2.0_Isabelle2021.pdf)
|
||||||
* [Isabelle_DOF-1.2.0_Isabelle2021.tar.xz](https://artifacts.logicalhacking.com/releases/Isabelle_DOF/Isabelle_DOF/Isabelle_DOF-1.2.0_Isabelle2021.tar.xz)
|
* [Isabelle_DOF-1.2.0_Isabelle2021.tar.xz](https://artifacts.logicalhacking.com/releases/Isabelle_DOF/Isabelle_DOF/Isabelle_DOF-1.2.0_Isabelle2021.tar.xz)
|
||||||
|
@ -153,7 +147,7 @@ Main contacts:
|
||||||
* Idir Ait-Sadoune
|
* Idir Ait-Sadoune
|
||||||
* Paolo Crisafulli
|
* Paolo Crisafulli
|
||||||
* Chantal Keller
|
* Chantal Keller
|
||||||
* Nicolas Méric
|
* Nicolas Méric
|
||||||
|
|
||||||
## License
|
## License
|
||||||
|
|
||||||
|
@ -163,28 +157,32 @@ SPDX-License-Identifier: BSD-2-Clause
|
||||||
|
|
||||||
## Publications
|
## Publications
|
||||||
|
|
||||||
* Achim D. Brucker, Idir Ait-Sadoune, Paolo Crisafulli, and Burkhart
|
* Achim D. Brucker, Idir Ait-Sadoune, Paolo Crisafulli, and Burkhart Wolff.
|
||||||
Wolff. [Using The Isabelle Ontology Framework: Linking the Formal
|
[Using The Isabelle Ontology Framework: Linking the Formal with the
|
||||||
with the Informal](https://www.brucker.ch/bibliography/download/2018/brucker.ea-isabelle-ontologies-2018.pdf).
|
Informal](https://www.brucker.ch/bibliography/download/2018/brucker.ea-isabelle-ontologies-2018.pdf).
|
||||||
In Conference on Intelligent Computer Mathematics (CICM). Lecture
|
In Conference on Intelligent Computer Mathematics (CICM). Lecture Notes in
|
||||||
Notes in Computer Science (11006), Springer-Verlag, 2018.
|
Computer Science (11006), Springer-Verlag, 2018.
|
||||||
[doi:10.1007/978-3-319-96812-4_3](https://doi.org/10.1007/978-3-319-96812-4_3).
|
[doi:10.1007/978-3-319-96812-4_3](https://doi.org/10.1007/978-3-319-96812-4_3).
|
||||||
|
|
||||||
* Achim D. Brucker and Burkhart Wolff. [Isabelle/DOF: Design and
|
* Achim D. Brucker and Burkhart Wolff. [Isabelle/DOF: Design and
|
||||||
Implementation](https://www.brucker.ch/bibliography/download/2019/brucker.ea-isabelledof-2019.pdf).
|
Implementation](https://www.brucker.ch/bibliography/download/2019/brucker.ea-isabelledof-2019.pdf).
|
||||||
In Software Engineering and Formal Methods (SEFM). Lecture Notes in
|
In Software Engineering and Formal Methods (SEFM). Lecture Notes in Computer
|
||||||
Computer Science (11724), Springer-Verlag, 2019.
|
Science (11724), Springer-Verlag, 2019.
|
||||||
[doi:10.1007/978-3-030-30446-1_15](https://doi.org/10.1007/978-3-030-30446-1_15).
|
[doi:10.1007/978-3-030-30446-1_15](https://doi.org/10.1007/978-3-030-30446-1_15).
|
||||||
|
|
||||||
* Achim D. Brucker, Burkhart Wolff. [Using Ontologies in Formal Developments Targeting Certification](https://www.brucker.ch/bibliography/download/2019/brucker.ea-ontologies-certification-2019.pdf). In
|
* Achim D. Brucker, Burkhart Wolff. [Using Ontologies in Formal Developments
|
||||||
Integrated Formal Methods (IFM). Lecture Notes in Computer Science (11918). Springer-Verlag 2019.
|
Targeting
|
||||||
|
Certification](https://www.brucker.ch/bibliography/download/2019/brucker.ea-ontologies-certification-2019.pdf).
|
||||||
|
In Integrated Formal Methods (IFM). Lecture Notes in Computer Science (11918).
|
||||||
|
Springer-Verlag 2019.
|
||||||
[doi:10.1007/978-3-030-34968-4_4](http://dx.doi.org/10.1007/978-3-030-34968-4_4)
|
[doi:10.1007/978-3-030-34968-4_4](http://dx.doi.org/10.1007/978-3-030-34968-4_4)
|
||||||
|
|
||||||
* Sergio Bezzecchi, Paolo Crisafulli, Charlotte Pichot, and Burkhart Wolff. [Making Agile Development
|
* Sergio Bezzecchi, Paolo Crisafulli, Charlotte Pichot, and Burkhart Wolff.
|
||||||
Processes fit for V-style Certification Procedures.](https://hal.archives-ouvertes.fr/hal-01702815/document)
|
[Making Agile Development Processes fit for V-style Certification
|
||||||
In ERTS 2018. <https://hal.archives-ouvertes.fr/hal-01702815>
|
Procedures.](https://hal.archives-ouvertes.fr/hal-01702815/document). In ERTS
|
||||||
|
2018. <https://hal.archives-ouvertes.fr/hal-01702815>
|
||||||
|
|
||||||
## Upstream Repository
|
## Upstream Repository
|
||||||
|
|
||||||
The upstream git repository, i.e., the single source of truth, for this project is hosted
|
The upstream git repository, i.e., the single source of truth, for this project
|
||||||
at <https://git.logicalhacking.com/Isabelle_DOF/Isabelle_DOF>.
|
is hosted at <https://git.logicalhacking.com/Isabelle_DOF/Isabelle_DOF>.
|
||||||
|
|
|
@ -4,9 +4,10 @@ no_build = false
|
||||||
requirements = \
|
requirements = \
|
||||||
env:ISABELLE_SCALA_JAR
|
env:ISABELLE_SCALA_JAR
|
||||||
sources = \
|
sources = \
|
||||||
|
src/scala/dof.scala \
|
||||||
src/scala/dof_document_build.scala \
|
src/scala/dof_document_build.scala \
|
||||||
src/scala/dof_mkroot.scala \
|
src/scala/dof_mkroot.scala \
|
||||||
src/scala/dof_tools.scala
|
src/scala/dof_tools.scala
|
||||||
services = \
|
services = \
|
||||||
isabelle_dof.DOF_Tools \
|
isabelle.dof.DOF_Tools \
|
||||||
isabelle_dof.DOF_Document_Build$Engine
|
isabelle.dof.DOF_Document_Build$Engine
|
||||||
|
|
12
etc/options
|
@ -1,12 +0,0 @@
|
||||||
(* :mode=isabelle-options: *)
|
|
||||||
|
|
||||||
section "Isabelle/DOF"
|
|
||||||
|
|
||||||
public option dof_url : string = "https://git.logicalhacking.com/Isabelle_DOF/Isabelle_DOF"
|
|
||||||
-- "Isabelle/DOF source repository"
|
|
||||||
|
|
||||||
option dof_artifact_dir : string = "releases/Isabelle_DOF/Isabelle_DOF"
|
|
||||||
-- "Isabelle/DOF release artifacts"
|
|
||||||
|
|
||||||
option dof_artifact_host : string = "artifacts.logicalhacking.com"
|
|
||||||
|
|
23
etc/settings
|
@ -1,25 +1,4 @@
|
||||||
# -*- shell-script -*- :mode=shellscript:
|
# -*- shell-script -*- :mode=shellscript:
|
||||||
|
|
||||||
ISABELLE_DOF_HOME="$COMPONENT"
|
ISABELLE_DOF_HOME="$COMPONENT"
|
||||||
ISABELLE_TOOLS="$ISABELLE_TOOLS":"$ISABELLE_DOF_HOME/src/Tools"
|
ISABELLE_DOCS="$ISABELLE_DOF_HOME/doc:$ISABELLE_DOCS"
|
||||||
ISABELLE_DOCS="$ISABELLE_DOF_HOME/doc":"$ISABELLE_DOCS"
|
|
||||||
|
|
||||||
# Isabelle/DOF Version Information
|
|
||||||
DOF_VERSION="Unreleased" # "Unreleased" for development, semantic version for releases
|
|
||||||
DOF_DATE="00/00/00"
|
|
||||||
DOF_LATEST_VERSION="1.2.0"
|
|
||||||
DOF_LATEST_ISABELLE="Isabelle2021"
|
|
||||||
DOF_LATEST_DOI="10.5281/zenodo.6385695"
|
|
||||||
DOF_GENERIC_DOI="10.5281/zenodo.3370482"
|
|
||||||
#
|
|
||||||
# Isabelle and AFP Configuration
|
|
||||||
ISABELLE_VERSION="Isabelle2021-1"
|
|
||||||
ISABELLE_URL="https://isabelle.in.tum.de/website-Isabelle2021-1/"
|
|
||||||
AFP_DATE="afp-2021-12-28"
|
|
||||||
AFP_URL="https://www.isa-afp.org/release/"$AFP_DATE".tar.gz"
|
|
||||||
#
|
|
||||||
# Isabelle/DOF Repository Configuration
|
|
||||||
DOF_URL="https://git.logicalhacking.com/Isabelle_DOF/Isabelle_DOF"
|
|
||||||
DOF_ARTIFACT_DIR="releases/Isabelle_DOF/Isabelle_DOF"
|
|
||||||
DOF_ARTIFACT_HOST="artifacts.logicalhacking.com"
|
|
||||||
#
|
|
||||||
|
|
|
@ -1,9 +1,10 @@
|
||||||
session "mini_odo" = "Isabelle_DOF" +
|
session "mini_odo" = "Isabelle_DOF" +
|
||||||
options [document = pdf, document_output = "output", document_build = dof]
|
options [document = pdf, document_output = "output", document_build = dof]
|
||||||
|
sessions
|
||||||
|
"Physical_Quantities"
|
||||||
theories
|
theories
|
||||||
"mini_odo"
|
"mini_odo"
|
||||||
document_files
|
document_files
|
||||||
"isadof.cfg"
|
|
||||||
"preamble.tex"
|
"preamble.tex"
|
||||||
"root.bib"
|
"root.bib"
|
||||||
"root.mst"
|
"root.mst"
|
||||||
|
@ -12,3 +13,4 @@ session "mini_odo" = "Isabelle_DOF" +
|
||||||
"figures/odometer.jpeg"
|
"figures/odometer.jpeg"
|
||||||
"figures/three-phase-odo.pdf"
|
"figures/three-phase-odo.pdf"
|
||||||
"figures/wheel-df.png"
|
"figures/wheel-df.png"
|
||||||
|
|
||||||
|
|
0
examples/CENELEC_50128/mini_odo/document/figures/df-numerics-encshaft.png
Executable file → Normal file
Before Width: | Height: | Size: 27 KiB After Width: | Height: | Size: 27 KiB |
Before Width: | Height: | Size: 407 KiB After Width: | Height: | Size: 407 KiB |
0
examples/CENELEC_50128/mini_odo/document/figures/three-phase-odo.pdf
Executable file → Normal file
Before Width: | Height: | Size: 23 KiB After Width: | Height: | Size: 23 KiB |
|
@ -1,3 +0,0 @@
|
||||||
Template: scrreprt-modern
|
|
||||||
Ontology: technical_report
|
|
||||||
Ontology: cenelec_50128
|
|
|
@ -14,11 +14,16 @@
|
||||||
(*<*)
|
(*<*)
|
||||||
theory
|
theory
|
||||||
mini_odo
|
mini_odo
|
||||||
imports
|
imports
|
||||||
"Isabelle_DOF.CENELEC_50128"
|
"Isabelle_DOF.CENELEC_50128"
|
||||||
"Isabelle_DOF.technical_report"
|
"Isabelle_DOF.technical_report"
|
||||||
|
"Physical_Quantities.SI" "Physical_Quantities.SI_Pretty"
|
||||||
begin
|
begin
|
||||||
|
use_template "scrreprt-modern"
|
||||||
|
use_ontology technical_report and CENELEC_50128
|
||||||
declare[[strict_monitor_checking=true]]
|
declare[[strict_monitor_checking=true]]
|
||||||
|
define_shortcut* dof \<rightleftharpoons> \<open>\dof\<close>
|
||||||
|
isadof \<rightleftharpoons> \<open>\isadof{}\<close>
|
||||||
(*>*)
|
(*>*)
|
||||||
|
|
||||||
title*[title::title]\<open>The CENELEC 50128 Ontology\<close>
|
title*[title::title]\<open>The CENELEC 50128 Ontology\<close>
|
||||||
|
@ -27,14 +32,34 @@ subtitle*[subtitle::subtitle]\<open>Case Study: An Odometer-Subsystem\<close>
|
||||||
chapter*[casestudy::technical]\<open>An Odometer-Subsystem\<close>
|
chapter*[casestudy::technical]\<open>An Odometer-Subsystem\<close>
|
||||||
text\<open>
|
text\<open>
|
||||||
In our case study, we will follow the phases of analysis, design, and implementation of the
|
In our case study, we will follow the phases of analysis, design, and implementation of the
|
||||||
odometry function of a train. This software processes data from an odometer to compute the position,
|
odometry function of a train. This \<^cenelec_term>\<open>SF\<close> processes data from an odometer to compute
|
||||||
speed, and acceleration of a train. This system provides the basis for many
|
the position, speed, and acceleration of a train. This system provides the basis for many
|
||||||
safety critical decisions, \eg, the opening of the doors. Due to its relatively small size, it
|
safety critical decisions, \<^eg>, the opening of the doors. Due to its relatively small size, it
|
||||||
is a manageable, albeit realistic target for a comprehensive formal development: it covers a
|
is a manageable, albeit realistic target for a comprehensive formal development: it covers a
|
||||||
physical model of the environment, the physical and architectural model of the odometer including
|
physical model of the environment, the physical and architectural model of the odometer,
|
||||||
the problem of numerical sampling, and the boundaries of efficient computations. The interplay
|
but also the \<^cenelec_term>\<open>SFRS\<close> aspects including the problem of numerical sampling and the
|
||||||
between environment and measuring-device as well as the implementation problems on a platform
|
boundaries of efficient computations. The interplay between environment and measuring-device as
|
||||||
with limited resources makes the odometer a fairly typical safety critical embedded system.
|
well as the implementation problems on a platform with limited resources makes the odometer a
|
||||||
|
fairly typical \<^cenelec_term>\<open>safety\<close> critical \<^cenelec_term>\<open>component\<close> of an embedded system.
|
||||||
|
|
||||||
|
The case-study is presented in form of an \<^emph>\<open>integrated source\<close> in \<^isadof> containing all four
|
||||||
|
reports from the phases:
|
||||||
|
\<^item> \<^term>\<open>software_requirements\<close> with deliverable \<^doc_class>\<open>SWRS\<close>
|
||||||
|
(or long:\<^typ>\<open>software_requirements_specification\<close>(-report))
|
||||||
|
\<^item> \<^term>\<open>software_architecture_and_design\<close> with deliverable \<^doc_class>\<open>SWDS\<close>
|
||||||
|
(or long: \<^typ>\<open>software_design_specification\<close>(-report))
|
||||||
|
\<^item> \<^term>\<open>software_component_design\<close> with deliverable \<^doc_class>\<open>SWCDVR\<close>
|
||||||
|
(or long: \<^typ>\<open>software_component_design_verification\<close>(-report).)
|
||||||
|
\<^item> \<^term>\<open>component_implementation_and_testing\<close> with deliverable \<^doc_class>\<open>SWADVR\<close>
|
||||||
|
(or long: \<^typ>\<open>software_architecture_and_design_verification\<close>(-report))
|
||||||
|
|
||||||
|
The objective of this case study is to demonstrate deep-semantical ontologoies in
|
||||||
|
software developments targeting certifications, and in particular, how \<^isadof>'s
|
||||||
|
integrated source concept permits to assure \<^cenelec_term>\<open>traceability\<close>.
|
||||||
|
|
||||||
|
\<^bold>\<open>NOTE\<close> that this case study has aspects that were actually covered by CENELEC 50126 -
|
||||||
|
the 'systems'-counterpart covering hardware aspects. Recall that the CENELEC 50128 covers
|
||||||
|
software.
|
||||||
|
|
||||||
Due to space reasons, we will focus on the analysis part of the integrated
|
Due to space reasons, we will focus on the analysis part of the integrated
|
||||||
document; the design and code parts will only be outlined in a final resume. The
|
document; the design and code parts will only be outlined in a final resume. The
|
||||||
|
@ -45,7 +70,8 @@ text\<open>
|
||||||
development.
|
development.
|
||||||
\<close>
|
\<close>
|
||||||
|
|
||||||
section\<open>System Requirements as an \<^emph>\<open>Integrated Source\<close>\<close>
|
section\<open>A CENELEC-conform development as an \<^emph>\<open>Integrated Source\<close>\<close>
|
||||||
|
|
||||||
text\<open>Accurate information of a train's location along a track is in an important prerequisite
|
text\<open>Accurate information of a train's location along a track is in an important prerequisite
|
||||||
to safe railway operation. Position, speed and acceleration measurement usually lies on a
|
to safe railway operation. Position, speed and acceleration measurement usually lies on a
|
||||||
set of independent measurements based on different physical principles---as a way to enhance
|
set of independent measurements based on different physical principles---as a way to enhance
|
||||||
|
@ -74,17 +100,23 @@ text\<open>
|
||||||
|
|
||||||
This model is already based on several fundamental assumptions relevant for the correct
|
This model is already based on several fundamental assumptions relevant for the correct
|
||||||
functioning of the system and for its integration into the system as a whole. In
|
functioning of the system and for its integration into the system as a whole. In
|
||||||
particular, we need to make the following assumptions explicit:\vspace{-.3cm}
|
particular, we need to make the following assumptions explicit: \<^vs>\<open>-0.3cm\<close>\<close>
|
||||||
\<^item> that the wheel is perfectly circular with a given, constant radius,
|
|
||||||
\<^item> that the slip between the trains wheel and the track negligible,
|
|
||||||
\<^item> the distance between all teeth of a wheel is the same and constant, and
|
|
||||||
\<^item> the sampling rate of positions is a given constant.
|
|
||||||
|
|
||||||
|
text*["perfect-wheel"::assumption]
|
||||||
|
\<open>\<^item> the wheel is perfectly circular with a given, constant radius. \<^vs>\<open>-0.3cm\<close>\<close>
|
||||||
|
text*["no-slip"::assumption]
|
||||||
|
\<open>\<^item> the slip between the trains wheel and the track negligible. \<^vs>\<open>-0.3cm\<close>\<close>
|
||||||
|
text*["constant-teeth-dist"::assumption]
|
||||||
|
\<open>\<^item> the distance between all teeth of a wheel is the same and constant, and \<^vs>\<open>-0.3cm\<close>\<close>
|
||||||
|
text*["constant-sampling-rate"::assumption]
|
||||||
|
\<open>\<^item> the sampling rate of positions is a given constant.\<close>
|
||||||
|
|
||||||
|
text\<open>
|
||||||
These assumptions have to be traced throughout the certification process as
|
These assumptions have to be traced throughout the certification process as
|
||||||
|
|
||||||
\<^emph>\<open>derived requirements\<close> (or, in CENELEC terminology, as \<^emph>\<open>exported constraints\<close>), which is
|
\<^emph>\<open>derived requirements\<close> (or, in CENELEC terminology, as \<^emph>\<open>exported constraints\<close>), which is
|
||||||
also reflected by their tracing throughout the body of certification documents. This may result
|
also reflected by their tracing throughout the body of certification documents. This may result
|
||||||
in operational regulations, \eg, regular checks for tolerable wheel defects. As for the
|
in operational regulations, \<^eg>, regular checks for tolerable wheel defects. As for the
|
||||||
\<^emph>\<open>no slip\<close>-assumption, this leads to the modeling of constraints under which physical
|
\<^emph>\<open>no slip\<close>-assumption, this leads to the modeling of constraints under which physical
|
||||||
slip can be neglected: the device can only produce reliable results under certain physical
|
slip can be neglected: the device can only produce reliable results under certain physical
|
||||||
constraints (speed and acceleration limits). Moreover, the \<^emph>\<open>no slip\<close>-assumption motivates
|
constraints (speed and acceleration limits). Moreover, the \<^emph>\<open>no slip\<close>-assumption motivates
|
||||||
|
@ -93,57 +125,53 @@ text\<open>
|
||||||
\<close>
|
\<close>
|
||||||
|
|
||||||
subsection\<open>Capturing ``System Architecture.''\<close>
|
subsection\<open>Capturing ``System Architecture.''\<close>
|
||||||
|
|
||||||
|
figure*["three-phase"::figure,relative_width="70",src="''figures/three-phase-odo''"]
|
||||||
|
\<open>An odometer with three sensors \<open>C1\<close>, \<open>C2\<close>, and \<open>C3\<close>.\<close>
|
||||||
|
|
||||||
text\<open>
|
text\<open>
|
||||||
\begin{figure}
|
The requirements analysis also contains a document \<^doc_class>\<open>SYSAD\<close>
|
||||||
\centering
|
(\<^typ>\<open>system_architecture_description\<close>) that contains technical drawing of the odometer,
|
||||||
\includegraphics[width=.70\textwidth]{figures/three-phase-odo}
|
a timing diagram (see \<^figure>\<open>three-phase\<close>), and tables describing the encoding of the position
|
||||||
\begin{picture}(0,0)
|
for the possible signal transitions of the sensors \<open>C1\<close>, \<open>C2\<close>, and \<open>C3\<close>.
|
||||||
\put(-112,44){\includegraphics[width=.30\textwidth]{figures/odometer}}
|
|
||||||
\end{picture}
|
|
||||||
\caption{An odometer with three sensors \inlineisar{C1}, \inlineisar{C2}, and \inlineisar{C3}.}\label{three-phase}
|
|
||||||
\end{figure}
|
|
||||||
The requirements analysis also contains a sub-document \<^emph>\<open>system architecture description\<close>
|
|
||||||
(CENELEC notion) that contains technical drawing of the odometer, a timing diagrams
|
|
||||||
(see \autoref{three-phase}), and tables describing the encoding of the position
|
|
||||||
for the possible signal transitions of the sensors \inlineisar{C1}, \inlineisar{C2}, and $C3.$
|
|
||||||
\<close>
|
\<close>
|
||||||
|
|
||||||
subsection\<open>Capturing ``System Interfaces.''\<close>
|
subsection\<open>Capturing ``System Interfaces.''\<close>
|
||||||
text\<open>
|
text\<open>
|
||||||
The requirements analysis also contains a sub-document \<^emph>\<open>functions and interfaces\<close>
|
The requirements analysis also contains a sub-document \<^doc_class>\<open>FnI\<close> (\<^typ>\<open>functions_and_interfaces\<close>)
|
||||||
(CENELEC notion) describing the technical format of the output of the odometry function.
|
describing the technical format of the output of the odometry function.
|
||||||
This section, \eg, specifies the output \<^emph>\<open>speed\<close> as given by a \<^verbatim>\<open>int_32\<close> to be the
|
This section, \<^eg>, specifies the output \<^emph>\<open>speed\<close> as given by a \<^verbatim>\<open>int_32\<close> to be the
|
||||||
``Estimation of the speed (in mm/sec) evaluated over the latest $N_{\text{avg}}$ samples''
|
``Estimation of the speed (in mm/sec) evaluated over the latest \<open>N\<^sub>a\<^sub>v\<^sub>g\<close> samples''
|
||||||
where the speed refers to the physical speed of the train and $N_{\text{avg}}$ a parameter of the
|
where the speed refers to the physical speed of the train and \<open>N\<^sub>a\<^sub>v\<^sub>g\<close> a parameter of the
|
||||||
sub-system configuration. \<close>
|
sub-system configuration. \<close>
|
||||||
|
|
||||||
(*<*)
|
(*<*)
|
||||||
declare_reference*["df-numerics-encshaft"::figure]
|
declare_reference*["df-numerics-encshaft"::figure]
|
||||||
(*>*)
|
(*>*)
|
||||||
subsection\<open>Capturing ``Required Performances.''\<close>
|
subsection\<open>Capturing ``Required Performances.''\<close>
|
||||||
text\<open>
|
text\<open>
|
||||||
The given analysis document is relatively implicit on the expected precision of the measurements;
|
The given analysis document is relatively implicit on the expected precision of the measurements;
|
||||||
however, certain interface parameters like \inlineisar*Odometric_Position_TimeStamp*
|
however, certain interface parameters like \<open>Odometric_Position_TimeStamp\<close>
|
||||||
(a counter on the number of samplings) and \inlineisar*Relative_Position* are defined by as
|
(a counter on the number of samplings) and \<open>Relative_Position\<close> are defined by as
|
||||||
unsigned 32 bit integer. These definitions imply that exported constraints concerning the acceptable
|
unsigned 32 bit integer. These definitions imply that exported constraints concerning the acceptable
|
||||||
time of service as well the maximum distance before a necessary reboot of the subsystem.
|
time of service as well the maximum distance before a necessary reboot of the subsystem.
|
||||||
For our case-study, we assume maximum deviation of the \inlineisar*Relative_Position* to the
|
For our case-study, we assume maximum deviation of the \<open>Relative_Position\<close> to the
|
||||||
theoretical distance.
|
theoretical distance.
|
||||||
|
|
||||||
The requirement analysis document describes the physical environment, the architecture
|
The requirement analysis document describes the physical environment, the architecture
|
||||||
of the measuring device, and the required format and precision of the measurements of the odometry
|
of the measuring device, and the required format and precision of the measurements of the odometry
|
||||||
function as represented (see @{figure (unchecked) "df-numerics-encshaft"}).\<close>
|
function as represented (see @{figure (unchecked) "df-numerics-encshaft"}).\<close>
|
||||||
|
|
||||||
figure*["df-numerics-encshaft"::figure,relative_width="76",src="''figures/df-numerics-encshaft''"]
|
figure*["df-numerics-encshaft"::figure,relative_width="76",src="''figures/df-numerics-encshaft''"]
|
||||||
\<open>Real distance vs. discrete distance vs. shaft-encoder sequence\<close>
|
\<open>Real distance vs. discrete distance vs. shaft-encoder sequence\<close>
|
||||||
|
|
||||||
|
|
||||||
subsection\<open>Capturing the ``Software Design Spec'' (Resume).\<close>
|
subsection\<open>Capturing the ``Software Design Spec'' (Resume).\<close>
|
||||||
text\<open>
|
text\<open>
|
||||||
\enlargethispage{\baselineskip}
|
|
||||||
The design provides a function that manages an internal first-in-first-out buffer of
|
The design provides a function that manages an internal first-in-first-out buffer of
|
||||||
shaft-encodings and corresponding positions. Central for the design is a step-function analyzing
|
shaft-encodings and corresponding positions. Central for the design is a step-function analyzing
|
||||||
new incoming shaft encodings, checking them and propagating two kinds of error-states (one allowing
|
new incoming shaft encodings, checking them and propagating two kinds of error-states (one allowing
|
||||||
recovery, another one, fatal, signaling, \eg, a defect of the receiver hardware),
|
recovery, another one, fatal, signaling, \<^eg>, a defect of the receiver hardware),
|
||||||
calculating the relative position, speed and acceleration.
|
calculating the relative position, speed and acceleration.
|
||||||
\<close>
|
\<close>
|
||||||
|
|
||||||
|
@ -161,89 +189,123 @@ text\<open>
|
||||||
in AutoCorres.
|
in AutoCorres.
|
||||||
\<close>
|
\<close>
|
||||||
|
|
||||||
|
(*<*)
|
||||||
|
definition teeth_per_wheelturn::nat ("tpw") where "tpw \<equiv> SOME x. x > 0"
|
||||||
|
definition wheel_diameter ::"real[m]" ("w\<^sub>d") where "w\<^sub>d \<equiv> SOME x. x > 0"
|
||||||
|
definition wheel_circumference::"real[m]" ("w\<^sub>0") where "w\<^sub>0 \<equiv> pi *\<^sub>Q w\<^sub>d"
|
||||||
|
definition \<delta>s\<^sub>r\<^sub>e\<^sub>s ::"real[m]" where "\<delta>s\<^sub>r\<^sub>e\<^sub>s \<equiv> 1 / (2 * 3 * tpw) *\<^sub>Q w\<^sub>0 "
|
||||||
|
(*>*)
|
||||||
|
|
||||||
|
|
||||||
section\<open>Formal Enrichment of the Software Requirements Specification\<close>
|
section\<open>Formal Enrichment of the Software Requirements Specification\<close>
|
||||||
text\<open>
|
text\<open>
|
||||||
After the \<^emph>\<open>capture\<close>-phase, where we converted/integrated existing informal analysis and design
|
After the \<^emph>\<open>capture\<close>-phase, where we converted/integrated existing informal analysis and design
|
||||||
documents as well as code into an integrated Isabelle document, we entered into the phase of
|
documents as well as code into an integrated Isabelle document, we entered into the phase of
|
||||||
\<open>formal enrichment\<close>. For example, from the assumptions in the architecture follow
|
\<open>formal enrichment\<close>. For example, from the assumptions in the architecture follow
|
||||||
the definitions:
|
the definitions:
|
||||||
\begin{isar}
|
|
||||||
definition teeth_per_wheelturn::nat ("tpw") where "tpw \<equiv> SOME x. x > 0"
|
@{theory_text [display]\<open>
|
||||||
definition wheel_diameter::real ("w$_d$") where "w$_d$ \<equiv> SOME x. x > 0"
|
definition teeth_per_wheelturn::nat ("tpw") where "tpw \<equiv> SOME x. x > 0"
|
||||||
definition wheel_circumference::real ("w$_{\text{circ}}$") where "w$_{\text{circ}}$ \<equiv> pi * w$_d$"
|
definition wheel_diameter::"real[m]" ("w\<^sub>d") where "w\<^sub>d \<equiv> SOME x. x > 0"
|
||||||
definition \<delta>s$_{\text{res}}$::real where "\<delta>s$_{\text{res}}$ \<equiv> w$_{\text{circ}}$ / (2 * 3 * tpw)"
|
definition wheel_circumference::"real[m]" ("w\<^sub>0") where "w\<^sub>0 \<equiv> pi *\<^sub>Q w\<^sub>d"
|
||||||
\end{isar}
|
definition \<delta>s\<^sub>r\<^sub>e\<^sub>s::"real[m]" where "\<delta>s\<^sub>r\<^sub>e\<^sub>s \<equiv> 1 / (2 * 3 * tpw) *\<^sub>Q w\<^sub>0 "
|
||||||
Here, \inlineisar{real} refers to the real numbers as defined in the HOL-Analysis
|
\<close>}
|
||||||
library, which provides concepts such as Cauchy Sequences, limits,
|
|
||||||
differentiability, and a very substantial part of classical Calculus. \inlineisar{SOME} is the
|
Here, \<open>real\<close> refers to the real numbers as defined in the HOL-Analysis library, which provides
|
||||||
Hilbert choice operator from HOL; the definitions of the model parameters admit all possible positive values as uninterpreted
|
concepts such as Cauchy Sequences, limits, differentiability, and a very substantial part of
|
||||||
constants. Our perfect-wheel assumption is translated into a calculation of the circumference of the
|
classical Calculus. \<open>SOME\<close> is the Hilbert choice operator from HOL; the definitions of the
|
||||||
wheel, while \inlineisar{\<delta>s<bsub>res<esub>}, the resolution of the odometer, can be calculated
|
model parameters admit all possible positive values as uninterpreted constants. Our
|
||||||
|
\<^assumption>\<open>perfect-wheel\<close> is translated into a calculation of the circumference of the
|
||||||
|
wheel, while \<open>\<delta>s\<^sub>r\<^sub>e\<^sub>s\<close>, the resolution of the odometer, can be calculated
|
||||||
from the these parameters. HOL-Analysis permits to formalize the fundamental physical observables:
|
from the these parameters. HOL-Analysis permits to formalize the fundamental physical observables:
|
||||||
\begin{isar}
|
\<close>
|
||||||
type_synonym distance_function = "real\<Rightarrow>real"
|
|
||||||
definition Speed::"distance_function\<Rightarrow>real\<Rightarrow>real" where "Speed f \<equiv> deriv f"
|
(*<*)
|
||||||
definition Accel::"distance_function\<Rightarrow>real\<Rightarrow>real"
|
type_synonym distance_function = "real[s] \<Rightarrow> real[m]"
|
||||||
where "Accel f \<equiv> deriv (deriv f)"
|
consts Speed::"distance_function \<Rightarrow> real[s] \<Rightarrow> real[m\<cdot>s\<^sup>-\<^sup>1]"
|
||||||
\end{isar}
|
consts Accel::"distance_function \<Rightarrow> real[s] \<Rightarrow> real[m\<cdot>s\<^sup>-\<^sup>2]"
|
||||||
which permits to constrain the central observable \inlineisar|distance_function| in a
|
consts Speed\<^sub>M\<^sub>a\<^sub>x::"real[m\<cdot>s\<^sup>-\<^sup>1]"
|
||||||
|
|
||||||
|
(* Non - SI conform common abrbreviations *)
|
||||||
|
definition "kmh \<equiv> kilo *\<^sub>Q metre \<^bold>/ hour :: 'a::{field,ring_char_0}[m\<cdot>s\<^sup>-\<^sup>1]"
|
||||||
|
definition "kHz \<equiv> kilo *\<^sub>Q hertz :: 'a::{field,ring_char_0}[s\<^sup>-\<^sup>1]"
|
||||||
|
|
||||||
|
(*>*)
|
||||||
|
text\<open>
|
||||||
|
@{theory_text [display]\<open>
|
||||||
|
type_synonym distance_function = "real[s]\<Rightarrow>real[m]"
|
||||||
|
definition Speed::"distance_function\<Rightarrow>real\<Rightarrow>real" where "Speed f \<equiv> deriv f"
|
||||||
|
definition Accel::"distance_function\<Rightarrow>real\<Rightarrow>real" where "Accel f \<equiv> deriv (deriv f)"
|
||||||
|
\<close>}
|
||||||
|
|
||||||
|
which permits to constrain the central observable \<open>distance_function\<close> in a
|
||||||
way that they describe the space of ``normal behavior'' where we expect the odometer to produce
|
way that they describe the space of ``normal behavior'' where we expect the odometer to produce
|
||||||
reliable measurements over a \inlineisar|distance_function df|.
|
reliable measurements over a \<open>distance_function df\<close> .
|
||||||
|
|
||||||
The essence of the physics of the train is covered by the following definition:
|
The essence of the physics of the train is covered by the following definition:
|
||||||
\begin{isar}
|
|
||||||
definition normally_behaved_distance_function :: "(real \<Rightarrow> real) \<Rightarrow> bool"
|
@{theory_text [display]\<open>
|
||||||
where normally_behaved_distance_function df =
|
definition normally_behaved_distance_function :: "(real \<Rightarrow> real) \<Rightarrow> bool"
|
||||||
( \<forall> t. df(t) \<in> \<real>$_{\ge 0}$ \<and> (\<forall> t \<in> \<real>$_{\le 0}$. df(t) = 0)
|
where normally_behaved_distance_function df =
|
||||||
\<and> df differentiable on$_{\text{R}}$ \<and> (Speed df)differentiable on$_{\text{R}}$
|
( \<forall> t. df(t) \<in> \<real>\<^sub>\<ge>\<^sub>0 \<and> (\<forall> t \<in> \<real>\<real>\<^sub>\<ge>\<^sub>0. df(t) = 0)
|
||||||
\<and> (Accel df)differentiable on$_{\ensuremath{R}}$
|
\<and> df differentiable on \<real>\<^sub>\<ge>\<^sub>0 \<and> (Speed df)differentiable on \<real>\<^sub>\<ge>\<^sub>0$
|
||||||
\<and> (\<forall> t. (Speed df) t \<in> {-Speed$_{\text{Max}}$ .. Speed$_{\text{Max}}$})
|
\<and> (Accel df)differentiable on \<real>\<^sub>\<ge>\<^sub>0
|
||||||
\<and> (\<forall> t. (Accel df) t \<in> {-\<bar>Accel$_{\text{Max}}$\<bar> .. \<bar>Accel$_{\text{Max}}$\<bar>}))
|
\<and> (\<forall> t. (Speed df) t \<in> {Speed\<^sub>M\<^sub>i\<^sub>n .. Speed\<^sub>M\<^sub>a\<^sub>x})
|
||||||
\end{isar}
|
\<and> (\<forall> t. (Accel df) t \<in> {Accel\<^sub>M\<^sub>i\<^sub>n .. Accel\<^sub>M\<^sub>a\<^sub>x}))
|
||||||
|
\<close>}
|
||||||
|
|
||||||
which constrains the distance functions in the bounds described of the informal descriptions and
|
which constrains the distance functions in the bounds described of the informal descriptions and
|
||||||
states them as three-fold differentiable function in certain bounds concerning speed and acceleration.
|
states them as three-fold differentiable function in certain bounds concerning speed and
|
||||||
Note that violations, in particular of the constraints on speed and acceleration, \<^emph>\<open>do\<close> occur in practice.
|
acceleration. Note that violations, in particular of the constraints on speed and acceleration,
|
||||||
In such cases, the global system adapts recovery strategies that are out of the scope of our model.
|
\<^emph>\<open>do\<close> occur in practice. In such cases, the global system adapts recovery strategies that are out
|
||||||
Concepts like \inlineisar+shaft_encoder_state+ (a triple with the sensor values
|
of the scope of our model. Concepts like \<open>shaft_encoder_state\<close> (a triple with the sensor values
|
||||||
\inlineisar{C1}, \inlineisar{C2}, \inlineisar{C3}) were formalized as types, while tables were
|
\<open>C1\<close>, \<open>C2\<close>, \<open>C3\<close>) were formalized as types, while tables were
|
||||||
defined as recursive functions:
|
defined as recursive functions:
|
||||||
\enlargethispage{2\baselineskip}\begin{isar}
|
|
||||||
fun phase$_0$ :: "nat \<Rightarrow> shaft_encoder_state" where
|
@{theory_text [display]\<open>
|
||||||
"phase$_0$ (0) = \<lparr> C1 = False, C2 = False, C3 = True \<rparr>"
|
fun phase\<^sub>0 :: "nat \<Rightarrow> shaft_encoder_state" where
|
||||||
|"phase$_0$ (1) = \<lparr> C1 = True, C2 = False, C3 = True \<rparr>"
|
"phase\<^sub>0 (0) = \<lparr> C1 = False, C2 = False, C3 = True \<rparr>"
|
||||||
|"phase$_0$ (2) = \<lparr> C1 = True, C2 = False, C3 = False\<rparr>"
|
|"phase\<^sub>0 (1) = \<lparr> C1 = True, C2 = False, C3 = True \<rparr>"
|
||||||
|"phase$_0$ (3) = \<lparr> C1 = True, C2 = True, C3 = False\<rparr>"
|
|"phase\<^sub>0 (2) = \<lparr> C1 = True, C2 = False, C3 = False\<rparr>"
|
||||||
|"phase$_0$ (4) = \<lparr> C1 = False, C2 = True, C3 = False\<rparr>"
|
|"phase\<^sub>0 (3) = \<lparr> C1 = True, C2 = True, C3 = False\<rparr>"
|
||||||
|"phase$_0$ (5) = \<lparr> C1 = False, C2 = True, C3 = True \<rparr>"
|
|"phase\<^sub>0 (4) = \<lparr> C1 = False, C2 = True, C3 = False\<rparr>"
|
||||||
|"phase$_0$ x = phase$_0$(x - 6)"
|
|"phase\<^sub>0 (5) = \<lparr> C1 = False, C2 = True, C3 = True \<rparr>"
|
||||||
definition Phase ::"nat\<Rightarrow>shaft_encoder_state" where Phase(x) = phase$_0$(x-1)
|
|"phase\<^sub>0 x = phase\<^sub>0(x - 6)"
|
||||||
\end{isar}
|
definition Phase ::"nat\<Rightarrow>shaft_encoder_state" where Phase(x) = phase\<^sub>0(x-1)
|
||||||
We now define shaft encoder sequences as
|
\<close>}
|
||||||
translations of distance functions:
|
|
||||||
\begin{isar}
|
We now define shaft encoder sequences as translations of distance functions:
|
||||||
definition encoding::"distance_function\<Rightarrow>nat\<Rightarrow>real\<Rightarrow>shaft_encoder_state"
|
|
||||||
where "encoding df init$_{\text{pos}}$ \<equiv> \<lambda>x. Phase(nat\<lfloor>df(x) / \<delta>s$_{\text{res}}$\<rfloor> + init$_{\text{pos}}$)"
|
@{theory_text [display]\<open>
|
||||||
\end{isar}
|
definition encoding::"distance_function\<Rightarrow>nat\<Rightarrow>real\<Rightarrow>shaft_encoder_state"
|
||||||
where \inlineisar+init$_{\text{pos}}$+ is the initial position of the wheel.
|
where "encoding df init\<^sub>p\<^sub>o\<^sub>s \<equiv> \<lambda>x. Phase(nat\<lfloor>df(x) / \<delta>s\<^sub>r\<^sub>e\<^sub>s\<rfloor> + init\<^sub>p\<^sub>o\<^sub>s)"
|
||||||
\inlineisar+sampling+'s were constructed from encoding sequences over discretized time points:
|
\<close>}
|
||||||
\begin{isar}
|
|
||||||
definition $\!\!$sampling::"distance$\!$_function\<Rightarrow>nat\<Rightarrow>real\<Rightarrow>nat\<Rightarrow>shaft$\!$_encoder$\!$_state"
|
where \<open>init\<^sub>p\<^sub>o\<^sub>s\<close> is the initial position of the wheel.
|
||||||
where "sampling df init$_{\text{pos}}$ \<delta>t \<equiv> \<lambda>n::nat. encoding df init$_{\text{pos}}$ (n * \<delta>t)"
|
\<open>sampling\<close>'s were constructed from encoding sequences over discretized time points:
|
||||||
\end{isar}
|
|
||||||
The sampling interval \inlineisar+\<delta>t+ (the inverse of the sampling frequency) is a critical
|
@{theory_text [display]\<open>
|
||||||
|
definition sampling::"distance_function\<Rightarrow>nat\<Rightarrow>real\<Rightarrow>nat\<Rightarrow>shaft_encoder_state"
|
||||||
|
where "sampling df init\<^sub>p\<^sub>o\<^sub>s \<delta>t \<equiv> \<lambda>n::nat. encoding df initinit\<^sub>p\<^sub>o\<^sub>s (n * \<delta>t)"
|
||||||
|
\<close>}
|
||||||
|
|
||||||
parameter of the configuration of a system.
|
parameter of the configuration of a system.
|
||||||
|
|
||||||
Finally, we can formally define the required performances. From the interface description
|
Finally, we can formally define the required performances. From the interface description
|
||||||
and the global model parameters such as wheel diameter, the number of teeth per wheel, the sampling
|
and the global model parameters such as wheel diameter, the number of teeth per wheel, the
|
||||||
frequency etc., we can infer the maximal time of service as well the maximum distance the
|
sampling frequency etc., we can infer the maximal time of service as well the maximum distance
|
||||||
device can measure.
|
the device can measure. As an example configuration, choosing:
|
||||||
As an example configuration, choosing 1m for
|
|
||||||
\inlineisar+w$_d$+, 100 for \inlineisar+tpw+, 80km/h \inlineisar+Speed$_{\text{Max}}$+,
|
\<^item> \<^term>\<open>(1 *\<^sub>Q metre):: real[m]\<close> for \<^term>\<open>w\<^sub>d\<close> (wheel-diameter),
|
||||||
and 14400Hz for the sampling frequency, results in an odometer resolution of 2.3mm,
|
\<^item> \<^term>\<open>100 :: real\<close> for \<^term>\<open>tpw\<close> (teeth per wheel),
|
||||||
a maximum distance of 9878km, and a maximal system up-time of 123.4 hours.
|
\<^item> \<^term>\<open>80 *\<^sub>Q kmh :: real[m\<cdot>s\<^sup>-\<^sup>1]\<close> for \<^term>\<open>Speed\<^sub>M\<^sub>a\<^sub>x\<close>,
|
||||||
|
\<^item> \<^term>\<open>14.4 *\<^sub>Q kHz :: real[s\<^sup>-\<^sup>1]\<close> for the sampling frequency,
|
||||||
|
|
||||||
|
results in an odometer resolution of \<^term>\<open>2.3 *\<^sub>Q milli *\<^sub>Q metre\<close>, a maximum distance of
|
||||||
|
\<^term>\<open>9878 *\<^sub>Q kilo *\<^sub>Q metre\<close>, and a maximal system up-time of \<^term>\<open>123.4 *\<^sub>Q hour\<close>s.
|
||||||
The required precision of an odometer can be defined by a constant describing
|
The required precision of an odometer can be defined by a constant describing
|
||||||
the maximally allowed difference between \inlineisar+df(n*\<delta>t)+ and
|
the maximally allowed difference between \<open>df(n*\<delta>t)\<close> and
|
||||||
\inlineisar+sampling df init$_{\text{pos}}$ \<delta>t n+ for all \inlineisar+init$_{\text{pos}}$ \<in>{0..5}+.
|
\<open>sampling df init\<^sub>p\<^sub>o\<^sub>s \<delta>t n\<close> for all \<open>init\<^sub>p\<^sub>o\<^sub>s \<in>{0..5}\<close>.
|
||||||
\<close>
|
\<close>
|
||||||
(*<*)
|
(*<*)
|
||||||
ML\<open>val two_thirty2 = 1024 * 1024 * 1024 * 4;
|
ML\<open>val two_thirty2 = 1024 * 1024 * 1024 * 4;
|
||||||
|
@ -253,41 +315,50 @@ ML\<open>val two_thirty2 = 1024 * 1024 * 1024 * 4;
|
||||||
|
|
||||||
section*[verific::technical]\<open>Verification of the Software Requirements Specification\<close>
|
section*[verific::technical]\<open>Verification of the Software Requirements Specification\<close>
|
||||||
text\<open>The original documents contained already various statements that motivate certain safety
|
text\<open>The original documents contained already various statements that motivate certain safety
|
||||||
properties of the device. For example, the \inlineisar+Phase+-table excludes situations in which
|
properties of the device. For example, the \<open>Phase\<close>-table excludes situations in which
|
||||||
all sensors \inlineisar{C1}, \inlineisar{C2}, and \inlineisar{C3} are all ``off'' or situations in
|
all sensors \<open>C1\<close>, \<open>C2\<close>, and \<open>C3\<close> are all ``off'' or situations in
|
||||||
which sensors are ``on,'' reflecting a physical or electrical error in the odometer. It can be
|
which sensors are ``on,'' reflecting a physical or electrical error in the odometer. It can be
|
||||||
shown by a very small Isabelle case-distinction proof that this safety requirement follows indeed from the
|
shown by a very small Isabelle case-distinction proof that this safety requirement follows indeed
|
||||||
above definitions:
|
from the above definitions:
|
||||||
\begin{isar}
|
|
||||||
lemma Encoder_Property_1:(C1(Phase x) \<and> C2(Phase x) \<and> C3(Phase x))=False
|
|
||||||
proof (cases x)
|
|
||||||
case 0 then show ?thesis by (simp add: Phase_def)
|
|
||||||
next
|
|
||||||
case (Suc n) then show ?thesis
|
|
||||||
by(simp add: Phase_def,rule_tac n = n in cycle_case_split,simp_all)
|
|
||||||
qed
|
|
||||||
\end{isar}
|
|
||||||
for all positions \inlineisar+x+. Similarly, it is proved that the table is indeed
|
|
||||||
cyclic: \inlineisar+ phase$_0$ x = phase$_0$(x mod 6)+ and locally injective:
|
|
||||||
\inlineisar+\<forall>x<6. \<forall>y<6. phase$_0$ x = phase$_0$ y \<longrightarrow> x = y+.
|
|
||||||
These lemmas, building the ``theory of an odometer,'' culminate in a theorem
|
|
||||||
that we would like to present in more detail.
|
|
||||||
\begin{isar}
|
|
||||||
theorem minimal_sampling :
|
|
||||||
assumes * : normally_behaved_distance_function df
|
|
||||||
and ** : \<delta>t * Speed$_{\text{Max}}$ < \<delta>s$_{\text{res}}$
|
|
||||||
shows \<forall> \<delta>X\<le>\<delta>t. 0<\<delta>X \<longrightarrow>
|
|
||||||
\<exists>f. retracting (f::nat\<Rightarrow>nat) \<and>
|
|
||||||
sampling df init$_{\text{pos}}$ \<delta>X = (sampling df init$_{\text{pos}}$ \<delta>t) o f
|
|
||||||
|
|
||||||
\end{isar}
|
@{theory_text [display]\<open>
|
||||||
This theorem states for \inlineisar+normally_behaved_distance_function+s that there is
|
lemma Encoder_Property_1:(C1(Phase x) \<and> C2(Phase x) \<and> C3(Phase x))=False
|
||||||
|
proof (cases x)
|
||||||
|
case 0 then show ?thesis by (simp add: Phase_def)
|
||||||
|
next
|
||||||
|
case (Suc n) then show ?thesis
|
||||||
|
by(simp add: Phase_def,rule_tac n = n in cycle_case_split,simp_all)
|
||||||
|
qed
|
||||||
|
\<close>}
|
||||||
|
|
||||||
|
for all positions \<open>x\<close>. Similarly, it is proved that the table is indeed cyclic:
|
||||||
|
|
||||||
|
\<open>phase\<^sub>0 x = phase\<^sub>0(x mod 6)\<close>
|
||||||
|
|
||||||
|
and locally injective:
|
||||||
|
|
||||||
|
\<open>\<forall>x<6. \<forall>y<6. phase\<^sub>0 x = phase\<^sub>0 y \<longrightarrow> x = y\<close>
|
||||||
|
|
||||||
|
These lemmas, building the ``theory of an odometer,'' culminate in a theorem
|
||||||
|
that we would like to present in more detail.
|
||||||
|
|
||||||
|
@{theory_text [display]\<open>
|
||||||
|
theorem minimal_sampling :
|
||||||
|
assumes * : normally_behaved_distance_function df
|
||||||
|
and ** : \<delta>t * Speed\<^sub>M\<^sub>a\<^sub>x < \<delta>s\<^sub>r\<^sub>e\<^sub>s
|
||||||
|
shows \<forall> \<delta>X\<le>\<delta>t. 0<\<delta>X \<longrightarrow>
|
||||||
|
\<exists>f. retracting (f::nat\<Rightarrow>nat) \<and>
|
||||||
|
sampling df init\<^sub>p\<^sub>o\<^sub>s \<delta>X = (sampling df init\<^sub>p\<^sub>o\<^sub>s \<delta>t) o f
|
||||||
|
|
||||||
|
\<close>}
|
||||||
|
|
||||||
|
This theorem states for \<open>normally_behaved_distance_function\<close>s that there is
|
||||||
a minimal sampling frequency assuring the safety of the measurements; samplings on
|
a minimal sampling frequency assuring the safety of the measurements; samplings on
|
||||||
some \inlineisar$df$ gained from this minimal sampling frequency can be ``pumped up''
|
some \<open>df\<close> gained from this minimal sampling frequency can be ``pumped up''
|
||||||
to samplings of these higher sampling frequencies; they do not contain more information.
|
to samplings of these higher sampling frequencies; they do not contain more information.
|
||||||
Of particular interest is the second assumption, labelled ``\inlineisar|**|,'' which
|
Of particular interest is the second assumption, labelled ``\<open>**\<close>'' which
|
||||||
establishes a lower bound from \inlineisar+w$_{\text{circ}}$+, \inlineisar+tpw+,
|
establishes a lower bound from \<open>w\<^sub>0\<close>, \<open>tpw\<close>,
|
||||||
\inlineisar+Speed$_{\text{Max}}$+ for the sampling frequency. Methodologically, this represents
|
\<open>Speed\<^sub>M\<^sub>a\<^sub>x\<close> for the sampling frequency. Methodologically, this represents
|
||||||
an exported constraint that can not be represented \<^emph>\<open>inside\<close> the design model: it means that the
|
an exported constraint that can not be represented \<^emph>\<open>inside\<close> the design model: it means that the
|
||||||
computations have to be fast enough on the computing platform in order to assure that the
|
computations have to be fast enough on the computing platform in order to assure that the
|
||||||
calculations are valid. It was in particular this exported constraint that forced us to give up
|
calculations are valid. It was in particular this exported constraint that forced us to give up
|
||||||
|
@ -301,6 +372,7 @@ standard~@{cite "bsi:50128:2014"}, 7.2.4.22 and are usually addressed in an own
|
||||||
\<close>
|
\<close>
|
||||||
|
|
||||||
chapter*[ontomodeling::text_section]\<open>The CENELEC 50128 Ontology\<close>
|
chapter*[ontomodeling::text_section]\<open>The CENELEC 50128 Ontology\<close>
|
||||||
|
|
||||||
text\<open>
|
text\<open>
|
||||||
Modeling an ontology from a semi-formal text such as~@{cite"bsi:50128:2014"} is,
|
Modeling an ontology from a semi-formal text such as~@{cite"bsi:50128:2014"} is,
|
||||||
like any other modeling activity, not a simple one-to-one translation of some
|
like any other modeling activity, not a simple one-to-one translation of some
|
||||||
|
@ -312,125 +384,144 @@ text\<open>
|
||||||
|
|
||||||
section*[lhf::text_section]
|
section*[lhf::text_section]
|
||||||
\<open>Tracking Concepts and Definitions\<close>
|
\<open>Tracking Concepts and Definitions\<close>
|
||||||
|
|
||||||
text\<open>
|
text\<open>
|
||||||
\isadof is designed to annotate text elements with structured meta-information and to reference
|
\<^isadof> is designed to annotate text elements with structured meta-information and to reference
|
||||||
these text elements throughout the integrated source. A classical application of this capability
|
these text elements throughout the integrated source. A classical application of this capability
|
||||||
is the annotation of concepts and terms definitions---be them informal, semi-formal or formal---and
|
is the annotation of concepts and terms definitions---be them informal, semi-formal or formal---and
|
||||||
their consistent referencing. In the context of our CENELEC ontology, \eg, we can translate the
|
their consistent referencing. In the context of our CENELEC ontology, \<^eg>, we can translate the
|
||||||
third chapter of @{cite "bsi:50128:2014"} ``Terms, Definitions and Abbreviations'' directly
|
third chapter of @{cite "bsi:50128:2014"} ``Terms, Definitions and Abbreviations'' directly
|
||||||
into our Ontology Definition Language (ODL). Picking one example out of 49, consider the definition
|
into our Ontology Definition Language (ODL). Picking one example out of 49, consider the definition
|
||||||
of the concept ``traceability'' in paragraphs 3.1.46 (a notion referenced 31 times in the standard),
|
of the concept \<^cenelec_term>\<open>traceability\<close> in paragraphs 3.1.46 (a notion referenced 31 times in
|
||||||
which we translated directly into:
|
the standard), which we translated directly into:
|
||||||
\begin{isar}
|
|
||||||
Definition*[traceability::concept]<open> degree to which relationship
|
@{theory_text [display]\<open>
|
||||||
can be established between two or more products of a development
|
Definition*[traceability, short_name="''traceability''"]
|
||||||
process, especially those having a predecessor/successor or
|
\<open>degree to which relationship can be established between two or more products of a
|
||||||
master/subordinate relationship to one another. <close>
|
development process, especially those having a predecessor/successor or
|
||||||
\end{isar}
|
master/subordinate relationship to one another.\<close>
|
||||||
In the integrated source of the odometry study, we can reference in a text element to this
|
\<close>}
|
||||||
|
|
||||||
|
In the integrated source of the odometry study, we can reference in a text element to this
|
||||||
concept as follows:
|
concept as follows:
|
||||||
\begin{isar}
|
|
||||||
text*[...]<open> ... to assure <@>{concept traceability} for
|
@{theory_text [display]\<open>
|
||||||
<@>{requirement bitwiseAND}, we prove ... <close>
|
text*[...]\<open> ... to assure <@>{cenelec_term traceability} for
|
||||||
\end{isar}
|
<@>{requirement bitwiseAND}, we prove ... \<close>
|
||||||
The presentation of this document element inside \isadof is immediately hyperlinked against the
|
\<close>}
|
||||||
\inlineisar+Definition*+ element shown above; this serves as documentation of
|
|
||||||
|
|
||||||
|
\<^isadof> also uses the underlying ontology to generate the navigation markup inside the IDE, \<^ie>
|
||||||
|
the presentation of this document element inside \<^isadof> is immediately hyperlinked against the
|
||||||
|
@{theory_text \<open> Definition* \<close>}-element shown above; this serves as documentation of
|
||||||
the standard for the development team working on the integrated source. The PDF presentation
|
the standard for the development team working on the integrated source. The PDF presentation
|
||||||
of such links depends on the actual configurations for the document generation; We will explain
|
of such links depends on the actual configurations for the document generation; We will explain
|
||||||
this later.
|
this later.
|
||||||
CENELEC foresees also a number of roles, phases, safety integration levels, etc., which were
|
CENELEC foresees also a number of roles, phases, safety integration levels, etc., which were
|
||||||
directly translated into HOL enumeration types usable in ontological concepts of ODL.
|
directly translated into HOL enumeration types usable in ontological concepts of ODL.
|
||||||
\begin{isar}
|
|
||||||
datatype role =
|
@{theory_text [display]\<open>
|
||||||
PM (* Program Manager *) | RQM (* Requirements Manager *)
|
datatype role =
|
||||||
| DES (* Designer *) | IMP (* Implementer *) |
|
PM (* Program Manager *) | RQM (* Requirements Manager *)
|
||||||
| VER (* Verifier *) | VAL (* Validator *) | ...
|
| DES (* Designer *) | IMP (* Implementer *) |
|
||||||
datatype phase =
|
| VER (* Verifier *) | VAL (* Validator *) | ...
|
||||||
SYSDEV_ext (* System Development *) | SPl (* Software Planning *)
|
datatype phase =
|
||||||
| SR (* Software Requirement *) | SA (* Software Architecture *)
|
SYSDEV_ext (* System Development *) | SPl (* Software Planning *)
|
||||||
| SDES (* Software Design *) | ...
|
| SR (* Software Requirement *) | SA (* Software Architecture *)
|
||||||
\end{isar}
|
| SDES (* Software Design *) | ...
|
||||||
Similarly, we can formalize the Table A.5: Verification and Testing of @{cite "bsi:50128:2014"}:
|
\<close>}
|
||||||
a classification of \<^emph>\<open>verification and testing techniques\<close>:
|
|
||||||
\begin{isar}
|
Similarly, we can formalize the Table A.5: Verification and Testing of @{cite "bsi:50128:2014"}:
|
||||||
datatype vnt_technique =
|
a classification of \<^emph>\<open>verification and testing techniques\<close>:
|
||||||
|
|
||||||
|
@{theory_text [display]\<open>
|
||||||
|
datatype vnt_technique =
|
||||||
formal_proof "thm list" | stat_analysis
|
formal_proof "thm list" | stat_analysis
|
||||||
| dyn_analysis dyn_ana_kind | ...
|
| dyn_analysis dyn_ana_kind | ...
|
||||||
\end{isar}
|
\<close>}
|
||||||
In contrast to the standard, we can parameterize \inlineisar+formal_proof+ with a list of
|
|
||||||
theorems, an entity known in the Isabelle kernel. Here, \isadof assures for text elements
|
In contrast to the standard, we can parameterize \<open>formal_proof\<close> with a list of
|
||||||
|
theorems, an entity known in the Isabelle kernel. Here, \<^isadof> assures for text elements
|
||||||
annotated with theorem names, that they refer indeed to established theorems in the Isabelle
|
annotated with theorem names, that they refer indeed to established theorems in the Isabelle
|
||||||
environment. Additional checks could be added to make sure that these theorems have a particular
|
environment. Additional checks could be added to make sure that these theorems have a particular
|
||||||
form.
|
form.
|
||||||
|
|
||||||
While we claim that this possibility to link to theorems (and test-results) is unique in the
|
While we claim that this possibility to link to theorems (and test-results) is unique in the
|
||||||
world of systems attempting to assure traceability, referencing a particular (proven) theorem is
|
world of systems attempting to assure \<^cenelec_term>\<open>traceability\<close>, referencing a particular
|
||||||
definitively not sufficient to satisfy the claimed requirement. Human evaluators will always have
|
(proven) theorem is definitively not sufficient to satisfy the claimed requirement. Human
|
||||||
to check that the provided theorem \<open>adequately\<close> represents the claim; we do not in the slightest
|
evaluators will always have to check that the provided theorem \<open>adequately\<close> represents the claim;
|
||||||
suggest that their work is superfluous. Our framework allows to statically check that tests or proofs
|
we do not in the slightest suggest that their work is superfluous. Our framework allows to
|
||||||
have been provided, at places where the ontology requires them to be, and both assessors and developers
|
statically check that tests or proofs have been provided, at places where the ontology requires
|
||||||
can rely on this check and navigate through related information easily. It does not guarantee that
|
them to be, and both assessors and developers can rely on this check and navigate through
|
||||||
intended concepts for, \eg, safety or security have been adequately modeled.
|
related information easily. It does not guarantee that intended concepts for, \<^eg>, safety
|
||||||
|
or security have been adequately modeled.
|
||||||
\<close>
|
\<close>
|
||||||
|
|
||||||
section*[moe::text_section]
|
section*[moe::text_section]
|
||||||
\<open>Major Ontological Entities: Requirements and Evidence\<close>
|
\<open>Major Ontological Entities: Requirements and Evidence\<close>
|
||||||
text\<open>
|
text\<open>
|
||||||
We introduce central concept of a \<^emph>\<open>requirement\<close> as an ODL \inlineisar*doc_class*
|
We introduce central concept of a \<^emph>\<open>requirement\<close> as an ODL \<^theory_text>\<open>doc_class\<close>
|
||||||
based on some generic basic library \inlineisar*text_element* providing basic layout attributes.
|
based on the generic basic library \<^doc_class>\<open>text_element\<close> providing basic layout attributes.
|
||||||
\begin{isar}
|
|
||||||
doc_class requirement = text_element +
|
@{theory_text [display]\<open>
|
||||||
long_name :: "string option"
|
doc_class requirement = text_element +
|
||||||
is_concerned :: "role set"
|
long_name :: "string option"
|
||||||
\end{isar}
|
is_concerned :: "role set"
|
||||||
where the \inlineisar*roles* are exactly the ones defined in the previous section and represent
|
\<close>}
|
||||||
the groups of stakeholders in the CENELEC process. Therefore, the \inlineisar+is_concerned+-attribute
|
|
||||||
allows expressing who ``owns'' this text-element. \isadof supports a role-based
|
the groups of stakeholders in the CENELEC process. Therefore, the \<open>is_concerned\<close>-attribute
|
||||||
presentation, \eg, different presentation styles of the
|
allows expressing who ``owns'' this text-element. \<^isadof> supports a role-based
|
||||||
integrated source may decide to highlight, to omit, to defer into an annex, text entities
|
presentation, \<^eg>, different presentation styles of the integrated source may decide to highlight,
|
||||||
according to the role-set.
|
to omit, to defer into an annex, text entities according to the role-set.
|
||||||
|
|
||||||
Since ODL supports single inheritance, we can express sub-requirements and therefore a style
|
Since ODL supports single inheritance, we can express sub-requirements and therefore a style
|
||||||
of requirement decomposition as advocated in GSN~@{cite "kelly.ea:goal:2004"}:
|
of requirement decomposition as advocated in GSN~@{cite "kelly.ea:goal:2004"}:
|
||||||
\begin{isar}
|
|
||||||
doc_class sub_requirement =
|
@{theory_text [display]\<open>
|
||||||
decomposes :: "requirement"
|
doc_class sub_requirement =
|
||||||
relates_to :: "requirement set"
|
decomposes :: "requirement"
|
||||||
\end{isar}\<close>
|
relates_to :: "requirement set"
|
||||||
|
\<close>}
|
||||||
|
\<close>
|
||||||
|
|
||||||
section*[claimsreqevidence::text_section]\<open>Tracking Claims, Derived Requirements and Evidence\<close>
|
section*[claimsreqevidence::text_section]\<open>Tracking Claims, Derived Requirements and Evidence\<close>
|
||||||
text\<open>An example for making explicit implicit principles,
|
text\<open>An example for making explicit implicit principles,
|
||||||
consider the following statement @{cite "bsi:50128:2014"}, pp. 25.:\vspace{-1.5mm}
|
consider the following statement @{cite "bsi:50128:2014"}, pp. 25.: \<^vs>\<open>-0.15cm\<close>
|
||||||
|
|
||||||
\begin{quote}\small
|
\begin{quote}\small
|
||||||
The objective of software verification is to examine and arrive at a judgment based on
|
The objective of software verification is to examine and arrive at a judgment based on
|
||||||
evidence that output items (process, documentation, software or application) of a specific
|
evidence that output items (process, documentation, software or application) of a specific
|
||||||
development phase fulfill the requirements and plans with respect to completeness, correctness
|
development phase fulfill the requirements and plans with respect to completeness, correctness
|
||||||
and consistency.
|
and consistency.
|
||||||
\end{quote}\vspace{-1.5mm}
|
\end{quote} \<^vs>\<open>-0.15cm\<close>
|
||||||
The terms \<^emph>\<open>judgment\<close> and \<^emph>\<open>evidence\<close> are used as a kind of leitmotif throughout the CENELEC
|
|
||||||
standard, but they are neither explained nor even listed in the general glossary. However, the
|
The terms \<^onto_class>\<open>judgement\<close> based on \<^term>\<open>evidence\<close> are used as a kind of leitmotif throughout
|
||||||
standard is fairly explicit on the \<^emph>\<open>phase\<close>s and the organizational roles that different stakeholders
|
the CENELEC standard, but they are neither explained nor even listed in the general glossary.
|
||||||
should have in the process. Our version to express this key concept of judgment, \eg, by
|
However, the standard is fairly explicit on the \<^emph>\<open>phase\<close>s and the organizational roles that
|
||||||
the following concept:
|
different stakeholders should have in the process. Our version to express this key concept of
|
||||||
\begin{isar}
|
\<^onto_class>\<open>judgement\<close> , \<^eg>, by the following concept:
|
||||||
doc_class judgement =
|
|
||||||
refers_to :: requirement
|
@{theory_text [display]\<open>
|
||||||
evidence :: "vnt_technique list"
|
doc_class judgement =
|
||||||
status :: status
|
refers_to :: requirement
|
||||||
is_concerned :: "role set" <= "{VER,ASR,VAL}"
|
evidence :: "vnt_technique list"
|
||||||
\end{isar}
|
status :: status
|
||||||
|
is_concerned :: "role set" <= "{VER,ASR,VAL}"
|
||||||
|
\<close>}
|
||||||
|
|
||||||
As one can see, the role set is per default set to the verification team, the assessors and the
|
As one can see, the role set is per default set to the verification team, the assessors and the
|
||||||
validation team.
|
validation team.
|
||||||
|
|
||||||
There are different views possible here: an alternative would be to define \inlineisar+evidence+
|
There are different views possible here: an alternative would be to define \<^term>\<open>evidence\<close>
|
||||||
as ontological concept with \inlineisar+vnt_technique+'s (rather than an attribute of judgement)
|
as ontological concept with \<^typ>\<open>vnt_technique\<close>'s (rather than an attribute of judgement)
|
||||||
and consider the basis of judgments as a relation between requirements and relation:
|
and consider the basis of a summary containing the relation between requirements and relation:
|
||||||
\begin{isar}
|
|
||||||
doc_class judgement =
|
@{theory_text [display]\<open>
|
||||||
based_on :: "(requirement \<times> evidence) set"
|
doc_class summary =
|
||||||
status :: status
|
based_on :: "(requirement \<times> evidence) set"
|
||||||
is_concerned :: "role set" <= "{VER,ASR,VAL}"
|
status :: status
|
||||||
\end{isar}
|
is_concerned :: "role set" <= "{VER,ASR,VAL}"
|
||||||
|
\<close>}
|
||||||
|
|
||||||
More experimentation will be needed to find out what kind of ontological modeling is most
|
More experimentation will be needed to find out what kind of ontological modeling is most
|
||||||
adequate for developers in the context of \isadof.
|
adequate for developers in the context of \isadof.
|
||||||
|
@ -442,60 +533,66 @@ text\<open>From the variety of different possibilities for adding CENELEC annota
|
||||||
integrated source, we will, in the following, point out three scenarios.\<close>
|
integrated source, we will, in the following, point out three scenarios.\<close>
|
||||||
|
|
||||||
subsection\<open>Internal Verification of Claims in the Requirements Specification.\<close>
|
subsection\<open>Internal Verification of Claims in the Requirements Specification.\<close>
|
||||||
text\<open>In our case, the SR-team early on detected a property necessary
|
text\<open>In our case, the \<^term>\<open>SR\<close>-team early on detected a property necessary
|
||||||
for error-detection of the device (c.f. @{technical verific}):
|
for error-detection of the device (c.f. @{technical verific}):
|
||||||
\enlargethispage{2\baselineskip}\begin{isar}
|
|
||||||
text*[encoder_props::requirement]<open> The requirement specification team ...
|
@{theory_text [display]\<open>
|
||||||
C1 & C2 & C3 = 0 (bitwise logical AND operation)
|
text*[encoder_props::requirement]\<open> The requirement specification team identifies the property:
|
||||||
C1 | C2 | C3 = 1 (bitwise logical OR operation) <close>
|
C1 & C2 & C3 = 0 (bitwise logical AND operation)
|
||||||
\end{isar}
|
C1 | C2 | C3 = 1 (bitwise logical OR operation) \<close>
|
||||||
|
\<close>}
|
||||||
|
|
||||||
After the Isabelle proofs shown in @{technical verific}, we can either register the theorems
|
After the Isabelle proofs shown in @{technical verific}, we can either register the theorems
|
||||||
directly in an evidence statement:
|
directly in an evidence statement:
|
||||||
|
|
||||||
\begin{isar}
|
@{theory_text [display]\<open>
|
||||||
text*[J1::judgement, refers_to="<@>{docitem <open>encoder_props<close>}",
|
text*[J1::judgement, refers_to="@{docitem <open>encoder_props<close>}",
|
||||||
evidence="[formal_proof[<@>{thm <open>Encoder_Property_1<close>},
|
evidence="[formal_proof[@{thm <open>Encoder_Property_1<close>},
|
||||||
<@>{thm <open>Encoder_Property_2<close>}]]"]
|
@{thm <open>Encoder_Property_2<close>}]]"]
|
||||||
<open>The required encoder properties are in fact verified to be consistent
|
\<open>The required encoder properties are in fact verified to be consistent
|
||||||
with the formalization of <@>{term "phase$_0$"}.<close>
|
with the formalization of @{term "phase\<^sub>0"}.\<close>
|
||||||
\end{isar}
|
\<close>}
|
||||||
The references \inlineisar|<@>{...}|, called antiquotation, allow us not only to reference to
|
|
||||||
|
The references \<open>@{...}\<close>, called antiquotation, allow us not only to reference to
|
||||||
formal concepts, they are checked for consistency and there are also antiquotations that
|
formal concepts, they are checked for consistency and there are also antiquotations that
|
||||||
print the formally checked content (\eg, the statement of a theorem).
|
print the formally checked content (\<^eg>, the statement of a theorem).
|
||||||
\<close>
|
\<close>
|
||||||
|
|
||||||
subsection\<open>Exporting Claims of the Requirements Specification.\<close>
|
subsection\<open>Exporting Claims of the Requirements Specification.\<close>
|
||||||
text\<open>By definition, the main purpose of the requirement specification is the
|
|
||||||
identification of the safety requirements. As an example, we state the required precision of an
|
|
||||||
odometric function: for any normally behaved distance function \inlineisar+df+, and any representable
|
|
||||||
and valid sampling sequence that can be constructed for \inlineisar+df+, we require that the
|
|
||||||
difference between the physical distance and distance calculable from the
|
|
||||||
@{term Odometric_Position_Count} is bound by the minimal resolution of the odometer.
|
|
||||||
\begin{isar}
|
|
||||||
text*[R5::safety_requirement]<open>We can now state ... <close>
|
|
||||||
definition
|
|
||||||
Odometric_Position_Count_precise::(shaft_encoder_state list\<Rightarrow>output)\<Rightarrow>bool
|
|
||||||
where Odometric_Position_Count_precise odofunction \<equiv>
|
|
||||||
(\<forall> df. \<forall>S. normally_behaved_distance_function df
|
|
||||||
\<longrightarrow> representable S
|
|
||||||
\<longrightarrow> valid_sampling S df
|
|
||||||
\<longrightarrow> (let pos = uint(Odometric_Position_Count(odofunction S))
|
|
||||||
in \<bar>df((length S - 1)*\<delta>t$_{\text{odo}}$) - (\<delta>s$_{\text{res}}$ * pos)\<bar> \<le> \<delta>s$_{\text{res}}$))
|
|
||||||
|
|
||||||
update_instance*[R5::safety_requirement,
|
text\<open>By definition, the main purpose of the requirement specification is the identification of
|
||||||
formal_definition:="[<@>{thm <open>Odometric_Position_Count_precise_def<close>}]"]
|
the safety requirements. As an example, we state the required precision of an odometric function:
|
||||||
\end{isar}
|
for any normally behaved distance function \<open>df\<close>, and any representable and valid
|
||||||
By \inlineisar+update_instance*+, we book the property \inlineisar+Position_Count_precise_def+ as
|
sampling sequence that can be constructed for \<open>df\<close>, we require that the difference
|
||||||
\inlineisar+safety_requirement+, a specific sub-class of \inlineisar+requirement+s
|
between the physical distance and distance calculable from the @{term Odometric_Position_Count}
|
||||||
|
is bound by the minimal resolution of the odometer.
|
||||||
|
|
||||||
|
@{theory_text [display]\<open>
|
||||||
|
text*[R5::safety_requirement]\<open>We can now state ... \<close>
|
||||||
|
definition Odometric_Position_Count_precise :: "(shaft_encoder_state list\<Rightarrow>output)\<Rightarrow>bool"
|
||||||
|
where "Odometric_Position_Count_precise odofunction \<equiv>
|
||||||
|
(\<forall> df. \<forall>S. normally_behaved_distance_function df
|
||||||
|
\<longrightarrow> representable S
|
||||||
|
\<longrightarrow> valid_sampling S df
|
||||||
|
\<longrightarrow> (let pos = uint(Odometric_Position_Count(odofunction S))
|
||||||
|
in \<bar>df((length S - 1)*\<delta>t\<^sub>o\<^sub>d\<^sub>o) - (\<delta>s\<^sub>r\<^sub>e\<^sub>s * pos)\<bar> \<le> \<delta>s\<^sub>r\<^sub>e\<^sub>s))"
|
||||||
|
|
||||||
|
update_instance*[R5::safety_requirement,
|
||||||
|
formal_definition:="[@{thm \<open>Odometric_Position_Count_precise_def\<close>}]"]
|
||||||
|
\<close>}
|
||||||
|
|
||||||
|
By \<^theory_text>\<open>update_instance*\<close>, we book the property \<open>Position_Count_precise_def\<close> as
|
||||||
|
\<^onto_class>\<open>safety_requirement\<close>, a specific sub-class of \<^onto_class>\<open>requirement\<close>s
|
||||||
requesting a formal definition in Isabelle.\<close>
|
requesting a formal definition in Isabelle.\<close>
|
||||||
|
|
||||||
subsection\<open>Exporting Derived Requirements.\<close>
|
subsection\<open>Exporting Derived Requirements.\<close>
|
||||||
|
|
||||||
text\<open>Finally, we discuss the situation where the verification team discovered a critical side-condition
|
text\<open>Finally, we discuss the situation where the verification team discovered a critical side-condition
|
||||||
for a major theorem necessary for the safety requirements; this was in our development the case for
|
for a major theorem necessary for the safety requirements; this was in our development the case for
|
||||||
the condition labelled ``\inlineisar|**|'' in @{docitem verific}. The current CENELEC standard clearly separates
|
the condition labelled ``\<open>**\<close>'' in @{docitem verific}. The current CENELEC standard clearly separates
|
||||||
``requirement specifications'' from ``verification reports,'' which is probably motivated
|
``requirement specifications'' from ``verification reports,'' which is probably motivated
|
||||||
by the overall concern of organizational separation and of document consistency. While this
|
by the overall concern of organizational separation and of document consistency. While this
|
||||||
document organization is possible in \isadof, it is in our experience often counter-productive
|
document organization is possible in \<^isadof>, it is in our experience often counter-productive
|
||||||
in practice: organizations tend to defend their documents because the impact of changes is more and more
|
in practice: organizations tend to defend their documents because the impact of changes is more and more
|
||||||
difficult to oversee. This effect results in a dramatic development slow-down and an increase of
|
difficult to oversee. This effect results in a dramatic development slow-down and an increase of
|
||||||
costs. Furthermore, these barriers exclude situations where developers perfectly know, for example,
|
costs. Furthermore, these barriers exclude situations where developers perfectly know, for example,
|
||||||
|
@ -510,58 +607,58 @@ different PDF versions and for each version, document specific consistency guara
|
||||||
automatically enforced.
|
automatically enforced.
|
||||||
|
|
||||||
In our case study, we define this condition as predicate, declare an explanation of it as
|
In our case study, we define this condition as predicate, declare an explanation of it as
|
||||||
\inlineisar+SRAC+ (CENELEC for: safety-related application condition; ontologically, this is a
|
\<^onto_class>\<open>SRAC\<close> (CENELEC for: safety-related application condition; ontologically, this is a
|
||||||
derived class from \inlineisar+requirement+.) and add the definition of the predicate into the
|
derived class from \<^onto_class>\<open>requirement\<close>.) and add the definition of the predicate into the
|
||||||
document instance as described in the previous section.\<close>
|
document instance as described in the previous section.\<close>
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
text\<open>\appendix\<close>
|
|
||||||
chapter\<open>Appendix\<close>
|
chapter\<open>Appendix\<close>
|
||||||
text\<open>
|
text\<open>
|
||||||
\<^item> \inlineisar|<@>{thm refl}|: @{thm refl}
|
\<^item> \<open>@{thm refl}\<close> : @{thm refl}
|
||||||
\<^item> \inlineisar|<@>{thm [source] refl}|: @{thm [source] refl}
|
\<^item> \<open>@{thm [source] refl}\<close> : @{thm [source] refl}
|
||||||
\<^item> \inlineisar|<@>{thm[mode=Rule] conjI}|: @{thm[mode=Rule] conjI}
|
\<^item> \<open>@{thm[mode=Rule] conjI}\<close> : @{thm[mode=Rule] conjI}
|
||||||
\<^item> \inlineisar|<@>{file "mini_odo.thy"}|: @{file "mini_odo.thy"}
|
\<^item> \<open>@{file "mini_odo.thy"}\<close> : @{file "mini_odo.thy"}
|
||||||
\<^item> \inlineisar|<@>{value "3+4::int"}|: @{value "3+4::int"}
|
\<^item> \<open>@{value "3+4::int"}}\<close> : @{value "3+4::int"}
|
||||||
\<^item> \inlineisar|<@>{const hd}|: @{const hd}
|
\<^item> \<open>@{const hd}\<close> : @{const hd}
|
||||||
\<^item> \inlineisar|<@>{theory HOL.List}|: @{theory HOL.List}
|
\<^item> \<open>@{theory HOL.List}\<close> : @{theory HOL.List}s
|
||||||
\<^item> \inlineisar|<@>{term "3"}|: @{term "3"}
|
\<^item> \<open>@{tserm "3"}\<close> : @{term "3"}
|
||||||
\<^item> \inlineisar|<@>{type bool}|: @{type bool}
|
\<^item> \<open>@{type bool}\<close> : @{type bool}
|
||||||
\<^item> \inlineisar|<@>{term [show_types] "f x = a + x"}|: @{term [show_types] "f x = a + x"}
|
\<^item> \<open>@{thm term [show_types] "f x = a + x"}\<close> : @{term [show_types] "f x = a + x"}
|
||||||
\<close>
|
\<close>
|
||||||
|
|
||||||
text\<open>Examples for declaration of typed doc-items "assumption" and "hypothesis",
|
text\<open>Examples for declaration of typed doc-classes "assumption" (sic!) and "hypothesis" (sic!!),
|
||||||
concepts defined in the underlying ontology @{theory "Isabelle_DOF.CENELEC_50128"}. \<close>
|
concepts defined in the underlying ontology @{theory "Isabelle_DOF.CENELEC_50128"}. \<close>
|
||||||
text*[ass2::assumption, long_name="Some ''assumption one''"] \<open> The subsystem Y is safe. \<close>
|
text*[ass2::assumption, long_name="Some ''assumption one''"] \<open> The subsystem Y is safe. \<close>
|
||||||
text*[hyp1::hypothesis] \<open> P not equal NP \<close>
|
text*[hyp1::hypothesis] \<open> \<open>P \<noteq> NP\<close> \<close>
|
||||||
|
|
||||||
text\<open>A real example fragment from a larger project, declaring a text-element as a
|
text\<open>
|
||||||
"safety-related application condition", a concept defined in the
|
A real example fragment fsrom a larger project, declaring a text-element as a
|
||||||
@{theory "Isabelle_DOF.CENELEC_50128"} ontology:\<close>
|
"safety-related application condition", a concept defined in the
|
||||||
|
@{theory "Isabelle_DOF.CENELEC_50128"} ontology:\<close>
|
||||||
|
|
||||||
text*[hyp2::hypothesis]\<open>Under the assumption @{assumption \<open>ass2\<close>} we establish the following: ... \<close>
|
text*[hyp2::hypothesis]\<open>Under the assumption @{assumption \<open>ass2\<close>} we establish the following: ... \<close>
|
||||||
|
|
||||||
text*[ass122::SRAC, long_name="Some ''ass122''"] \<open> The overall sampling frequence of the odometer
|
text*[ass122::SRAC, long_name="Some ''ass122''"]
|
||||||
subsystem is therefore 14 khz, which includes sampling, computing and
|
\<open> The overall sampling frequence of the odometer subsystem is therefore 14 khz,
|
||||||
result communication times... \<close>
|
which includes sampling, computing and result communication times... \<close>
|
||||||
|
|
||||||
text*[ass123::SRAC] \<open> The overall sampling frequence of the odometer
|
text*[ass123::SRAC]
|
||||||
subsystem is therefore 14 khz, which includes sampling, computing and
|
\<open> The overall sampling frequence of the odometer subsystem is therefore 14 khz,
|
||||||
result communication times... \<close>
|
which includes sampling, computing and result communication times... \<close>
|
||||||
|
|
||||||
text*[ass124::EC, long_name="Some ''ass124''"] \<open> The overall sampling frequence of the odometer
|
text*[ass124::EC, long_name="Some ''ass124''"]
|
||||||
subsystem is therefore 14 khz, which includes sampling, computing and
|
\<open> The overall sampling frequence of the odometer subsystem is therefore 14 khz,
|
||||||
result communication times... \<close>
|
which includes sampling, computing and result communication times... \<close>
|
||||||
|
|
||||||
text*[t10::test_result] \<open> This is a meta-test. This could be an ML-command
|
text*[t10::test_result]
|
||||||
that governs the external test-execution via, eg., a makefile or specific calls
|
\<open> This is a meta-test. This could be an ML-command that governs the external
|
||||||
to a test-environment or test-engine. \<close>
|
test-execution via, \<^eg>, a makefile or specific calls to a test-environment or test-engine. \<close>
|
||||||
|
|
||||||
|
|
||||||
text\<open>Finally some examples of references to doc-items, i.e. text-elements with declared
|
text \<open> Finally some examples of references to doc-items, i.e. text-elements
|
||||||
meta-information and status. \<close>
|
with declared meta-information and status. \<close>
|
||||||
|
|
||||||
text \<open> As established by @{test_result (unchecked) \<open>t10\<close>},
|
text \<open> As established by @{test_result (unchecked) \<open>t10\<close>},
|
||||||
@{test_result (define) \<open>t10\<close>} \<close>
|
@{test_result (define) \<open>t10\<close>} \<close>
|
||||||
text \<open> the @{test_result \<open>t10\<close>}
|
text \<open> the @{test_result \<open>t10\<close>}
|
||||||
|
@ -569,10 +666,10 @@ text \<open> the @{test_result \<open>t10\<close>}
|
||||||
text \<open> represent a justification of the safety related applicability
|
text \<open> represent a justification of the safety related applicability
|
||||||
condition @{SRAC \<open>ass122\<close>} aka exported constraint @{EC \<open>ass122\<close>}.\<close>
|
condition @{SRAC \<open>ass122\<close>} aka exported constraint @{EC \<open>ass122\<close>}.\<close>
|
||||||
|
|
||||||
(* due to notational conventions for antiquotations, one may even write: *)
|
text \<open> due to notational conventions for antiquotations, one may even write:
|
||||||
(* CRASHES WITH LaTeX backend error. *)
|
|
||||||
text \<open> represent a justification of the safety related applicability
|
"represent a justification of the safety related applicability
|
||||||
condition \<^SRAC>\<open>ass122\<close> aka exported constraint \<^EC>\<open>ass122\<close>.\<close>
|
condition \<^SRAC>\<open>ass122\<close> aka exported constraint \<^EC>\<open>ass122\<close>."\<close>
|
||||||
|
|
||||||
|
|
||||||
(*<*)
|
(*<*)
|
||||||
|
|
15
examples/scholarly_paper/2018-cicm-isabelle_dof-applications/IsaDofApplications.thy
Executable file → Normal file
|
@ -16,6 +16,9 @@ theory IsaDofApplications
|
||||||
imports "Isabelle_DOF.scholarly_paper"
|
imports "Isabelle_DOF.scholarly_paper"
|
||||||
begin
|
begin
|
||||||
|
|
||||||
|
use_template "lncs"
|
||||||
|
use_ontology "scholarly_paper"
|
||||||
|
|
||||||
open_monitor*[this::article]
|
open_monitor*[this::article]
|
||||||
declare[[strict_monitor_checking=false]]
|
declare[[strict_monitor_checking=false]]
|
||||||
|
|
||||||
|
@ -71,7 +74,7 @@ abstract*[abs::abstract, keywordlist="[''Ontology'',''Ontological Modeling'',''I
|
||||||
\<close>
|
\<close>
|
||||||
|
|
||||||
section*[intro::introduction]\<open> Introduction \<close>
|
section*[intro::introduction]\<open> Introduction \<close>
|
||||||
text*[introtext::introduction]\<open>
|
text*[introtext::introduction, level = "Some 1"]\<open>
|
||||||
The linking of the \<^emph>\<open>formal\<close> to the \<^emph>\<open>informal\<close> is perhaps the
|
The linking of the \<^emph>\<open>formal\<close> to the \<^emph>\<open>informal\<close> is perhaps the
|
||||||
most pervasive challenge in the digitization of knowledge and its
|
most pervasive challenge in the digitization of knowledge and its
|
||||||
propagation. This challenge incites numerous research efforts
|
propagation. This challenge incites numerous research efforts
|
||||||
|
@ -120,7 +123,7 @@ declare_reference*[ontomod::text_section]
|
||||||
declare_reference*[ontopide::text_section]
|
declare_reference*[ontopide::text_section]
|
||||||
declare_reference*[conclusion::text_section]
|
declare_reference*[conclusion::text_section]
|
||||||
(*>*)
|
(*>*)
|
||||||
text*[plan::introduction]\<open> The plan of the paper is follows: we start by introducing the underlying
|
text*[plan::introduction, level="Some 1"]\<open> The plan of the paper is follows: we start by introducing the underlying
|
||||||
Isabelle system (@{text_section (unchecked) \<open>bgrnd\<close>}) followed by presenting the
|
Isabelle system (@{text_section (unchecked) \<open>bgrnd\<close>}) followed by presenting the
|
||||||
essentials of \<^isadof> and its ontology language (@{text_section (unchecked) \<open>isadof\<close>}).
|
essentials of \<^isadof> and its ontology language (@{text_section (unchecked) \<open>isadof\<close>}).
|
||||||
It follows @{text_section (unchecked) \<open>ontomod\<close>}, where we present three application
|
It follows @{text_section (unchecked) \<open>ontomod\<close>}, where we present three application
|
||||||
|
@ -130,7 +133,7 @@ conclusions and discuss related work in @{text_section (unchecked) \<open>conclu
|
||||||
|
|
||||||
section*[bgrnd::text_section,main_author="Some(@{docitem ''bu''}::author)"]
|
section*[bgrnd::text_section,main_author="Some(@{docitem ''bu''}::author)"]
|
||||||
\<open> Background: The Isabelle System \<close>
|
\<open> Background: The Isabelle System \<close>
|
||||||
text*[background::introduction]\<open>
|
text*[background::introduction, level="Some 1"]\<open>
|
||||||
While Isabelle is widely perceived as an interactive theorem prover
|
While Isabelle is widely perceived as an interactive theorem prover
|
||||||
for HOL (Higher-order Logic)~@{cite "nipkow.ea:isabelle:2002"}, we
|
for HOL (Higher-order Logic)~@{cite "nipkow.ea:isabelle:2002"}, we
|
||||||
would like to emphasize the view that Isabelle is far more than that:
|
would like to emphasize the view that Isabelle is far more than that:
|
||||||
|
@ -159,7 +162,7 @@ figure*[architecture::figure,relative_width="100",src="''figures/isabelle-archit
|
||||||
asynchronous communication between the Isabelle system and
|
asynchronous communication between the Isabelle system and
|
||||||
the IDE (right-hand side). \<close>
|
the IDE (right-hand side). \<close>
|
||||||
|
|
||||||
text*[blug::introduction]\<open> The Isabelle system architecture shown in @{figure \<open>architecture\<close>}
|
text*[blug::introduction, level="Some 1"]\<open> The Isabelle system architecture shown in @{figure \<open>architecture\<close>}
|
||||||
comes with many layers, with Standard ML (SML) at the bottom layer as implementation
|
comes with many layers, with Standard ML (SML) at the bottom layer as implementation
|
||||||
language. The architecture actually foresees a \<^emph>\<open>Nano-Kernel\<close> (our terminology) which
|
language. The architecture actually foresees a \<^emph>\<open>Nano-Kernel\<close> (our terminology) which
|
||||||
resides in the SML structure \<^ML_structure>\<open>Context\<close>. This structure provides a kind of container called
|
resides in the SML structure \<^ML_structure>\<open>Context\<close>. This structure provides a kind of container called
|
||||||
|
@ -191,7 +194,7 @@ For the antiquotation \inlineisar+\at{value "fac 5"}+ we assume the usual defin
|
||||||
\inlineisar+fac+ in HOL.
|
\inlineisar+fac+ in HOL.
|
||||||
\<close>
|
\<close>
|
||||||
|
|
||||||
text*[anti]\<open> Thus, antiquotations can refer to formal content, can be type-checked before being
|
text*[anti::introduction, level = "Some 1"]\<open> Thus, antiquotations can refer to formal content, can be type-checked before being
|
||||||
displayed and can be used for calculations before actually being typeset. When editing,
|
displayed and can be used for calculations before actually being typeset. When editing,
|
||||||
Isabelle's PIDE offers auto-completion and error-messages while typing the above
|
Isabelle's PIDE offers auto-completion and error-messages while typing the above
|
||||||
\<^emph>\<open>semi-formal\<close> content. \<close>
|
\<^emph>\<open>semi-formal\<close> content. \<close>
|
||||||
|
@ -256,7 +259,7 @@ enforcing a sequence of text-elements that must belong to the corresponding clas
|
||||||
To start using \<^isadof>, one creates an Isabelle project (with the name
|
To start using \<^isadof>, one creates an Isabelle project (with the name
|
||||||
\inlinebash{IsaDofApplications}):
|
\inlinebash{IsaDofApplications}):
|
||||||
\begin{bash}
|
\begin{bash}
|
||||||
isabelle mkroot_DOF -o scholarly_paper -t lncs -d IsaDofApplications
|
isabelle dof_mkroot -o scholarly_paper -t lncs IsaDofApplications
|
||||||
\end{bash}
|
\end{bash}
|
||||||
where the \inlinebash{-o scholarly_paper} specifies the ontology for writing scientific articles and
|
where the \inlinebash{-o scholarly_paper} specifies the ontology for writing scientific articles and
|
||||||
\inlinebash{-t lncs} specifies the use of Springer's \LaTeX-configuration for the Lecture Notes in
|
\inlinebash{-t lncs} specifies the use of Springer's \LaTeX-configuration for the Lecture Notes in
|
||||||
|
|
|
@ -1,13 +1,13 @@
|
||||||
session "2018-cicm-isabelle_dof-applications" = "Isabelle_DOF" +
|
session "2018-cicm-isabelle_dof-applications" = "Isabelle_DOF" +
|
||||||
options [document = pdf, document_output = "output", document_build = dof,
|
options [document = pdf, document_output = "output", document_build = dof, quick_and_dirty = true]
|
||||||
quick_and_dirty = true]
|
|
||||||
theories
|
theories
|
||||||
IsaDofApplications
|
IsaDofApplications
|
||||||
document_files
|
document_files
|
||||||
"isadof.cfg"
|
|
||||||
"root.bib"
|
"root.bib"
|
||||||
|
"authorarchive.sty"
|
||||||
"preamble.tex"
|
"preamble.tex"
|
||||||
"lstisadof.sty"
|
"lstisadof.sty"
|
||||||
|
"vector_iD_icon.pdf"
|
||||||
"figures/isabelle-architecture.pdf"
|
"figures/isabelle-architecture.pdf"
|
||||||
"figures/Dogfood-Intro.png"
|
"figures/Dogfood-Intro.png"
|
||||||
"figures/InteractiveMathSheet.png"
|
"figures/InteractiveMathSheet.png"
|
||||||
|
|
|
@ -0,0 +1,339 @@
|
||||||
|
%% Copyright (C) 2008-2019 Achim D. Brucker, https://www.brucker.ch
|
||||||
|
%%
|
||||||
|
%% License:
|
||||||
|
%% This program can be redistributed and/or modified under the terms
|
||||||
|
%% of the LaTeX Project Public License Distributed from CTAN
|
||||||
|
%% archives in directory macros/latex/base/lppl.txt; either
|
||||||
|
%% version 1.3c of the License, or (at your option) any later version.
|
||||||
|
%% OR
|
||||||
|
%% The 2-clause BSD-style license.
|
||||||
|
%%
|
||||||
|
%% SPDX-License-Identifier: LPPL-1.3c+ OR BSD-2-Clause
|
||||||
|
\NeedsTeXFormat{LaTeX2e}\relax
|
||||||
|
\ProvidesPackage{authorarchive}
|
||||||
|
[0000/00/00 Unreleased v1.1.1+%
|
||||||
|
Self-archiving information for scientific publications.]
|
||||||
|
%
|
||||||
|
\PassOptionsToPackage{hyphens}{url}
|
||||||
|
%
|
||||||
|
\RequirePackage{ifthen}
|
||||||
|
\RequirePackage[inline]{enumitem}
|
||||||
|
\RequirePackage{graphicx}
|
||||||
|
\RequirePackage{eso-pic}
|
||||||
|
\RequirePackage{intopdf}
|
||||||
|
\RequirePackage{kvoptions}
|
||||||
|
\RequirePackage{hyperref}
|
||||||
|
\RequirePackage{calc}
|
||||||
|
\RequirePackage{qrcode}
|
||||||
|
\RequirePackage{hvlogos}
|
||||||
|
%
|
||||||
|
%Better url breaking
|
||||||
|
\g@addto@macro{\UrlBreaks}{\UrlOrds}
|
||||||
|
%
|
||||||
|
% Option declarations
|
||||||
|
% -------------------
|
||||||
|
\SetupKeyvalOptions{
|
||||||
|
family=AA,
|
||||||
|
prefix=AA@
|
||||||
|
}
|
||||||
|
%
|
||||||
|
\DeclareStringOption[.]{bibtexdir}
|
||||||
|
\DeclareStringOption[https://duckduckgo.com/?q=]{baseurl}
|
||||||
|
\DeclareStringOption[.pdf]{suffix}
|
||||||
|
\DeclareStringOption[UNKNOWN PUBLISHER]{publisher}[]
|
||||||
|
\DeclareStringOption[UNKNOWN YEAR]{year}[]
|
||||||
|
\DeclareStringOption[]{key}[]
|
||||||
|
\DeclareStringOption[]{doi}[]
|
||||||
|
\DeclareStringOption[]{doiText}[]
|
||||||
|
\DeclareStringOption[]{publisherurl}[]
|
||||||
|
\DeclareStringOption[UNKNOWN START PAGE]{startpage}[]
|
||||||
|
\DeclareStringOption[UNKNOWN PUBLICATION]{publication}[]
|
||||||
|
|
||||||
|
\DeclareBoolOption{ACM}
|
||||||
|
\DeclareBoolOption{acmart}
|
||||||
|
\DeclareBoolOption{ENTCS}
|
||||||
|
\DeclareBoolOption{IEEE}
|
||||||
|
\DeclareBoolOption{LNCS}
|
||||||
|
\DeclareBoolOption{LNI}
|
||||||
|
\DeclareBoolOption{nocopyright}
|
||||||
|
\DeclareBoolOption{nourl}
|
||||||
|
\DeclareBoolOption{nobib}
|
||||||
|
\DeclareBoolOption{orcidicon}
|
||||||
|
%\ProcessOptions\relax
|
||||||
|
|
||||||
|
|
||||||
|
% Default option rule
|
||||||
|
\DeclareDefaultOption{%
|
||||||
|
\ifx\CurrentOptionValue\relax
|
||||||
|
\PackageWarningNoLine{\@currname}{%
|
||||||
|
Unknown option `\CurrentOption'\MessageBreak
|
||||||
|
is passed to package `authorarchive'%
|
||||||
|
}%
|
||||||
|
% Pass the option to package color.
|
||||||
|
% Again it is better to expand \CurrentOption.
|
||||||
|
\expandafter\PassOptionsToPackage\expandafter{\CurrentOption}{color}%
|
||||||
|
\else
|
||||||
|
% Package color does not take options with values.
|
||||||
|
% We provide the standard LaTeX error.
|
||||||
|
\@unknownoptionerror
|
||||||
|
\fi
|
||||||
|
}
|
||||||
|
\ProcessKeyvalOptions*
|
||||||
|
|
||||||
|
% Provide command for dynamic configuration seutp
|
||||||
|
\def\authorsetup{\kvsetkeys{AA}}
|
||||||
|
|
||||||
|
% Load local configuration
|
||||||
|
\InputIfFileExists{authorarchive.config}{}{}
|
||||||
|
|
||||||
|
|
||||||
|
\newlength\AA@x
|
||||||
|
\newlength\AA@y
|
||||||
|
\newlength\AA@width
|
||||||
|
|
||||||
|
\def\AA@bibBibTeX{\AA@bibtexdir/\AA@key.bib}
|
||||||
|
\def\AA@bibBibTeXLong{\AA@bibtexdir/\AA@key.bibtex}
|
||||||
|
\def\AA@bibWord{\AA@bibtexdir/\AA@key.word.xml}
|
||||||
|
\def\AA@bibEndnote{\AA@bibtexdir/\AA@key.enw}
|
||||||
|
\def\AA@bibRIS{\AA@bibtexdir/\AA@key.ris}
|
||||||
|
|
||||||
|
\newboolean{AA@bibExists}
|
||||||
|
\setboolean{AA@bibExists}{false}
|
||||||
|
\IfFileExists{\AA@bibBibTeX}{\setboolean{AA@bibExists}{true}}{}
|
||||||
|
\IfFileExists{\AA@bibBibTeXLong}{\setboolean{AA@bibExists}{true}}{}
|
||||||
|
\IfFileExists{\AA@bibWord}{\setboolean{AA@bibExists}{true}}{}
|
||||||
|
\IfFileExists{\AA@bibEndnote}{\setboolean{AA@bibExists}{true}}{}
|
||||||
|
\IfFileExists{\AA@bibRIS}{\setboolean{AA@bibExists}{true}}{}
|
||||||
|
|
||||||
|
\setlength\AA@x{1in+\hoffset+\oddsidemargin}
|
||||||
|
|
||||||
|
\newcommand{\authorcrfont}{\footnotesize}
|
||||||
|
\newcommand{\authorat}[1]{\AtPageUpperLeft{\put(\LenToUnit{\AA@x},\LenToUnit{.2cm-\paperheight}){#1}}}
|
||||||
|
\newcommand{\authorwidth}[1]{\setlength{\AA@width}{#1}}
|
||||||
|
\setlength{\AA@width}{\textwidth}
|
||||||
|
|
||||||
|
\def\AA@pageinfo{}
|
||||||
|
\ifthenelse{\equal{\AA@startpage}{UNKNOWN START PAGE}}{%
|
||||||
|
}{%
|
||||||
|
\setcounter{page}{\AA@startpage}%
|
||||||
|
\def\AA@pageinfo{pp. \thepage--\pageref{\aa@lastpage}, }
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
%%%% sig-alternate.cls
|
||||||
|
\ifAA@ACM%
|
||||||
|
\ifthenelse{\equal{\AA@publisher}{UNKNOWN PUBLISHER}}{%
|
||||||
|
\setkeys{AA}{publisher=ACM}
|
||||||
|
}{}
|
||||||
|
\global\boilerplate={}
|
||||||
|
\global\copyrightetc={}
|
||||||
|
\renewcommand{\conferenceinfo}[2]{}
|
||||||
|
\renewcommand{\authorcrfont}{\scriptsize}
|
||||||
|
\setlength\AA@x{1in+\hoffset+\oddsidemargin}
|
||||||
|
\setlength\AA@y{-\textheight+\topmargin+\headheight-\footskip} % -\voffset-\topmargin-\headheight-\footskip}
|
||||||
|
\renewcommand{\authorat}[1]{\put(\LenToUnit{\AA@x},\LenToUnit{\AA@y}){#1}}
|
||||||
|
\setlength{\AA@width}{\columnwidth}
|
||||||
|
\fi
|
||||||
|
%
|
||||||
|
%%%% acmart.cls
|
||||||
|
\ifAA@acmart%
|
||||||
|
\ifthenelse{\equal{\AA@publisher}{UNKNOWN PUBLISHER}}{%
|
||||||
|
\setkeys{AA}{publisher=ACM}
|
||||||
|
}{}
|
||||||
|
\renewcommand{\authorat}[1]{\AtPageUpperLeft{\put(\LenToUnit{\AA@x},\LenToUnit{0.2cm-\paperheight}){#1}}}
|
||||||
|
\setlength{\AA@width}{\textwidth}
|
||||||
|
\fi
|
||||||
|
%
|
||||||
|
%%%% LNCS
|
||||||
|
\ifAA@LNCS%
|
||||||
|
\ifAA@orcidicon%
|
||||||
|
\renewcommand{\orcidID}[1]{\href{https://orcid.org/#1}{%
|
||||||
|
\textsuperscript{\,\includegraphics[height=2\fontcharht\font`A]{vector_iD_icon}}}}
|
||||||
|
\else\relax\fi%
|
||||||
|
%
|
||||||
|
\ifthenelse{\equal{\AA@publisher}{UNKNOWN PUBLISHER}}{%
|
||||||
|
\setkeys{AA}{publisher=Springer-Verlag}
|
||||||
|
}{}
|
||||||
|
\renewcommand{\authorcrfont}{\scriptsize}
|
||||||
|
\@ifclasswith{llncs}{a4paper}{%
|
||||||
|
\ExplSyntaxOn
|
||||||
|
\@ifundefined{pdfmanagement_add:nnn}{%
|
||||||
|
\pdfpagesattr{/CropBox [92 114 523 780]}%
|
||||||
|
}{%
|
||||||
|
\pdfmanagement_add:nnn {Pages}{CropBox}{[92~114~523~780]}
|
||||||
|
}%
|
||||||
|
\ExplSyntaxOff
|
||||||
|
\renewcommand{\authorat}[1]{\put(\LenToUnit{\AA@x},40){#1}}%
|
||||||
|
}{%
|
||||||
|
\ExplSyntaxOn
|
||||||
|
\@ifundefined{pdfmanagement_add:nnn}{%
|
||||||
|
\pdfpagesattr{/CropBox [92 65 523 731]}% LNCS page: 152x235 mm
|
||||||
|
}{%
|
||||||
|
\pdfmanagement_add:nnn {Pages}{CropBox}{[92~62~523~731]}
|
||||||
|
}%
|
||||||
|
\ExplSyntaxOff
|
||||||
|
\renewcommand{\authorat}[1]{\put(\LenToUnit{\AA@x},23){#1}}
|
||||||
|
}
|
||||||
|
\setlength{\AA@width}{\textwidth}
|
||||||
|
\setcounter{tocdepth}{2}
|
||||||
|
\fi
|
||||||
|
%
|
||||||
|
%%%% LNI
|
||||||
|
\ifAA@LNI%
|
||||||
|
\ifthenelse{\equal{\AA@publisher}{UNKNOWN PUBLISHER}}{%
|
||||||
|
\setkeys{AA}{publisher=GI}
|
||||||
|
}{}
|
||||||
|
\renewcommand{\authorat}[1]{\put(\LenToUnit{\AA@x},35){#1}}
|
||||||
|
\renewcommand{\authorcrfont}{\scriptsize}
|
||||||
|
\pdfpagesattr{/CropBox [70 65 526.378 748.15]} % TODO
|
||||||
|
\setlength{\AA@width}{\textwidth}
|
||||||
|
\setcounter{tocdepth}{2}
|
||||||
|
\fi
|
||||||
|
%
|
||||||
|
%%%% ENTCS
|
||||||
|
\ifAA@ENTCS%
|
||||||
|
\addtolength{\voffset}{1cm}
|
||||||
|
\ifthenelse{\equal{\AA@publisher}{UNKNOWN PUBLISHER}}{%
|
||||||
|
\setkeys{AA}{publisher=Elsevier Science B.~V.}
|
||||||
|
}{}
|
||||||
|
\renewcommand{\authorat}[1]{\put(\LenToUnit{\AA@x},\LenToUnit{-.5cm-\the\ht\AA@authoratBox}){#1}}
|
||||||
|
\renewcommand{\authorcrfont}{\scriptsize}
|
||||||
|
\setlength{\AA@width}{\textwidth}
|
||||||
|
\fi
|
||||||
|
%
|
||||||
|
%%%% IEEE
|
||||||
|
\ifAA@IEEE%
|
||||||
|
\ifthenelse{\equal{\AA@publisher}{UNKNOWN PUBLISHER}}{%
|
||||||
|
\setkeys{AA}{publisher=IEEE}
|
||||||
|
}{}
|
||||||
|
\renewcommand{\authorat}[1]{\put(\LenToUnit{\AA@x},6){#1}}
|
||||||
|
\renewcommand{\authorcrfont}{\scriptsize}
|
||||||
|
\setlength{\AA@width}{\textwidth}
|
||||||
|
\setcounter{tocdepth}{2}
|
||||||
|
\fi
|
||||||
|
%
|
||||||
|
|
||||||
|
\hypersetup{%
|
||||||
|
draft = false,
|
||||||
|
bookmarksopen = true,
|
||||||
|
bookmarksnumbered= true,
|
||||||
|
pdfauthor = {\@author},
|
||||||
|
pdftitle = {\@title},
|
||||||
|
}
|
||||||
|
|
||||||
|
\@ifpackageloaded{totpages}{%
|
||||||
|
\def\aa@lastpage{TotPages}
|
||||||
|
}{%
|
||||||
|
\RequirePackage{lastpage}
|
||||||
|
\def\aa@lastpage{LastPage}
|
||||||
|
}
|
||||||
|
\newsavebox{\AA@authoratBox}
|
||||||
|
|
||||||
|
\AddToShipoutPicture*{%
|
||||||
|
\setlength{\unitlength}{1mm}%
|
||||||
|
\savebox{\AA@authoratBox}{%
|
||||||
|
\parbox{1.4cm}{%
|
||||||
|
\bgroup%
|
||||||
|
\normallineskiplimit=0pt%
|
||||||
|
\ifAA@nourl%
|
||||||
|
\ifx\AA@doi\@empty\relax%
|
||||||
|
\else%
|
||||||
|
\qrcode[hyperlink,height=1.17cm,padding]{https://doi.org/\AA@doi}%
|
||||||
|
\fi%
|
||||||
|
\else%
|
||||||
|
\qrcode[hyperlink,height=1.17cm,padding]{\AA@baseurl/\AA@key\AA@suffix}%
|
||||||
|
\fi%
|
||||||
|
\egroup%
|
||||||
|
}%
|
||||||
|
\ifAA@nourl\ifx\AA@doi\@empty\addtolength{\AA@width}{1.4cm}\fi\fi
|
||||||
|
\parbox{\AA@width-1.4cm}{\authorcrfont%
|
||||||
|
\ifAA@LNCS%
|
||||||
|
\AA@publication, \AA@pageinfo \AA@year. %
|
||||||
|
\ifAA@nocopyright\else
|
||||||
|
\textcopyright~\AA@year~\AA@publisher.
|
||||||
|
\fi
|
||||||
|
This is the author's
|
||||||
|
version of the work. It is posted
|
||||||
|
\ifAA@nourl\relax\else%
|
||||||
|
at \url{\AA@baseurl/\AA@key\AA@suffix} %
|
||||||
|
\fi
|
||||||
|
\ifAA@nocopyright\relax\else
|
||||||
|
by permission of \AA@publisher{}
|
||||||
|
\fi
|
||||||
|
for your personal use.
|
||||||
|
\ifx\AA@doi\@empty%
|
||||||
|
\relax
|
||||||
|
\else
|
||||||
|
The final publication is available at Springer via
|
||||||
|
\ifx\AA@doiText\@empty%
|
||||||
|
\url{https://doi.org/\AA@doi}.
|
||||||
|
\else
|
||||||
|
\href{https://doi.org/\AA@doi}{\AA@doiText}.
|
||||||
|
\fi
|
||||||
|
\fi
|
||||||
|
\else
|
||||||
|
\ifAA@nocopyright\relax\else
|
||||||
|
\textcopyright~\AA@year~\AA@publisher. %
|
||||||
|
\fi%
|
||||||
|
This is the author's
|
||||||
|
version of the work. It is posted
|
||||||
|
\ifAA@nourl\relax\else%
|
||||||
|
at \url{\AA@baseurl/\AA@key\AA@suffix} %
|
||||||
|
\fi
|
||||||
|
\ifAA@nocopyright\relax\else
|
||||||
|
by permission of \AA@publisher{} %
|
||||||
|
\fi
|
||||||
|
for your personal use. Not for redistribution. The definitive
|
||||||
|
version was published in \emph{\AA@publication}, \AA@pageinfo \AA@year%
|
||||||
|
\ifx\AA@doi\@empty%
|
||||||
|
\ifx\AA@publisherurl\@empty%
|
||||||
|
.%
|
||||||
|
\else
|
||||||
|
\url{\AA@publisherurl}.%
|
||||||
|
\fi
|
||||||
|
\else
|
||||||
|
\ifx\AA@doiText\@empty%
|
||||||
|
, doi: \href{https://doi.org/\AA@doi}{\AA@doi}.%
|
||||||
|
\else
|
||||||
|
, doi: \href{https://doi.org/\AA@doi}{\AA@doiText}.%
|
||||||
|
\fi
|
||||||
|
\fi
|
||||||
|
\fi
|
||||||
|
\ifAA@nobib\relax\else%
|
||||||
|
\ifthenelse{\boolean{AA@bibExists}}{%
|
||||||
|
\hfill
|
||||||
|
\begin{itemize*}[label={}, itemjoin={,}]
|
||||||
|
\IfFileExists{\AA@bibBibTeX}{%
|
||||||
|
\item \attachandlink{\AA@bibBibTeX}[application/x-bibtex]{BibTeX entry of this paper}{\BibTeX}%
|
||||||
|
}{%
|
||||||
|
\IfFileExists{\AA@bibBibTeXLong}{%
|
||||||
|
\item \attachandlink[\AA@key.bib]{\AA@bibBibTeXLong}[application/x-bibtex]{BibTeX entry of this paper}{\BibTeX}%
|
||||||
|
}{%
|
||||||
|
\typeout{No file \AA@bibBibTeX{} (and no \AA@bibBibTeXLong) found. Not embedded reference in BibTeX format.}%
|
||||||
|
}%
|
||||||
|
}%
|
||||||
|
\IfFileExists{\AA@bibWord}{%
|
||||||
|
\item \attachandlink{\AA@bibWord}[application/xml]{XML entry of this paper (e.g., for Word 2007 and later)}{Word}%
|
||||||
|
}{%
|
||||||
|
\typeout{No file \AA@bibWord{} found. Not embedded reference for Word 2007 and later.}%
|
||||||
|
}%
|
||||||
|
\IfFileExists{\AA@bibEndnote}{%
|
||||||
|
\item \attachandlink{\AA@bibEndnote}[application/x-endnote-refer]{Endnote entry of this paper}{EndNote}%
|
||||||
|
}{%
|
||||||
|
\typeout{No file \AA@bibEndnote{} found. Not embedded reference in Endnote format.}%
|
||||||
|
}%
|
||||||
|
\IfFileExists{\AA@bibRIS}{%
|
||||||
|
\item \attachandlink{\AA@bibRIS}[application/x-research-info-systems]{RIS entry of this paper}{RIS}%
|
||||||
|
}{%
|
||||||
|
\typeout{No file \AA@bibRIS{} found. Not embedded reference in RIS format.}%
|
||||||
|
}%
|
||||||
|
\end{itemize*}\\
|
||||||
|
}{%
|
||||||
|
\PackageError{authorarchive}{No bibliographic files found. Specify option 'nobib' if this is intended.}
|
||||||
|
}
|
||||||
|
\fi
|
||||||
|
}
|
||||||
|
}
|
||||||
|
\authorat{\raisebox{\the\ht\AA@authoratBox}{\usebox{\AA@authoratBox}}}
|
||||||
|
}
|
0
examples/scholarly_paper/2018-cicm-isabelle_dof-applications/document/figures/Dogfood-II-bgnd1.png
Executable file → Normal file
Before Width: | Height: | Size: 14 KiB After Width: | Height: | Size: 14 KiB |
0
examples/scholarly_paper/2018-cicm-isabelle_dof-applications/document/figures/Dogfood-III-bgnd-text_section.png
Executable file → Normal file
Before Width: | Height: | Size: 18 KiB After Width: | Height: | Size: 18 KiB |
0
examples/scholarly_paper/2018-cicm-isabelle_dof-applications/document/figures/Dogfood-IV-jumpInDocCLass.png
Executable file → Normal file
Before Width: | Height: | Size: 23 KiB After Width: | Height: | Size: 23 KiB |
0
examples/scholarly_paper/2018-cicm-isabelle_dof-applications/document/figures/Dogfood-Intro.png
Executable file → Normal file
Before Width: | Height: | Size: 85 KiB After Width: | Height: | Size: 85 KiB |
0
examples/scholarly_paper/2018-cicm-isabelle_dof-applications/document/figures/Dogfood-V-attribute.png
Executable file → Normal file
Before Width: | Height: | Size: 16 KiB After Width: | Height: | Size: 16 KiB |
0
examples/scholarly_paper/2018-cicm-isabelle_dof-applications/document/figures/Dogfood-figures.png
Executable file → Normal file
Before Width: | Height: | Size: 18 KiB After Width: | Height: | Size: 18 KiB |
0
examples/scholarly_paper/2018-cicm-isabelle_dof-applications/document/figures/InteractiveMathSheet.png
Executable file → Normal file
Before Width: | Height: | Size: 75 KiB After Width: | Height: | Size: 75 KiB |
0
examples/scholarly_paper/2018-cicm-isabelle_dof-applications/document/figures/antiquotations-PIDE.png
Executable file → Normal file
Before Width: | Height: | Size: 96 KiB After Width: | Height: | Size: 96 KiB |
0
examples/scholarly_paper/2018-cicm-isabelle_dof-applications/document/figures/isabelle-architecture.pdf
Executable file → Normal file
0
examples/scholarly_paper/2018-cicm-isabelle_dof-applications/document/figures/isabelle-architecture.svg
Executable file → Normal file
Before Width: | Height: | Size: 57 KiB After Width: | Height: | Size: 57 KiB |
0
examples/scholarly_paper/2018-cicm-isabelle_dof-applications/document/figures/srac-as-es-application.png
Executable file → Normal file
Before Width: | Height: | Size: 67 KiB After Width: | Height: | Size: 67 KiB |
0
examples/scholarly_paper/2018-cicm-isabelle_dof-applications/document/figures/srac-definition.png
Executable file → Normal file
Before Width: | Height: | Size: 50 KiB After Width: | Height: | Size: 50 KiB |
|
@ -1,2 +0,0 @@
|
||||||
Template: scrartcl
|
|
||||||
Ontology: scholarly_paper
|
|
0
examples/scholarly_paper/2018-cicm-isabelle_dof-applications/document/lstisadof.sty
Executable file → Normal file
29
examples/scholarly_paper/2018-cicm-isabelle_dof-applications/document/preamble.tex
Executable file → Normal file
|
@ -53,23 +53,18 @@
|
||||||
\usepackage[size=footnotesize]{caption}
|
\usepackage[size=footnotesize]{caption}
|
||||||
|
|
||||||
|
|
||||||
\subject{Example of an Academic Paper\footnote{%
|
\usepackage[LNCS,
|
||||||
This document is an example setup for writing an academic paper. While
|
orcidicon,
|
||||||
it is optimized for the Springer's LNCS class, it uses a Koma Script
|
key=brucker.ea-isabelle-ontologies-2018,
|
||||||
LaTeX class to avoid the re-distribution of \texttt{llncs.cls},
|
year=2018,
|
||||||
which would violate Springer's copyright. This example has been
|
publication={F. Rabe et al. (Eds.): CICM 2018, LNAI 11006},
|
||||||
published at CICM 2018:
|
nobib,
|
||||||
\protect\begin{quote}
|
startpage={1},
|
||||||
Achim D. Brucker, Idir Ait-Sadoune, Paolo Crisafulli, and
|
doi={10.1007/978-3-319-96812-4_3},
|
||||||
Burkhart Wolff. Using The Isabelle Ontology Framework: Linking
|
doiText={10.1007/978-3-319-96812-4\_3},
|
||||||
the Formal with the Informal. In Conference on Intelligent
|
]{authorarchive}
|
||||||
Computer Mathematics (CICM). Lecture Notes in Computer Science
|
\authorrunning{A. D. Brucker et al.}
|
||||||
(11006), Springer-Verlag, 2018.
|
\pagestyle{headings}
|
||||||
\protect\end{quote}
|
|
||||||
Note that the content of this example is not updated and, hence,
|
|
||||||
might not be correct with respect to the latest version of
|
|
||||||
\isadof{}.
|
|
||||||
}}
|
|
||||||
|
|
||||||
|
|
||||||
\title{<TITLE>}
|
\title{<TITLE>}
|
||||||
|
|
0
examples/scholarly_paper/2018-cicm-isabelle_dof-applications/document/root.bib
Executable file → Normal file
|
@ -3,6 +3,5 @@ session "2020-iFM-csp" = "Isabelle_DOF" +
|
||||||
theories
|
theories
|
||||||
"paper"
|
"paper"
|
||||||
document_files
|
document_files
|
||||||
"isadof.cfg"
|
|
||||||
"root.bib"
|
"root.bib"
|
||||||
"preamble.tex"
|
"preamble.tex"
|
||||||
|
|
|
@ -1,2 +0,0 @@
|
||||||
Template: scrartcl
|
|
||||||
Ontology: scholarly_paper
|
|
|
@ -6870,7 +6870,7 @@ isbn="978-3-540-48509-4"
|
||||||
title = {{Isabelle's} Logic: {HOL}},
|
title = {{Isabelle's} Logic: {HOL}},
|
||||||
author = {Tobias Nipkow and Lawrence C. Paulson and Markus Wenzel},
|
author = {Tobias Nipkow and Lawrence C. Paulson and Markus Wenzel},
|
||||||
year = 2009,
|
year = 2009,
|
||||||
misc = {\url{http://isabelle.in.tum.de/library/HOL/}}
|
misc = {\url{https://isabelle.in.tum.de/library/HOL/}}
|
||||||
}
|
}
|
||||||
|
|
||||||
@InProceedings{ garson.ea:security:2008,
|
@InProceedings{ garson.ea:security:2008,
|
||||||
|
@ -11000,7 +11000,7 @@ isbn="978-1-4471-3182-3"
|
||||||
journal = {Archive of Formal Proofs},
|
journal = {Archive of Formal Proofs},
|
||||||
month = apr,
|
month = apr,
|
||||||
year = 2019,
|
year = 2019,
|
||||||
note = {\url{http://isa-afp.org/entries/HOL-CSP.html}},
|
note = {\url{https://isa-afp.org/entries/HOL-CSP.html}},
|
||||||
ISSN = {2150-914x},
|
ISSN = {2150-914x},
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -3,6 +3,8 @@ theory "paper"
|
||||||
imports "Isabelle_DOF.scholarly_paper"
|
imports "Isabelle_DOF.scholarly_paper"
|
||||||
begin
|
begin
|
||||||
|
|
||||||
|
use_template "scrartcl"
|
||||||
|
use_ontology "scholarly_paper"
|
||||||
|
|
||||||
open_monitor*[this::article]
|
open_monitor*[this::article]
|
||||||
|
|
||||||
|
@ -50,7 +52,7 @@ abstract*[abs, keywordlist="[\<open>Shallow Embedding\<close>,\<open>Process-Alg
|
||||||
\<close>
|
\<close>
|
||||||
text\<open>\<close>
|
text\<open>\<close>
|
||||||
section*[introheader::introduction,main_author="Some(@{docitem ''bu''}::author)"]\<open> Introduction \<close>
|
section*[introheader::introduction,main_author="Some(@{docitem ''bu''}::author)"]\<open> Introduction \<close>
|
||||||
text*[introtext::introduction]\<open>
|
text*[introtext::introduction, level="Some 1"]\<open>
|
||||||
Communicating Sequential Processes (\<^csp>) is a language
|
Communicating Sequential Processes (\<^csp>) is a language
|
||||||
to specify and verify patterns of interaction of concurrent systems.
|
to specify and verify patterns of interaction of concurrent systems.
|
||||||
Together with CCS and LOTOS, it belongs to the family of \<^emph>\<open>process algebras\<close>.
|
Together with CCS and LOTOS, it belongs to the family of \<^emph>\<open>process algebras\<close>.
|
||||||
|
@ -152,7 +154,7 @@ processes \<open>Skip\<close> (successful termination) and \<open>Stop\<close> (
|
||||||
\<open>\<T>(Skip) = \<T>(Stop) = {[]}\<close>.
|
\<open>\<T>(Skip) = \<T>(Stop) = {[]}\<close>.
|
||||||
Note that the trace sets, representing all \<^emph>\<open>partial\<close> history, is in general prefix closed.\<close>
|
Note that the trace sets, representing all \<^emph>\<open>partial\<close> history, is in general prefix closed.\<close>
|
||||||
|
|
||||||
text*[ex1::math_example, status=semiformal] \<open>
|
text*[ex1::math_example, status=semiformal, level="Some 1"] \<open>
|
||||||
Let two processes be defined as follows:
|
Let two processes be defined as follows:
|
||||||
|
|
||||||
\<^enum> \<open>P\<^sub>d\<^sub>e\<^sub>t = (a \<rightarrow> Stop) \<box> (b \<rightarrow> Stop)\<close>
|
\<^enum> \<open>P\<^sub>d\<^sub>e\<^sub>t = (a \<rightarrow> Stop) \<box> (b \<rightarrow> Stop)\<close>
|
||||||
|
@ -352,7 +354,7 @@ Roscoe and Brooks @{cite "Roscoe1992AnAO"} finally proposed another ordering, ca
|
||||||
that completeness could at least be assured for read-operations. This more complex ordering
|
that completeness could at least be assured for read-operations. This more complex ordering
|
||||||
is based on the concept \<^emph>\<open>refusals after\<close> a trace \<open>s\<close> and defined by \<open>\<R> P s \<equiv> {X | (s, X) \<in> \<F> P}\<close>.\<close>
|
is based on the concept \<^emph>\<open>refusals after\<close> a trace \<open>s\<close> and defined by \<open>\<R> P s \<equiv> {X | (s, X) \<in> \<F> P}\<close>.\<close>
|
||||||
|
|
||||||
Definition*[process_ordering, short_name="''process ordering''"]\<open>
|
Definition*[process_ordering, level= "Some 2", short_name="''process ordering''"]\<open>
|
||||||
We define \<open>P \<sqsubseteq> Q \<equiv> \<psi>\<^sub>\<D> \<and> \<psi>\<^sub>\<R> \<and> \<psi>\<^sub>\<M> \<close>, where
|
We define \<open>P \<sqsubseteq> Q \<equiv> \<psi>\<^sub>\<D> \<and> \<psi>\<^sub>\<R> \<and> \<psi>\<^sub>\<M> \<close>, where
|
||||||
\<^enum> \<open>\<psi>\<^sub>\<D> = \<D> P \<supseteq> \<D> Q \<close>
|
\<^enum> \<open>\<psi>\<^sub>\<D> = \<D> P \<supseteq> \<D> Q \<close>
|
||||||
\<^enum> \<open>\<psi>\<^sub>\<R> = s \<notin> \<D> P \<Rightarrow> \<R> P s = \<R> Q s\<close>
|
\<^enum> \<open>\<psi>\<^sub>\<R> = s \<notin> \<D> P \<Rightarrow> \<R> P s = \<R> Q s\<close>
|
||||||
|
@ -528,10 +530,10 @@ To handle termination better, we added two new processes \<open>CHAOS\<^sub>S\<^
|
||||||
\<close>
|
\<close>
|
||||||
|
|
||||||
(*<*) (* a test ...*)
|
(*<*) (* a test ...*)
|
||||||
text*[X22 ::math_content ]\<open>\<open>RUN A \<equiv> \<mu> X. \<box> x \<in> A \<rightarrow> X\<close> \<close>
|
text*[X22 ::math_content, level="Some 2" ]\<open>\<open>RUN A \<equiv> \<mu> X. \<box> x \<in> A \<rightarrow> X\<close> \<close>
|
||||||
text*[X32::"definition", mcc=defn]\<open>\<open>CHAOS A \<equiv> \<mu> X. (STOP \<sqinter> (\<box> x \<in> A \<rightarrow> X))\<close> \<close>
|
text*[X32::"definition", level="Some 2", mcc=defn]\<open>\<open>CHAOS A \<equiv> \<mu> X. (STOP \<sqinter> (\<box> x \<in> A \<rightarrow> X))\<close> \<close>
|
||||||
Definition*[X42]\<open>\<open>CHAOS\<^sub>S\<^sub>K\<^sub>I\<^sub>P A \<equiv> \<mu> X. (SKIP \<sqinter> STOP \<sqinter> (\<box> x \<in> A \<rightarrow> X))\<close> \<close>
|
Definition*[X42, level="Some 2"]\<open>\<open>CHAOS\<^sub>S\<^sub>K\<^sub>I\<^sub>P A \<equiv> \<mu> X. (SKIP \<sqinter> STOP \<sqinter> (\<box> x \<in> A \<rightarrow> X))\<close> \<close>
|
||||||
Definition*[X52::"definition"]\<open>\<open>CHAOS\<^sub>S\<^sub>K\<^sub>I\<^sub>P A \<equiv> \<mu> X. (SKIP \<sqinter> STOP \<sqinter> (\<box> x \<in> A \<rightarrow> X))\<close> \<close>
|
Definition*[X52::"definition", level="Some 2"]\<open>\<open>CHAOS\<^sub>S\<^sub>K\<^sub>I\<^sub>P A \<equiv> \<mu> X. (SKIP \<sqinter> STOP \<sqinter> (\<box> x \<in> A \<rightarrow> X))\<close> \<close>
|
||||||
|
|
||||||
text\<open> The \<open>RUN\<close>-process defined @{math_content X22} represents the process that accepts all
|
text\<open> The \<open>RUN\<close>-process defined @{math_content X22} represents the process that accepts all
|
||||||
events, but never stops nor deadlocks. The \<open>CHAOS\<close>-process comes in two variants shown in
|
events, but never stops nor deadlocks. The \<open>CHAOS\<close>-process comes in two variants shown in
|
||||||
|
@ -539,11 +541,11 @@ events, but never stops nor deadlocks. The \<open>CHAOS\<close>-process comes in
|
||||||
stops or accepts any offered event, whereas \<open>CHAOS\<^sub>S\<^sub>K\<^sub>I\<^sub>P\<close> can additionally terminate.\<close>
|
stops or accepts any offered event, whereas \<open>CHAOS\<^sub>S\<^sub>K\<^sub>I\<^sub>P\<close> can additionally terminate.\<close>
|
||||||
(*>*)
|
(*>*)
|
||||||
|
|
||||||
Definition*[X2]\<open>\<open>RUN A \<equiv> \<mu> X. \<box> x \<in> A \<rightarrow> X\<close> \<close>
|
Definition*[X2, level="Some 2"]\<open>\<open>RUN A \<equiv> \<mu> X. \<box> x \<in> A \<rightarrow> X\<close> \<close>
|
||||||
Definition*[X3]\<open>\<open>CHAOS A \<equiv> \<mu> X. (STOP \<sqinter> (\<box> x \<in> A \<rightarrow> X))\<close> \<close>
|
Definition*[X3, level="Some 2"]\<open>\<open>CHAOS A \<equiv> \<mu> X. (STOP \<sqinter> (\<box> x \<in> A \<rightarrow> X))\<close> \<close>
|
||||||
Definition*[X4]\<open>\<open>CHAOS\<^sub>S\<^sub>K\<^sub>I\<^sub>P A \<equiv> \<mu> X. (SKIP \<sqinter> STOP \<sqinter> (\<box> x \<in> A \<rightarrow> X))\<close>\<close>
|
Definition*[X4, level="Some 2"]\<open>\<open>CHAOS\<^sub>S\<^sub>K\<^sub>I\<^sub>P A \<equiv> \<mu> X. (SKIP \<sqinter> STOP \<sqinter> (\<box> x \<in> A \<rightarrow> X))\<close>\<close>
|
||||||
Definition*[X5]\<open>\<open>DF A \<equiv> \<mu> X. (\<sqinter> x \<in> A \<rightarrow> X)\<close> \<close>
|
Definition*[X5, level="Some 2"]\<open>\<open>DF A \<equiv> \<mu> X. (\<sqinter> x \<in> A \<rightarrow> X)\<close> \<close>
|
||||||
Definition*[X6]\<open>\<open>DF\<^sub>S\<^sub>K\<^sub>I\<^sub>P A \<equiv> \<mu> X. ((\<sqinter> x \<in> A \<rightarrow> X) \<sqinter> SKIP)\<close> \<close>
|
Definition*[X6, level="Some 2"]\<open>\<open>DF\<^sub>S\<^sub>K\<^sub>I\<^sub>P A \<equiv> \<mu> X. ((\<sqinter> x \<in> A \<rightarrow> X) \<sqinter> SKIP)\<close> \<close>
|
||||||
|
|
||||||
text\<open>In the following, we denote \<open> \<R>\<P> = {DF\<^sub>S\<^sub>K\<^sub>I\<^sub>P, DF, RUN, CHAOS, CHAOS\<^sub>S\<^sub>K\<^sub>I\<^sub>P}\<close>.
|
text\<open>In the following, we denote \<open> \<R>\<P> = {DF\<^sub>S\<^sub>K\<^sub>I\<^sub>P, DF, RUN, CHAOS, CHAOS\<^sub>S\<^sub>K\<^sub>I\<^sub>P}\<close>.
|
||||||
All five reference processes are divergence-free.
|
All five reference processes are divergence-free.
|
||||||
|
@ -605,16 +607,16 @@ handled separately. One contribution of our work is establish their precise rela
|
||||||
the Failure/Divergence Semantics of \<^csp>.\<close>
|
the Failure/Divergence Semantics of \<^csp>.\<close>
|
||||||
|
|
||||||
(* bizarre: Definition* does not work for this single case *)
|
(* bizarre: Definition* does not work for this single case *)
|
||||||
text*[X10::"definition"]\<open> \<open>deadlock\<^sub>-free P \<equiv> DF\<^sub>S\<^sub>K\<^sub>I\<^sub>P UNIV \<sqsubseteq>\<^sub>\<F> P\<close> \<close>
|
text*[X10::"definition", level="Some 2"]\<open> \<open>deadlock\<^sub>-free P \<equiv> DF\<^sub>S\<^sub>K\<^sub>I\<^sub>P UNIV \<sqsubseteq>\<^sub>\<F> P\<close> \<close>
|
||||||
|
|
||||||
text\<open>\<^noindent> A process \<open>P\<close> is deadlock-free if and only if after any trace \<open>s\<close> without \<open>\<surd>\<close>, the union of \<open>\<surd>\<close>
|
text\<open>\<^noindent> A process \<open>P\<close> is deadlock-free if and only if after any trace \<open>s\<close> without \<open>\<surd>\<close>, the union of \<open>\<surd>\<close>
|
||||||
and all events of \<open>P\<close> can never be a refusal set associated to \<open>s\<close>, which means that \<open>P\<close> cannot
|
and all events of \<open>P\<close> can never be a refusal set associated to \<open>s\<close>, which means that \<open>P\<close> cannot
|
||||||
be deadlocked after any non-terminating trace.
|
be deadlocked after any non-terminating trace.
|
||||||
\<close>
|
\<close>
|
||||||
|
|
||||||
Theorem*[T1, short_name="\<open>DF definition captures deadlock-freeness\<close>"]
|
Theorem*[T1, short_name="\<open>DF definition captures deadlock-freeness\<close>", level="Some 2"]
|
||||||
\<open> \hfill \break \<open>deadlock_free P \<longleftrightarrow> (\<forall>s\<in>\<T> P. tickFree s \<longrightarrow> (s, {\<surd>}\<union>events_of P) \<notin> \<F> P)\<close> \<close>
|
\<open> \hfill \break \<open>deadlock_free P \<longleftrightarrow> (\<forall>s\<in>\<T> P. tickFree s \<longrightarrow> (s, {\<surd>}\<union>events_of P) \<notin> \<F> P)\<close> \<close>
|
||||||
Definition*[X11]\<open> \<open>livelock\<^sub>-free P \<equiv> \<D> P = {} \<close> \<close>
|
Definition*[X11, level="Some 2"]\<open> \<open>livelock\<^sub>-free P \<equiv> \<D> P = {} \<close> \<close>
|
||||||
|
|
||||||
text\<open> Recall that all five reference processes are livelock-free.
|
text\<open> Recall that all five reference processes are livelock-free.
|
||||||
We also have the following lemmas about the
|
We also have the following lemmas about the
|
||||||
|
@ -628,7 +630,7 @@ text\<open>
|
||||||
Finally, we proved the following theorem that confirms the relationship between the two vital
|
Finally, we proved the following theorem that confirms the relationship between the two vital
|
||||||
properties:
|
properties:
|
||||||
\<close>
|
\<close>
|
||||||
Theorem*[T2, short_name="''DF implies LF''"]
|
Theorem*[T2, short_name="''DF implies LF''", level="Some 2"]
|
||||||
\<open> \<open>deadlock_free P \<longrightarrow> livelock_free P\<close> \<close>
|
\<open> \<open>deadlock_free P \<longrightarrow> livelock_free P\<close> \<close>
|
||||||
|
|
||||||
text\<open>
|
text\<open>
|
||||||
|
@ -795,7 +797,7 @@ This normal form is closed under deterministic and communication operators.
|
||||||
The advantage of this format is that we can mimick the well-known product automata construction
|
The advantage of this format is that we can mimick the well-known product automata construction
|
||||||
for an arbitrary number of synchronized processes under normal form.
|
for an arbitrary number of synchronized processes under normal form.
|
||||||
We only show the case of the synchronous product of two processes: \<close>
|
We only show the case of the synchronous product of two processes: \<close>
|
||||||
text*[T3::"theorem", short_name="\<open>Product Construction\<close>"]\<open>
|
text*[T3::"theorem", short_name="\<open>Product Construction\<close>", level="Some 2"]\<open>
|
||||||
Parallel composition translates to normal form:
|
Parallel composition translates to normal form:
|
||||||
@{cartouche [display,indent=5]\<open>(P\<^sub>n\<^sub>o\<^sub>r\<^sub>m\<lbrakk>\<tau>\<^sub>1,\<upsilon>\<^sub>1\<rbrakk> \<sigma>\<^sub>1) || (P\<^sub>n\<^sub>o\<^sub>r\<^sub>m\<lbrakk>\<tau>\<^sub>2,\<upsilon>\<^sub>2\<rbrakk> \<sigma>\<^sub>2) =
|
@{cartouche [display,indent=5]\<open>(P\<^sub>n\<^sub>o\<^sub>r\<^sub>m\<lbrakk>\<tau>\<^sub>1,\<upsilon>\<^sub>1\<rbrakk> \<sigma>\<^sub>1) || (P\<^sub>n\<^sub>o\<^sub>r\<^sub>m\<lbrakk>\<tau>\<^sub>2,\<upsilon>\<^sub>2\<rbrakk> \<sigma>\<^sub>2) =
|
||||||
P\<^sub>n\<^sub>o\<^sub>r\<^sub>m\<lbrakk>\<lambda>(\<sigma>\<^sub>1,\<sigma>\<^sub>2). \<tau>\<^sub>1 \<sigma>\<^sub>1 \<inter> \<tau>\<^sub>2 \<sigma>\<^sub>2 , \<lambda>(\<sigma>\<^sub>1,\<sigma>\<^sub>2).\<lambda>e.(\<upsilon>\<^sub>1 \<sigma>\<^sub>1 e, \<upsilon>\<^sub>2 \<sigma>\<^sub>2 e)\<rbrakk> (\<sigma>\<^sub>1,\<sigma>\<^sub>2)\<close>}
|
P\<^sub>n\<^sub>o\<^sub>r\<^sub>m\<lbrakk>\<lambda>(\<sigma>\<^sub>1,\<sigma>\<^sub>2). \<tau>\<^sub>1 \<sigma>\<^sub>1 \<inter> \<tau>\<^sub>2 \<sigma>\<^sub>2 , \<lambda>(\<sigma>\<^sub>1,\<sigma>\<^sub>2).\<lambda>e.(\<upsilon>\<^sub>1 \<sigma>\<^sub>1 e, \<upsilon>\<^sub>2 \<sigma>\<^sub>2 e)\<rbrakk> (\<sigma>\<^sub>1,\<sigma>\<^sub>2)\<close>}
|
||||||
|
@ -815,7 +817,7 @@ states via the closure \<open>\<RR>\<close>, which is defined inductively over:
|
||||||
Thus, normalization leads to a new characterization of deadlock-freeness inspired
|
Thus, normalization leads to a new characterization of deadlock-freeness inspired
|
||||||
from automata theory. We formally proved the following theorem:\<close>
|
from automata theory. We formally proved the following theorem:\<close>
|
||||||
|
|
||||||
text*[T4::"theorem", short_name="\<open>DF vs. Reacheability\<close>"]
|
text*[T4::"theorem", short_name="\<open>DF vs. Reacheability\<close>", level="Some 2"]
|
||||||
\<open> If each reachable state \<open>s \<in> (\<RR> \<tau> \<upsilon>)\<close> has outgoing transitions,
|
\<open> If each reachable state \<open>s \<in> (\<RR> \<tau> \<upsilon>)\<close> has outgoing transitions,
|
||||||
the \<^csp> process is deadlock-free:
|
the \<^csp> process is deadlock-free:
|
||||||
@{cartouche [display,indent=10] \<open>\<forall>\<sigma> \<in> (\<RR> \<tau> \<upsilon> \<sigma>\<^sub>0). \<tau> \<sigma> \<noteq> {} \<Longrightarrow> deadlock_free (P\<^sub>n\<^sub>o\<^sub>r\<^sub>m\<lbrakk>\<tau>,\<upsilon>\<rbrakk> \<sigma>\<^sub>0)\<close>}
|
@{cartouche [display,indent=10] \<open>\<forall>\<sigma> \<in> (\<RR> \<tau> \<upsilon> \<sigma>\<^sub>0). \<tau> \<sigma> \<noteq> {} \<Longrightarrow> deadlock_free (P\<^sub>n\<^sub>o\<^sub>r\<^sub>m\<lbrakk>\<tau>,\<upsilon>\<rbrakk> \<sigma>\<^sub>0)\<close>}
|
||||||
|
|
|
@ -54,7 +54,7 @@ text\<open>
|
||||||
Depending on your operating system and depending if you put Isabelle's \<^boxed_bash>\<open>bin\<close> directory
|
Depending on your operating system and depending if you put Isabelle's \<^boxed_bash>\<open>bin\<close> directory
|
||||||
in your \<^boxed_bash>\<open>PATH\<close>, you will need to invoke \<^boxed_bash>\<open>isabelle\<close> using its
|
in your \<^boxed_bash>\<open>PATH\<close>, you will need to invoke \<^boxed_bash>\<open>isabelle\<close> using its
|
||||||
full qualified path, \<^eg>:
|
full qualified path, \<^eg>:
|
||||||
@{boxed_bash [display]\<open>ë\prompt{}ë /usr/local/Isabelleë\isabelleversionë/bin/isabelle version
|
@{boxed_bash [display]\<open>ë\prompt{}ë /usr/local/Isabelleë\isabelleversion/bin/isabelleë version
|
||||||
ë\isabellefullversionë\<close>}
|
ë\isabellefullversionë\<close>}
|
||||||
\<close>
|
\<close>
|
||||||
|
|
||||||
|
@ -69,22 +69,25 @@ text\<open>
|
||||||
subsubsection*[isadof::technical]\<open>Installing \<^isadof>\<close>
|
subsubsection*[isadof::technical]\<open>Installing \<^isadof>\<close>
|
||||||
text\<open>
|
text\<open>
|
||||||
In the following, we assume that you already downloaded the \<^isadof> distribution
|
In the following, we assume that you already downloaded the \<^isadof> distribution
|
||||||
(\href{\isadofarchiveurl}{\isadofarchiven}) from the \<^isadof> web site. The main steps for
|
(\href{\isadofarchiveurl}{\isadofarchiven}) from the \<^isadof> web site.
|
||||||
installing are extracting the \<^isadof> distribution and calling its \<^boxed_bash>\<open>install-afp\<close> script.
|
|
||||||
We start by extracting the \<^isadof> archive:
|
We start by extracting the \<^isadof> archive:
|
||||||
@{boxed_bash [display]\<open>ë\prompt{}ë tar xf ë\href{\isadofarchiveurl}{\isadofarchiven}ë\<close>}
|
@{boxed_bash [display]\<open>ë\prompt{}ë tar xf ë\href{\isadofarchiveurl}{\isadofarchiven}ë\<close>}
|
||||||
This will create a directory \texttt{\isadofdirn} containing \<^isadof> distribution.
|
This will create a directory \texttt{\isadofdirn} containing \<^isadof> distribution.
|
||||||
|
|
||||||
\<^isadof> depends on the the AFP (\<^url>\<open>https://www.isa-afp.org\<close>), namely the AFP
|
Next, we need to register \<^isadof> as an Isabelle component:
|
||||||
|
|
||||||
|
@{boxed_bash [display]\<open>ë\prompt{}ë isabelle components -u ë\mbox{\isadofdirn}ë\<close>}
|
||||||
|
|
||||||
|
Moreover, \<^isadof> depends on the the AFP (\<^url>\<open>https://www.isa-afp.org\<close>), namely the AFP
|
||||||
entries ``Functional Automata''~@{cite "nipkow.ea:functional-Automata-afp:2004"} and
|
entries ``Functional Automata''~@{cite "nipkow.ea:functional-Automata-afp:2004"} and
|
||||||
``Regular Sets and Expressions''~@{cite "kraus.ea:regular-sets-afp:2010"}. You can either
|
``Regular Sets and Expressions''~@{cite "kraus.ea:regular-sets-afp:2010"}. You can either
|
||||||
install the complete AFP, following the instructions given at \<^url>\<open>https://www.isa-afp.org/using.html\<close>),
|
install the complete AFP, following the instructions given at \<^url>\<open>https://www.isa-afp.org/using.html\<close>),
|
||||||
or use the provided \<^boxed_bash>\<open>install\<close> script to install a minimal AFP
|
or use the provided \<^boxed_bash>\<open>install\<close> script to install a minimal AFP
|
||||||
setup into the local \<^isadof> directory. The script needs to be prefixed with the
|
setup into the local \<^isadof> directory. The script needs to be prefixed with the
|
||||||
\<^boxed_bash>\<open>isabelle env\<close> command:
|
\<^boxed_bash>\<open>isabelle eëënv\<close> command:
|
||||||
|
|
||||||
@{boxed_bash [display]\<open>ë\prompt{}ë cd ë\isadofdirnë
|
@{boxed_bash [display]\<open>ë\prompt{}ë cd ë\isadofdirnë
|
||||||
ë\prompt{\isadofdirn}ë isabelle env ./install-afp
|
ë\prompt{\isadofdirn}ë isabelle eëënv ë\mbox{./install-afp}ë
|
||||||
|
|
||||||
Isabelle/DOF Installer
|
Isabelle/DOF Installer
|
||||||
======================
|
======================
|
||||||
|
@ -115,15 +118,17 @@ session and all example documents, execute:
|
||||||
subsection*[first_project::technical]\<open>Creating an \<^isadof> Project\<close>
|
subsection*[first_project::technical]\<open>Creating an \<^isadof> Project\<close>
|
||||||
text\<open>
|
text\<open>
|
||||||
\<^isadof> provides its own variant of Isabelle's
|
\<^isadof> provides its own variant of Isabelle's
|
||||||
\<^boxed_bash>\<open>mkroot\<close> tool, called \<^boxed_bash>\<open>mkroot_DOF\<close>\index{mkroot\_DOF}:
|
\<^boxed_bash>\<open>mkroot\<close> tool, called \<^boxed_bash>\<open>dof_mkroot\<close>\index{mkroot\_DOF}:
|
||||||
@{boxed_bash [display]\<open>ë\prompt{}ë isabelle mkroot_DOF myproject
|
@{boxed_bash [display]\<open>ë\prompt{}ë isabelle dof_mkroot myproject
|
||||||
|
|
||||||
Preparing session "myproject" iëën "myproject"
|
Preparing session "myproject" iëën "myproject"
|
||||||
creating "myproject/ROOT"
|
creating "myproject/ROOT"
|
||||||
creating "myproject/document/root.tex"
|
creating "myproject/myproject.thy"
|
||||||
|
creating "myproject/document/preamble.tex"
|
||||||
|
|
||||||
Now use the following coëëmmand line to build the session:
|
Now use the following commanëëd line to build the session:
|
||||||
isabelle build -D myproject \<close>}
|
|
||||||
|
isabelle build -D myproject\<close>}
|
||||||
The created project uses the default configuration (the ontology for writing academic papers
|
The created project uses the default configuration (the ontology for writing academic papers
|
||||||
(scholarly\_paper) using a report layout based on the article class (\<^boxed_latex>\<open>scrartcl\<close>) of
|
(scholarly\_paper) using a report layout based on the article class (\<^boxed_latex>\<open>scrartcl\<close>) of
|
||||||
the KOMA-Script bundle~@{cite "kohm:koma-script:2019"}). The directory \<^boxed_bash>\<open>myproject\<close>
|
the KOMA-Script bundle~@{cite "kohm:koma-script:2019"}). The directory \<^boxed_bash>\<open>myproject\<close>
|
||||||
|
@ -139,21 +144,23 @@ The directory \<^boxed_bash>\<open>myproject\<close> contains the following fil
|
||||||
.1 .
|
.1 .
|
||||||
.2 myproject.
|
.2 myproject.
|
||||||
.3 document.
|
.3 document.
|
||||||
.4 isadof.cfg\DTcomment{\<^isadof> configuration}.
|
|
||||||
.4 preamble.tex\DTcomment{Manual \<^LaTeX>-configuration}.
|
.4 preamble.tex\DTcomment{Manual \<^LaTeX>-configuration}.
|
||||||
|
.3 myproject.thy\DTcomment{Example theory file}.
|
||||||
.3 ROOT\DTcomment{Isabelle build-configuration}.
|
.3 ROOT\DTcomment{Isabelle build-configuration}.
|
||||||
}
|
}
|
||||||
\end{minipage}
|
\end{minipage}
|
||||||
\end{center}
|
\end{center}
|
||||||
The \<^isadof> configuration (\<^boxed_bash>\<open>isadof.cfg\<close>) specifies the required
|
|
||||||
ontologies and the document template using a YAML syntax.\<^footnote>\<open>Isabelle power users will recognize that
|
The main two configuration files\<^footnote>\<open>Isabelle power users will recognize that
|
||||||
\<^isadof>'s document setup does not make use of a file \<^boxed_bash>\<open>root.tex\<close>: this file is
|
\<^isadof>'s document setup does not make use of a file \<^boxed_bash>\<open>root.tex\<close>: this file is
|
||||||
replaced by built-in document templates.\<close> The main two configuration files for
|
replaced by built-in document templates.\<close> for users are:
|
||||||
users are:
|
|
||||||
\<^item> The file \<^boxed_bash>\<open>ROOT\<close>\<^index>\<open>ROOT\<close>, which defines the Isabelle session. New theory files as well as new
|
\<^item> The file \<^boxed_bash>\<open>ROOT\<close>\<^index>\<open>ROOT\<close>, which defines the Isabelle session. New theory files as well as new
|
||||||
files required by the document generation (\<^eg>, images, bibliography database using \<^BibTeX>, local
|
files required by the document generation (\<^eg>, images, bibliography database using \<^BibTeX>, local
|
||||||
\<^LaTeX>-styles) need to be registered in this file. For details of Isabelle's build system, please
|
\<^LaTeX>-styles) need to be registered in this file. For details of Isabelle's build system, please
|
||||||
consult the Isabelle System Manual~@{cite "wenzel:system-manual:2020"}.
|
consult the Isabelle System Manual~@{cite "wenzel:system-manual:2020"}. In addition to the
|
||||||
|
standard features of, this file also contains \<^isadof> specific configurations:
|
||||||
|
\<^item> \<^boxed_bash>\<open>document_build=dof\<close> needs to be present, to tell Isabelle, to use the
|
||||||
|
Isabelle/DOF backend for the document generation.
|
||||||
\<^item> The file \<^boxed_bash>\<open>preamble.tex\<close>\<^index>\<open>preamble.tex\<close>, which allows users to add additional
|
\<^item> The file \<^boxed_bash>\<open>preamble.tex\<close>\<^index>\<open>preamble.tex\<close>, which allows users to add additional
|
||||||
\<^LaTeX>-packages or to add/modify \<^LaTeX>-commands.
|
\<^LaTeX>-packages or to add/modify \<^LaTeX>-commands.
|
||||||
\<close>
|
\<close>
|
||||||
|
@ -166,33 +173,24 @@ text\<open>
|
||||||
obtains and adds the necessary \<^LaTeX> class file.
|
obtains and adds the necessary \<^LaTeX> class file.
|
||||||
This is due to licensing restrictions).\<close>
|
This is due to licensing restrictions).\<close>
|
||||||
text\<open>
|
text\<open>
|
||||||
This can be configured by using the command-line options of \<^boxed_bash>\<open>mkroot_DOF\<close>. In
|
This can be configured by using the command-line options of \<^boxed_bash>\<open>dof_mkroot\<close>. In
|
||||||
Particular, \<^boxed_bash>\<open>-o\<close> allows selecting the ontology and \<^boxed_bash>\<open>-t\<close> allows selecting
|
Particular, \<^boxed_bash>\<open>-o\<close> allows selecting the ontology and \<^boxed_bash>\<open>-t\<close> allows selecting
|
||||||
the document template. The built-in help (using \<^boxed_bash>\<open>-h\<close>) shows all available options
|
the document template. The built-in help (using \<^boxed_bash>\<open>-h\<close>) shows all available options:
|
||||||
as well as a complete list of the available document templates and ontologies:
|
|
||||||
|
|
||||||
@{boxed_bash [display]\<open>ë\prompt{}ë isabelle mkroot_DOF -h
|
@{boxed_bash [display]\<open>ë\prompt{}ë isabelle dof_mkroot -h
|
||||||
|
|
||||||
Usage: isabelle mkroot_DOF [OPTIONS] [DIR]
|
Usage: isabelle dof_mkroot [OPTIONS] [DIRECTORY]
|
||||||
|
|
||||||
Options are:
|
Options are:
|
||||||
-h print this hëëelp text and exëëit
|
-I init Mercurial repository and add generated files
|
||||||
-n NAME alternative session name (default: DIR base name)
|
-h print help
|
||||||
-o ONTOLOGY (default: scholarly_paper)
|
-n NAME alternative session name (default: directory base name)
|
||||||
Available ontologies:
|
-o NAMES list of ontologies, separated by blanks
|
||||||
* CENELEC_50128
|
(default: "technical_report scholarly_paper")
|
||||||
* math_exam
|
-q quiet mode: less verbosity
|
||||||
* scholarly_paper
|
-t NAME template (default: "scrreprt-modern")
|
||||||
* technical_report
|
|
||||||
-t TEMPLATE (default: scrartcl)
|
|
||||||
Available document templates:
|
|
||||||
* lncs
|
|
||||||
* scrartcl
|
|
||||||
* scrreprt-modern
|
|
||||||
* scrreprt
|
|
||||||
|
|
||||||
Prepare session root DIR (default: current directory). \<close>}
|
|
||||||
|
|
||||||
|
Create session root directory for Isabelle/DOF (default: current directory).\<close>}
|
||||||
\<close>
|
\<close>
|
||||||
|
|
||||||
section*[writing_doc::technical]\<open>Writing Documents: General Remarks\<close>
|
section*[writing_doc::technical]\<open>Writing Documents: General Remarks\<close>
|
||||||
|
@ -327,15 +325,16 @@ doc_class text_section = text_element +
|
||||||
\<close>}
|
\<close>}
|
||||||
|
|
||||||
The \<^const>\<open>Isa_COL.text_element.level\<close>-attibute \<^index>\<open>level\<close> enables doc-notation support for
|
The \<^const>\<open>Isa_COL.text_element.level\<close>-attibute \<^index>\<open>level\<close> enables doc-notation support for
|
||||||
headers, chapters, sections, and subsections; we follow here the \<^LaTeX> terminology on levels to which \<^isadof> is currently targeting at.
|
headers, chapters, sections, and subsections; we follow here the \<^LaTeX> terminology on levels to
|
||||||
The values are interpreted accordingly to the \<^LaTeX> standard. The correspondence between the levels
|
which \<^isadof> is currently targeting at. The values are interpreted accordingly to the \<^LaTeX>
|
||||||
and the structural entities is summarized as follows:
|
standard. The correspondence between the levels and the structural entities is summarized
|
||||||
|
as follows:
|
||||||
|
|
||||||
\<^item> part \<^index>\<open>part\<close> \<open>Some -1\<close> \vspace{-0.3cm}
|
\<^item> part \<^index>\<open>part\<close> \<open>Some -1\<close> \<^vs>\<open>-0.3cm\<close>
|
||||||
\<^item> chapter \<^index>\<open>chapter\<close> \<open>Some 0\<close> \vspace{-0.3cm}
|
\<^item> chapter \<^index>\<open>chapter\<close> \<open>Some 0\<close> \<^vs>\<open>-0.3cm\<close>
|
||||||
\<^item> section \<^index>\<open>section\<close> \<open>Some 1\<close> \vspace{-0.3cm}
|
\<^item> section \<^index>\<open>section\<close> \<open>Some 1\<close> \<^vs>\<open>-0.3cm\<close>
|
||||||
\<^item> subsection \<^index>\<open>subsection\<close> \<open>Some 2\<close> \vspace{-0.3cm}
|
\<^item> subsection \<^index>\<open>subsection\<close> \<open>Some 2\<close> \<^vs>\<open>-0.3cm\<close>
|
||||||
\<^item> subsubsection \<^index>\<open>subsubsection\<close> \<open>Some 3\<close> \vspace{-0.1cm}
|
\<^item> subsubsection \<^index>\<open>subsubsection\<close> \<open>Some 3\<close> \<^vs>\<open>-0.3cm\<close>
|
||||||
|
|
||||||
Additional means assure that the following invariant is maintained in a document
|
Additional means assure that the following invariant is maintained in a document
|
||||||
conforming to \<^verbatim>\<open>scholarly_paper\<close>:
|
conforming to \<^verbatim>\<open>scholarly_paper\<close>:
|
||||||
|
@ -343,6 +342,8 @@ conforming to \<^verbatim>\<open>scholarly_paper\<close>:
|
||||||
\center {\<open>level > 0\<close>}
|
\center {\<open>level > 0\<close>}
|
||||||
\<close>
|
\<close>
|
||||||
|
|
||||||
|
text\<open>\<^vs>\<open>1.0cm\<close>\<close>
|
||||||
|
|
||||||
text\<open> The rest of the ontology introduces concepts for \<^typ>\<open>introduction\<close>, \<^typ>\<open>conclusion\<close>,
|
text\<open> The rest of the ontology introduces concepts for \<^typ>\<open>introduction\<close>, \<^typ>\<open>conclusion\<close>,
|
||||||
\<^typ>\<open>related_work\<close>, \<^typ>\<open>bibliography\<close> etc. More details can be found in \<^verbatim>\<open>scholarly_paper\<close>
|
\<^typ>\<open>related_work\<close>, \<^typ>\<open>bibliography\<close> etc. More details can be found in \<^verbatim>\<open>scholarly_paper\<close>
|
||||||
contained in the theory \<^theory>\<open>Isabelle_DOF.scholarly_paper\<close>. \<close>
|
contained in the theory \<^theory>\<open>Isabelle_DOF.scholarly_paper\<close>. \<close>
|
||||||
|
@ -505,9 +506,6 @@ side_by_side_figure*["hyperlinks"::side_by_side_figure,anchor="''fig:Dogfood-IV-
|
||||||
caption2="''Exploring an attribute (hyperlinked to the class).''",
|
caption2="''Exploring an attribute (hyperlinked to the class).''",
|
||||||
relative_width2="47",
|
relative_width2="47",
|
||||||
src2="''figures/Dogfood-V-attribute''"]\<open>Navigation via generated hyperlinks.\<close>
|
src2="''figures/Dogfood-V-attribute''"]\<open>Navigation via generated hyperlinks.\<close>
|
||||||
(* TODO: Update the images in fig:Dogfood-IV-jumpInDocCLass side_by_side_figure
|
|
||||||
to align them. This has to wait that the exploration of an attribute is again possible.
|
|
||||||
See: https://git.logicalhacking.com/Isabelle_DOF/Isabelle_DOF/issues/10 *)
|
|
||||||
|
|
||||||
text\<open>
|
text\<open>
|
||||||
From these class definitions, \<^isadof> also automatically generated editing
|
From these class definitions, \<^isadof> also automatically generated editing
|
||||||
|
@ -694,8 +692,29 @@ text\<open>There are options to display sub-parts of formulas etc., but it is a
|
||||||
of tight-checking that the information must be given complete and exactly in the syntax of
|
of tight-checking that the information must be given complete and exactly in the syntax of
|
||||||
Isabelle. This may be over-precise and a burden to readers not familiar with Isabelle, which may
|
Isabelle. This may be over-precise and a burden to readers not familiar with Isabelle, which may
|
||||||
motivate authors to choose the aforementioned freeform-style.
|
motivate authors to choose the aforementioned freeform-style.
|
||||||
|
|
||||||
|
Additionally, documents antiquotations were added to check and evaluate terms with
|
||||||
|
term antiquotations:
|
||||||
|
\<^item> \<^theory_text>\<open>@{term_ \<open>term\<close> }\<close> parses and type-checks \<open>term\<close> with term antiquotations,
|
||||||
|
for instance \<^theory_text>\<open>@{term_ \<open>@{cenelec-term \<open>FT\<close>}\<close>}\<close> will parse and check
|
||||||
|
that \<open>FT\<close> is indeed an instance of the class \<^typ>\<open>cenelec_term\<close>,
|
||||||
|
\<^item> \<^theory_text>\<open>@{value_ \<open>term\<close> }\<close> performs the evaluation of \<open>term\<close> with term antiquotations,
|
||||||
|
for instance \<^theory_text>\<open>@{value_ \<open>mcc @{cenelec-term \<open>FT\<close>}\<close>}\<close>
|
||||||
|
will print the value of the \<^const>\<open>mcc\<close> attribute of the instance \<open>FT\<close>.
|
||||||
|
\<^theory_text>\<open>value_\<close> may have an optional argument between square brackets to specify the evaluator but this
|
||||||
|
argument must be specified after a default optional argument already defined
|
||||||
|
by the text antiquotation implementation.
|
||||||
|
So one must use the following syntax if he does not want to specify the first optional argument:
|
||||||
|
\<open>@{value_ [] [nbe] \<open>r @{F \<open>xcv4\<close>}\<close>}\<close>. Note the empty brackets.
|
||||||
\<close>
|
\<close>
|
||||||
|
|
||||||
|
(*<*)
|
||||||
|
declare_reference*["subsec:onto-term-ctxt"::technical]
|
||||||
|
(*>*)
|
||||||
|
|
||||||
|
text\<open>They are text-contexts equivalents to the \<^theory_text>\<open>term*\<close> and \<^theory_text>\<open>value*\<close> commands
|
||||||
|
for term-contexts introduced in @{technical (unchecked) \<open>subsec:onto-term-ctxt\<close>}\<close>
|
||||||
|
|
||||||
subsection\<open>A Technical Report with Tight Checking\<close>
|
subsection\<open>A Technical Report with Tight Checking\<close>
|
||||||
text\<open>An example of tight checking is a small programming manual developed by the
|
text\<open>An example of tight checking is a small programming manual developed by the
|
||||||
second author in order to document programming trick discoveries while implementing in Isabelle.
|
second author in order to document programming trick discoveries while implementing in Isabelle.
|
||||||
|
|
|
@ -235,8 +235,7 @@ A document class\<^bindex>\<open>document class\<close> can be defined using the
|
||||||
We call document classes with an \<open>accepts_clause\<close>
|
We call document classes with an \<open>accepts_clause\<close>
|
||||||
\<^emph>\<open>monitor classes\<close> or \<^emph>\<open>monitors\<close> for short.
|
\<^emph>\<open>monitor classes\<close> or \<^emph>\<open>monitors\<close> for short.
|
||||||
\<^rail>\<open> (@@{command "doc_class"}| @@{command "onto_class"}) class_id '=' (class_id '+')? (attribute_decl+) \<newline>
|
\<^rail>\<open> (@@{command "doc_class"}| @@{command "onto_class"}) class_id '=' (class_id '+')? (attribute_decl+) \<newline>
|
||||||
(invariant_decl *)
|
(invariant_decl *) (rejects_clause accepts_clause)? \<newline> (accepts_clause *)\<close>
|
||||||
(accepts_clause rejects_clause?)?\<close>
|
|
||||||
\<^item> \<open>attribute_decl\<close>:\<^index>\<open>attribute\_decl@\<open>attribute_decl\<close>\<close>
|
\<^item> \<open>attribute_decl\<close>:\<^index>\<open>attribute\_decl@\<open>attribute_decl\<close>\<close>
|
||||||
\<^rail>\<open> name '::' '"' type '"' default_clause? \<close>
|
\<^rail>\<open> name '::' '"' type '"' default_clause? \<close>
|
||||||
\<^item> \<open>invariant_decl\<close>:\<^index>\<open>invariant\_decl@\<open>invariant_decl\<close>\<close>
|
\<^item> \<open>invariant_decl\<close>:\<^index>\<open>invariant\_decl@\<open>invariant_decl\<close>\<close>
|
||||||
|
@ -245,10 +244,10 @@ A document class\<^bindex>\<open>document class\<close> can be defined using the
|
||||||
disambiguate the argument of the expression
|
disambiguate the argument of the expression
|
||||||
and the \<^boxed_text>\<open>\<sigma>\<close> symbol is reserved to reference the instance of the class itself.
|
and the \<^boxed_text>\<open>\<sigma>\<close> symbol is reserved to reference the instance of the class itself.
|
||||||
\<^rail>\<open> 'invariant' (name '::' '"' term '"' + 'and') \<close>
|
\<^rail>\<open> 'invariant' (name '::' '"' term '"' + 'and') \<close>
|
||||||
\<^item> \<open>accepts_clause\<close>:\<^index>\<open>accepts\_clause@\<open>accepts_clause\<close>\<close>
|
|
||||||
\<^rail>\<open> 'accepts' '"' regexpr '"'\<close>
|
|
||||||
\<^item> \<open>rejects_clause\<close>:\<^index>\<open>rejects\_clause@\<open>rejects_clause\<close>\<close>
|
\<^item> \<open>rejects_clause\<close>:\<^index>\<open>rejects\_clause@\<open>rejects_clause\<close>\<close>
|
||||||
\<^rail>\<open> 'rejects' (class_id * ',') \<close>
|
\<^rail>\<open> 'rejects' (class_id * ',') \<close>
|
||||||
|
\<^item> \<open>accepts_clause\<close>:\<^index>\<open>accepts\_clause@\<open>accepts_clause\<close>\<close>
|
||||||
|
\<^rail>\<open> 'accepts' ('"' regexpr '"' + 'and')\<close>
|
||||||
\<^item> \<open>default_clause\<close>:\<^index>\<open>default\_clause@\<open>default_clause\<close>\<close>
|
\<^item> \<open>default_clause\<close>:\<^index>\<open>default\_clause@\<open>default_clause\<close>\<close>
|
||||||
\<^rail>\<open> '<=' '"' expr '"' \<close>
|
\<^rail>\<open> '<=' '"' expr '"' \<close>
|
||||||
\<^item> \<open>regexpr\<close>:\<^index>\<open>regexpr@\<open>regexpr\<close>\<close>
|
\<^item> \<open>regexpr\<close>:\<^index>\<open>regexpr@\<open>regexpr\<close>\<close>
|
||||||
|
@ -331,14 +330,14 @@ is currently only available in the SML API's of the kernel.
|
||||||
symbolic evaluation using the simplifier, \<open>nbe\<close> for \<^emph>\<open>normalization by
|
symbolic evaluation using the simplifier, \<open>nbe\<close> for \<^emph>\<open>normalization by
|
||||||
evaluation\<close> and \<^emph>\<open>code\<close> for code generation in SML.
|
evaluation\<close> and \<^emph>\<open>code\<close> for code generation in SML.
|
||||||
\<^item> \<open>upd_meta_args\<close> :
|
\<^item> \<open>upd_meta_args\<close> :
|
||||||
\<^rail>\<open> (obj_id ('::' class_id) ((',' attribute ('=' | '+=') HOL_term) * ))\<close>
|
\<^rail>\<open> (obj_id ('::' class_id) ((',' attribute (':=' | '+=') HOL_term) * ))\<close>
|
||||||
\<^item> \<open>annotated_text_element\<close> :
|
\<^item> \<open>annotated_text_element\<close> :
|
||||||
\<^rail>\<open>
|
\<^rail>\<open>
|
||||||
( @@{command "text*"} '[' meta_args ']' '\<open>' formal_text '\<close>'
|
( @@{command "text*"} '[' meta_args ']' '\<open>' formal_text '\<close>'
|
||||||
| @@{command "ML*"} ('[' meta_args ']')? '\<open>' SML_code '\<close>'
|
| @@{command "ML*"} ('[' meta_args ']')? '\<open>' SML_code '\<close>'
|
||||||
| @@{command "term*"} ('[' meta_args ']')? '\<open>' HOL_term '\<close>'
|
| @@{command "term*"} ('[' meta_args ']')? '\<open>' HOL_term '\<close>'
|
||||||
| @@{command "value*"} \<newline> ('[' meta_args ']' ('[' evaluator ']')?)? '\<open>' HOL_term '\<close>'
|
| (@@{command "value*"}
|
||||||
| @@{command "assert*"} \<newline> ('[' meta_args ']' ('[' evaluator ']')?)? '\<open>' HOL_term '\<close>'
|
| @@{command "assert*"}) \<newline> ('[' meta_args ']')? ('[' evaluator ']')? '\<open>' HOL_term '\<close>'
|
||||||
)
|
)
|
||||||
\<close>
|
\<close>
|
||||||
\<^rail>\<open>
|
\<^rail>\<open>
|
||||||
|
@ -404,13 +403,24 @@ a referentiable meta-object of class \<^theory_text>\<open>cid\<close> is create
|
||||||
The SML-code is type-checked and executed
|
The SML-code is type-checked and executed
|
||||||
in the context of the SML toplevel of the Isabelle system as in the corresponding
|
in the context of the SML toplevel of the Isabelle system as in the corresponding
|
||||||
\<^theory_text>\<open>ML\<open> \<dots> SML-code \<dots> \<close>\<close>-command.
|
\<^theory_text>\<open>ML\<open> \<dots> SML-code \<dots> \<close>\<close>-command.
|
||||||
|
Additionally, ML antiquotations were added to check and evaluate terms with
|
||||||
|
term antiquotations:
|
||||||
|
\<^item> \<^theory_text>\<open>@{term_ \<open>term\<close> }\<close> parses and type-checks \<open>term\<close> with term antiquotations,
|
||||||
|
for instance \<^theory_text>\<open>@{term_ \<open>@{cenelec-term \<open>FT\<close>}\<close>}\<close> will parse and check
|
||||||
|
that \<open>FT\<close> is indeed an instance of the class \<^typ>\<open>cenelec_term\<close>,
|
||||||
|
\<^item> \<^theory_text>\<open>@{value_ \<open>term\<close> }\<close> performs the evaluation of \<open>term\<close> with term antiquotations,
|
||||||
|
for instance \<^theory_text>\<open>@{value_ \<open>mcc @{cenelec-term \<open>FT\<close>}\<close>}\<close>
|
||||||
|
will print the value of the \<^const>\<open>mcc\<close> attribute of the instance \<open>FT\<close>.
|
||||||
|
\<^theory_text>\<open>value_\<close> may have an optional argument between square brackets to specify the evaluator:
|
||||||
|
\<open>@{value_ [nbe] \<open>mcc @{cenelec-term \<open>FT\<close>}\<close>}\<close> forces \<open>value_\<close> to evaluate
|
||||||
|
the term using normalization by evaluation (see @{cite "wenzel:isabelle-isar:2020"}).
|
||||||
\<close>
|
\<close>
|
||||||
|
|
||||||
(*<*)
|
(*<*)
|
||||||
declare_reference*["text-elements-expls"::technical]
|
declare_reference*["text-elements-expls"::technical]
|
||||||
(*>*)
|
(*>*)
|
||||||
|
|
||||||
subsection\<open>Ontological Term-Contexts and their Management\<close>
|
subsection*["subsec:onto-term-ctxt"::technical]\<open>Ontological Term-Contexts and their Management\<close>
|
||||||
text\<open>The major commands providing term-contexts are
|
text\<open>The major commands providing term-contexts are
|
||||||
\<^theory_text>\<open>term*[oid::cid, ...] \<open> \<dots> HOL-term \<dots> \<close>\<close>,
|
\<^theory_text>\<open>term*[oid::cid, ...] \<open> \<dots> HOL-term \<dots> \<close>\<close>,
|
||||||
\<^theory_text>\<open>value*[oid::cid, ...] \<open> \<dots> HOL-term \<dots> \<close>\<close> and
|
\<^theory_text>\<open>value*[oid::cid, ...] \<open> \<dots> HOL-term \<dots> \<close>\<close> and
|
||||||
|
@ -418,8 +428,9 @@ text\<open>The major commands providing term-contexts are
|
||||||
Wrt. creation, track-ability and checking they are analogous to the ontological text and
|
Wrt. creation, track-ability and checking they are analogous to the ontological text and
|
||||||
code-commands. However the argument terms may contain term-antiquotations stemming from an
|
code-commands. However the argument terms may contain term-antiquotations stemming from an
|
||||||
ontology definition. Both term-contexts were type-checked and \<^emph>\<open>validated\<close> against
|
ontology definition. Both term-contexts were type-checked and \<^emph>\<open>validated\<close> against
|
||||||
the global context (so: in the term \<open>@{A \<open>oid\<close>}\<close>, \<open>oid\<close> is indeed a string which refers
|
the global context (so: in the term @{term_ \<open>@{author \<open>bu\<close>}\<close>}, \<open>bu\<close>
|
||||||
to a meta-object belonging to the document class \<open>A\<close>, for example).
|
is indeed a string which refers to a meta-object belonging
|
||||||
|
to the document class \<^typ>\<open>author\<close>, for example).
|
||||||
The term-context in the \<open>value*\<close>-command and \<^emph>\<open>assert*\<close>-command is additionally expanded
|
The term-context in the \<open>value*\<close>-command and \<^emph>\<open>assert*\<close>-command is additionally expanded
|
||||||
(\<^eg> replaced) by a term denoting the meta-object.
|
(\<^eg> replaced) by a term denoting the meta-object.
|
||||||
This expansion happens \<^emph>\<open>before\<close> evaluation of the term, thus permitting
|
This expansion happens \<^emph>\<open>before\<close> evaluation of the term, thus permitting
|
||||||
|
@ -434,7 +445,6 @@ Note unspecified attribute values were represented by free fresh variables which
|
||||||
to choose either the normalization-by-evaluation strategy \<^theory_text>\<open>nbe\<close> or a proof attempt via
|
to choose either the normalization-by-evaluation strategy \<^theory_text>\<open>nbe\<close> or a proof attempt via
|
||||||
the \<^theory_text>\<open>auto\<close> method. A failure of these strategies will be reported and regarded as non-validation
|
the \<^theory_text>\<open>auto\<close> method. A failure of these strategies will be reported and regarded as non-validation
|
||||||
of this meta-object. The latter leads to a failure of the entire command.
|
of this meta-object. The latter leads to a failure of the entire command.
|
||||||
|
|
||||||
\<close>
|
\<close>
|
||||||
|
|
||||||
(*<*)
|
(*<*)
|
||||||
|
@ -1009,9 +1019,6 @@ text\<open>
|
||||||
these types. They are just types declared in HOL,
|
these types. They are just types declared in HOL,
|
||||||
which are ``inhabited'' by special constant symbols carrying strings, for
|
which are ``inhabited'' by special constant symbols carrying strings, for
|
||||||
example of the format \<^boxed_theory_text>\<open>@{thm <string>}\<close>.
|
example of the format \<^boxed_theory_text>\<open>@{thm <string>}\<close>.
|
||||||
% TODO:
|
|
||||||
% Update meta-types implementation explanation to the new implementation
|
|
||||||
% in repository commit 08c101c5440eee2a087068581952026e88c39f6a
|
|
||||||
When HOL
|
When HOL
|
||||||
expressions were used to denote values of \<^boxed_theory_text>\<open>doc_class\<close>
|
expressions were used to denote values of \<^boxed_theory_text>\<open>doc_class\<close>
|
||||||
instance attributes, this requires additional checks after
|
instance attributes, this requires additional checks after
|
||||||
|
@ -1043,11 +1050,19 @@ subsection*["sec:class_inv"::technical]\<open>ODL Class Invariants\<close>
|
||||||
text\<open>
|
text\<open>
|
||||||
Ontological classes as described so far are too liberal in many situations.
|
Ontological classes as described so far are too liberal in many situations.
|
||||||
There is a first high-level syntax implementation for class invariants.
|
There is a first high-level syntax implementation for class invariants.
|
||||||
These invariants can be checked when an instance of the class is defined.
|
These invariants are checked when an instance of the class is defined,
|
||||||
To enable the checking of the invariants, the \<^boxed_theory_text>\<open>invariants_checking\<close>
|
and trigger warnings.
|
||||||
|
The checking is enabled by default but can be disabled with the
|
||||||
|
\<^boxed_theory_text>\<open>invariants_checking\<close> theory attribute:
|
||||||
|
@{boxed_theory_text [display]\<open>
|
||||||
|
declare[[invariants_checking = false]]\<close>}
|
||||||
|
|
||||||
|
To enable the strict checking of the invariants,
|
||||||
|
that is to trigger errors instead of warnings,
|
||||||
|
the \<^boxed_theory_text>\<open>invariants_strict_checking\<close>
|
||||||
theory attribute must be set:
|
theory attribute must be set:
|
||||||
@{boxed_theory_text [display]\<open>
|
@{boxed_theory_text [display]\<open>
|
||||||
declare[[invariants_checking = true]]\<close>}
|
declare[[invariants_strict_checking = true]]\<close>}
|
||||||
|
|
||||||
For example, let's define the following two classes:
|
For example, let's define the following two classes:
|
||||||
@{boxed_theory_text [display]\<open>
|
@{boxed_theory_text [display]\<open>
|
||||||
|
@ -1107,8 +1122,15 @@ text\<open>
|
||||||
All specified constraints are already checked in the IDE of \<^dof> while editing.
|
All specified constraints are already checked in the IDE of \<^dof> while editing.
|
||||||
The invariant \<^boxed_theory_text>\<open>author_finite\<close> enforces that the user sets the
|
The invariant \<^boxed_theory_text>\<open>author_finite\<close> enforces that the user sets the
|
||||||
\<^boxed_theory_text>\<open>authored_by\<close> set.
|
\<^boxed_theory_text>\<open>authored_by\<close> set.
|
||||||
|
The invariants \<^theory_text>\<open>author_finite\<close> and \<^theory_text>\<open>establish_defined\<close> can not be checked directly
|
||||||
|
and need a little help.
|
||||||
|
We can set the \<open>invariants_checking_with_tactics\<close> theory attribute to help the checking.
|
||||||
|
It will enable a basic tactic, using unfold and auto:
|
||||||
|
@{boxed_theory_text [display]\<open>
|
||||||
|
declare[[invariants_checking_with_tactics = true]]\<close>}
|
||||||
There are still some limitations with this high-level syntax.
|
There are still some limitations with this high-level syntax.
|
||||||
For now, the high-level syntax does not support monitors (see \<^technical>\<open>sec:monitors\<close>).
|
For now, the high-level syntax does not support the checking of
|
||||||
|
specific monitor behaviors (see \<^technical>\<open>sec:monitors\<close>).
|
||||||
For example, one would like to delay a final error message till the
|
For example, one would like to delay a final error message till the
|
||||||
closing of a monitor.
|
closing of a monitor.
|
||||||
For this use-case you can use low-level class invariants (see \<^technical>\<open>sec:low_level_inv\<close>).
|
For this use-case you can use low-level class invariants (see \<^technical>\<open>sec:low_level_inv\<close>).
|
||||||
|
@ -1150,6 +1172,20 @@ text\<open>
|
||||||
instances of \<open>S\<close>.
|
instances of \<open>S\<close>.
|
||||||
\<close>
|
\<close>
|
||||||
text\<open>
|
text\<open>
|
||||||
|
Should the specified ranges of admissible instances not be observed, warnings will be triggered.
|
||||||
|
To forbid the violation of the specified ranges,
|
||||||
|
one can enable the \<^boxed_theory_text>\<open>strict_monitor_checking\<close> theory attribute:
|
||||||
|
@{boxed_theory_text [display]\<open>declare[[strict_monitor_checking = true]]\<close>}
|
||||||
|
It is possible to enable the tracing of free classes occurring inside the scope of a monitor by
|
||||||
|
enabling the \<^boxed_theory_text>\<open>free_class_in_monitor_checking\<close>
|
||||||
|
theory attribute:
|
||||||
|
@{boxed_theory_text [display]\<open>declare[[free_class_in_monitor_checking = true]]\<close>}
|
||||||
|
Then a warning will be triggered when defining an instance of a free class
|
||||||
|
inside the scope of a monitor.
|
||||||
|
To forbid free classes inside the scope of a monitor, one can enable the
|
||||||
|
\<^boxed_theory_text>\<open>free_class_in_monitor_strict_checking\<close> theory attribute:
|
||||||
|
@{boxed_theory_text [display]\<open>declare[[free_class_in_monitor_strict_checking = true]]\<close>}
|
||||||
|
|
||||||
Monitored document sections can be nested and overlap; thus, it is
|
Monitored document sections can be nested and overlap; thus, it is
|
||||||
possible to combine the effect of different monitors. For example, it
|
possible to combine the effect of different monitors. For example, it
|
||||||
would be possible to refine the \<^boxed_theory_text>\<open>example\<close> section by its own
|
would be possible to refine the \<^boxed_theory_text>\<open>example\<close> section by its own
|
||||||
|
@ -1167,8 +1203,17 @@ text\<open>
|
||||||
header delimiting the borders of its representation. Class invariants
|
header delimiting the borders of its representation. Class invariants
|
||||||
on monitors allow for specifying structural properties on document
|
on monitors allow for specifying structural properties on document
|
||||||
sections.
|
sections.
|
||||||
For now, the high-level syntax of invariants is not supported for monitors and you must use
|
For now, the high-level syntax of invariants does not support the checking of
|
||||||
the low-level class invariants (see \<^technical>\<open>sec:low_level_inv\<close>.\<close>
|
specific monitor behaviors like the one just described and you must use
|
||||||
|
the low-level class invariants (see \<^technical>\<open>sec:low_level_inv\<close>).
|
||||||
|
|
||||||
|
Low-level invariants checking can be set up to be triggered
|
||||||
|
when opening a monitor, when closing a monitor, or both
|
||||||
|
by using the \<^ML>\<open>DOF_core.update_class_eager_invariant\<close>,
|
||||||
|
\<^ML>\<open>DOF_core.update_class_lazy_invariant\<close>, or \<^ML>\<open>DOF_core.update_class_invariant\<close> commands
|
||||||
|
respectively, to add the invariants to the theory context
|
||||||
|
(See \<^technical>\<open>sec:low_level_inv\<close> for an example).
|
||||||
|
\<close>
|
||||||
|
|
||||||
|
|
||||||
subsection*["sec:low_level_inv"::technical]\<open>ODL Low-level Class Invariants\<close>
|
subsection*["sec:low_level_inv"::technical]\<open>ODL Low-level Class Invariants\<close>
|
||||||
|
@ -1185,9 +1230,9 @@ text\<open>
|
||||||
fun check_result_inv oid {is_monitor:bool} ctxt =
|
fun check_result_inv oid {is_monitor:bool} ctxt =
|
||||||
let
|
let
|
||||||
val kind =
|
val kind =
|
||||||
AttributeAccess.compute_attr_access ctxt "evidence" oid <@>{here} <@>{here}
|
ISA_core.compute_attr_access ctxt "evidence" oid NONE <@>{here}
|
||||||
val prop =
|
val prop =
|
||||||
AttributeAccess.compute_attr_access ctxt "property" oid <@>{here} <@>{here}
|
ISA_core.compute_attr_access ctxt "property" oid NONE <@>{here}
|
||||||
val tS = HOLogic.dest_list prop
|
val tS = HOLogic.dest_list prop
|
||||||
in case kind of
|
in case kind of
|
||||||
<@>{term "proof"} => if not(null tS) then true
|
<@>{term "proof"} => if not(null tS) then true
|
||||||
|
@ -1236,10 +1281,11 @@ value*\<open>filter (\<lambda>\<sigma>. the (text_section.level \<sigma>) > 1)
|
||||||
|
|
||||||
section*[infrastructure::technical]\<open>Technical Infrastructure\<close>
|
section*[infrastructure::technical]\<open>Technical Infrastructure\<close>
|
||||||
|
|
||||||
text\<open>
|
text\<open>
|
||||||
|
%FIXME update story concerning "list"
|
||||||
The list of fully supported (\<^ie>, supporting both interactive ontological modeling and
|
The list of fully supported (\<^ie>, supporting both interactive ontological modeling and
|
||||||
document generation) ontologies and the list of supported document templates can be
|
document generation) ontologies and the list of supported document templates can be
|
||||||
obtained by calling \<^boxed_bash>\<open>isabelle mkroot_DOF -h\<close> (see \<^technical>\<open>first_project\<close>).
|
obtained by calling \<^boxed_bash>\<open>isabelle dof_mkroot -h\<close> (see \<^technical>\<open>first_project\<close>).
|
||||||
Note that the postfix \<^boxed_bash>\<open>-UNSUPPORTED\<close> denotes experimental ontologies or templates
|
Note that the postfix \<^boxed_bash>\<open>-UNSUPPORTED\<close> denotes experimental ontologies or templates
|
||||||
for which further manual setup steps might be required or that are not fully tested. Also note
|
for which further manual setup steps might be required or that are not fully tested. Also note
|
||||||
that the \<^LaTeX>-class files required by the templates need to be already installed on your
|
that the \<^LaTeX>-class files required by the templates need to be already installed on your
|
||||||
|
@ -1323,6 +1369,7 @@ text\<open>
|
||||||
text\<open>
|
text\<open>
|
||||||
Developing a new document template ``\<^boxed_bash>\<open>bar\<close>'' requires the following steps:
|
Developing a new document template ``\<^boxed_bash>\<open>bar\<close>'' requires the following steps:
|
||||||
\<^item> develop a new \<^LaTeX>-template \<^boxed_bash>\<open>src/document-templates/root-bar.tex\<close>
|
\<^item> develop a new \<^LaTeX>-template \<^boxed_bash>\<open>src/document-templates/root-bar.tex\<close>
|
||||||
|
\<^item> add a suitable \<^theory_text>\<open>define_template\<close> command to theory \<^theory>\<open>Isabelle_DOF.Isa_DOF\<close>.
|
||||||
\<^item> activation of the new document template by executing the \<^boxed_bash>\<open>install\<close> script. You can skip the lengthy
|
\<^item> activation of the new document template by executing the \<^boxed_bash>\<open>install\<close> script. You can skip the lengthy
|
||||||
checks for the AFP entries and the installation of the Isabelle patch by using the
|
checks for the AFP entries and the installation of the Isabelle patch by using the
|
||||||
\<^boxed_bash>\<open>--skip-afp\<close> option:
|
\<^boxed_bash>\<open>--skip-afp\<close> option:
|
||||||
|
@ -1363,14 +1410,7 @@ text\<open>
|
||||||
\usepackage{isabelle,isabellesym,amssymb} % Required (by Isabelle)
|
\usepackage{isabelle,isabellesym,amssymb} % Required (by Isabelle)
|
||||||
\usepackage{amsmath} % Used by some ontologies
|
\usepackage{amsmath} % Used by some ontologies
|
||||||
\bibliographystyle{abbrv}
|
\bibliographystyle{abbrv}
|
||||||
\IfFileExists{DOF-core.sty}{}{ % Required by Isabelle/DOF
|
\input{dof-common} % setup shared between all Isabelle/DOF templates
|
||||||
\PackageError{DOF-core}{The doëëcument preparation
|
|
||||||
requires the Isabelle/DOF framework.}{For further help, see
|
|
||||||
ë\url{\dofurl}ë
|
|
||||||
}
|
|
||||||
\input{ontologies} % This will include the document specific
|
|
||||||
% ontologies from isadof.cfg
|
|
||||||
\IfFileExists{preamble.tex}{\input{preamble.tex}}{}
|
|
||||||
\usepackage{graphicx} % Required for images.
|
\usepackage{graphicx} % Required for images.
|
||||||
\usepackage[caption]{subfig}
|
\usepackage[caption]{subfig}
|
||||||
\usepackage[size=footnotesize]{caption}
|
\usepackage[size=footnotesize]{caption}
|
||||||
|
@ -1396,7 +1436,7 @@ text\<open>
|
||||||
|
|
||||||
subsubsection\<open>Getting Started\<close>
|
subsubsection\<open>Getting Started\<close>
|
||||||
text\<open>
|
text\<open>
|
||||||
In general, we recommend creating a test project (\<^eg>, using \<^boxed_bash>\<open>isabelle mkroot_DOF\<close>)
|
In general, we recommend creating a test project (\<^eg>, using \<^boxed_bash>\<open>isabelle dof_mkroot\<close>)
|
||||||
to develop new document templates or ontology representations. The default setup of the \<^isadof>
|
to develop new document templates or ontology representations. The default setup of the \<^isadof>
|
||||||
build system generated a \<^path>\<open>output/document\<close> directory with a self-contained \<^LaTeX>-setup. In
|
build system generated a \<^path>\<open>output/document\<close> directory with a self-contained \<^LaTeX>-setup. In
|
||||||
this directory, you can directly use \<^LaTeX> on the main file, called \<^path>\<open>root.tex\<close>:
|
this directory, you can directly use \<^LaTeX> on the main file, called \<^path>\<open>root.tex\<close>:
|
||||||
|
|
|
@ -53,7 +53,6 @@ text\<open>
|
||||||
\<open>structure Data = Generic_Data
|
\<open>structure Data = Generic_Data
|
||||||
( type T = docobj_tab * docclass_tab * ...
|
( type T = docobj_tab * docclass_tab * ...
|
||||||
val empty = (initial_docobj_tab, initial_docclass_tab, ...)
|
val empty = (initial_docobj_tab, initial_docclass_tab, ...)
|
||||||
val extend = I
|
|
||||||
fun merge((d1,c1,...),(d2,c2,...)) = (merge_docobj_tab (d1,d2,...),
|
fun merge((d1,c1,...),(d2,c2,...)) = (merge_docobj_tab (d1,d2,...),
|
||||||
merge_docclass_tab(c1,c2,...))
|
merge_docclass_tab(c1,c2,...))
|
||||||
);\<close>}
|
);\<close>}
|
||||||
|
|
2
examples/technical_report/Isabelle_DOF-Manual/Isabelle_DOF-Manual.thy
Executable file → Normal file
|
@ -15,6 +15,8 @@
|
||||||
theory "Isabelle_DOF-Manual"
|
theory "Isabelle_DOF-Manual"
|
||||||
imports "05_Implementation"
|
imports "05_Implementation"
|
||||||
begin
|
begin
|
||||||
|
use_template "scrreprt-modern"
|
||||||
|
use_ontology "technical_report" and "CENELEC_50128"
|
||||||
close_monitor*[this]
|
close_monitor*[this]
|
||||||
check_doc_global
|
check_doc_global
|
||||||
text\<open>Resulting trace in \<^verbatim>\<open>doc_item\<close> ''this'': \<close>
|
text\<open>Resulting trace in \<^verbatim>\<open>doc_item\<close> ''this'': \<close>
|
||||||
|
|
|
@ -1,10 +1,8 @@
|
||||||
session "Isabelle_DOF-Manual" = "Isabelle_DOF" +
|
session "Isabelle_DOF-Manual" = "Isabelle_DOF" +
|
||||||
options [document = pdf, document_output = "output", document_build = dof,
|
options [document = pdf, document_output = "output", document_build = dof, quick_and_dirty = true]
|
||||||
quick_and_dirty = true]
|
|
||||||
theories
|
theories
|
||||||
"Isabelle_DOF-Manual"
|
"Isabelle_DOF-Manual"
|
||||||
document_files
|
document_files
|
||||||
"isadof.cfg"
|
|
||||||
"root.bib"
|
"root.bib"
|
||||||
"root.mst"
|
"root.mst"
|
||||||
"preamble.tex"
|
"preamble.tex"
|
||||||
|
|
BIN
examples/technical_report/Isabelle_DOF-Manual/document/figures/Dogfood-II-bgnd1.png
Executable file → Normal file
Before Width: | Height: | Size: 14 KiB After Width: | Height: | Size: 10 KiB |
BIN
examples/technical_report/Isabelle_DOF-Manual/document/figures/Dogfood-III-bgnd-text_section.png
Executable file → Normal file
Before Width: | Height: | Size: 18 KiB After Width: | Height: | Size: 10 KiB |
BIN
examples/technical_report/Isabelle_DOF-Manual/document/figures/Dogfood-IV-jumpInDocCLass.png
Executable file → Normal file
Before Width: | Height: | Size: 23 KiB After Width: | Height: | Size: 17 KiB |
0
examples/technical_report/Isabelle_DOF-Manual/document/figures/Dogfood-Intro.png
Executable file → Normal file
Before Width: | Height: | Size: 85 KiB After Width: | Height: | Size: 85 KiB |
BIN
examples/technical_report/Isabelle_DOF-Manual/document/figures/Dogfood-V-attribute.png
Executable file → Normal file
Before Width: | Height: | Size: 16 KiB After Width: | Height: | Size: 13 KiB |
0
examples/technical_report/Isabelle_DOF-Manual/document/figures/Dogfood-figures.png
Executable file → Normal file
Before Width: | Height: | Size: 18 KiB After Width: | Height: | Size: 18 KiB |
0
examples/technical_report/Isabelle_DOF-Manual/document/figures/Isabelle_DOF-logo.pdf
Executable file → Normal file
0
examples/technical_report/Isabelle_DOF-Manual/document/figures/MyCommentedIsabelle.png
Executable file → Normal file
Before Width: | Height: | Size: 162 KiB After Width: | Height: | Size: 162 KiB |
0
examples/technical_report/Isabelle_DOF-Manual/document/figures/antiquotations-PIDE.png
Executable file → Normal file
Before Width: | Height: | Size: 43 KiB After Width: | Height: | Size: 43 KiB |
0
examples/technical_report/Isabelle_DOF-Manual/document/figures/cicm2018-combined.png
Executable file → Normal file
Before Width: | Height: | Size: 214 KiB After Width: | Height: | Size: 214 KiB |
0
examples/technical_report/Isabelle_DOF-Manual/document/figures/cicm2018-dof.png
Executable file → Normal file
Before Width: | Height: | Size: 135 KiB After Width: | Height: | Size: 135 KiB |
0
examples/technical_report/Isabelle_DOF-Manual/document/figures/cicm2018-pdf.png
Executable file → Normal file
Before Width: | Height: | Size: 73 KiB After Width: | Height: | Size: 73 KiB |
0
examples/technical_report/Isabelle_DOF-Manual/document/figures/definition-use-CSP-pdf.png
Executable file → Normal file
Before Width: | Height: | Size: 70 KiB After Width: | Height: | Size: 70 KiB |
0
examples/technical_report/Isabelle_DOF-Manual/document/figures/document-hierarchy.pdf
Executable file → Normal file
0
examples/technical_report/Isabelle_DOF-Manual/document/figures/document-hierarchy.svg
Executable file → Normal file
Before Width: | Height: | Size: 36 KiB After Width: | Height: | Size: 36 KiB |
0
examples/technical_report/Isabelle_DOF-Manual/document/figures/header_CSP_pdf.png
Executable file → Normal file
Before Width: | Height: | Size: 203 KiB After Width: | Height: | Size: 203 KiB |
0
examples/technical_report/Isabelle_DOF-Manual/document/figures/isabelle-architecture.pdf
Executable file → Normal file
0
examples/technical_report/Isabelle_DOF-Manual/document/figures/isabelle-architecture.svg
Executable file → Normal file
Before Width: | Height: | Size: 57 KiB After Width: | Height: | Size: 57 KiB |
0
examples/technical_report/Isabelle_DOF-Manual/document/figures/isadof.png
Executable file → Normal file
Before Width: | Height: | Size: 81 KiB After Width: | Height: | Size: 81 KiB |
0
examples/technical_report/Isabelle_DOF-Manual/document/figures/srac-as-es-application.png
Executable file → Normal file
Before Width: | Height: | Size: 37 KiB After Width: | Height: | Size: 37 KiB |
0
examples/technical_report/Isabelle_DOF-Manual/document/figures/srac-definition.png
Executable file → Normal file
Before Width: | Height: | Size: 26 KiB After Width: | Height: | Size: 26 KiB |
|
@ -1,4 +0,0 @@
|
||||||
Template: scrreprt-modern
|
|
||||||
Ontology: technical_report
|
|
||||||
Ontology: cenelec_50128
|
|
||||||
|
|
0
examples/technical_report/Isabelle_DOF-Manual/document/lstisadof-manual.sty
Executable file → Normal file
|
@ -451,7 +451,7 @@
|
||||||
journal = {Archive of Formal Proofs},
|
journal = {Archive of Formal Proofs},
|
||||||
month = may,
|
month = may,
|
||||||
year = 2010,
|
year = 2010,
|
||||||
note = {\url{http://isa-afp.org/entries/Regular-Sets.html}, Formal
|
note = {\url{https://isa-afp.org/entries/Regular-Sets.html}, Formal
|
||||||
proof development},
|
proof development},
|
||||||
issn = {2150-914x}
|
issn = {2150-914x}
|
||||||
}
|
}
|
||||||
|
@ -462,7 +462,7 @@
|
||||||
journal = {Archive of Formal Proofs},
|
journal = {Archive of Formal Proofs},
|
||||||
month = mar,
|
month = mar,
|
||||||
year = 2004,
|
year = 2004,
|
||||||
note = {\url{http://isa-afp.org/entries/Functional-Automata.html},
|
note = {\url{https://isa-afp.org/entries/Functional-Automata.html},
|
||||||
Formal proof development},
|
Formal proof development},
|
||||||
issn = {2150-914x}
|
issn = {2150-914x}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,11 +1,9 @@
|
||||||
session "TR_MyCommentedIsabelle" = "Isabelle_DOF" +
|
session "TR_MyCommentedIsabelle" = "Isabelle_DOF" +
|
||||||
options [document = pdf, document_output = "output", document_build = dof,
|
options [document = pdf, document_output = "output", document_build = dof, quick_and_dirty = true]
|
||||||
quick_and_dirty = true]
|
|
||||||
theories
|
theories
|
||||||
"TR_MyCommentedIsabelle"
|
"TR_MyCommentedIsabelle"
|
||||||
document_files
|
document_files
|
||||||
"root.bib"
|
"root.bib"
|
||||||
"isadof.cfg"
|
|
||||||
"preamble.tex"
|
"preamble.tex"
|
||||||
"prooftree.sty"
|
"prooftree.sty"
|
||||||
"figures/markup-demo.png"
|
"figures/markup-demo.png"
|
||||||
|
|
30
examples/technical_report/TR_my_commented_isabelle/TR_MyCommentedIsabelle.thy
Executable file → Normal file
|
@ -1,7 +1,7 @@
|
||||||
(*************************************************************************
|
(*************************************************************************
|
||||||
* Copyright (C)
|
* Copyright (C)
|
||||||
* 2019-2021 The University of Exeter
|
* 2019-2022 The University of Exeter
|
||||||
* 2018-2021 The University of Paris-Saclay
|
* 2018-2022 The University of Paris-Saclay
|
||||||
* 2018 The University of Sheffield
|
* 2018 The University of Sheffield
|
||||||
*
|
*
|
||||||
* License:
|
* License:
|
||||||
|
@ -14,16 +14,18 @@
|
||||||
(*<*)
|
(*<*)
|
||||||
theory TR_MyCommentedIsabelle
|
theory TR_MyCommentedIsabelle
|
||||||
imports "Isabelle_DOF.technical_report"
|
imports "Isabelle_DOF.technical_report"
|
||||||
|
|
||||||
begin
|
begin
|
||||||
|
|
||||||
|
use_template "scrreprt"
|
||||||
|
use_ontology "technical_report"
|
||||||
|
|
||||||
define_shortcut* isabelle \<rightleftharpoons> \<open>Isabelle/HOL\<close>
|
define_shortcut* isabelle \<rightleftharpoons> \<open>Isabelle/HOL\<close>
|
||||||
|
|
||||||
open_monitor*[this::report]
|
open_monitor*[this::report]
|
||||||
(*>*)
|
(*>*)
|
||||||
|
|
||||||
title*[tit::title]\<open>My Personal, Eclectic Isabelle Programming Manual\<close>
|
title*[tit::title]\<open>My Personal, Eclectic Isabelle Programming Manual\<close>
|
||||||
subtitle*[stit::subtitle]\<open>Version : Isabelle 2020\<close>
|
subtitle*[stit::subtitle]\<open>Version : Isabelle 2022\<close>
|
||||||
text*[bu::author,
|
text*[bu::author,
|
||||||
email = "''wolff@lri.fr''",
|
email = "''wolff@lri.fr''",
|
||||||
affiliation = "\<open>Université Paris-Saclay, LRI, France\<close>"]\<open>Burkhart Wolff\<close>
|
affiliation = "\<open>Université Paris-Saclay, LRI, France\<close>"]\<open>Burkhart Wolff\<close>
|
||||||
|
@ -62,7 +64,7 @@ text\<open> \<^vs>\<open>-0.5cm\<close>
|
||||||
Isabelle and Isabelle/HOL, a complementary text to the unfortunately somewhat outdated
|
Isabelle and Isabelle/HOL, a complementary text to the unfortunately somewhat outdated
|
||||||
"The Isabelle Cookbook" in \<^url>\<open>https://nms.kcl.ac.uk/christian.urban/Cookbook/\<close>.
|
"The Isabelle Cookbook" in \<^url>\<open>https://nms.kcl.ac.uk/christian.urban/Cookbook/\<close>.
|
||||||
The present text is also complementary to the current version of
|
The present text is also complementary to the current version of
|
||||||
\<^url>\<open>https://isabelle.in.tum.de/dist/Isabelle2021/doc/isar-ref.pdf\<close>
|
\<^url>\<open>https://isabelle.in.tum.de/doc/isar-ref.pdf\<close>
|
||||||
"The Isabelle/Isar Implementation" by Makarius Wenzel in that it focusses on subjects
|
"The Isabelle/Isar Implementation" by Makarius Wenzel in that it focusses on subjects
|
||||||
not covered there, or presents alternative explanations for which I believe, based on my
|
not covered there, or presents alternative explanations for which I believe, based on my
|
||||||
experiences with students and Phds, that they are helpful.
|
experiences with students and Phds, that they are helpful.
|
||||||
|
@ -364,7 +366,7 @@ subsection*[t213::example]\<open>Mechanism 2 : Extending the Global Context \<op
|
||||||
|
|
||||||
text\<open>A central mechanism for constructing user-defined data is by the \<^ML_functor>\<open>Generic_Data\<close>-functor.
|
text\<open>A central mechanism for constructing user-defined data is by the \<^ML_functor>\<open>Generic_Data\<close>-functor.
|
||||||
A plugin needing some data \<^verbatim>\<open>T\<close> and providing it with implementations for an
|
A plugin needing some data \<^verbatim>\<open>T\<close> and providing it with implementations for an
|
||||||
\<^verbatim>\<open>empty\<close>, and operations \<^verbatim>\<open>merge\<close> and \<^verbatim>\<open>extend\<close>, can construct a lense with operations
|
\<^verbatim>\<open>empty\<close>, and operation \<^verbatim>\<open>merge\<close>, can construct a lense with operations
|
||||||
\<^verbatim>\<open>get\<close> and \<^verbatim>\<open>put\<close> that attach this data into the generic system context. Rather than using
|
\<^verbatim>\<open>get\<close> and \<^verbatim>\<open>put\<close> that attach this data into the generic system context. Rather than using
|
||||||
unsynchronized SML mutable variables, this is the mechanism to introduce component local
|
unsynchronized SML mutable variables, this is the mechanism to introduce component local
|
||||||
data in Isabelle, which allows to manage this data for the necessary backtrack and synchronization
|
data in Isabelle, which allows to manage this data for the necessary backtrack and synchronization
|
||||||
|
@ -373,14 +375,12 @@ text\<open>A central mechanism for constructing user-defined data is by the \<^M
|
||||||
ML \<open>
|
ML \<open>
|
||||||
datatype X = mt
|
datatype X = mt
|
||||||
val init = mt;
|
val init = mt;
|
||||||
val ext = I
|
|
||||||
fun merge (X,Y) = mt
|
fun merge (X,Y) = mt
|
||||||
|
|
||||||
structure Data = Generic_Data
|
structure Data = Generic_Data
|
||||||
(
|
(
|
||||||
type T = X
|
type T = X
|
||||||
val empty = init
|
val empty = init
|
||||||
val extend = ext
|
|
||||||
val merge = merge
|
val merge = merge
|
||||||
);
|
);
|
||||||
\<close>
|
\<close>
|
||||||
|
@ -1183,7 +1183,7 @@ text\<open> The extensibility of Isabelle as a system framework depends on a num
|
||||||
\<^item> \<^ML>\<open>Toplevel.theory: (theory -> theory) -> Toplevel.transition -> Toplevel.transition\<close>
|
\<^item> \<^ML>\<open>Toplevel.theory: (theory -> theory) -> Toplevel.transition -> Toplevel.transition\<close>
|
||||||
adjoins a theory transformer.
|
adjoins a theory transformer.
|
||||||
\<^item> \<^ML>\<open>Toplevel.generic_theory: (generic_theory -> generic_theory) -> Toplevel.transition -> Toplevel.transition\<close>
|
\<^item> \<^ML>\<open>Toplevel.generic_theory: (generic_theory -> generic_theory) -> Toplevel.transition -> Toplevel.transition\<close>
|
||||||
\<^item> \<^ML>\<open>Toplevel.theory': (bool -> theory -> theory) -> Toplevel.transition -> Toplevel.transition\<close>
|
\<^item> \<^ML>\<open>Toplevel.theory': (bool -> theory -> theory) -> Toplevel.presentation -> Toplevel.transition -> Toplevel.transition\<close>
|
||||||
\<^item> \<^ML>\<open>Toplevel.exit: Toplevel.transition -> Toplevel.transition\<close>
|
\<^item> \<^ML>\<open>Toplevel.exit: Toplevel.transition -> Toplevel.transition\<close>
|
||||||
\<^item> \<^ML>\<open>Toplevel.ignored: Position.T -> Toplevel.transition\<close>
|
\<^item> \<^ML>\<open>Toplevel.ignored: Position.T -> Toplevel.transition\<close>
|
||||||
\<^item> \<^ML>\<open>Toplevel.present_local_theory: (xstring * Position.T) option ->
|
\<^item> \<^ML>\<open>Toplevel.present_local_theory: (xstring * Position.T) option ->
|
||||||
|
@ -1845,6 +1845,7 @@ text\<open>The second part is much more high-level, and can be found under \<^ML
|
||||||
This is perhaps meant with the fairly cryptic comment:
|
This is perhaps meant with the fairly cryptic comment:
|
||||||
"Quasi-inner syntax based on outer tokens: concrete argument syntax of
|
"Quasi-inner syntax based on outer tokens: concrete argument syntax of
|
||||||
attributes, methods etc." at the beginning of this structure.\<close>
|
attributes, methods etc." at the beginning of this structure.\<close>
|
||||||
|
ML\<open>open Args\<close>
|
||||||
|
|
||||||
text\<open> Some more combinators
|
text\<open> Some more combinators
|
||||||
\<^item>\<^ML>\<open>Args.symbolic : Token.T parser\<close>
|
\<^item>\<^ML>\<open>Args.symbolic : Token.T parser\<close>
|
||||||
|
@ -1865,12 +1866,11 @@ Common Isar Syntax
|
||||||
\<^item>\<^ML>\<open>Args.name_position: (string * Position.T) parser\<close>
|
\<^item>\<^ML>\<open>Args.name_position: (string * Position.T) parser\<close>
|
||||||
\<^item>\<^ML>\<open>Args.cartouche_inner_syntax: string parser\<close>
|
\<^item>\<^ML>\<open>Args.cartouche_inner_syntax: string parser\<close>
|
||||||
\<^item>\<^ML>\<open>Args.cartouche_input: Input.source parser\<close>
|
\<^item>\<^ML>\<open>Args.cartouche_input: Input.source parser\<close>
|
||||||
\<^item>\<^ML>\<open>Args.text_token: Token.T parser \<close>
|
|
||||||
|
|
||||||
|
|
||||||
Common Isar Syntax
|
Common Isar Syntax
|
||||||
\<^item>\<^ML>\<open>Args.text_input: Input.source parser\<close>
|
\<^item>\<^ML>\<open>Parse.embedded_input: Input.source parser\<close>
|
||||||
\<^item>\<^ML>\<open>Args.text : string parser\<close>
|
\<^item>\<^ML>\<open>Parse.embedded : string parser\<close>
|
||||||
\<^item>\<^ML>\<open>Args.binding : Binding.binding parser\<close>
|
\<^item>\<^ML>\<open>Args.binding : Binding.binding parser\<close>
|
||||||
|
|
||||||
Common Stuff related to Inner Syntax Parsing
|
Common Stuff related to Inner Syntax Parsing
|
||||||
|
@ -1893,8 +1893,7 @@ Common Isar Syntax
|
||||||
\<^item>\<^ML>\<open>Args.named_source: (Token.T -> Token.src) -> Token.src parser\<close>
|
\<^item>\<^ML>\<open>Args.named_source: (Token.T -> Token.src) -> Token.src parser\<close>
|
||||||
\<^item>\<^ML>\<open>Args.named_typ : (string -> typ) -> typ parser\<close>
|
\<^item>\<^ML>\<open>Args.named_typ : (string -> typ) -> typ parser\<close>
|
||||||
\<^item>\<^ML>\<open>Args.named_term : (string -> term) -> term parser\<close>
|
\<^item>\<^ML>\<open>Args.named_term : (string -> term) -> term parser\<close>
|
||||||
\<^item>\<^ML>\<open>Args.text_declaration: (Input.source -> declaration) -> declaration parser\<close>
|
\<^item>\<^ML>\<open>Args.embedded_declaration: (Input.source -> declaration) -> declaration parser\<close>
|
||||||
\<^item>\<^ML>\<open>Args.cartouche_declaration: (Input.source -> declaration) -> declaration parser\<close>
|
|
||||||
\<^item>\<^ML>\<open>Args.typ_abbrev : typ context_parser\<close>
|
\<^item>\<^ML>\<open>Args.typ_abbrev : typ context_parser\<close>
|
||||||
\<^item>\<^ML>\<open>Args.typ: typ context_parser\<close>
|
\<^item>\<^ML>\<open>Args.typ: typ context_parser\<close>
|
||||||
\<^item>\<^ML>\<open>Args.term: term context_parser\<close>
|
\<^item>\<^ML>\<open>Args.term: term context_parser\<close>
|
||||||
|
@ -1903,8 +1902,6 @@ Common Isar Syntax
|
||||||
\<^item>\<^ML>\<open>Args.named_source: (Token.T -> Token.src) -> Token.src parser\<close>
|
\<^item>\<^ML>\<open>Args.named_source: (Token.T -> Token.src) -> Token.src parser\<close>
|
||||||
\<^item>\<^ML>\<open>Args.named_typ : (string -> typ) -> typ parser\<close>
|
\<^item>\<^ML>\<open>Args.named_typ : (string -> typ) -> typ parser\<close>
|
||||||
\<^item>\<^ML>\<open>Args.named_term: (string -> term) -> term parser\<close>
|
\<^item>\<^ML>\<open>Args.named_term: (string -> term) -> term parser\<close>
|
||||||
\<^item>\<^ML>\<open>Args.text_declaration: (Input.source -> declaration) -> declaration parser\<close>
|
|
||||||
\<^item>\<^ML>\<open>Args.cartouche_declaration: (Input.source -> declaration) -> declaration parser\<close>
|
|
||||||
|
|
||||||
Syntax for some major Pure commands in Isar
|
Syntax for some major Pure commands in Isar
|
||||||
\<^item>\<^ML>\<open>Args.prop: term context_parser\<close>
|
\<^item>\<^ML>\<open>Args.prop: term context_parser\<close>
|
||||||
|
@ -2132,7 +2129,6 @@ Since Isabelle2018, an own AST is provided for the LaTeX syntax, analogously to
|
||||||
\<^item>\<^ML>\<open>Latex.string: string -> Latex.text\<close>
|
\<^item>\<^ML>\<open>Latex.string: string -> Latex.text\<close>
|
||||||
\<^item>\<^ML>\<open>Latex.text: string * Position.T -> Latex.text\<close>
|
\<^item>\<^ML>\<open>Latex.text: string * Position.T -> Latex.text\<close>
|
||||||
|
|
||||||
\<^item>\<^ML>\<open>Latex.output_name: string -> string\<close>
|
|
||||||
\<^item>\<^ML>\<open>Latex.output_ascii: string -> string\<close>
|
\<^item>\<^ML>\<open>Latex.output_ascii: string -> string\<close>
|
||||||
\<^item>\<^ML>\<open>Latex.output_symbols: Symbol.symbol list -> string\<close>
|
\<^item>\<^ML>\<open>Latex.output_symbols: Symbol.symbol list -> string\<close>
|
||||||
|
|
||||||
|
|
0
examples/technical_report/TR_my_commented_isabelle/document/figures/document-model.pdf
Executable file → Normal file
0
examples/technical_report/TR_my_commented_isabelle/document/figures/isabelle-architecture.pdf
Executable file → Normal file
0
examples/technical_report/TR_my_commented_isabelle/document/figures/markup-demo.png
Executable file → Normal file
Before Width: | Height: | Size: 13 KiB After Width: | Height: | Size: 13 KiB |
0
examples/technical_report/TR_my_commented_isabelle/document/figures/pure-inferences-I.pdf
Executable file → Normal file
Before Width: | Height: | Size: 91 KiB After Width: | Height: | Size: 91 KiB |
0
examples/technical_report/TR_my_commented_isabelle/document/figures/pure-inferences-II.pdf
Executable file → Normal file
Before Width: | Height: | Size: 31 KiB After Width: | Height: | Size: 31 KiB |
0
examples/technical_report/TR_my_commented_isabelle/document/figures/text-element.pdf
Executable file → Normal file
|
@ -1,2 +0,0 @@
|
||||||
Template: scrreprt
|
|
||||||
Ontology: technical_report
|
|
0
examples/technical_report/TR_my_commented_isabelle/document/preamble.tex
Executable file → Normal file
0
examples/technical_report/TR_my_commented_isabelle/document/prooftree.sty
Executable file → Normal file
0
examples/technical_report/TR_my_commented_isabelle/document/root.bib
Executable file → Normal file
67
install-afp
|
@ -30,10 +30,13 @@
|
||||||
#set -e
|
#set -e
|
||||||
shopt -s nocasematch
|
shopt -s nocasematch
|
||||||
|
|
||||||
|
|
||||||
print_help()
|
print_help()
|
||||||
{
|
{
|
||||||
echo "Usage: isabelle env ./install-afp [OPTION] "
|
echo "Usage: isabelle env ./install-afp [OPTION] "
|
||||||
echo ""
|
echo ""
|
||||||
|
echo "Warning: This tools is deprecated."
|
||||||
|
echo ""
|
||||||
echo "Run ..."
|
echo "Run ..."
|
||||||
echo ""
|
echo ""
|
||||||
echo " --help, -h display this help message"
|
echo " --help, -h display this help message"
|
||||||
|
@ -43,11 +46,29 @@ print_help()
|
||||||
|
|
||||||
exit_error() {
|
exit_error() {
|
||||||
echo ""
|
echo ""
|
||||||
echo " *** Isabelle/DOF installation FAILED, please check the README.md for help ***"
|
echo " *** Local AFP installation FAILED, please check the README.md for help ***"
|
||||||
echo ""
|
echo ""
|
||||||
exit 1
|
exit 1
|
||||||
}
|
}
|
||||||
|
|
||||||
|
confirm_usage() {
|
||||||
|
echo "* From Isabelle2021-1 on, the recommended method for making the whole AFP "
|
||||||
|
echo " available to Isabelle is the isabelle components -u command."
|
||||||
|
echo " For doing so, please follow the instructions at: "
|
||||||
|
echo " https://www.isa-afp.org/help/"
|
||||||
|
echo ""
|
||||||
|
echo " Alternatively, you can continue, on your own risk, to install only"
|
||||||
|
echo " the AFP entries required to run Isabelle/DOF."
|
||||||
|
echo ""
|
||||||
|
read -p " Still continue (y/N)? " -n 1 -r
|
||||||
|
echo
|
||||||
|
if [[ $REPLY =~ ^[Yy]$ ]];
|
||||||
|
then
|
||||||
|
echo " Continuing installation on your OWN risk."
|
||||||
|
else
|
||||||
|
exit_error
|
||||||
|
fi
|
||||||
|
}
|
||||||
check_isabelle_version() {
|
check_isabelle_version() {
|
||||||
echo "* Checking Isabelle version:"
|
echo "* Checking Isabelle version:"
|
||||||
if [ "$ISABELLE_VERSION" != "$ACTUAL_ISABELLE_VERSION" ]; then
|
if [ "$ISABELLE_VERSION" != "$ACTUAL_ISABELLE_VERSION" ]; then
|
||||||
|
@ -58,8 +79,8 @@ check_isabelle_version() {
|
||||||
echo " version of Isabelle and rerun the install script, providing the"
|
echo " version of Isabelle and rerun the install script, providing the"
|
||||||
echo " the \"isabelle\" command as argument."
|
echo " the \"isabelle\" command as argument."
|
||||||
echo " Isabelle ($ISABELLE_VERSION) can be obtained from:"
|
echo " Isabelle ($ISABELLE_VERSION) can be obtained from:"
|
||||||
echo " $ISABELLE_URL"
|
echo " https://isabelle.in.tum.de/website-$ISABELLE_VERSION/"
|
||||||
echo
|
echo ""
|
||||||
read -p " Still continue (y/N)? " -n 1 -r
|
read -p " Still continue (y/N)? " -n 1 -r
|
||||||
echo
|
echo
|
||||||
if [[ $REPLY =~ ^[Yy]$ ]];
|
if [[ $REPLY =~ ^[Yy]$ ]];
|
||||||
|
@ -76,7 +97,7 @@ check_isabelle_version() {
|
||||||
check_afp_entries() {
|
check_afp_entries() {
|
||||||
echo "* Checking availability of AFP entries:"
|
echo "* Checking availability of AFP entries:"
|
||||||
missing=""
|
missing=""
|
||||||
required="Regular-Sets Functional-Automata"
|
required="Regular-Sets Functional-Automata Physical_Quantities"
|
||||||
for afp in $required; do
|
for afp in $required; do
|
||||||
res=`$ISABELLE_TOOL build -n $afp 2>/dev/null || true`
|
res=`$ISABELLE_TOOL build -n $afp 2>/dev/null || true`
|
||||||
if [ "$res" != "" ]; then
|
if [ "$res" != "" ]; then
|
||||||
|
@ -110,12 +131,6 @@ check_afp_entries() {
|
||||||
fi
|
fi
|
||||||
}
|
}
|
||||||
|
|
||||||
register(){
|
|
||||||
echo " - Registering Isabelle/DOF"
|
|
||||||
$ISABELLE_TOOL components -u "$PWD"
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
while [ $# -gt 0 ]
|
while [ $# -gt 0 ]
|
||||||
do
|
do
|
||||||
case "$1" in
|
case "$1" in
|
||||||
|
@ -128,15 +143,37 @@ do
|
||||||
shift
|
shift
|
||||||
done
|
done
|
||||||
|
|
||||||
|
if [ -z ${ISABELLE_TOOL+x} ];
|
||||||
|
then
|
||||||
|
print_help
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
ACTUAL_ISABELLE_VERSION=`$ISABELLE_TOOL version`
|
ACTUAL_ISABELLE_VERSION=`$ISABELLE_TOOL version`
|
||||||
|
|
||||||
|
ISABELLE_VERSION="Isabelle$($ISABELLE_TOOL dof_param -b isabelle_version)"
|
||||||
|
|
||||||
|
if [ ${ISABELLE_VERSION} = "Isabelle" ];
|
||||||
|
then
|
||||||
|
echo "Error: cannot find Isabelle/DOF configuration, please check that you"
|
||||||
|
echo " registered Isabelle/DOF as an Isabelle component, e.g., using"
|
||||||
|
echo " isabelle components -u ."
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
AFP_DATE="$($ISABELLE_TOOL dof_param -b afp_version)"
|
||||||
|
AFP_URL="https://www.isa-afp.org/release/"$AFP_DATE".tar.gz"
|
||||||
|
|
||||||
echo ""
|
echo ""
|
||||||
echo "Isabelle/DOF Installer"
|
echo "Isabelle/DOF AFP Installation Utility"
|
||||||
echo "======================"
|
echo "====================================="
|
||||||
|
confirm_usage
|
||||||
check_isabelle_version
|
check_isabelle_version
|
||||||
check_afp_entries
|
check_afp_entries
|
||||||
register
|
echo "* AFP Installation successful."
|
||||||
echo "* Installation successful. Enjoy Isabelle/DOF, you can build the session"
|
echo " You should now be able to enjoy Isabelle/DOF by building its session"
|
||||||
echo " Isabelle/DOF and all example documents by executing:"
|
echo " and all example documents by executing:"
|
||||||
echo " $ISABELLE_TOOL build -D ."
|
echo " $ISABELLE_TOOL build -D ."
|
||||||
exit 0
|
exit 0
|
||||||
|
|