Compare commits
173 Commits
main
...
ICFEM-2022
Author | SHA1 | Date |
---|---|---|
Burkhart Wolff | 86a70ffbba | |
Burkhart Wolff | 324066afa2 | |
Nicolas Méric | ee647d2822 | |
Nicolas Méric | 92b515730d | |
Nicolas Méric | fb55ed8ded | |
Nicolas Méric | 8b1b7736ee | |
Burkhart Wolff | fbf34f9a35 | |
Idir AIT SADOUNE | 1714703272 | |
Nicolas Méric | 7b0d446724 | |
Nicolas Méric | 0cc17a136e | |
Nicolas Méric | e69808750c | |
Nicolas Méric | d5b195d873 | |
Nicolas Méric | a5796277fd | |
Nicolas Méric | 4f5f4996af | |
Nicolas Méric | 763c0a515b | |
Idir AIT SADOUNE | a803d999c0 | |
Nicolas Méric | e4e8199e33 | |
Nicolas Méric | 7618f8b2c4 | |
Nicolas Méric | 30fc7a306f | |
Nicolas Méric | 3260ae3406 | |
Nicolas Méric | 108be68f19 | |
Nicolas Méric | 0ee58a65e6 | |
Nicolas Méric | f30ff6a967 | |
Nicolas Méric | b448b87395 | |
Nicolas Méric | 57c984d4dc | |
Nicolas Méric | 35b50e419f | |
Burkhart Wolff | 230336f0a4 | |
Burkhart Wolff | 8e056e0e8a | |
Nicolas Méric | 9770f8c332 | |
Nicolas Méric | 8aba7f272a | |
Nicolas Méric | 9b396b6096 | |
Burkhart Wolff | 53333d7096 | |
Burkhart Wolff | 624b0e4af1 | |
Burkhart Wolff | 301d4ac5c2 | |
Burkhart Wolff | 908827e0ea | |
Burkhart Wolff | 5e97dfb6fe | |
Burkhart Wolff | ee3428d384 | |
Burkhart Wolff | c9beb99b7a | |
Idir AIT SADOUNE | aa6d11f73c | |
Idir AIT SADOUNE | 681b8f828d | |
Burkhart Wolff | 6e928f5af6 | |
Burkhart Wolff | c11124966b | |
Burkhart Wolff | 818cb34a0b | |
Burkhart Wolff | c9e2f22c8b | |
Burkhart Wolff | d422e78849 | |
Burkhart Wolff | 346b28f732 | |
Burkhart Wolff | de1354625e | |
Burkhart Wolff | d0bedee42d | |
Nicolas Méric | 46841c4d1b | |
Burkhart Wolff | 081309b5cd | |
Burkhart Wolff | 8722b21c3d | |
Nicolas Méric | 26ddfe5b0c | |
Idir AIT SADOUNE | abeb2f7f5e | |
Idir AIT SADOUNE | acbaa582ef | |
Burkhart Wolff | 76cff5beab | |
Burkhart Wolff | 1c31a11bdc | |
Burkhart Wolff | 11ef56965f | |
Burkhart Wolff | ca418f60b0 | |
Burkhart Wolff | 6f4259cc2c | |
Nicolas Méric | 7201101a6c | |
Burkhart Wolff | 65e6240fa8 | |
Burkhart Wolff | 0496ce3080 | |
Burkhart Wolff | 4f6ac66167 | |
Burkhart Wolff | 74ba9aa892 | |
Nicolas Méric | d6468274d7 | |
Nicolas Méric | 1c550a962a | |
Nicolas Méric | 4a5523ac17 | |
Nicolas Méric | 7f02cbbde4 | |
Nicolas Méric | e125775350 | |
Nicolas Méric | 7bd1b61ddd | |
Nicolas Méric | b3fd073b38 | |
Nicolas Méric | 8f2e194501 | |
Nicolas Méric | 258fcc2f8e | |
Nicolas Méric | d95a5aa2a8 | |
Nicolas Méric | f96db62396 | |
Burkhart Wolff | 913bf49b3f | |
Burkhart Wolff | 51b3e74c36 | |
Burkhart Wolff | 6863995671 | |
Burkhart Wolff | 01c196086b | |
Burkhart Wolff | c2e39edd99 | |
Burkhart Wolff | 62c816781e | |
Nicolas Méric | 930630e368 | |
Nicolas Méric | f681ab54a7 | |
Nicolas Méric | b10cb9d54d | |
Nicolas Méric | eea88bccfd | |
Nicolas Méric | 2d33ad814c | |
Nicolas Méric | 9711b079a4 | |
Nicolas Méric | 12588fa6e9 | |
Nicolas Méric | 8b7162d104 | |
Burkhart Wolff | a4708957d5 | |
Burkhart Wolff | f1e01a5b86 | |
Burkhart Wolff | 27d90fc87e | |
Burkhart Wolff | f93e62d54b | |
Burkhart Wolff | 676edd7d54 | |
Burkhart Wolff | 7acd0af628 | |
Nicolas Méric | 4cfb33856b | |
Idir AIT SADOUNE | 31de87dfca | |
Nicolas Méric | f99be4d3a3 | |
Nicolas Méric | 00511848ed | |
Nicolas Méric | d96d6124ef | |
Nicolas Méric | 1a23140534 | |
Nicolas Méric | 4b05c9a9a1 | |
Nicolas Méric | 1fca5a37b7 | |
Nicolas Méric | e511049ba8 | |
Nicolas Méric | 4edcb1acd1 | |
Nicolas Méric | 8bfe06db9e | |
Idir AIT SADOUNE | 28fecba621 | |
Idir AIT SADOUNE | 9b9a4dcfb0 | |
Nicolas Méric | f2f6cfad98 | |
Nicolas Méric | 41e8c4975a | |
Idir AIT SADOUNE | ca21aa81f4 | |
Idir AIT SADOUNE | f674ca8ca3 | |
Idir AIT SADOUNE | 7cd7a4d15d | |
Idir AIT SADOUNE | b9c72124c0 | |
Burkhart Wolff | fe728ef9af | |
Burkhart Wolff | c2fa80953a | |
Burkhart Wolff | f180a87fbf | |
Burkhart Wolff | 3226fdf0c8 | |
Idir AIT SADOUNE | f35a7eb5bd | |
Nicolas Méric | 34a57b2c9f | |
Burkhart Wolff | 17c6e87b8d | |
Idir AIT SADOUNE | 861986a443 | |
Idir AIT SADOUNE | 57a32ce39d | |
Nicolas Méric | 9d3b701c27 | |
Idir AIT SADOUNE | 893e239ce5 | |
Idir AIT SADOUNE | d4f8ee344b | |
Nicolas Méric | 46450e951c | |
Nicolas Méric | 307bb4e3d4 | |
Nicolas Méric | 1051e2cd7b | |
Nicolas Méric | c914b201ee | |
Nicolas Méric | b2aac7288d | |
Nicolas Méric | 94baf69f25 | |
Nicolas Méric | 33d991a1c7 | |
Nicolas Méric | 36276fc3b4 | |
Nicolas Méric | 68417e01d3 | |
Nicolas Méric | 30793f5c51 | |
Nicolas Méric | 35a117a361 | |
Nicolas Méric | 09129bfbf8 | |
Idir AIT SADOUNE | 8663703d53 | |
Nicolas Méric | 2e645ed5ff | |
Burkhart Wolff | 092d552ef8 | |
Nicolas Méric | 927f0446a0 | |
Idir AIT SADOUNE | b0a29ac00d | |
Idir AIT SADOUNE | 4b43756eb3 | |
Idir AIT SADOUNE | 4936a8142e | |
Nicolas Méric | 7033335e3f | |
Nicolas Méric | a2f3057545 | |
Burkhart Wolff | e6721b548d | |
Burkhart Wolff | d319ab2555 | |
Nicolas Méric | 502f5c5cd2 | |
Nicolas Méric | 7ac669e52e | |
Nicolas Méric | d9f2d5c0c4 | |
Burkhart Wolff | ae96c7cef0 | |
Burkhart Wolff | 9d9fd03b72 | |
Burkhart Wolff | b243f30252 | |
Burkhart Wolff | 05b896291b | |
Burkhart Wolff | cc151291f6 | |
Burkhart Wolff | 3e06e659b6 | |
Burkhart Wolff | eaef236dcc | |
Burkhart Wolff | b35c774d27 | |
Burkhart Wolff | c5cdf5f826 | |
Nicolas Méric | a38d13198c | |
Idir AIT SADOUNE | f8c125bcff | |
Burkhart Wolff | 9cd021d7fa | |
Burkhart Wolff | 9d7fc957c6 | |
Burkhart Wolff | b1fb509d8b | |
Burkhart Wolff | 63145e0d3a | |
Burkhart Wolff | be0352cab7 | |
Nicolas Méric | 6d42f122b1 | |
Burkhart Wolff | e53984feea | |
Burkhart Wolff | d7c7a98138 | |
Idir AIT SADOUNE | 8629dda85e | |
Nicolas Méric | 688e823463 |
|
@ -1,2 +0,0 @@
|
|||
core.autocrlf false
|
||||
core.eol lf
|
|
@ -2,4 +2,3 @@ output
|
|||
.afp
|
||||
*~
|
||||
*#
|
||||
Isabelle_DOF-Unit-Tests/latex_test/
|
||||
|
|
|
@ -15,4 +15,3 @@ It may also contain additional tools and script that are useful for preparing a
|
|||
* pdflatex
|
||||
* [browser_info](https://artifacts.logicalhacking.com/ci/Isabelle_DOF/Isabelle_DOF/main/latest/pdflatex/browser_info/Unsorted/)
|
||||
* [aux files](https://artifacts.logicalhacking.com/ci/Isabelle_DOF/Isabelle_DOF/main/latest/pdflatex/)
|
||||
* [Isabelle_DOF-Unreleased_Isabelle2022.tar.xz](https://artifacts.logicalhacking.com/ci/Isabelle_DOF/Isabelle_DOF/main/latest/Isabelle_DOF-Unreleased_Isabelle2022.tar.xz)
|
||||
|
|
|
@ -1,38 +1,20 @@
|
|||
pipeline:
|
||||
build:
|
||||
image: git.logicalhacking.com/lh-docker/lh-docker-isabelle/isabelle2023:latest
|
||||
pull: true
|
||||
image: docker.io/logicalhacking/isabelle2021-1
|
||||
commands:
|
||||
- hg log --limit 2 /root/isabelle
|
||||
- ./.woodpecker/check_dangling_theories
|
||||
- ./.woodpecker/check_external_file_refs
|
||||
- ./.woodpecker/check_quick_and_dirty
|
||||
- export ARTIFACT_DIR=$CI_WORKSPACE/.artifacts/$CI_REPO/$CI_BRANCH/$CI_BUILD_NUMBER/$LATEX
|
||||
- mkdir -p $ARTIFACT_DIR
|
||||
- export `isabelle getenv ISABELLE_HOME_USER`
|
||||
- mkdir -p $ISABELLE_HOME_USER/etc
|
||||
- echo "ISABELLE_PDFLATEX=\"$LATEX --file-line-error\"" >> $ISABELLE_HOME_USER/etc/settings
|
||||
- isabelle build -x HOL-Proofs -x Isabelle_DOF-Proofs -D . -o browser_info
|
||||
- if [ "$LATEX" = "lualatex" ]; then isabelle build -o 'timeout_scale=2' -D . -o browser_info; else echo "Skipping Isabelle_DOF-Proofs for pdflatex build."; fi
|
||||
- find . -name 'root.tex' -prune -o -name 'output' -type f | xargs latexmk -$LATEX -cd -quiet -Werror
|
||||
- isabelle components -u .
|
||||
- isabelle dof_mkroot -q DOF_test
|
||||
- isabelle components -u `pwd`
|
||||
- isabelle build -D . -o browser_info
|
||||
- isabelle mkroot_DOF DOF_test
|
||||
- isabelle build -D DOF_test
|
||||
- cp -r $ISABELLE_HOME_USER/browser_info $ARTIFACT_DIR
|
||||
- cd $ARTIFACT_DIR
|
||||
- cd ../..
|
||||
- ln -s * latest
|
||||
archive:
|
||||
image: git.logicalhacking.com/lh-docker/lh-docker-isabelle/isabelle2023:latest
|
||||
commands:
|
||||
- export ARTIFACT_DIR=$CI_WORKSPACE/.artifacts/$CI_REPO/$CI_BRANCH/$CI_BUILD_NUMBER/$LATEX
|
||||
- mkdir -p $ARTIFACT_DIR
|
||||
- export ISABELLE_VERSION=`isabelle version`
|
||||
- ./.woodpecker/mk_release -d
|
||||
- cp Isabelle_DOF-Unreleased_$ISABELLE_VERSION.tar.xz $ARTIFACT_DIR/../
|
||||
when:
|
||||
matrix:
|
||||
LATEX: lualatex
|
||||
deploy:
|
||||
image: docker.io/drillster/drone-rsync
|
||||
settings:
|
||||
|
@ -45,7 +27,7 @@ pipeline:
|
|||
from_secret: artifacts_ssh
|
||||
user: artifacts
|
||||
notify:
|
||||
image: docker.io/drillster/drone-email
|
||||
image: drillster/drone-email
|
||||
settings:
|
||||
host: smtp.0x5f.org
|
||||
username: woodpecker
|
||||
|
@ -53,9 +35,9 @@ pipeline:
|
|||
from_secret: email
|
||||
from: ci@logicalhacking.com
|
||||
when:
|
||||
status: [ failure ]
|
||||
status: [ changed, failure ]
|
||||
|
||||
matrix:
|
||||
LATEX:
|
||||
- lualatex
|
||||
- pdflatex
|
||||
- lualatex
|
||||
|
|
|
@ -1,33 +0,0 @@
|
|||
#!/bin/bash
|
||||
|
||||
set -e
|
||||
|
||||
failuremsg="Error"
|
||||
failurecode=1
|
||||
|
||||
while [ $# -gt 0 ]
|
||||
do
|
||||
case "$1" in
|
||||
--warning|-w)
|
||||
failuremsg="Warning"
|
||||
failurecode=0;;
|
||||
esac
|
||||
shift
|
||||
done
|
||||
|
||||
echo "Checking for theories that are not part of an Isabelle session:"
|
||||
echo "==============================================================="
|
||||
|
||||
PWD=`pwd`
|
||||
TMPDIR=`mktemp -d`
|
||||
isabelle build -D . -l -n | grep $PWD | sed -e "s| *${PWD}/||" | sort -u | grep thy$ > ${TMPDIR}/sessions-thy-files.txt
|
||||
find * -type f | sort -u | grep thy$ > ${TMPDIR}/actual-thy-files.txt
|
||||
thylist=`comm -13 ${TMPDIR}/sessions-thy-files.txt ${TMPDIR}/actual-thy-files.txt`
|
||||
if [ -z "$thylist" ] ; then
|
||||
echo " * Success: No dangling theories found."
|
||||
exit 0
|
||||
else
|
||||
echo -e "$thylist"
|
||||
echo "$failuremsg: Dangling theories found (see list above)!"
|
||||
exit $failurecode
|
||||
fi
|
|
@ -1,45 +0,0 @@
|
|||
#!/bin/sh
|
||||
|
||||
|
||||
|
||||
failuremsg="Error"
|
||||
failurecode=1
|
||||
|
||||
while [ $# -gt 0 ]
|
||||
do
|
||||
case "$1" in
|
||||
--warning|-w)
|
||||
failuremsg="Warning"
|
||||
failurecode=0;;
|
||||
esac
|
||||
shift
|
||||
done
|
||||
|
||||
DIRREGEXP="\\.\\./"
|
||||
|
||||
echo "Checking for references pointing outside of session directory:"
|
||||
echo "=============================================================="
|
||||
|
||||
REGEXP=$DIRREGEXP
|
||||
DIR=$DIRMATCH
|
||||
failed=0
|
||||
for i in $(seq 1 10); do
|
||||
FILES=`find * -mindepth $((i-1)) -maxdepth $i -type f | xargs`
|
||||
if [ -n "$FILES" ]; then
|
||||
grep -s ${REGEXP} ${FILES}
|
||||
exit=$?
|
||||
if [ "$exit" -eq 0 ] ; then
|
||||
failed=1
|
||||
fi
|
||||
fi
|
||||
REGEXP="${DIRREGEXP}${REGEXP}"
|
||||
done
|
||||
|
||||
|
||||
if [ "$failed" -ne 0 ] ; then
|
||||
echo "$failuremsg: Forbidden reference to files outside of their session directory!"
|
||||
exit $failurecode
|
||||
fi
|
||||
|
||||
echo " * Success: No relative references to files outside of their session directory found."
|
||||
exit 0
|
|
@ -1,30 +0,0 @@
|
|||
#!/bin/bash
|
||||
|
||||
set -e
|
||||
|
||||
failuremsg="Error"
|
||||
failurecode=1
|
||||
|
||||
while [ $# -gt 0 ]
|
||||
do
|
||||
case "$1" in
|
||||
--warning|-w)
|
||||
failuremsg="Warning"
|
||||
failurecode=0;;
|
||||
esac
|
||||
shift
|
||||
done
|
||||
|
||||
echo "Checking for sessions with quick_and_dirty mode enabled:"
|
||||
echo "========================================================"
|
||||
|
||||
rootlist=`find -name 'ROOT' -exec grep -l 'quick_and_dirty *= *true' {} \;`
|
||||
|
||||
if [ -z "$rootlist" ] ; then
|
||||
echo " * Success: No sessions with quick_and_dirty mode enabled found."
|
||||
exit 0
|
||||
else
|
||||
echo -e "$rootlist"
|
||||
echo "$failuremsg: Sessions with quick_and_dirty mode enabled found (see list above)!"
|
||||
exit $failurecode
|
||||
fi
|
|
@ -46,9 +46,7 @@ print_help()
|
|||
echo " --tag tag, -t tag use tag for release archive"
|
||||
echo " (default: use master branch)"
|
||||
echo " --publish, -p publish generated artefact"
|
||||
echo " (default: $PUBLISH)"
|
||||
echo " --quick-and-dirty, -d only build required artifacts, no complete test"
|
||||
echo " (default: $DIRTY)"
|
||||
echo " (use master: $PUBLISH)"
|
||||
}
|
||||
|
||||
check_isabelle_version() {
|
||||
|
@ -70,7 +68,7 @@ clone_repo()
|
|||
echo " * Switching to tag $TAG"
|
||||
(cd $ISADOF_WORK_DIR && git checkout $TAG)
|
||||
else
|
||||
echo " * No tag specified, using master branch"
|
||||
echo " * Not tag specified, using master branch"
|
||||
fi
|
||||
(cd $ISADOF_WORK_DIR && git show -s --format="COMMIT=%H%nDATE=%cd" --date=short | sed -e 's|-|/|g') >> $ISADOF_WORK_DIR/etc/settings
|
||||
|
||||
|
@ -80,40 +78,19 @@ build_and_install_manuals()
|
|||
{
|
||||
echo "* Building manual"
|
||||
|
||||
if [ "$DIRTY" = "true" ]; then
|
||||
if [ -z ${ARTIFACT_DIR+x} ]; then
|
||||
echo " * Quick and Dirty Mode (local build)"
|
||||
$ISABELLE build -d . Isabelle_DOF Isabelle_DOF-Example-I
|
||||
mkdir -p $ISADOF_WORK_DIR/Isabelle_DOF-Example-I/output/
|
||||
cp Isabelle_DOF-Example-I/output/document.pdf \
|
||||
$ISADOF_WORK_DIR/Isabelle_DOF-Example-I/output/
|
||||
mkdir -p $ISADOF_WORK_DIR/Isabelle_DOF/output/
|
||||
cp Isabelle_DOF/output/document.pdf \
|
||||
$ISADOF_WORK_DIR/Isabelle_DOF/output/;
|
||||
else
|
||||
echo " * Quick and Dirty Mode (running on CI)"
|
||||
mkdir -p $ISADOF_WORK_DIR/Isabelle_DOF-Example-I/output/
|
||||
cp $ARTIFACT_DIR/browser_info/AFP/Isabelle_DOF-Example-I/document.pdf \
|
||||
$ISADOF_WORK_DIR/Isabelle_DOF-Example-I/output/
|
||||
|
||||
mkdir -p $ISADOF_WORK_DIR/Isabelle_DOF/output/
|
||||
cp $ARTIFACT_DIR/browser_info/AFP/Isabelle_DOF/document.pdf \
|
||||
$ISADOF_WORK_DIR/Isabelle_DOF/output/;
|
||||
fi
|
||||
else
|
||||
(cd $ISADOF_WORK_DIR && $ISABELLE env ./install-afp)
|
||||
(cd $ISADOF_WORK_DIR && $ISABELLE build -c -D . )
|
||||
fi
|
||||
(cd $ISADOF_WORK_DIR && $ISABELLE env ./install-afp)
|
||||
(cd $ISADOF_WORK_DIR && $ISABELLE build -c -D . )
|
||||
|
||||
mkdir -p $ISADOF_WORK_DIR/doc
|
||||
echo "Isabelle/DOF Manuals!" > $ISADOF_WORK_DIR/doc/Contents
|
||||
|
||||
cp $ISADOF_WORK_DIR/Isabelle_DOF/output/document.pdf \
|
||||
cp $ISADOF_WORK_DIR/examples/technical_report/Isabelle_DOF-Manual/output/document.pdf \
|
||||
$ISADOF_WORK_DIR/doc/Isabelle_DOF-Manual.pdf
|
||||
echo " Isabelle_DOF-Manual User and Implementation Manual for Isabelle/DOF" >> $ISADOF_WORK_DIR/doc/Contents
|
||||
|
||||
cp $ISADOF_WORK_DIR/Isabelle_DOF-Example-I/output/document.pdf \
|
||||
$ISADOF_WORK_DIR/doc/Isabelle_DOF-Example-I.pdf
|
||||
echo " Isabelle_DOF-Example-I Example academic paper" >> $ISADOF_WORK_DIR/doc/Contents
|
||||
cp $ISADOF_WORK_DIR/examples/scholarly_paper/2018-cicm-isabelle_dof-applications/output/document.pdf \
|
||||
$ISADOF_WORK_DIR/doc/2018-cicm-isabelle_dof-applications.pdf
|
||||
echo " 2018-cicm-isabelle_dof-applications Example academic paper" >> $ISADOF_WORK_DIR/doc/Contents
|
||||
|
||||
find $ISADOF_WORK_DIR -type d -name "output" -exec rm -rf {} \; &> /dev/null || true
|
||||
rm -rf $ISADOF_WORK_DIR/.git* $ISADOF_WORK_DIR/.woodpecker $ISADOF_WORK_DIR/.afp
|
||||
|
@ -127,6 +104,7 @@ create_archive()
|
|||
(mv $ISADOF_WORK_DIR $ISADOF_DIR)
|
||||
(cd $BUILD_DIR && tar cf $ISADOF_TAR.tar $ISADOF_TAR && xz $ISADOF_DIR.tar)
|
||||
mv $BUILD_DIR/$ISADOF_TAR.tar.xz .
|
||||
rm -rf $BUILD_DIR
|
||||
}
|
||||
|
||||
sign_archive()
|
||||
|
@ -143,12 +121,14 @@ publish_archive()
|
|||
ssh 0x5f.org chmod go+u-w -R www/$DOF_ARTIFACT_HOST/htdocs/$DOF_ARTIFACT_DIR
|
||||
}
|
||||
|
||||
|
||||
ISABELLE=`which isabelle`
|
||||
USE_TAG="false"
|
||||
SIGN="false"
|
||||
PUBLISH="false"
|
||||
DIRTY="false"
|
||||
BUILD_DIR=`mktemp -d`
|
||||
ISABELLE_HOME_USER=`mktemp -d`
|
||||
export ISABELLE_HOME_USER
|
||||
ISADOF_WORK_DIR="$BUILD_DIR/Isabelle_DOF"
|
||||
while [ $# -gt 0 ]
|
||||
do
|
||||
|
@ -164,8 +144,6 @@ do
|
|||
SIGN="true";;
|
||||
--publish|-p)
|
||||
PUBLISH="true";;
|
||||
--quick-and-dirty|-d)
|
||||
DIRTY="true";;
|
||||
--help|-h)
|
||||
print_help
|
||||
exit 0;;
|
||||
|
@ -178,24 +156,14 @@ done
|
|||
|
||||
clone_repo
|
||||
|
||||
ISADOF_MAIN_DIR=`pwd`
|
||||
$ISABELLE components -x `pwd`
|
||||
$ISABELLE components -u $ISADOF_WORK_DIR
|
||||
|
||||
if [ "$DIRTY" = "true" ]; then
|
||||
echo "Running in Quick and Dirty mode!"
|
||||
$ISABELLE components -u $ISADOF_MAIN_DIR
|
||||
else
|
||||
$ISABELLE components -x $ISADOF_MAIN_DIR
|
||||
$ISABELLE components -u $ISADOF_WORK_DIR
|
||||
fi
|
||||
|
||||
VARS=`$ISABELLE getenv ISABELLE_TOOL`
|
||||
VARS=`$ISABELLE getenv ISABELLE_VERSION DOF_VERSION ISABELLE_HOME_USER`
|
||||
for i in $VARS; do
|
||||
export "$i"
|
||||
done
|
||||
|
||||
ISABELLE_VERSION="Isabelle$($ISABELLE_TOOL dof_param -b isabelle_version)"
|
||||
DOF_VERSION="$($ISABELLE_TOOL dof_param -b dof_version)"
|
||||
|
||||
ISABELLE_SHORT_VERSION=`echo $ISABELLE_VERSION | sed -e 's/:.*$//'`
|
||||
ISADOF_TAR="Isabelle_DOF-"$DOF_VERSION"_"$ISABELLE_SHORT_VERSION
|
||||
ISADOF_DIR="$BUILD_DIR/$ISADOF_TAR"
|
||||
|
@ -203,11 +171,6 @@ ISADOF_DIR="$BUILD_DIR/$ISADOF_TAR"
|
|||
check_isabelle_version
|
||||
build_and_install_manuals
|
||||
|
||||
if [ "$DIRTY" != "true" ]; then
|
||||
$ISABELLE components -x $ISADOF_WORK_DIR
|
||||
$ISABELLE components -u $ISADOF_MAIN_DIR
|
||||
fi
|
||||
|
||||
create_archive
|
||||
|
||||
if [ "$SIGN" = "true" ]; then
|
||||
|
@ -218,5 +181,10 @@ if [ "$PUBLISH" = "true" ]; then
|
|||
publish_archive
|
||||
fi
|
||||
|
||||
rm -rf $BUILD_DIR
|
||||
$ISABELLE components -x $ISADOF_WORK_DIR
|
||||
$ISABELLE components -u `pwd`
|
||||
|
||||
rm -rf ISABELLE_HOME_USER
|
||||
rm -rf ISADOF_WORK_DIR
|
||||
|
||||
exit 0
|
||||
|
|
34
CHANGELOG.md
34
CHANGELOG.md
|
@ -5,30 +5,23 @@ All notable changes to this project will be documented in this file.
|
|||
The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/)
|
||||
and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0.html).
|
||||
|
||||
## [Unreleased]
|
||||
## Unreleased
|
||||
|
||||
### Added
|
||||
|
||||
### Changed
|
||||
|
||||
- removed explicit use of build script. Requires removing "build" script entry
|
||||
from ROOT files.
|
||||
- lipics support is now official, requires document option "document_comment_latex=true"
|
||||
in the ROOT file.
|
||||
- Isabelle/DOF is now a proper Isabelle component that should be installed using the
|
||||
"isabelle components" command. The installation script is now only an convenience
|
||||
means for installing the required AFP entries.
|
||||
|
||||
- Updated Isabelle version to Isabelle 2023
|
||||
## 1.2.0 - 2022-03-26
|
||||
|
||||
## [1.3.0] - 2022-07-08
|
||||
|
||||
### Changed
|
||||
|
||||
- The project-specific configuration is not part of the `ROOT` file, the formerly
|
||||
used `isadof.cfg` is obsolete and no longer supported.
|
||||
- Removed explicit use of `document/build` script. Requires removing the `build` script
|
||||
entry from ROOT files.
|
||||
- Isabelle/DOF is now a proper Isabelle component that should be installed using the
|
||||
`isabelle components` command. The installation script is now only a convenient way
|
||||
of installing the required AFP entries.
|
||||
- `mkroot_DOF` has been renamed to `dof_mkroot` (and reimplemented in Scala).
|
||||
|
||||
## [1.2.0] - 2022-03-26
|
||||
|
||||
## [1.1.0] - 2021-03-20
|
||||
## 1.1.0 - 2021-03-20
|
||||
|
||||
### Added
|
||||
|
||||
|
@ -45,7 +38,4 @@ and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0.
|
|||
|
||||
- First public release
|
||||
|
||||
[Unreleased]: https://git.logicalhacking.com/Isabelle_DOF/Isabelle_DOF/compare/v1.3.0/Isabelle2021...HEAD
|
||||
[1.3.0]: https://git.logicalhacking.com/Isabelle_DOF/Isabelle_DOF/compare/v1.2.0/Isabelle2021...v1.3.0/Isabelle2021-1
|
||||
[1.2.0]: https://git.logicalhacking.com/Isabelle_DOF/Isabelle_DOF/compare/v1.1.0/Isabelle2021...v1.2.0/Isabelle2021
|
||||
[1.1.0]: https://git.logicalhacking.com/Isabelle_DOF/Isabelle_DOF/compare/v1.0.0/Isabelle2019...v1.1.0/Isabelle2021
|
||||
[Unreleased]: https://git.logicalhacking.com/Isabelle_DOF/Isabelle_DOF/compare/v1.2.0/Isabelle2021...HEAD
|
||||
|
|
|
@ -1,4 +0,0 @@
|
|||
session "PikeOS_study" = "Isabelle_DOF-Ontologies" +
|
||||
options [document = false]
|
||||
theories
|
||||
"PikeOS_ST"
|
|
@ -1 +0,0 @@
|
|||
PikeOS_study
|
|
@ -1,3 +0,0 @@
|
|||
\input{mini_odo}
|
||||
\input{CENELEC_50128}
|
||||
|
|
@ -1,675 +0,0 @@
|
|||
(*************************************************************************
|
||||
* Copyright (C)
|
||||
* 2019 The University of Exeter
|
||||
* 2018-2019 The University of Paris-Saclay
|
||||
* 2018 The University of Sheffield
|
||||
*
|
||||
* License:
|
||||
* This program can be redistributed and/or modified under the terms
|
||||
* of the 2-clause BSD-style license.
|
||||
*
|
||||
* SPDX-License-Identifier: BSD-2-Clause
|
||||
*************************************************************************)
|
||||
|
||||
(*<*)
|
||||
theory
|
||||
mini_odo
|
||||
imports
|
||||
"Isabelle_DOF-Ontologies.CENELEC_50128"
|
||||
"Isabelle_DOF.technical_report"
|
||||
"Physical_Quantities.SI" "Physical_Quantities.SI_Pretty"
|
||||
begin
|
||||
use_template "scrreprt-modern"
|
||||
use_ontology technical_report and "Isabelle_DOF-Ontologies.CENELEC_50128"
|
||||
declare[[strict_monitor_checking=true]]
|
||||
define_shortcut* dof \<rightleftharpoons> \<open>\dof\<close>
|
||||
isadof \<rightleftharpoons> \<open>\isadof{}\<close>
|
||||
(*>*)
|
||||
|
||||
title*[title::title]\<open>The CENELEC 50128 Ontology\<close>
|
||||
subtitle*[subtitle::subtitle]\<open>Case Study: An Odometer-Subsystem\<close>
|
||||
|
||||
chapter*[casestudy::technical]\<open>An Odometer-Subsystem\<close>
|
||||
text\<open>
|
||||
In our case study, we will follow the phases of analysis, design, and implementation of the
|
||||
odometry function of a train. This \<^cenelec_term>\<open>SF\<close> processes data from an odometer to compute
|
||||
the position, speed, and acceleration of a train. This system provides the basis for many
|
||||
safety critical decisions, \<^eg>, the opening of the doors. Due to its relatively small size, it
|
||||
is a manageable, albeit realistic target for a comprehensive formal development: it covers a
|
||||
physical model of the environment, the physical and architectural model of the odometer,
|
||||
but also the \<^cenelec_term>\<open>SFRS\<close> aspects including the problem of numerical sampling and the
|
||||
boundaries of efficient computations. The interplay between environment and measuring-device as
|
||||
well as the implementation problems on a platform with limited resources makes the odometer a
|
||||
fairly typical \<^cenelec_term>\<open>safety\<close> critical \<^cenelec_term>\<open>component\<close> of an embedded system.
|
||||
|
||||
The case-study is presented in form of an \<^emph>\<open>integrated source\<close> in \<^isadof> containing all four
|
||||
reports from the phases:
|
||||
\<^item> \<^term>\<open>software_requirements\<close> with deliverable \<^doc_class>\<open>SWRS\<close>
|
||||
(or long:\<^typ>\<open>software_requirements_specification\<close>(-report))
|
||||
\<^item> \<^term>\<open>software_architecture_and_design\<close> with deliverable \<^doc_class>\<open>SWDS\<close>
|
||||
(or long: \<^typ>\<open>software_design_specification\<close>(-report))
|
||||
\<^item> \<^term>\<open>software_component_design\<close> with deliverable \<^doc_class>\<open>SWCDVR\<close>
|
||||
(or long: \<^typ>\<open>software_component_design_verification\<close>(-report).)
|
||||
\<^item> \<^term>\<open>component_implementation_and_testing\<close> with deliverable \<^doc_class>\<open>SWADVR\<close>
|
||||
(or long: \<^typ>\<open>software_architecture_and_design_verification\<close>(-report))
|
||||
|
||||
The objective of this case study is to demonstrate deep-semantical ontologoies in
|
||||
software developments targeting certifications, and in particular, how \<^isadof>'s
|
||||
integrated source concept permits to assure \<^cenelec_term>\<open>traceability\<close>.
|
||||
|
||||
\<^bold>\<open>NOTE\<close> that this case study has aspects that were actually covered by CENELEC 50126 -
|
||||
the 'systems'-counterpart covering hardware aspects. Recall that the CENELEC 50128 covers
|
||||
software.
|
||||
|
||||
Due to space reasons, we will focus on the analysis part of the integrated
|
||||
document; the design and code parts will only be outlined in a final resume. The
|
||||
\<^emph>\<open>ontological embedding\<close>, which represents a main contribution of this paper, will be presented
|
||||
in the next two sections.
|
||||
|
||||
We start with the capture of a number of informal documents available at the beginning of the
|
||||
development.
|
||||
\<close>
|
||||
|
||||
section\<open>A CENELEC-conform development as an \<^emph>\<open>Integrated Source\<close>\<close>
|
||||
|
||||
text\<open>Accurate information of a train's location along a track is in an important prerequisite
|
||||
to safe railway operation. Position, speed and acceleration measurement usually lies on a
|
||||
set of independent measurements based on different physical principles---as a way to enhance
|
||||
precision and availability. One of them is an \<^emph>\<open>odometer\<close>, which allows estimating a relative
|
||||
location while the train runs positions established by other measurements. \<close>
|
||||
|
||||
subsection\<open>Capturing ``Basic Principles of Motion and Motion Measurement.''\<close>
|
||||
text\<open>
|
||||
A rotary encoder measures the motion of a train. To achieve this, the encoder's shaft is fixed to
|
||||
the trains wheels axle. When the train moves, the encoder produces a signal pattern directly
|
||||
related to the trains progress. By measuring the fractional rotation of the encoders shaft and
|
||||
considering the wheels effective ratio, relative movement of the train can be calculated.
|
||||
|
||||
\begin{wrapfigure}[8]{l}{4.6cm}
|
||||
\centering
|
||||
\vspace{-.5cm}
|
||||
\includegraphics[width=3.4cm]{figures/wheel-df}
|
||||
\caption{Motion sensing via an odometer.}
|
||||
\label{wheel-df}
|
||||
\end{wrapfigure}
|
||||
\autoref{wheel-df} shows that we model a train, seen from a pure kinematics standpoint, as physical
|
||||
system characterized by a one-dimensional continuous distance function, which represents the
|
||||
observable of the physical system. Concepts like speed and acceleration were derived concepts
|
||||
defined as their (gradient) derivatives. We assume the use of the meter, kilogram, and second
|
||||
(MKS) system.
|
||||
|
||||
This model is already based on several fundamental assumptions relevant for the correct
|
||||
functioning of the system and for its integration into the system as a whole. In
|
||||
particular, we need to make the following assumptions explicit: \<^vs>\<open>-0.3cm\<close>\<close>
|
||||
|
||||
text*["perfect_wheel"::assumption]
|
||||
\<open>\<^item> the wheel is perfectly circular with a given, constant radius. \<^vs>\<open>-0.3cm\<close>\<close>
|
||||
text*["no_slip"::assumption]
|
||||
\<open>\<^item> the slip between the trains wheel and the track negligible. \<^vs>\<open>-0.3cm\<close>\<close>
|
||||
text*["constant_teeth_dist"::assumption]
|
||||
\<open>\<^item> the distance between all teeth of a wheel is the same and constant, and \<^vs>\<open>-0.3cm\<close>\<close>
|
||||
text*["constant_sampling_rate"::assumption]
|
||||
\<open>\<^item> the sampling rate of positions is a given constant.\<close>
|
||||
|
||||
text\<open>
|
||||
These assumptions have to be traced throughout the certification process as
|
||||
|
||||
\<^emph>\<open>derived requirements\<close> (or, in CENELEC terminology, as \<^emph>\<open>exported constraints\<close>), which is
|
||||
also reflected by their tracing throughout the body of certification documents. This may result
|
||||
in operational regulations, \<^eg>, regular checks for tolerable wheel defects. As for the
|
||||
\<^emph>\<open>no slip\<close>-assumption, this leads to the modeling of constraints under which physical
|
||||
slip can be neglected: the device can only produce reliable results under certain physical
|
||||
constraints (speed and acceleration limits). Moreover, the \<^emph>\<open>no slip\<close>-assumption motivates
|
||||
architectural arrangements for situations where this assumption cannot be assured (as is the
|
||||
case, for example, of an emergency breaking) together with error-detection and error-recovery.
|
||||
\<close>
|
||||
|
||||
subsection\<open>Capturing ``System Architecture.''\<close>
|
||||
|
||||
figure*["three_phase"::figure,relative_width="70",file_src="''figures/three-phase-odo.pdf''"]
|
||||
\<open>An odometer with three sensors \<open>C1\<close>, \<open>C2\<close>, and \<open>C3\<close>.\<close>
|
||||
|
||||
text\<open>
|
||||
The requirements analysis also contains a document \<^doc_class>\<open>SYSAD\<close>
|
||||
(\<^typ>\<open>system_architecture_description\<close>) that contains technical drawing of the odometer,
|
||||
a timing diagram (see \<^figure>\<open>three_phase\<close>), and tables describing the encoding of the position
|
||||
for the possible signal transitions of the sensors \<open>C1\<close>, \<open>C2\<close>, and \<open>C3\<close>.
|
||||
\<close>
|
||||
|
||||
subsection\<open>Capturing ``System Interfaces.''\<close>
|
||||
text\<open>
|
||||
The requirements analysis also contains a sub-document \<^doc_class>\<open>FnI\<close> (\<^typ>\<open>functions_and_interfaces\<close>)
|
||||
describing the technical format of the output of the odometry function.
|
||||
This section, \<^eg>, specifies the output \<^emph>\<open>speed\<close> as given by a \<^verbatim>\<open>int_32\<close> to be the
|
||||
``Estimation of the speed (in mm/sec) evaluated over the latest \<open>N\<^sub>a\<^sub>v\<^sub>g\<close> samples''
|
||||
where the speed refers to the physical speed of the train and \<open>N\<^sub>a\<^sub>v\<^sub>g\<close> a parameter of the
|
||||
sub-system configuration. \<close>
|
||||
|
||||
(*<*)
|
||||
declare_reference*["df_numerics_encshaft"::figure]
|
||||
(*>*)
|
||||
subsection\<open>Capturing ``Required Performances.''\<close>
|
||||
text\<open>
|
||||
The given analysis document is relatively implicit on the expected precision of the measurements;
|
||||
however, certain interface parameters like \<open>Odometric_Position_TimeStamp\<close>
|
||||
(a counter on the number of samplings) and \<open>Relative_Position\<close> are defined by as
|
||||
unsigned 32 bit integer. These definitions imply that exported constraints concerning the acceptable
|
||||
time of service as well the maximum distance before a necessary reboot of the subsystem.
|
||||
For our case-study, we assume maximum deviation of the \<open>Relative_Position\<close> to the
|
||||
theoretical distance.
|
||||
|
||||
The requirement analysis document describes the physical environment, the architecture
|
||||
of the measuring device, and the required format and precision of the measurements of the odometry
|
||||
function as represented (see @{figure (unchecked) "df_numerics_encshaft"}).\<close>
|
||||
|
||||
figure*["df_numerics_encshaft"::figure,relative_width="76",file_src="''figures/df-numerics-encshaft.png''"]
|
||||
\<open>Real distance vs. discrete distance vs. shaft-encoder sequence\<close>
|
||||
|
||||
|
||||
subsection\<open>Capturing the ``Software Design Spec'' (Resume).\<close>
|
||||
text\<open>
|
||||
The design provides a function that manages an internal first-in-first-out buffer of
|
||||
shaft-encodings and corresponding positions. Central for the design is a step-function analyzing
|
||||
new incoming shaft encodings, checking them and propagating two kinds of error-states (one allowing
|
||||
recovery, another one, fatal, signaling, \<^eg>, a defect of the receiver hardware),
|
||||
calculating the relative position, speed and acceleration.
|
||||
\<close>
|
||||
|
||||
subsection\<open>Capturing the ``Software Implementation'' (Resume).\<close>
|
||||
text\<open>
|
||||
While the design is executable on a Linux system, it turns out that the generated code from an
|
||||
Isabelle model is neither executable on resource-constraint target platform, an ARM-based
|
||||
Sabre-light card, nor certifiable, since the compilation chain via ML to C implies the
|
||||
inclusion of a run-time system and quite complex libraries.
|
||||
We adopted therefore a similar approach as used in the seL4 project~@{cite "Klein2014"}: we use a
|
||||
hand-written implementation in C and verify it via
|
||||
AutoCorres~@{cite "greenaway.ea:bridging:2012"} against
|
||||
the design model. The hand-written C-source is integrated into the Isabelle/HOL technically by
|
||||
registering it in the build-configuration and logically by a trusted C-to-HOL compiler included
|
||||
in AutoCorres.
|
||||
\<close>
|
||||
|
||||
(*<*)
|
||||
definition teeth_per_wheelturn::nat ("tpw") where "tpw \<equiv> SOME x. x > 0"
|
||||
definition wheel_diameter ::"real[m]" ("w\<^sub>d") where "w\<^sub>d \<equiv> SOME x. x > 0"
|
||||
definition wheel_circumference::"real[m]" ("w\<^sub>0") where "w\<^sub>0 \<equiv> pi *\<^sub>Q w\<^sub>d"
|
||||
definition \<delta>s\<^sub>r\<^sub>e\<^sub>s ::"real[m]" where "\<delta>s\<^sub>r\<^sub>e\<^sub>s \<equiv> 1 / (2 * 3 * tpw) *\<^sub>Q w\<^sub>0 "
|
||||
(*>*)
|
||||
|
||||
|
||||
section\<open>Formal Enrichment of the Software Requirements Specification\<close>
|
||||
text\<open>
|
||||
After the \<^emph>\<open>capture\<close>-phase, where we converted/integrated existing informal analysis and design
|
||||
documents as well as code into an integrated Isabelle document, we entered into the phase of
|
||||
\<open>formal enrichment\<close>. For example, from the assumptions in the architecture follow
|
||||
the definitions:
|
||||
|
||||
@{theory_text [display]\<open>
|
||||
definition teeth_per_wheelturn::nat ("tpw") where "tpw \<equiv> SOME x. x > 0"
|
||||
definition wheel_diameter::"real[m]" ("w\<^sub>d") where "w\<^sub>d \<equiv> SOME x. x > 0"
|
||||
definition wheel_circumference::"real[m]" ("w\<^sub>0") where "w\<^sub>0 \<equiv> pi *\<^sub>Q w\<^sub>d"
|
||||
definition \<delta>s\<^sub>r\<^sub>e\<^sub>s::"real[m]" where "\<delta>s\<^sub>r\<^sub>e\<^sub>s \<equiv> 1 / (2 * 3 * tpw) *\<^sub>Q w\<^sub>0 "
|
||||
\<close>}
|
||||
|
||||
Here, \<open>real\<close> refers to the real numbers as defined in the HOL-Analysis library, which provides
|
||||
concepts such as Cauchy Sequences, limits, differentiability, and a very substantial part of
|
||||
classical Calculus. \<open>SOME\<close> is the Hilbert choice operator from HOL; the definitions of the
|
||||
model parameters admit all possible positive values as uninterpreted constants. Our
|
||||
\<^assumption>\<open>perfect_wheel\<close> is translated into a calculation of the circumference of the
|
||||
wheel, while \<open>\<delta>s\<^sub>r\<^sub>e\<^sub>s\<close>, the resolution of the odometer, can be calculated
|
||||
from the these parameters. HOL-Analysis permits to formalize the fundamental physical observables:
|
||||
\<close>
|
||||
|
||||
(*<*)
|
||||
type_synonym distance_function = "real[s] \<Rightarrow> real[m]"
|
||||
consts Speed::"distance_function \<Rightarrow> real[s] \<Rightarrow> real[m\<cdot>s\<^sup>-\<^sup>1]"
|
||||
consts Accel::"distance_function \<Rightarrow> real[s] \<Rightarrow> real[m\<cdot>s\<^sup>-\<^sup>2]"
|
||||
consts Speed\<^sub>M\<^sub>a\<^sub>x::"real[m\<cdot>s\<^sup>-\<^sup>1]"
|
||||
|
||||
(* Non - SI conform common abrbreviations *)
|
||||
definition "kmh \<equiv> kilo *\<^sub>Q metre \<^bold>/ hour :: 'a::{field,ring_char_0}[m\<cdot>s\<^sup>-\<^sup>1]"
|
||||
definition "kHz \<equiv> kilo *\<^sub>Q hertz :: 'a::{field,ring_char_0}[s\<^sup>-\<^sup>1]"
|
||||
|
||||
(*>*)
|
||||
text\<open>
|
||||
@{theory_text [display]\<open>
|
||||
type_synonym distance_function = "real[s]\<Rightarrow>real[m]"
|
||||
definition Speed::"distance_function\<Rightarrow>real\<Rightarrow>real" where "Speed f \<equiv> deriv f"
|
||||
definition Accel::"distance_function\<Rightarrow>real\<Rightarrow>real" where "Accel f \<equiv> deriv (deriv f)"
|
||||
\<close>}
|
||||
|
||||
which permits to constrain the central observable \<open>distance_function\<close> in a
|
||||
way that they describe the space of ``normal behavior'' where we expect the odometer to produce
|
||||
reliable measurements over a \<open>distance_function df\<close> .
|
||||
|
||||
The essence of the physics of the train is covered by the following definition:
|
||||
|
||||
@{theory_text [display]\<open>
|
||||
definition normally_behaved_distance_function :: "(real \<Rightarrow> real) \<Rightarrow> bool"
|
||||
where normally_behaved_distance_function df =
|
||||
( \<forall> t. df(t) \<in> \<real>\<^sub>\<ge>\<^sub>0 \<and> (\<forall> t \<in> \<real>\<real>\<^sub>\<ge>\<^sub>0. df(t) = 0)
|
||||
\<and> df differentiable on \<real>\<^sub>\<ge>\<^sub>0 \<and> (Speed df)differentiable on \<real>\<^sub>\<ge>\<^sub>0$
|
||||
\<and> (Accel df)differentiable on \<real>\<^sub>\<ge>\<^sub>0
|
||||
\<and> (\<forall> t. (Speed df) t \<in> {Speed\<^sub>M\<^sub>i\<^sub>n .. Speed\<^sub>M\<^sub>a\<^sub>x})
|
||||
\<and> (\<forall> t. (Accel df) t \<in> {Accel\<^sub>M\<^sub>i\<^sub>n .. Accel\<^sub>M\<^sub>a\<^sub>x}))
|
||||
\<close>}
|
||||
|
||||
which constrains the distance functions in the bounds described of the informal descriptions and
|
||||
states them as three-fold differentiable function in certain bounds concerning speed and
|
||||
acceleration. Note that violations, in particular of the constraints on speed and acceleration,
|
||||
\<^emph>\<open>do\<close> occur in practice. In such cases, the global system adapts recovery strategies that are out
|
||||
of the scope of our model. Concepts like \<open>shaft_encoder_state\<close> (a triple with the sensor values
|
||||
\<open>C1\<close>, \<open>C2\<close>, \<open>C3\<close>) were formalized as types, while tables were
|
||||
defined as recursive functions:
|
||||
|
||||
@{theory_text [display]\<open>
|
||||
fun phase\<^sub>0 :: "nat \<Rightarrow> shaft_encoder_state" where
|
||||
"phase\<^sub>0 (0) = \<lparr> C1 = False, C2 = False, C3 = True \<rparr>"
|
||||
|"phase\<^sub>0 (1) = \<lparr> C1 = True, C2 = False, C3 = True \<rparr>"
|
||||
|"phase\<^sub>0 (2) = \<lparr> C1 = True, C2 = False, C3 = False\<rparr>"
|
||||
|"phase\<^sub>0 (3) = \<lparr> C1 = True, C2 = True, C3 = False\<rparr>"
|
||||
|"phase\<^sub>0 (4) = \<lparr> C1 = False, C2 = True, C3 = False\<rparr>"
|
||||
|"phase\<^sub>0 (5) = \<lparr> C1 = False, C2 = True, C3 = True \<rparr>"
|
||||
|"phase\<^sub>0 x = phase\<^sub>0(x - 6)"
|
||||
definition Phase ::"nat\<Rightarrow>shaft_encoder_state" where Phase(x) = phase\<^sub>0(x-1)
|
||||
\<close>}
|
||||
|
||||
We now define shaft encoder sequences as translations of distance functions:
|
||||
|
||||
@{theory_text [display]\<open>
|
||||
definition encoding::"distance_function\<Rightarrow>nat\<Rightarrow>real\<Rightarrow>shaft_encoder_state"
|
||||
where "encoding df init\<^sub>p\<^sub>o\<^sub>s \<equiv> \<lambda>x. Phase(nat\<lfloor>df(x) / \<delta>s\<^sub>r\<^sub>e\<^sub>s\<rfloor> + init\<^sub>p\<^sub>o\<^sub>s)"
|
||||
\<close>}
|
||||
|
||||
where \<open>init\<^sub>p\<^sub>o\<^sub>s\<close> is the initial position of the wheel.
|
||||
\<open>sampling\<close>'s were constructed from encoding sequences over discretized time points:
|
||||
|
||||
@{theory_text [display]\<open>
|
||||
definition sampling::"distance_function\<Rightarrow>nat\<Rightarrow>real\<Rightarrow>nat\<Rightarrow>shaft_encoder_state"
|
||||
where "sampling df init\<^sub>p\<^sub>o\<^sub>s \<delta>t \<equiv> \<lambda>n::nat. encoding df initinit\<^sub>p\<^sub>o\<^sub>s (n * \<delta>t)"
|
||||
\<close>}
|
||||
|
||||
parameter of the configuration of a system.
|
||||
|
||||
Finally, we can formally define the required performances. From the interface description
|
||||
and the global model parameters such as wheel diameter, the number of teeth per wheel, the
|
||||
sampling frequency etc., we can infer the maximal time of service as well the maximum distance
|
||||
the device can measure. As an example configuration, choosing:
|
||||
|
||||
\<^item> \<^term>\<open>(1 *\<^sub>Q metre):: real[m]\<close> for \<^term>\<open>w\<^sub>d\<close> (wheel-diameter),
|
||||
\<^item> \<^term>\<open>100 :: real\<close> for \<^term>\<open>tpw\<close> (teeth per wheel),
|
||||
\<^item> \<^term>\<open>80 *\<^sub>Q kmh :: real[m\<cdot>s\<^sup>-\<^sup>1]\<close> for \<^term>\<open>Speed\<^sub>M\<^sub>a\<^sub>x\<close>,
|
||||
\<^item> \<^term>\<open>14.4 *\<^sub>Q kHz :: real[s\<^sup>-\<^sup>1]\<close> for the sampling frequency,
|
||||
|
||||
results in an odometer resolution of \<^term>\<open>2.3 *\<^sub>Q milli *\<^sub>Q metre\<close>, a maximum distance of
|
||||
\<^term>\<open>9878 *\<^sub>Q kilo *\<^sub>Q metre\<close>, and a maximal system up-time of \<^term>\<open>123.4 *\<^sub>Q hour\<close>s.
|
||||
The required precision of an odometer can be defined by a constant describing
|
||||
the maximally allowed difference between \<open>df(n*\<delta>t)\<close> and
|
||||
\<open>sampling df init\<^sub>p\<^sub>o\<^sub>s \<delta>t n\<close> for all \<open>init\<^sub>p\<^sub>o\<^sub>s \<in>{0..5}\<close>.
|
||||
\<close>
|
||||
(*<*)
|
||||
ML\<open>val two_thirty2 = 1024 * 1024 * 1024 * 4;
|
||||
val dist_max = 0.0023 * (real two_thirty2) / 1000.0;
|
||||
val dist_h = dist_max / 80.0\<close>
|
||||
(*>*)
|
||||
|
||||
section*[verific::technical]\<open>Verification of the Software Requirements Specification\<close>
|
||||
text\<open>The original documents contained already various statements that motivate certain safety
|
||||
properties of the device. For example, the \<open>Phase\<close>-table excludes situations in which
|
||||
all sensors \<open>C1\<close>, \<open>C2\<close>, and \<open>C3\<close> are all ``off'' or situations in
|
||||
which sensors are ``on,'' reflecting a physical or electrical error in the odometer. It can be
|
||||
shown by a very small Isabelle case-distinction proof that this safety requirement follows indeed
|
||||
from the above definitions:
|
||||
|
||||
@{theory_text [display]\<open>
|
||||
lemma Encoder_Property_1:(C1(Phase x) \<and> C2(Phase x) \<and> C3(Phase x))=False
|
||||
proof (cases x)
|
||||
case 0 then show ?thesis by (simp add: Phase_def)
|
||||
next
|
||||
case (Suc n) then show ?thesis
|
||||
by(simp add: Phase_def,rule_tac n = n in cycle_case_split,simp_all)
|
||||
qed
|
||||
\<close>}
|
||||
|
||||
for all positions \<open>x\<close>. Similarly, it is proved that the table is indeed cyclic:
|
||||
|
||||
\<open>phase\<^sub>0 x = phase\<^sub>0(x mod 6)\<close>
|
||||
|
||||
and locally injective:
|
||||
|
||||
\<open>\<forall>x<6. \<forall>y<6. phase\<^sub>0 x = phase\<^sub>0 y \<longrightarrow> x = y\<close>
|
||||
|
||||
These lemmas, building the ``theory of an odometer,'' culminate in a theorem
|
||||
that we would like to present in more detail.
|
||||
|
||||
@{theory_text [display]\<open>
|
||||
theorem minimal_sampling :
|
||||
assumes * : normally_behaved_distance_function df
|
||||
and ** : \<delta>t * Speed\<^sub>M\<^sub>a\<^sub>x < \<delta>s\<^sub>r\<^sub>e\<^sub>s
|
||||
shows \<forall> \<delta>X\<le>\<delta>t. 0<\<delta>X \<longrightarrow>
|
||||
\<exists>f. retracting (f::nat\<Rightarrow>nat) \<and>
|
||||
sampling df init\<^sub>p\<^sub>o\<^sub>s \<delta>X = (sampling df init\<^sub>p\<^sub>o\<^sub>s \<delta>t) o f
|
||||
|
||||
\<close>}
|
||||
|
||||
This theorem states for \<open>normally_behaved_distance_function\<close>s that there is
|
||||
a minimal sampling frequency assuring the safety of the measurements; samplings on
|
||||
some \<open>df\<close> gained from this minimal sampling frequency can be ``pumped up''
|
||||
to samplings of these higher sampling frequencies; they do not contain more information.
|
||||
Of particular interest is the second assumption, labelled ``\<open>**\<close>'' which
|
||||
establishes a lower bound from \<open>w\<^sub>0\<close>, \<open>tpw\<close>,
|
||||
\<open>Speed\<^sub>M\<^sub>a\<^sub>x\<close> for the sampling frequency. Methodologically, this represents
|
||||
an exported constraint that can not be represented \<^emph>\<open>inside\<close> the design model: it means that the
|
||||
computations have to be fast enough on the computing platform in order to assure that the
|
||||
calculations are valid. It was in particular this exported constraint that forced us to give up
|
||||
the original plan to generate the code from the design model and to execute this directly on the
|
||||
target platform.
|
||||
|
||||
For our example configuration (1m diameter, 100 teeth per wheel, 80km/h max), this theorem justifies
|
||||
that 14,4 kHz is indeed enough to assure valid samplings. Such properties are called
|
||||
``internal consistency of the software requirements specification'' in the CENELEC
|
||||
standard~@{cite "bsi:50128:2014"}, 7.2.4.22 and are usually addressed in an own report.
|
||||
\<close>
|
||||
|
||||
chapter*[ontomodeling::text_section]\<open>The CENELEC 50128 Ontology\<close>
|
||||
|
||||
text\<open>
|
||||
Modeling an ontology from a semi-formal text such as~@{cite"bsi:50128:2014"} is,
|
||||
like any other modeling activity, not a simple one-to-one translation of some
|
||||
concepts to some formalism. Rather, implicit and self-understood principles
|
||||
have to be made explicit, abstractions have to be made, and decisions about
|
||||
the kind of desirable user-interaction may have an influence similarly to
|
||||
design decisions influenced by strengths or weaknesses of a programming language.
|
||||
\<close>
|
||||
|
||||
section*[lhf::text_section]
|
||||
\<open>Tracking Concepts and Definitions\<close>
|
||||
|
||||
text\<open>
|
||||
\<^isadof> is designed to annotate text elements with structured meta-information and to reference
|
||||
these text elements throughout the integrated source. A classical application of this capability
|
||||
is the annotation of concepts and terms definitions---be them informal, semi-formal or formal---and
|
||||
their consistent referencing. In the context of our CENELEC ontology, \<^eg>, we can translate the
|
||||
third chapter of @{cite "bsi:50128:2014"} ``Terms, Definitions and Abbreviations'' directly
|
||||
into our Ontology Definition Language (ODL). Picking one example out of 49, consider the definition
|
||||
of the concept \<^cenelec_term>\<open>traceability\<close> in paragraphs 3.1.46 (a notion referenced 31 times in
|
||||
the standard), which we translated directly into:
|
||||
|
||||
@{theory_text [display]\<open>
|
||||
Definition*[traceability, short_name="''traceability''"]
|
||||
\<open>degree to which relationship can be established between two or more products of a
|
||||
development process, especially those having a predecessor/successor or
|
||||
master/subordinate relationship to one another.\<close>
|
||||
\<close>}
|
||||
|
||||
In the integrated source of the odometry study, we can reference in a text element to this
|
||||
concept as follows:
|
||||
|
||||
@{theory_text [display]\<open>
|
||||
text*[...]\<open> ... to assure <@>{cenelec_term traceability} for
|
||||
<@>{requirement bitwiseAND}, we prove ... \<close>
|
||||
\<close>}
|
||||
|
||||
|
||||
\<^isadof> also uses the underlying ontology to generate the navigation markup inside the IDE, \<^ie>
|
||||
the presentation of this document element inside \<^isadof> is immediately hyperlinked against the
|
||||
@{theory_text \<open> Definition* \<close>}-element shown above; this serves as documentation of
|
||||
the standard for the development team working on the integrated source. The PDF presentation
|
||||
of such links depends on the actual configurations for the document generation; We will explain
|
||||
this later.
|
||||
CENELEC foresees also a number of roles, phases, safety integration levels, etc., which were
|
||||
directly translated into HOL enumeration types usable in ontological concepts of ODL.
|
||||
|
||||
@{theory_text [display]\<open>
|
||||
datatype role =
|
||||
PM (* Program Manager *) | RQM (* Requirements Manager *)
|
||||
| DES (* Designer *) | IMP (* Implementer *) |
|
||||
| VER (* Verifier *) | VAL (* Validator *) | ...
|
||||
datatype phase =
|
||||
SYSDEV_ext (* System Development *) | SPl (* Software Planning *)
|
||||
| SR (* Software Requirement *) | SA (* Software Architecture *)
|
||||
| SDES (* Software Design *) | ...
|
||||
\<close>}
|
||||
|
||||
Similarly, we can formalize the Table A.5: Verification and Testing of @{cite "bsi:50128:2014"}:
|
||||
a classification of \<^emph>\<open>verification and testing techniques\<close>:
|
||||
|
||||
@{theory_text [display]\<open>
|
||||
datatype vnt_technique =
|
||||
formal_proof "thm list" | stat_analysis
|
||||
| dyn_analysis dyn_ana_kind | ...
|
||||
\<close>}
|
||||
|
||||
In contrast to the standard, we can parameterize \<open>formal_proof\<close> with a list of
|
||||
theorems, an entity known in the Isabelle kernel. Here, \<^isadof> assures for text elements
|
||||
annotated with theorem names, that they refer indeed to established theorems in the Isabelle
|
||||
environment. Additional checks could be added to make sure that these theorems have a particular
|
||||
form.
|
||||
|
||||
While we claim that this possibility to link to theorems (and test-results) is unique in the
|
||||
world of systems attempting to assure \<^cenelec_term>\<open>traceability\<close>, referencing a particular
|
||||
(proven) theorem is definitively not sufficient to satisfy the claimed requirement. Human
|
||||
evaluators will always have to check that the provided theorem \<open>adequately\<close> represents the claim;
|
||||
we do not in the slightest suggest that their work is superfluous. Our framework allows to
|
||||
statically check that tests or proofs have been provided, at places where the ontology requires
|
||||
them to be, and both assessors and developers can rely on this check and navigate through
|
||||
related information easily. It does not guarantee that intended concepts for, \<^eg>, safety
|
||||
or security have been adequately modeled.
|
||||
\<close>
|
||||
|
||||
section*[moe::text_section]
|
||||
\<open>Major Ontological Entities: Requirements and Evidence\<close>
|
||||
text\<open>
|
||||
We introduce central concept of a \<^emph>\<open>requirement\<close> as an ODL \<^theory_text>\<open>doc_class\<close>
|
||||
based on the generic basic library \<^doc_class>\<open>text_element\<close> providing basic layout attributes.
|
||||
|
||||
@{theory_text [display]\<open>
|
||||
doc_class requirement = text_element +
|
||||
long_name :: "string option"
|
||||
is_concerned :: "role set"
|
||||
\<close>}
|
||||
|
||||
the groups of stakeholders in the CENELEC process. Therefore, the \<open>is_concerned\<close>-attribute
|
||||
allows expressing who ``owns'' this text-element. \<^isadof> supports a role-based
|
||||
presentation, \<^eg>, different presentation styles of the integrated source may decide to highlight,
|
||||
to omit, to defer into an annex, text entities according to the role-set.
|
||||
|
||||
Since ODL supports single inheritance, we can express sub-requirements and therefore a style
|
||||
of requirement decomposition as advocated in GSN~@{cite "kelly.ea:goal:2004"}:
|
||||
|
||||
@{theory_text [display]\<open>
|
||||
doc_class sub_requirement =
|
||||
decomposes :: "requirement"
|
||||
relates_to :: "requirement set"
|
||||
\<close>}
|
||||
\<close>
|
||||
|
||||
section*[claimsreqevidence::text_section]\<open>Tracking Claims, Derived Requirements and Evidence\<close>
|
||||
text\<open>An example for making explicit implicit principles,
|
||||
consider the following statement @{cite "bsi:50128:2014"}, pp. 25.: \<^vs>\<open>-0.15cm\<close>
|
||||
|
||||
\begin{quote}\small
|
||||
The objective of software verification is to examine and arrive at a judgment based on
|
||||
evidence that output items (process, documentation, software or application) of a specific
|
||||
development phase fulfill the requirements and plans with respect to completeness, correctness
|
||||
and consistency.
|
||||
\end{quote} \<^vs>\<open>-0.15cm\<close>
|
||||
|
||||
The terms \<^onto_class>\<open>judgement\<close> based on \<^term>\<open>evidence\<close> are used as a kind of leitmotif throughout
|
||||
the CENELEC standard, but they are neither explained nor even listed in the general glossary.
|
||||
However, the standard is fairly explicit on the \<^emph>\<open>phase\<close>s and the organizational roles that
|
||||
different stakeholders should have in the process. Our version to express this key concept of
|
||||
\<^onto_class>\<open>judgement\<close> , \<^eg>, by the following concept:
|
||||
|
||||
@{theory_text [display]\<open>
|
||||
doc_class judgement =
|
||||
refers_to :: requirement
|
||||
evidence :: "vnt_technique list"
|
||||
status :: status
|
||||
is_concerned :: "role set" <= "{VER,ASR,VAL}"
|
||||
\<close>}
|
||||
|
||||
As one can see, the role set is per default set to the verification team, the assessors and the
|
||||
validation team.
|
||||
|
||||
There are different views possible here: an alternative would be to define \<^term>\<open>evidence\<close>
|
||||
as ontological concept with \<^typ>\<open>vnt_technique\<close>'s (rather than an attribute of judgement)
|
||||
and consider the basis of a summary containing the relation between requirements and relation:
|
||||
|
||||
@{theory_text [display]\<open>
|
||||
doc_class summary =
|
||||
based_on :: "(requirement \<times> evidence) set"
|
||||
status :: status
|
||||
is_concerned :: "role set" <= "{VER,ASR,VAL}"
|
||||
\<close>}
|
||||
|
||||
More experimentation will be needed to find out what kind of ontological modeling is most
|
||||
adequate for developers in the context of \isadof.
|
||||
\<close>
|
||||
|
||||
section*[ontocontrol::text_section]\<open>Ontological Compliance\<close>
|
||||
|
||||
text\<open>From the variety of different possibilities for adding CENELEC annotations to the
|
||||
integrated source, we will, in the following, point out three scenarios.\<close>
|
||||
|
||||
subsection\<open>Internal Verification of Claims in the Requirements Specification.\<close>
|
||||
text\<open>In our case, the \<^term>\<open>SR\<close>-team early on detected a property necessary
|
||||
for error-detection of the device (c.f. @{technical verific}):
|
||||
|
||||
@{theory_text [display]\<open>
|
||||
text*[encoder_props::requirement]\<open> The requirement specification team identifies the property:
|
||||
C1 & C2 & C3 = 0 (bitwise logical AND operation)
|
||||
C1 | C2 | C3 = 1 (bitwise logical OR operation) \<close>
|
||||
\<close>}
|
||||
|
||||
After the Isabelle proofs shown in @{technical verific}, we can either register the theorems
|
||||
directly in an evidence statement:
|
||||
|
||||
@{theory_text [display]\<open>
|
||||
text*[J1::judgement, refers_to="@{docitem <open>encoder_props<close>}",
|
||||
evidence="[formal_proof[@{thm <open>Encoder_Property_1<close>},
|
||||
@{thm <open>Encoder_Property_2<close>}]]"]
|
||||
\<open>The required encoder properties are in fact verified to be consistent
|
||||
with the formalization of @{term "phase\<^sub>0"}.\<close>
|
||||
\<close>}
|
||||
|
||||
The references \<open>@{...}\<close>, called antiquotation, allow us not only to reference to
|
||||
formal concepts, they are checked for consistency and there are also antiquotations that
|
||||
print the formally checked content (\<^eg>, the statement of a theorem).
|
||||
\<close>
|
||||
|
||||
subsection\<open>Exporting Claims of the Requirements Specification.\<close>
|
||||
|
||||
text\<open>By definition, the main purpose of the requirement specification is the identification of
|
||||
the safety requirements. As an example, we state the required precision of an odometric function:
|
||||
for any normally behaved distance function \<open>df\<close>, and any representable and valid
|
||||
sampling sequence that can be constructed for \<open>df\<close>, we require that the difference
|
||||
between the physical distance and distance calculable from the @{term Odometric_Position_Count}
|
||||
is bound by the minimal resolution of the odometer.
|
||||
|
||||
@{theory_text [display]\<open>
|
||||
text*[R5::safety_requirement]\<open>We can now state ... \<close>
|
||||
definition Odometric_Position_Count_precise :: "(shaft_encoder_state list\<Rightarrow>output)\<Rightarrow>bool"
|
||||
where "Odometric_Position_Count_precise odofunction \<equiv>
|
||||
(\<forall> df. \<forall>S. normally_behaved_distance_function df
|
||||
\<longrightarrow> representable S
|
||||
\<longrightarrow> valid_sampling S df
|
||||
\<longrightarrow> (let pos = uint(Odometric_Position_Count(odofunction S))
|
||||
in \<bar>df((length S - 1)*\<delta>t\<^sub>o\<^sub>d\<^sub>o) - (\<delta>s\<^sub>r\<^sub>e\<^sub>s * pos)\<bar> \<le> \<delta>s\<^sub>r\<^sub>e\<^sub>s))"
|
||||
|
||||
update_instance*[R5::safety_requirement,
|
||||
formal_definition:="[@{thm \<open>Odometric_Position_Count_precise_def\<close>}]"]
|
||||
\<close>}
|
||||
|
||||
By \<^theory_text>\<open>update_instance*\<close>, we book the property \<open>Position_Count_precise_def\<close> as
|
||||
\<^onto_class>\<open>safety_requirement\<close>, a specific sub-class of \<^onto_class>\<open>requirement\<close>s
|
||||
requesting a formal definition in Isabelle.\<close>
|
||||
|
||||
subsection\<open>Exporting Derived Requirements.\<close>
|
||||
|
||||
text\<open>Finally, we discuss the situation where the verification team discovered a critical side-condition
|
||||
for a major theorem necessary for the safety requirements; this was in our development the case for
|
||||
the condition labelled ``\<open>**\<close>'' in @{docitem verific}. The current CENELEC standard clearly separates
|
||||
``requirement specifications'' from ``verification reports,'' which is probably motivated
|
||||
by the overall concern of organizational separation and of document consistency. While this
|
||||
document organization is possible in \<^isadof>, it is in our experience often counter-productive
|
||||
in practice: organizations tend to defend their documents because the impact of changes is more and more
|
||||
difficult to oversee. This effect results in a dramatic development slow-down and an increase of
|
||||
costs. Furthermore, these barriers exclude situations where developers perfectly know, for example,
|
||||
invariants, but can not communicate them to the verification team because the precise formalization
|
||||
is not known in time. Rather than advocating document separation, we tend to integrate these documents,
|
||||
keep proof as close as possible to definitions, and plead for consequent version control of the
|
||||
integrated source, together with the proposed methods to strengthen the links between the informal
|
||||
and formal parts by anti-quotations and continuous ontological checking. Instead of separation
|
||||
of the documents, we would rather emphasize the \<^emph>\<open>separation of the views\<close> of the different
|
||||
document representations. Such views were systematically generated out of the integrated source in
|
||||
different PDF versions and for each version, document specific consistency guarantees can be
|
||||
automatically enforced.
|
||||
|
||||
In our case study, we define this condition as predicate, declare an explanation of it as
|
||||
\<^onto_class>\<open>SRAC\<close> (CENELEC for: safety-related application condition; ontologically, this is a
|
||||
derived class from \<^onto_class>\<open>requirement\<close>.) and add the definition of the predicate into the
|
||||
document instance as described in the previous section.\<close>
|
||||
|
||||
|
||||
|
||||
chapter\<open>Appendix\<close>
|
||||
text\<open>
|
||||
\<^item> \<open>@{thm refl}\<close> : @{thm refl}
|
||||
\<^item> \<open>@{thm [source] refl}\<close> : @{thm [source] refl}
|
||||
\<^item> \<open>@{thm[mode=Rule] conjI}\<close> : @{thm[mode=Rule] conjI}
|
||||
\<^item> \<open>@{file "mini_odo.thy"}\<close> : @{file "mini_odo.thy"}
|
||||
\<^item> \<open>@{value "3+4::int"}}\<close> : @{value "3+4::int"}
|
||||
\<^item> \<open>@{const hd}\<close> : @{const hd}
|
||||
\<^item> \<open>@{theory HOL.List}\<close> : @{theory HOL.List}s
|
||||
\<^item> \<open>@{tserm "3"}\<close> : @{term "3"}
|
||||
\<^item> \<open>@{type bool}\<close> : @{type bool}
|
||||
\<^item> \<open>@{thm term [show_types] "f x = a + x"}\<close> : @{term [show_types] "f x = a + x"}
|
||||
\<close>
|
||||
|
||||
text\<open>Examples for declaration of typed doc-classes "assumption" (sic!) and "hypothesis" (sic!!),
|
||||
concepts defined in the underlying ontology @{theory "Isabelle_DOF-Ontologies.CENELEC_50128"}. \<close>
|
||||
text*[ass2::assumption, long_name="Some ''assumption one''"] \<open> The subsystem Y is safe. \<close>
|
||||
text*[hyp1::hypothesis] \<open> \<open>P \<noteq> NP\<close> \<close>
|
||||
|
||||
text\<open>
|
||||
A real example fragment from a larger project, declaring a text-element as a
|
||||
"safety-related application condition", a concept defined in the
|
||||
@{theory "Isabelle_DOF-Ontologies.CENELEC_50128"} ontology:\<close>
|
||||
|
||||
text*[hyp2::hypothesis]\<open>Under the assumption @{assumption \<open>ass2\<close>} we establish the following: ... \<close>
|
||||
|
||||
text*[ass122::SRAC, long_name="Some ''ass122''"]
|
||||
\<open> The overall sampling frequence of the odometer subsystem is therefore 14 khz,
|
||||
which includes sampling, computing and result communication times... \<close>
|
||||
|
||||
text*[ass123::SRAC]
|
||||
\<open> The overall sampling frequence of the odometer subsystem is therefore 14 khz,
|
||||
which includes sampling, computing and result communication times... \<close>
|
||||
|
||||
text*[ass124::EC, long_name="Some ''ass124''"]
|
||||
\<open> The overall sampling frequence of the odometer subsystem is therefore 14 khz,
|
||||
which includes sampling, computing and result communication times... \<close>
|
||||
|
||||
text*[t10::test_result]
|
||||
\<open> This is a meta-test. This could be an ML-command that governs the external
|
||||
test-execution via, \<^eg>, a makefile or specific calls to a test-environment or test-engine. \<close>
|
||||
|
||||
|
||||
text \<open> Finally some examples of references to doc-items, i.e. text-elements
|
||||
with declared meta-information and status. \<close>
|
||||
|
||||
text \<open> As established by @{test_result \<open>t10\<close>}\<close>
|
||||
text \<open> the @{test_result \<open>t10\<close>}
|
||||
as well as the @{SRAC \<open>ass122\<close>}\<close>
|
||||
text \<open> represent a justification of the safety related applicability
|
||||
condition @{SRAC \<open>ass122\<close>} aka exported constraint @{EC \<open>ass122\<close>}.\<close>
|
||||
|
||||
text \<open> due to notational conventions for antiquotations, one may even write:
|
||||
|
||||
"represent a justification of the safety related applicability
|
||||
condition \<^SRAC>\<open>ass122\<close> aka exported constraint \<^EC>\<open>ass122\<close>."\<close>
|
||||
|
||||
(*<*)
|
||||
end
|
||||
(*>*)
|
|
@ -1,2 +0,0 @@
|
|||
poster
|
||||
presentation
|
|
@ -1,8 +0,0 @@
|
|||
chapter AFP
|
||||
|
||||
session "poster-example" (AFP) = "Isabelle_DOF-Ontologies" +
|
||||
options [document = pdf, document_output = "output", document_build = dof, timeout = 300]
|
||||
theories
|
||||
"poster"
|
||||
document_files
|
||||
"preamble.tex"
|
|
@ -1,2 +0,0 @@
|
|||
%% This is a placeholder for user-specific configuration and packages.
|
||||
|
|
@ -1,39 +0,0 @@
|
|||
(*<*)
|
||||
theory "poster"
|
||||
imports "Isabelle_DOF.scholarly_paper"
|
||||
"Isabelle_DOF-Ontologies.document_templates"
|
||||
begin
|
||||
|
||||
use_template "beamerposter-UNSUPPORTED"
|
||||
use_ontology "scholarly_paper"
|
||||
(*>*)
|
||||
|
||||
title*[tit::title]\<open>Example Presentation\<close>
|
||||
|
||||
author*[safouan,email="\<open>example@example.org\<close>",affiliation="\<open>Example Org\<close>"]\<open>Eliza Example\<close>
|
||||
|
||||
text\<open>
|
||||
\vfill
|
||||
\begin{block}{\large Fontsizes}
|
||||
\centering
|
||||
{\tiny tiny}\par
|
||||
{\scriptsize scriptsize}\par
|
||||
{\footnotesize footnotesize}\par
|
||||
{\normalsize normalsize}\par
|
||||
{\large large}\par
|
||||
{\Large Large}\par
|
||||
{\LARGE LARGE}\par
|
||||
{\veryHuge veryHuge}\par
|
||||
{\VeryHuge VeryHuge}\par
|
||||
{\VERYHuge VERYHuge}\par
|
||||
\end{block}
|
||||
\vfill
|
||||
\<close>
|
||||
|
||||
text\<open>
|
||||
@{block (title = "\<open>Title\<^sub>t\<^sub>e\<^sub>s\<^sub>t\<close>") "\<open>Block content\<^sub>t\<^sub>e\<^sub>s\<^sub>t\<close>"}
|
||||
\<close>
|
||||
|
||||
(*<*)
|
||||
end
|
||||
(*>*)
|
|
@ -1,9 +0,0 @@
|
|||
chapter AFP
|
||||
|
||||
session "presentation-example" (AFP) = "Isabelle_DOF-Ontologies" +
|
||||
options [document = pdf, document_output = "output", document_build = dof, timeout = 300]
|
||||
theories
|
||||
"presentation"
|
||||
document_files
|
||||
"preamble.tex"
|
||||
"figures/A.png"
|
|
@ -1,2 +0,0 @@
|
|||
%% This is a placeholder for user-specific configuration and packages.
|
||||
|
|
@ -1,69 +0,0 @@
|
|||
(*<*)
|
||||
theory "presentation"
|
||||
imports "Isabelle_DOF.scholarly_paper"
|
||||
"Isabelle_DOF-Ontologies.document_templates"
|
||||
begin
|
||||
|
||||
use_template "beamer-UNSUPPORTED"
|
||||
use_ontology "scholarly_paper"
|
||||
(*>*)
|
||||
|
||||
title*[tit::title]\<open>Example Presentation\<close>
|
||||
|
||||
author*[safouan,email="\<open>example@example.org\<close>",affiliation="\<open>Example Org\<close>"]\<open>Eliza Example\<close>
|
||||
|
||||
text\<open>
|
||||
\begin{frame}
|
||||
\frametitle{Example Slide}
|
||||
\centering\huge This is an example!
|
||||
\end{frame}
|
||||
\<close>
|
||||
|
||||
|
||||
frame*[test_frame
|
||||
, frametitle = \<open>\<open>\<open>Example Slide\<^sub>t\<^sub>e\<^sub>s\<^sub>t\<close> with items @{thm "HOL.refl"}\<close>\<close>
|
||||
, framesubtitle = "''Subtitle''"]
|
||||
\<open>This is an example!
|
||||
\<^item> The term \<^term>\<open>refl\<close> is...
|
||||
\<^item> and the term encoding the title of this frame is \<^term_>\<open>frametitle @{frame \<open>test_frame\<close>}\<close>\<close>
|
||||
|
||||
frame*[test_frame2
|
||||
, frametitle = "''Example Slide''"
|
||||
, framesubtitle = \<open>\<open>\<open>Subtitle\<^sub>t\<^sub>e\<^sub>s\<^sub>t:\<close> the value of \<^term>\<open>(3::int) + 3\<close> is @{value "(3::int) + 3"}\<close>\<close>]
|
||||
\<open>Test frame env \<^term>\<open>refl\<close>\<close>
|
||||
|
||||
frame*[test_frame3, frametitle = "''A slide with a Figure''"]
|
||||
\<open>A figure
|
||||
@{figure_content (width=45, caption=\<open>\<open>Figure\<^sub>t\<^sub>e\<^sub>s\<^sub>t\<close> is not the \<^term>\<open>refl\<close> theorem (@{thm "refl"}).\<close>)
|
||||
"figures/A.png"}\<close>
|
||||
|
||||
frame*[test_frame4
|
||||
, options = "''allowframebreaks''"
|
||||
, frametitle = "''Example Slide with frame break''"
|
||||
, framesubtitle = \<open>\<open>\<open>Subtitle\<^sub>t\<^sub>e\<^sub>s\<^sub>t:\<close> the value of \<^term>\<open>(3::int) + 3\<close> is @{value "(3::int) + 3"}\<close>\<close>]
|
||||
\<open>
|
||||
\<^item> The term \<^term>\<open>refl\<close> is...
|
||||
\<^item> and the term encoding the title of this frame is \<^term_>\<open>frametitle @{frame \<open>test_frame4\<close>}\<close>
|
||||
\<^item> The term \<^term>\<open>refl\<close> is...
|
||||
\<^item> The term \<^term>\<open>refl\<close> is...
|
||||
\<^item> The term \<^term>\<open>refl\<close> is...
|
||||
\<^item> The term \<^term>\<open>refl\<close> is...
|
||||
\<^item> The term \<^term>\<open>refl\<close> is...
|
||||
\<^item> The term \<^term>\<open>refl\<close> is...
|
||||
\<^item> The term \<^term>\<open>refl\<close> is...
|
||||
\<^item> The term \<^term>\<open>refl\<close> is...
|
||||
\<^item> The term \<^term>\<open>refl\<close> is...
|
||||
\<^item> The term \<^term>\<open>refl\<close> is...
|
||||
\<^item> The term \<^term>\<open>refl\<close> is...
|
||||
\<^item> The term \<^term>\<open>refl\<close> is...
|
||||
\<^item> The term \<^term>\<open>refl\<close> is...
|
||||
\<^item> The term \<^term>\<open>refl\<close> is...
|
||||
\<^item> The term \<^term>\<open>refl\<close> is...
|
||||
\<^item> The term \<^term>\<open>refl\<close> is...
|
||||
\<^item> The term \<^term>\<open>refl\<close> is...
|
||||
\<^item> The term \<^term>\<open>refl\<close> is...
|
||||
\<close>
|
||||
|
||||
(*<*)
|
||||
end
|
||||
(*>*)
|
|
@ -1,4 +0,0 @@
|
|||
session "Cytology" = "Isabelle_DOF" +
|
||||
options [document = false]
|
||||
theories
|
||||
"Cytology"
|
|
@ -1,9 +0,0 @@
|
|||
template-beamerposter-UNSUPPORTED
|
||||
template-beamer-UNSUPPORTED
|
||||
template-lipics-v2021-UNSUPPORTED
|
||||
template-lncs
|
||||
template-scrartcl
|
||||
template-scrreprt
|
||||
template-scrreprt-modern
|
||||
template-sn-article-UNSUPPORTED
|
||||
template-svjour3-UNSUPPORTED
|
|
@ -1,9 +0,0 @@
|
|||
session "template-beamer-UNSUPPORTED" = "Isabelle_DOF-Ontologies" +
|
||||
options [document = pdf, document_output = "output", document_build = dof]
|
||||
(*theories [document = false]
|
||||
A
|
||||
B*)
|
||||
theories
|
||||
"template-beamer-UNSUPPORTED"
|
||||
document_files
|
||||
"preamble.tex"
|
|
@ -1 +0,0 @@
|
|||
%% This is a placeholder for user-specific configuration and packages.
|
|
@ -1,72 +0,0 @@
|
|||
(*<*)
|
||||
theory
|
||||
"template-beamer-UNSUPPORTED"
|
||||
imports
|
||||
"Isabelle_DOF-Ontologies.document_templates"
|
||||
Isabelle_DOF.scholarly_paper
|
||||
begin
|
||||
|
||||
list_templates
|
||||
use_template "beamer-UNSUPPORTED"
|
||||
list_ontologies
|
||||
use_ontology "scholarly_paper"
|
||||
(*>*)
|
||||
|
||||
title* [tit::title]\<open>Formal Verification of Security Protocols\<close>
|
||||
author*[alice, email = "\<open>alice@example.com\<close>",
|
||||
affiliation = "\<open>Wonderland University\<close>"]\<open>Alice\<close>
|
||||
(*
|
||||
author*[bob, email = "\<open>bob@example.com\<close>",
|
||||
affiliation = "\<open>Wonderland University\<close>"]\<open>Bob\<close>
|
||||
*)
|
||||
|
||||
text\<open>
|
||||
\begin{frame}
|
||||
\frametitle{Example Slide}
|
||||
\centering\huge This is an example!
|
||||
\end{frame}
|
||||
\<close>
|
||||
|
||||
|
||||
frame*[test_frame
|
||||
, frametitle = \<open>\<open>\<open>Example Slide\<^sub>t\<^sub>e\<^sub>s\<^sub>t\<close> with items @{thm "HOL.refl"}\<close>\<close>
|
||||
, framesubtitle = "''Subtitle''"]
|
||||
\<open>This is an example!
|
||||
\<^item> The term \<^term>\<open>refl\<close> is...
|
||||
\<^item> and the term encoding the title of this frame is \<^term_>\<open>frametitle @{frame \<open>test_frame\<close>}\<close>\<close>
|
||||
|
||||
frame*[test_frame2
|
||||
, frametitle = "''Example Slide''"
|
||||
, framesubtitle = \<open>\<open>\<open>Subtitle\<^sub>t\<^sub>e\<^sub>s\<^sub>t:\<close> the value of \<^term>\<open>(3::int) + 3\<close> is @{value "(3::int) + 3"}\<close>\<close>]
|
||||
\<open>Test frame env \<^term>\<open>refl\<close>\<close>
|
||||
|
||||
frame*[test_frame3
|
||||
, options = "''allowframebreaks''"
|
||||
, frametitle = "''Example Slide with frame break''"
|
||||
, framesubtitle = \<open>\<open>\<open>Subtitle\<^sub>t\<^sub>e\<^sub>s\<^sub>t:\<close> the value of \<^term>\<open>(3::int) + 3\<close> is @{value "(3::int) + 3"}\<close>\<close>]
|
||||
\<open>
|
||||
\<^item> The term \<^term>\<open>refl\<close> is...
|
||||
\<^item> and the term encoding the title of this frame is \<^term_>\<open>frametitle @{frame \<open>test_frame3\<close>}\<close>
|
||||
\<^item> The term \<^term>\<open>refl\<close> is...
|
||||
\<^item> The term \<^term>\<open>refl\<close> is...
|
||||
\<^item> The term \<^term>\<open>refl\<close> is...
|
||||
\<^item> The term \<^term>\<open>refl\<close> is...
|
||||
\<^item> The term \<^term>\<open>refl\<close> is...
|
||||
\<^item> The term \<^term>\<open>refl\<close> is...
|
||||
\<^item> The term \<^term>\<open>refl\<close> is...
|
||||
\<^item> The term \<^term>\<open>refl\<close> is...
|
||||
\<^item> The term \<^term>\<open>refl\<close> is...
|
||||
\<^item> The term \<^term>\<open>refl\<close> is...
|
||||
\<^item> The term \<^term>\<open>refl\<close> is...
|
||||
\<^item> The term \<^term>\<open>refl\<close> is...
|
||||
\<^item> The term \<^term>\<open>refl\<close> is...
|
||||
\<^item> The term \<^term>\<open>refl\<close> is...
|
||||
\<^item> The term \<^term>\<open>refl\<close> is...
|
||||
\<^item> The term \<^term>\<open>refl\<close> is...
|
||||
\<^item> The term \<^term>\<open>refl\<close> is...
|
||||
\<^item> The term \<^term>\<open>refl\<close> is...
|
||||
\<close>
|
||||
|
||||
(*<*)
|
||||
end
|
||||
(*>*)
|
|
@ -1,9 +0,0 @@
|
|||
session "template-beamerposter-UNSUPPORTED" = "Isabelle_DOF-Ontologies" +
|
||||
options [document = pdf, document_output = "output", document_build = dof]
|
||||
(*theories [document = false]
|
||||
A
|
||||
B*)
|
||||
theories
|
||||
"template-beamerposter-UNSUPPORTED"
|
||||
document_files
|
||||
"preamble.tex"
|
|
@ -1 +0,0 @@
|
|||
%% This is a placeholder for user-specific configuration and packages.
|
|
@ -1,21 +0,0 @@
|
|||
theory
|
||||
"template-beamerposter-UNSUPPORTED"
|
||||
imports
|
||||
"Isabelle_DOF-Ontologies.document_templates"
|
||||
Isabelle_DOF.scholarly_paper
|
||||
begin
|
||||
|
||||
list_templates
|
||||
use_template "beamerposter-UNSUPPORTED"
|
||||
list_ontologies
|
||||
use_ontology "scholarly_paper"
|
||||
|
||||
title* [tit::title]\<open>Formal Verification of Security Protocols\<close>
|
||||
author*[alice, email = "\<open>alice@example.com\<close>",
|
||||
http_site = "\<open>https://example.com/alice\<close>",
|
||||
affiliation = "\<open>Wonderland University\<close>"]\<open>Alice\<close>
|
||||
author*[bob, email = "\<open>bob@example.com\<close>",
|
||||
http_site = "\<open>https://example.com/bob\<close>",
|
||||
affiliation = "\<open>Wonderland University\<close>"]\<open>Bob\<close>
|
||||
|
||||
end
|
|
@ -1,10 +0,0 @@
|
|||
session "template-lipics-v2021-UNSUPPORTED" = "Isabelle_DOF-Ontologies" +
|
||||
options [document = pdf, document_output = "output", document_build = dof]
|
||||
(*theories [document = false]
|
||||
A
|
||||
B*)
|
||||
theories
|
||||
"template-lipics-v2021-UNSUPPORTED"
|
||||
document_files
|
||||
"preamble.tex"
|
||||
"lipics-v2021.cls"
|
File diff suppressed because it is too large
Load Diff
|
@ -1 +0,0 @@
|
|||
%% This is a placeholder for user-specific configuration and packages.
|
|
@ -1,21 +0,0 @@
|
|||
theory
|
||||
"template-lipics-v2021-UNSUPPORTED"
|
||||
imports
|
||||
"Isabelle_DOF-Ontologies.document_templates"
|
||||
Isabelle_DOF.scholarly_paper
|
||||
begin
|
||||
|
||||
list_templates
|
||||
use_template "lipics-v2021-UNSUPPORTED"
|
||||
list_ontologies
|
||||
use_ontology "scholarly_paper"
|
||||
|
||||
title* [tit::title]\<open>Formal Verification of Security Protocols\<close>
|
||||
author*[alice, email = "\<open>alice@example.com\<close>",
|
||||
http_site = "\<open>https://example.com/alice\<close>",
|
||||
affiliation = "\<open>Wonderland University\<close>"]\<open>Alice\<close>
|
||||
author*[bob, email = "\<open>bob@example.com\<close>",
|
||||
http_site = "\<open>https://example.com/bob\<close>",
|
||||
affiliation = "\<open>Wonderland University\<close>"]\<open>Bob\<close>
|
||||
|
||||
end
|
|
@ -1,9 +0,0 @@
|
|||
session "template-lncs" = "Isabelle_DOF-Ontologies" +
|
||||
options [document = pdf, document_output = "output", document_build = dof]
|
||||
(*theories [document = false]
|
||||
A
|
||||
B*)
|
||||
theories
|
||||
"template-lncs"
|
||||
document_files
|
||||
"preamble.tex"
|
|
@ -1 +0,0 @@
|
|||
%% This is a placeholder for user-specific configuration and packages.
|
|
@ -1,21 +0,0 @@
|
|||
theory
|
||||
"template-lncs"
|
||||
imports
|
||||
"Isabelle_DOF-Ontologies.document_templates"
|
||||
Isabelle_DOF.scholarly_paper
|
||||
begin
|
||||
|
||||
list_templates
|
||||
use_template "lncs"
|
||||
list_ontologies
|
||||
use_ontology "scholarly_paper"
|
||||
|
||||
title* [tit::title]\<open>Formal Verification of Security Protocols\<close>
|
||||
author*[alice, email = "\<open>alice@example.com\<close>",
|
||||
http_site = "\<open>https://example.com/alice\<close>",
|
||||
affiliation = "\<open>Wonderland University\<close>"]\<open>Alice\<close>
|
||||
author*[bob, email = "\<open>bob@example.com\<close>",
|
||||
http_site = "\<open>https://example.com/bob\<close>",
|
||||
affiliation = "\<open>Wonderland University\<close>"]\<open>Bob\<close>
|
||||
|
||||
end
|
|
@ -1,9 +0,0 @@
|
|||
session "template-scrartcl" = "Isabelle_DOF-Ontologies" +
|
||||
options [document = pdf, document_output = "output", document_build = dof]
|
||||
(*theories [document = false]
|
||||
A
|
||||
B*)
|
||||
theories
|
||||
"template-scrartcl"
|
||||
document_files
|
||||
"preamble.tex"
|
|
@ -1 +0,0 @@
|
|||
%% This is a placeholder for user-specific configuration and packages.
|
|
@ -1,21 +0,0 @@
|
|||
theory
|
||||
"template-scrartcl"
|
||||
imports
|
||||
"Isabelle_DOF-Ontologies.document_templates"
|
||||
Isabelle_DOF.scholarly_paper
|
||||
begin
|
||||
|
||||
list_templates
|
||||
use_template "scrartcl"
|
||||
list_ontologies
|
||||
use_ontology "scholarly_paper"
|
||||
|
||||
title* [tit::title]\<open>Formal Verification of Security Protocols\<close>
|
||||
author*[alice, email = "\<open>alice@example.com\<close>",
|
||||
http_site = "\<open>https://example.com/alice\<close>",
|
||||
affiliation = "\<open>Wonderland University\<close>"]\<open>Alice\<close>
|
||||
author*[bob, email = "\<open>bob@example.com\<close>",
|
||||
http_site = "\<open>https://example.com/bob\<close>",
|
||||
affiliation = "\<open>Wonderland University\<close>"]\<open>Bob\<close>
|
||||
|
||||
end
|
|
@ -1,9 +0,0 @@
|
|||
session "template-scrreprt-modern" = "Isabelle_DOF-Ontologies" +
|
||||
options [document = pdf, document_output = "output", document_build = dof]
|
||||
(*theories [document = false]
|
||||
A
|
||||
B*)
|
||||
theories
|
||||
"template-scrreprt-modern"
|
||||
document_files
|
||||
"preamble.tex"
|
|
@ -1 +0,0 @@
|
|||
%% This is a placeholder for user-specific configuration and packages.
|
|
@ -1,21 +0,0 @@
|
|||
theory
|
||||
"template-scrreprt-modern"
|
||||
imports
|
||||
"Isabelle_DOF-Ontologies.document_templates"
|
||||
Isabelle_DOF.technical_report
|
||||
begin
|
||||
|
||||
list_templates
|
||||
use_template "scrreprt-modern"
|
||||
list_ontologies
|
||||
use_ontology "technical_report"
|
||||
|
||||
title* [tit::title]\<open>Formal Verification of Security Protocols\<close>
|
||||
author*[alice, email = "\<open>alice@example.com\<close>",
|
||||
http_site = "\<open>https://example.com/alice\<close>",
|
||||
affiliation = "\<open>Wonderland University\<close>"]\<open>Alice\<close>
|
||||
author*[bob, email = "\<open>bob@example.com\<close>",
|
||||
http_site = "\<open>https://example.com/bob\<close>",
|
||||
affiliation = "\<open>Wonderland University\<close>"]\<open>Bob\<close>
|
||||
|
||||
end
|
|
@ -1,9 +0,0 @@
|
|||
session "template-scrreprt" = "Isabelle_DOF-Ontologies" +
|
||||
options [document = pdf, document_output = "output", document_build = dof]
|
||||
(*theories [document = false]
|
||||
A
|
||||
B*)
|
||||
theories
|
||||
"template-scrreprt"
|
||||
document_files
|
||||
"preamble.tex"
|
|
@ -1 +0,0 @@
|
|||
%% This is a placeholder for user-specific configuration and packages.
|
|
@ -1,21 +0,0 @@
|
|||
theory
|
||||
"template-scrreprt"
|
||||
imports
|
||||
"Isabelle_DOF-Ontologies.document_templates"
|
||||
Isabelle_DOF.technical_report
|
||||
begin
|
||||
|
||||
list_templates
|
||||
use_template "scrreprt"
|
||||
list_ontologies
|
||||
use_ontology "technical_report"
|
||||
|
||||
title* [tit::title]\<open>Formal Verification of Security Protocols\<close>
|
||||
author*[alice, email = "\<open>alice@example.com\<close>",
|
||||
http_site = "\<open>https://example.com/alice\<close>",
|
||||
affiliation = "\<open>Wonderland University\<close>"]\<open>Alice\<close>
|
||||
author*[bob, email = "\<open>bob@example.com\<close>",
|
||||
http_site = "\<open>https://example.com/bob\<close>",
|
||||
affiliation = "\<open>Wonderland University\<close>"]\<open>Bob\<close>
|
||||
|
||||
end
|
|
@ -1,10 +0,0 @@
|
|||
session "template-sn-article-UNSUPPORTED" = "Isabelle_DOF-Ontologies" +
|
||||
options [document = pdf, document_output = "output", document_build = dof]
|
||||
(*theories [document = false]
|
||||
A
|
||||
B*)
|
||||
theories
|
||||
"template-sn-article-UNSUPPORTED"
|
||||
document_files
|
||||
"preamble.tex"
|
||||
"sn-jnl.cls"
|
|
@ -1 +0,0 @@
|
|||
%% This is a placeholder for user-specific configuration and packages.
|
File diff suppressed because it is too large
Load Diff
|
@ -1,21 +0,0 @@
|
|||
theory
|
||||
"template-sn-article-UNSUPPORTED"
|
||||
imports
|
||||
"Isabelle_DOF-Ontologies.document_templates"
|
||||
Isabelle_DOF.scholarly_paper
|
||||
begin
|
||||
|
||||
list_templates
|
||||
use_template "sn-article-UNSUPPORTED"
|
||||
list_ontologies
|
||||
use_ontology "scholarly_paper"
|
||||
|
||||
title* [tit::title]\<open>Formal Verification of Security Protocols\<close>
|
||||
author*[alice, email = "\<open>alice@example.com\<close>",
|
||||
http_site = "\<open>https://example.com/alice\<close>",
|
||||
affiliation = "\<open>Wonderland University\<close>"]\<open>Alice\<close>
|
||||
author*[bob, email = "\<open>bob@example.com\<close>",
|
||||
http_site = "\<open>https://example.com/bob\<close>",
|
||||
affiliation = "\<open>Wonderland University\<close>"]\<open>Bob\<close>
|
||||
|
||||
end
|
|
@ -1,11 +0,0 @@
|
|||
session "template-svjour3-UNSUPPORTED" = "Isabelle_DOF-Ontologies" +
|
||||
options [document = pdf, document_output = "output", document_build = dof]
|
||||
(*theories [document = false]
|
||||
A
|
||||
B*)
|
||||
theories
|
||||
"template-svjour3-UNSUPPORTED"
|
||||
document_files
|
||||
"preamble.tex"
|
||||
"svjour3.cls"
|
||||
"svglov3.clo"
|
|
@ -1 +0,0 @@
|
|||
%% This is a placeholder for user-specific configuration and packages.
|
|
@ -1,101 +0,0 @@
|
|||
% SVJour3 DOCUMENT CLASS OPTION SVGLOV3 -- for standardised journals
|
||||
%
|
||||
% This is an enhancement for the LaTeX
|
||||
% SVJour3 document class for Springer journals
|
||||
%
|
||||
%%
|
||||
%%
|
||||
%% \CharacterTable
|
||||
%% {Upper-case \A\B\C\D\E\F\G\H\I\J\K\L\M\N\O\P\Q\R\S\T\U\V\W\X\Y\Z
|
||||
%% Lower-case \a\b\c\d\e\f\g\h\i\j\k\l\m\n\o\p\q\r\s\t\u\v\w\x\y\z
|
||||
%% Digits \0\1\2\3\4\5\6\7\8\9
|
||||
%% Exclamation \! Double quote \" Hash (number) \#
|
||||
%% Dollar \$ Percent \% Ampersand \&
|
||||
%% Acute accent \' Left paren \( Right paren \)
|
||||
%% Asterisk \* Plus \+ Comma \,
|
||||
%% Minus \- Point \. Solidus \/
|
||||
%% Colon \: Semicolon \; Less than \<
|
||||
%% Equals \= Greater than \> Question mark \?
|
||||
%% Commercial at \@ Left bracket \[ Backslash \\
|
||||
%% Right bracket \] Circumflex \^ Underscore \_
|
||||
%% Grave accent \` Left brace \{ Vertical bar \|
|
||||
%% Right brace \} Tilde \~}
|
||||
\ProvidesFile{svglov3.clo}
|
||||
[2006/02/03 v3.1
|
||||
style option for standardised journals]
|
||||
\typeout{SVJour Class option: svglov3.clo for standardised journals}
|
||||
\def\validfor{svjour3}
|
||||
\ExecuteOptions{final,10pt,runningheads}
|
||||
% No size changing allowed, hence a "copy" of size10.clo is included
|
||||
\renewcommand\normalsize{%
|
||||
\if@twocolumn
|
||||
\@setfontsize\normalsize\@xpt{12.5pt}%
|
||||
\else
|
||||
\if@smallext
|
||||
\@setfontsize\normalsize\@xpt\@xiipt
|
||||
\else
|
||||
\@setfontsize\normalsize{9.5pt}{11.5pt}%
|
||||
\fi
|
||||
\fi
|
||||
\abovedisplayskip=3 mm plus6pt minus 4pt
|
||||
\belowdisplayskip=3 mm plus6pt minus 4pt
|
||||
\abovedisplayshortskip=0.0 mm plus6pt
|
||||
\belowdisplayshortskip=2 mm plus4pt minus 4pt
|
||||
\let\@listi\@listI}
|
||||
\normalsize
|
||||
\newcommand\small{%
|
||||
\if@twocolumn
|
||||
\@setfontsize\small{8.5pt}\@xpt
|
||||
\else
|
||||
\if@smallext
|
||||
\@setfontsize\small\@viiipt{9.5pt}%
|
||||
\else
|
||||
\@setfontsize\small\@viiipt{9.25pt}%
|
||||
\fi
|
||||
\fi
|
||||
\abovedisplayskip 8.5\p@ \@plus3\p@ \@minus4\p@
|
||||
\abovedisplayshortskip \z@ \@plus2\p@
|
||||
\belowdisplayshortskip 4\p@ \@plus2\p@ \@minus2\p@
|
||||
\def\@listi{\leftmargin\leftmargini
|
||||
\parsep 0\p@ \@plus1\p@ \@minus\p@
|
||||
\topsep 4\p@ \@plus2\p@ \@minus4\p@
|
||||
\itemsep0\p@}%
|
||||
\belowdisplayskip \abovedisplayskip
|
||||
}
|
||||
\let\footnotesize\small
|
||||
\newcommand\scriptsize{\@setfontsize\scriptsize\@viipt\@viiipt}
|
||||
\newcommand\tiny{\@setfontsize\tiny\@vpt\@vipt}
|
||||
\if@twocolumn
|
||||
\newcommand\large{\@setfontsize\large\@xiipt\@xivpt}
|
||||
\newcommand\LARGE{\@setfontsize\LARGE{16pt}{18pt}}
|
||||
\else
|
||||
\newcommand\large{\@setfontsize\large\@xipt\@xiipt}
|
||||
\newcommand\LARGE{\@setfontsize\LARGE{13pt}{15pt}}
|
||||
\fi
|
||||
\newcommand\Large{\@setfontsize\Large\@xivpt{16dd}}
|
||||
\newcommand\huge{\@setfontsize\huge\@xxpt{25}}
|
||||
\newcommand\Huge{\@setfontsize\Huge\@xxvpt{30}}
|
||||
%
|
||||
\def\runheadhook{\rlap{\smash{\lower6.5pt\hbox to\textwidth{\hrulefill}}}}
|
||||
\if@twocolumn
|
||||
\setlength{\textwidth}{17.4cm}
|
||||
\setlength{\textheight}{234mm}
|
||||
\AtEndOfClass{\setlength\columnsep{6mm}}
|
||||
\else
|
||||
\if@smallext
|
||||
\setlength{\textwidth}{11.9cm}
|
||||
\setlength{\textheight}{19.4cm}
|
||||
\else
|
||||
\setlength{\textwidth}{12.2cm}
|
||||
\setlength{\textheight}{19.8cm}
|
||||
\fi
|
||||
\fi
|
||||
%
|
||||
\AtBeginDocument{%
|
||||
\@ifundefined{@journalname}
|
||||
{\typeout{Unknown journal: specify \string\journalname\string{%
|
||||
<name of your journal>\string} in preambel^^J}}{}}
|
||||
%
|
||||
\endinput
|
||||
%%
|
||||
%% End of file `svglov3.clo'.
|
File diff suppressed because it is too large
Load Diff
|
@ -1,21 +0,0 @@
|
|||
theory
|
||||
"template-svjour3-UNSUPPORTED"
|
||||
imports
|
||||
"Isabelle_DOF-Ontologies.document_templates"
|
||||
Isabelle_DOF.scholarly_paper
|
||||
begin
|
||||
|
||||
list_templates
|
||||
use_template "svjour3-UNSUPPORTED"
|
||||
list_ontologies
|
||||
use_ontology "scholarly_paper"
|
||||
|
||||
title* [tit::title]\<open>Formal Verification of Security Protocols\<close>
|
||||
author*[alice, email = "\<open>alice@example.com\<close>",
|
||||
http_site = "\<open>https://example.com/alice\<close>",
|
||||
affiliation = "\<open>Wonderland University\<close>"]\<open>Alice\<close>
|
||||
author*[bob, email = "\<open>bob@example.com\<close>",
|
||||
http_site = "\<open>https://example.com/bob\<close>",
|
||||
affiliation = "\<open>Wonderland University\<close>"]\<open>Bob\<close>
|
||||
|
||||
end
|
|
@ -1,57 +0,0 @@
|
|||
%% Copyright (C) University of Exeter
|
||||
%% University of Paris-Saclay
|
||||
%%
|
||||
%% License:
|
||||
%% This program can be redistributed and/or modified under the terms
|
||||
%% of the LaTeX Project Public License Distributed from CTAN
|
||||
%% archives in directory macros/latex/base/lppl.txt; either
|
||||
%% version 1.3c of the License, or (at your option) any later version.
|
||||
%% OR
|
||||
%% The 2-clause BSD-style license.
|
||||
%%
|
||||
%% SPDX-License-Identifier: LPPL-1.3c+ OR BSD-2-Clause
|
||||
|
||||
\NeedsTeXFormat{LaTeX2e}\relax
|
||||
\ProvidesPackage{DOF-CC_terminology}
|
||||
[00/00/0000 Document-Type Support Framework for Isabelle (CC).]
|
||||
|
||||
\RequirePackage{DOF-COL}
|
||||
\usepackage{etex}
|
||||
\ifdef{\reserveinserts}{\reserveinserts{28}}{}
|
||||
|
||||
|
||||
|
||||
|
||||
\newkeycommand*{\mathcc}[label=,type=%
|
||||
, scholarlyUNDERSCOREpaperDOTmathUNDERSCOREcontentDOTshortUNDERSCOREname ={}%
|
||||
, scholarlyUNDERSCOREpaperDOTmathUNDERSCOREcontentDOTmcc = %
|
||||
, IsaUNDERSCORECOLDOTtextUNDERSCOREelementDOTlevel =%
|
||||
, IsaUNDERSCORECOLDOTtextUNDERSCOREelementDOTreferentiable =%
|
||||
, IsaUNDERSCORECOLDOTtextUNDERSCOREelementDOTvariants =%
|
||||
, scholarlyUNDERSCOREpaperDOTtextUNDERSCOREsectionDOTmainUNDERSCOREauthor =%
|
||||
, scholarlyUNDERSCOREpaperDOTtextUNDERSCOREsectionDOTfixmeUNDERSCORElist =%
|
||||
, IsaUNDERSCORECOLDOTtextUNDERSCOREelementDOTlevel =%
|
||||
, scholarlyUNDERSCOREpaperDOTtechnicalDOTdefinitionUNDERSCORElist =%
|
||||
, scholarlyUNDERSCOREpaperDOTtechnicalDOTstatus =%
|
||||
, CCUNDERSCOREterminologyDOTconceptUNDERSCOREdefinitionDOTtag=%
|
||||
, CCUNDERSCOREterminologyDOTconceptUNDERSCOREdefinitionDOTshortUNDERSCOREtag=%
|
||||
]
|
||||
[1]
|
||||
{%
|
||||
\begin{isamarkuptext}%
|
||||
\ifthenelse{\equal{\commandkey{scholarlyUNDERSCOREpaperDOTmathUNDERSCOREcontentDOTshortUNDERSCOREname}} {} }
|
||||
{%
|
||||
\begin{\commandkey{scholarlyUNDERSCOREpaperDOTmathUNDERSCOREcontentDOTmcc}}\label{\commandkey{label}}
|
||||
#1
|
||||
\end{\commandkey{scholarlyUNDERSCOREpaperDOTmathUNDERSCOREcontentDOTmcc}}
|
||||
}{%
|
||||
\begin{\commandkey{scholarlyUNDERSCOREpaperDOTmathUNDERSCOREcontentDOTmcc}}[\commandkey{scholarlyUNDERSCOREpaperDOTmathUNDERSCOREcontentDOTshortUNDERSCOREname}]\label{\commandkey{label}}
|
||||
#1
|
||||
\end{\commandkey{scholarlyUNDERSCOREpaperDOTmathUNDERSCOREcontentDOTmcc}}
|
||||
}
|
||||
\end{isamarkuptext}%
|
||||
}
|
||||
|
||||
\expandafter\def\csname isaDofDOTtextDOTscholarlyUNDERSCOREpaperDOTmathUNDERSCOREcontent\endcsname{\mathcc}
|
||||
|
||||
|
File diff suppressed because it is too large
Load Diff
|
@ -1,397 +0,0 @@
|
|||
(*************************************************************************
|
||||
* Copyright (C)
|
||||
* 2019-2023 The University of Exeter
|
||||
* 2018-2023 The University of Paris-Saclay
|
||||
* 2018 The University of Sheffield
|
||||
*
|
||||
* License:
|
||||
* This program can be redistributed and/or modified under the terms
|
||||
* of the 2-clause BSD-style license.
|
||||
*
|
||||
* SPDX-License-Identifier: BSD-2-Clause
|
||||
*************************************************************************)
|
||||
|
||||
(*<<*)
|
||||
theory
|
||||
CENELEC_50128_Documentation
|
||||
imports
|
||||
CENELEC_50128
|
||||
|
||||
begin
|
||||
|
||||
define_shortcut* dof \<rightleftharpoons> \<open>\dof\<close>
|
||||
isadof \<rightleftharpoons> \<open>\isadof{}\<close>
|
||||
define_shortcut* TeXLive \<rightleftharpoons> \<open>\TeXLive\<close>
|
||||
BibTeX \<rightleftharpoons> \<open>\BibTeX{}\<close>
|
||||
LaTeX \<rightleftharpoons> \<open>\LaTeX{}\<close>
|
||||
TeX \<rightleftharpoons> \<open>\TeX{}\<close>
|
||||
pdf \<rightleftharpoons> \<open>PDF\<close>
|
||||
|
||||
ML\<open>
|
||||
|
||||
fun boxed_text_antiquotation name (* redefined in these more abstract terms *) =
|
||||
DOF_lib.gen_text_antiquotation name DOF_lib.report_text
|
||||
(fn ctxt => DOF_lib.string_2_text_antiquotation ctxt
|
||||
#> DOF_lib.enclose_env false ctxt "isarbox")
|
||||
|
||||
val neant = K(Latex.text("",\<^here>))
|
||||
|
||||
fun boxed_theory_text_antiquotation name (* redefined in these more abstract terms *) =
|
||||
DOF_lib.gen_text_antiquotation name DOF_lib.report_theory_text
|
||||
(fn ctxt => DOF_lib.string_2_theory_text_antiquotation ctxt
|
||||
#> DOF_lib.enclose_env false ctxt "isarbox"
|
||||
(* #> neant *)) (*debugging *)
|
||||
|
||||
fun boxed_sml_text_antiquotation name =
|
||||
DOF_lib.gen_text_antiquotation name (K(K()))
|
||||
(fn ctxt => Input.source_content
|
||||
#> Latex.text
|
||||
#> DOF_lib.enclose_env true ctxt "sml")
|
||||
(* the simplest conversion possible *)
|
||||
|
||||
fun boxed_pdf_antiquotation name =
|
||||
DOF_lib.gen_text_antiquotation name (K(K()))
|
||||
(fn ctxt => Input.source_content
|
||||
#> Latex.text
|
||||
#> DOF_lib.enclose_env true ctxt "out")
|
||||
(* the simplest conversion possible *)
|
||||
|
||||
fun boxed_latex_antiquotation name =
|
||||
DOF_lib.gen_text_antiquotation name (K(K()))
|
||||
(fn ctxt => Input.source_content
|
||||
#> Latex.text
|
||||
#> DOF_lib.enclose_env true ctxt "ltx")
|
||||
(* the simplest conversion possible *)
|
||||
|
||||
fun boxed_bash_antiquotation name =
|
||||
DOF_lib.gen_text_antiquotation name (K(K()))
|
||||
(fn ctxt => Input.source_content
|
||||
#> Latex.text
|
||||
#> DOF_lib.enclose_env true ctxt "bash")
|
||||
(* the simplest conversion possible *)
|
||||
\<close>
|
||||
|
||||
setup\<open>(* std_text_antiquotation \<^binding>\<open>my_text\<close> #> *)
|
||||
boxed_text_antiquotation \<^binding>\<open>boxed_text\<close> #>
|
||||
(* std_text_antiquotation \<^binding>\<open>my_cartouche\<close> #> *)
|
||||
boxed_text_antiquotation \<^binding>\<open>boxed_cartouche\<close> #>
|
||||
(* std_theory_text_antiquotation \<^binding>\<open>my_theory_text\<close>#> *)
|
||||
boxed_theory_text_antiquotation \<^binding>\<open>boxed_theory_text\<close> #>
|
||||
|
||||
boxed_sml_text_antiquotation \<^binding>\<open>boxed_sml\<close> #>
|
||||
boxed_pdf_antiquotation \<^binding>\<open>boxed_pdf\<close> #>
|
||||
boxed_latex_antiquotation \<^binding>\<open>boxed_latex\<close>#>
|
||||
boxed_bash_antiquotation \<^binding>\<open>boxed_bash\<close>
|
||||
\<close>
|
||||
|
||||
|
||||
|
||||
(*>>*)
|
||||
|
||||
section*[cenelec_onto::example]\<open>Writing Certification Documents \<^boxed_theory_text>\<open>CENELEC_50128\<close>\<close>
|
||||
subsection\<open>The CENELEC 50128 Example\<close>
|
||||
text\<open>
|
||||
The ontology \<^verbatim>\<open>CENELEC_50128\<close>\index{ontology!CENELEC\_50128} is a small ontology modeling
|
||||
documents for a certification following CENELEC 50128~@{cite "boulanger:cenelec-50128:2015"}.
|
||||
The \<^isadof> distribution contains a small example using the ontology ``CENELEC\_50128'' in
|
||||
the directory \nolinkurl{examples/CENELEC_50128/mini_odo/}. You can inspect/edit the
|
||||
integrated source example by either
|
||||
\<^item> starting Isabelle/jEdit using your graphical user interface (\<^eg>, by clicking on the
|
||||
Isabelle-Icon provided by the Isabelle installation) and loading the file
|
||||
\nolinkurl{examples/CENELEC_50128/mini_odo/mini_odo.thy}.
|
||||
\<^item> starting Isabelle/jEdit from the command line by calling:
|
||||
|
||||
@{boxed_bash [display]\<open>ë\prompt{\isadofdirn}ë
|
||||
isabelle jedit examples/CENELEC_50128/mini_odo/mini_odo.thy \<close>}
|
||||
\<close>
|
||||
text\<open>\<^noindent> Finally, you
|
||||
\<^item> can build the \<^pdf>-document by calling:
|
||||
@{boxed_bash [display]\<open>ë\prompt{\isadofdirn}ë isabelle build mini_odo \<close>}
|
||||
\<close>
|
||||
|
||||
subsection\<open>Modeling CENELEC 50128\<close>
|
||||
|
||||
text\<open>
|
||||
Documents to be provided in formal certifications (such as CENELEC
|
||||
50128~@{cite "boulanger:cenelec-50128:2015"} or Common Criteria~@{cite "cc:cc-part3:2006"}) can
|
||||
much profit from the control of ontological consistency: a substantial amount of the work
|
||||
of evaluators in formal certification processes consists in tracing down the links from
|
||||
requirements over assumptions down to elements of evidence, be it in form of semi-formal
|
||||
documentation, models, code, or tests. In a certification process, traceability becomes a major
|
||||
concern; and providing mechanisms to ensure complete traceability already at the development of
|
||||
the integrated source can in our view increase the speed and reduce the risk certification
|
||||
processes. Making the link-structure machine-checkable, be it between requirements, assumptions,
|
||||
their implementation and their discharge by evidence (be it tests, proofs, or authoritative
|
||||
arguments), has the potential in our view to decrease the cost of software developments
|
||||
targeting certifications.
|
||||
|
||||
As in many other cases, formal certification documents come with an own terminology and pragmatics
|
||||
of what has to be demonstrated and where, and how the traceability of requirements through
|
||||
design-models over code to system environment assumptions has to be assured.
|
||||
|
||||
In the sequel, we present a simplified version of an ontological model used in a
|
||||
case-study~@{cite "bezzecchi.ea:making:2018"}. We start with an introduction of the concept of
|
||||
requirement:
|
||||
|
||||
@{boxed_theory_text [display]\<open>
|
||||
doc_class requirement = long_name :: "string option"
|
||||
|
||||
doc_class hypothesis = requirement +
|
||||
hyp_type :: hyp_type <= physical (* default *)
|
||||
|
||||
datatype ass_kind = informal | semiformal | formal
|
||||
|
||||
doc_class assumption = requirement +
|
||||
assumption_kind :: ass_kind <= informal
|
||||
\<close>}
|
||||
|
||||
Such ontologies can be enriched by larger explanations and examples, which may help
|
||||
the team of engineers substantially when developing the central document for a certification,
|
||||
like an explication of what is precisely the difference between an \<^typ>\<open>hypothesis\<close> and an
|
||||
\<^typ>\<open>assumption\<close> in the context of the evaluation standard. Since the PIDE makes for each
|
||||
document class its definition available by a simple mouse-click, this kind on meta-knowledge
|
||||
can be made far more accessible during the document evolution.
|
||||
|
||||
For example, the term of category \<^typ>\<open>assumption\<close> is used for domain-specific assumptions.
|
||||
It has \<^const>\<open>formal\<close>, \<^const>\<open>semiformal\<close> and \<^const>\<open>informal\<close> sub-categories. They have to be
|
||||
tracked and discharged by appropriate validation procedures within a
|
||||
certification process, be it by test or proof. It is different from a \<^typ>\<open>hypothesis\<close>, which is
|
||||
globally assumed and accepted.
|
||||
|
||||
In the sequel, the category \<^typ>\<open>exported_constraint\<close> (or \<^typ>\<open>EC\<close> for short)
|
||||
is used for formal assumptions, that arise during the analysis,
|
||||
design or implementation and have to be tracked till the final
|
||||
evaluation target, and discharged by appropriate validation procedures
|
||||
within the certification process, be it by test or proof. A particular class of interest
|
||||
is the category \<^typ>\<open>safety_related_application_condition\<close> (or \<^typ>\<open>SRAC\<close>
|
||||
for short) which is used for \<^typ>\<open>EC\<close>'s that establish safety properties
|
||||
of the evaluation target. Their traceability throughout the certification
|
||||
is therefore particularly critical. This is naturally modeled as follows:
|
||||
@{boxed_theory_text [display]\<open>
|
||||
doc_class EC = assumption +
|
||||
assumption_kind :: ass_kind <= (*default *) formal
|
||||
|
||||
doc_class SRAC = EC +
|
||||
assumption_kind :: ass_kind <= (*default *) formal
|
||||
\<close>}
|
||||
|
||||
We now can, \<^eg>, write
|
||||
|
||||
@{boxed_theory_text [display]\<open>
|
||||
text*[ass123::SRAC]\<open>
|
||||
The overall sampling frequence of the odometer subsystem is therefore
|
||||
14 khz, which includes sampling, computing and result communication
|
||||
times \ldots
|
||||
\<close>
|
||||
\<close>}
|
||||
|
||||
This will be shown in the \<^pdf> as follows:
|
||||
\<close>
|
||||
text*[ass123::SRAC] \<open> The overall sampling frequency of the odometer
|
||||
subsystem is therefore 14 khz, which includes sampling, computing and
|
||||
result communication times \ldots \<close>
|
||||
|
||||
text\<open>Note that this \<^pdf>-output is the result of a specific setup for \<^typ>\<open>SRAC\<close>s.\<close>
|
||||
|
||||
subsection*[ontopide::technical]\<open>Editing Support for CENELEC 50128\<close>
|
||||
figure*[figfig3::figure,relative_width="95",file_src="''figures/antiquotations-PIDE.png''"]
|
||||
\<open> Standard antiquotations referring to theory elements.\<close>
|
||||
text\<open> The corresponding view in @{docitem \<open>figfig3\<close>} shows core part of a document
|
||||
conforming to the \<^verbatim>\<open>CENELEC_50128\<close> ontology. The first sample shows standard Isabelle antiquotations
|
||||
@{cite "wenzel:isabelle-isar:2020"} into formal entities of a theory. This way, the informal parts
|
||||
of a document get ``formal content'' and become more robust under change.\<close>
|
||||
|
||||
figure*[figfig5::figure, relative_width="95", file_src="''figures/srac-definition.png''"]
|
||||
\<open> Defining a \<^typ>\<open>SRAC\<close> in the integrated source ... \<close>
|
||||
|
||||
figure*[figfig7::figure, relative_width="95", file_src="''figures/srac-as-es-application.png''"]
|
||||
\<open> Using a \<^typ>\<open>SRAC\<close> as \<^typ>\<open>EC\<close> document element. \<close>
|
||||
text\<open> The subsequent sample in @{figure \<open>figfig5\<close>} shows the definition of a
|
||||
\<^emph>\<open>safety-related application condition\<close>, a side-condition of a theorem which
|
||||
has the consequence that a certain calculation must be executed sufficiently fast on an embedded
|
||||
device. This condition can not be established inside the formal theory but has to be
|
||||
checked by system integration tests. Now we reference in @{figure \<open>figfig7\<close>} this
|
||||
safety-related condition; however, this happens in a context where general \<^emph>\<open>exported constraints\<close>
|
||||
are listed. \<^isadof>'s checks and establishes that this is legal in the given ontology.
|
||||
\<close>
|
||||
|
||||
text\<open>
|
||||
\<^item> \<^theory_text>\<open>@{term_ \<open>term\<close> }\<close> parses and type-checks \<open>term\<close> with term antiquotations,
|
||||
for instance \<^theory_text>\<open>@{term_ \<open>@{cenelec-term \<open>FT\<close>}\<close>}\<close> will parse and check
|
||||
that \<open>FT\<close> is indeed an instance of the class \<^typ>\<open>cenelec_term\<close>,
|
||||
\<close>
|
||||
|
||||
subsection\<open>A Domain-Specific Ontology: \<^verbatim>\<open>CENELEC_50128\<close>\<close>
|
||||
(*<*)
|
||||
ML\<open>val toLaTeX = String.translate (fn c => if c = #"_" then "\\_" else String.implode[c])\<close>
|
||||
ML\<open>writeln (DOF_core.print_doc_class_tree
|
||||
@{context} (fn (n,l) => true (* String.isPrefix "technical_report" l
|
||||
orelse String.isPrefix "Isa_COL" l *))
|
||||
toLaTeX)\<close>
|
||||
(*>*)
|
||||
text\<open> The \<^verbatim>\<open>CENELEC_50128\<close> ontology in \<^theory>\<open>Isabelle_DOF-Ontologies.CENELEC_50128\<close>
|
||||
is an example of a domain-specific ontology.
|
||||
It is based on \<^verbatim>\<open>technical_report\<close> since we assume that this kind of format will be most
|
||||
appropriate for this type of long-and-tedious documents,
|
||||
|
||||
%
|
||||
\begin{center}
|
||||
\begin{minipage}{.9\textwidth}\footnotesize
|
||||
\dirtree{%
|
||||
.0 .
|
||||
.1 CENELEC\_50128.judgement\DTcomment{...}.
|
||||
.1 CENELEC\_50128.test\_item\DTcomment{...}.
|
||||
.2 CENELEC\_50128.test\_case\DTcomment{...}.
|
||||
.2 CENELEC\_50128.test\_tool\DTcomment{...}.
|
||||
.2 CENELEC\_50128.test\_result\DTcomment{...}.
|
||||
.2 CENELEC\_50128.test\_adm\_role\DTcomment{...}.
|
||||
.2 CENELEC\_50128.test\_environment\DTcomment{...}.
|
||||
.2 CENELEC\_50128.test\_requirement\DTcomment{...}.
|
||||
.2 CENELEC\_50128.test\_specification\DTcomment{...}.
|
||||
.1 CENELEC\_50128.objectives\DTcomment{...}.
|
||||
.1 CENELEC\_50128.design\_item\DTcomment{...}.
|
||||
.2 CENELEC\_50128.interface\DTcomment{...}.
|
||||
.1 CENELEC\_50128.sub\_requirement\DTcomment{...}.
|
||||
.1 CENELEC\_50128.test\_documentation\DTcomment{...}.
|
||||
.1 Isa\_COL.text\_element\DTcomment{...}.
|
||||
.2 CENELEC\_50128.requirement\DTcomment{...}.
|
||||
.3 CENELEC\_50128.TC\DTcomment{...}.
|
||||
.3 CENELEC\_50128.FnI\DTcomment{...}.
|
||||
.3 CENELEC\_50128.SIR\DTcomment{...}.
|
||||
.3 CENELEC\_50128.CoAS\DTcomment{...}.
|
||||
.3 CENELEC\_50128.HtbC\DTcomment{...}.
|
||||
.3 CENELEC\_50128.SILA\DTcomment{...}.
|
||||
.3 CENELEC\_50128.assumption\DTcomment{...}.
|
||||
.4 CENELEC\_50128.AC\DTcomment{...}.
|
||||
.5 CENELEC\_50128.EC\DTcomment{...}.
|
||||
.6 CENELEC\_50128.SRAC\DTcomment{...}.
|
||||
.3 CENELEC\_50128.hypothesis\DTcomment{...}.
|
||||
.4 CENELEC\_50128.security\_hyp\DTcomment{...}.
|
||||
.3 CENELEC\_50128.safety\_requirement\DTcomment{...}.
|
||||
.2 CENELEC\_50128.cenelec\_text\DTcomment{...}.
|
||||
.3 CENELEC\_50128.SWAS\DTcomment{...}.
|
||||
.3 [...].
|
||||
.2 scholarly\_paper.text\_section\DTcomment{...}.
|
||||
.3 scholarly\_paper.technical\DTcomment{...}.
|
||||
.4 scholarly\_paper.math\_content\DTcomment{...}.
|
||||
.5 CENELEC\_50128.semi\_formal\_content\DTcomment{...}.
|
||||
.1 ...
|
||||
}
|
||||
\end{minipage}
|
||||
\end{center}
|
||||
\<close>
|
||||
|
||||
(* TODO : Rearrange ontology hierarchies. *)
|
||||
|
||||
subsubsection\<open>Examples\<close>
|
||||
|
||||
text\<open>
|
||||
The category ``exported constraint (EC)'' is, in the file
|
||||
\<^file>\<open>CENELEC_50128.thy\<close> defined as follows:
|
||||
|
||||
@{boxed_theory_text [display]\<open>
|
||||
doc_class requirement = text_element +
|
||||
long_name :: "string option"
|
||||
is_concerned :: "role set"
|
||||
doc_class assumption = requirement +
|
||||
assumption_kind :: ass_kind <= informal
|
||||
doc_class AC = assumption +
|
||||
is_concerned :: "role set" <= "UNIV"
|
||||
doc_class EC = AC +
|
||||
assumption_kind :: ass_kind <= (*default *) formal
|
||||
\<close>}
|
||||
\<close>
|
||||
text\<open>
|
||||
We now define the document representations, in the file
|
||||
\<^file>\<open>DOF-CENELEC_50128.sty\<close>. Let us assume that we want to
|
||||
register the definition of EC's in a dedicated table of contents (\<^boxed_latex>\<open>tos\<close>)
|
||||
and use an earlier defined environment \inlineltx|\begin{EC}...\end{EC}| for their graphical
|
||||
representation. Note that the \inlineltx|\newisadof{}[]{}|-command requires the
|
||||
full-qualified names, \<^eg>, \<^boxed_theory_text>\<open>text.CENELEC_50128.EC\<close> for the document class and
|
||||
\<^boxed_theory_text>\<open>CENELEC_50128.requirement.long_name\<close> for the attribute \<^const>\<open>long_name\<close>,
|
||||
inherited from the document class \<^typ>\<open>requirement\<close>. The representation of \<^typ>\<open>EC\<close>'s
|
||||
can now be defined as follows:
|
||||
% TODO:
|
||||
% Explain the text qualifier of the long_name text.CENELEC_50128.EC
|
||||
|
||||
\begin{ltx}
|
||||
\newisadof{text.CENELEC_50128.EC}%
|
||||
[label=,type=%
|
||||
,Isa_COL.text_element.level=%
|
||||
,Isa_COL.text_element.referentiable=%
|
||||
,Isa_COL.text_element.variants=%
|
||||
,CENELEC_50128.requirement.is_concerned=%
|
||||
,CENELEC_50128.requirement.long_name=%
|
||||
,CENELEC_50128.EC.assumption_kind=][1]{%
|
||||
\begin{isamarkuptext}%
|
||||
\ifthenelse{\equal{\commandkey{CENELEC_50128.requirement.long_name}}{}}{%
|
||||
% If long_name is not defined, we only create an entry in the table tos
|
||||
% using the auto-generated number of the EC
|
||||
\begin{EC}%
|
||||
\addxcontentsline{tos}{chapter}[]{\autoref{\commandkey{label}}}%
|
||||
}{%
|
||||
% If long_name is defined, we use the long_name as title in the
|
||||
% layout of the EC, in the table "tos" and as index entry. .
|
||||
\begin{EC}[\commandkey{CENELEC_50128.requirement.long_name}]%
|
||||
\addxcontentsline{toe}{chapter}[]{\autoref{\commandkey{label}}: %
|
||||
\commandkey{CENELEC_50128.requirement.long_name}}%
|
||||
\DOFindex{EC}{\commandkey{CENELEC_50128.requirement.long_name}}%
|
||||
}%
|
||||
\label{\commandkey{label}}% we use the label attribute as anchor
|
||||
#1% The main text of the EC
|
||||
\end{EC}
|
||||
\end{isamarkuptext}%
|
||||
}
|
||||
\end{ltx}
|
||||
\<close>
|
||||
text\<open>
|
||||
For example, the @{docitem "ass123"} is mapped to
|
||||
|
||||
@{boxed_latex [display]
|
||||
\<open>\begin{isamarkuptext*}%
|
||||
[label = {ass122},type = {CENELEC_50128.SRAC},
|
||||
args={label = {ass122}, type = {CENELEC_50128.SRAC},
|
||||
CENELEC_50128.EC.assumption_kind = {formal}}
|
||||
] The overall sampling frequence of the odometer subsystem is therefore
|
||||
14 khz, which includes sampling, computing and result communication
|
||||
times ...
|
||||
\end{isamarkuptext*}\<close>}
|
||||
|
||||
This environment is mapped to a plain \<^LaTeX> command via:
|
||||
@{boxed_latex [display]
|
||||
\<open> \NewEnviron{isamarkuptext*}[1][]{\isaDof[env={text},#1]{\BODY}} \<close>}
|
||||
\<close>
|
||||
text\<open>
|
||||
For the command-based setup, \<^isadof> provides a dispatcher that selects the most specific
|
||||
implementation for a given \<^boxed_theory_text>\<open>doc_class\<close>:
|
||||
|
||||
@{boxed_latex [display]
|
||||
\<open>%% The Isabelle/DOF dispatcher:
|
||||
\newkeycommand+[\|]\isaDof[env={UNKNOWN},label=,type={dummyT},args={}][1]{%
|
||||
\ifcsname isaDof.\commandkey{type}\endcsname%
|
||||
\csname isaDof.\commandkey{type}\endcsname%
|
||||
[label=\commandkey{label},\commandkey{args}]{#1}%
|
||||
\else\relax\fi%
|
||||
\ifcsname isaDof.\commandkey{env}.\commandkey{type}\endcsname%
|
||||
\csname isaDof.\commandkey{env}.\commandkey{type}\endcsname%
|
||||
[label=\commandkey{label},\commandkey{args}]{#1}%
|
||||
\else%
|
||||
\message{Isabelle/DOF: Using default LaTeX representation for concept %
|
||||
"\commandkey{env}.\commandkey{type}".}%
|
||||
\ifcsname isaDof.\commandkey{env}\endcsname%
|
||||
\csname isaDof.\commandkey{env}\endcsname%
|
||||
[label=\commandkey{label}]{#1}%
|
||||
\else%
|
||||
\errmessage{Isabelle/DOF: No LaTeX representation for concept %
|
||||
"\commandkey{env}.\commandkey{type}" defined and no default %
|
||||
definition for "\commandkey{env}" available either.}%
|
||||
\fi%
|
||||
\fi%
|
||||
}\<close>}
|
||||
\<close>
|
||||
|
||||
|
||||
|
||||
(*<<*)
|
||||
end
|
||||
(*>>*)
|
|
@ -1,220 +0,0 @@
|
|||
%% Copyright (C) 2019 University of Exeter
|
||||
%% 2018 University of Paris-Saclay
|
||||
%% 2018 The University of Sheffield
|
||||
%%
|
||||
%% License:
|
||||
%% This program can be redistributed and/or modified under the terms
|
||||
%% of the LaTeX Project Public License Distributed from CTAN
|
||||
%% archives in directory macros/latex/base/lppl.txt; either
|
||||
%% version 1.3c of the License, or (at your option) any later version.
|
||||
%% OR
|
||||
%% The 2-clause BSD-style license.
|
||||
%%
|
||||
%% SPDX-License-Identifier: LPPL-1.3c+ OR BSD-2-Clause
|
||||
|
||||
\NeedsTeXFormat{LaTeX2e}\relax
|
||||
\ProvidesPackage{DOF-cenelec_50128}
|
||||
[00/00/0000 Document-Type Support Framework for Isabelle (CENELEC 50128).]
|
||||
|
||||
\RequirePackage{DOF-COL}
|
||||
\usepackage{etex}
|
||||
\ifdef{\reserveinserts}{\reserveinserts{28}}{}
|
||||
\usepackage[many]{tcolorbox}
|
||||
\usepackage{marginnote}
|
||||
|
||||
% Index setup
|
||||
\usepackage{index}
|
||||
\makeindex
|
||||
\AtEndDocument{\printindex}
|
||||
|
||||
\newcommand{\DOFindex}[2]{%
|
||||
\marginnote{\normalfont\textbf{#1}: #2}%
|
||||
\expandafter\index\expandafter{\expanded{#2 (#1)}}%
|
||||
}%
|
||||
|
||||
|
||||
%% SRAC
|
||||
\providecolor{SRAC}{named}{green}
|
||||
\ifcsdef{DeclareNewTOC}{%
|
||||
\DeclareNewTOC[%
|
||||
owner=\jobname,
|
||||
type=SRAC,%
|
||||
types=SRACs,%
|
||||
listname={List of SRACs}%
|
||||
]{tos}
|
||||
\setuptoc{tos}{chapteratlist}
|
||||
\AtEndEnvironment{frontmatter}{\listofSRACs}
|
||||
}{}
|
||||
|
||||
\newtheorem{SRAC}{SRAC}
|
||||
\tcolorboxenvironment{SRAC}{
|
||||
boxrule=0pt
|
||||
,boxsep=0pt
|
||||
,colback={white!90!SRAC}
|
||||
,enhanced jigsaw
|
||||
,borderline west={2pt}{0pt}{SRAC}
|
||||
,sharp corners
|
||||
,before skip=10pt
|
||||
,after skip=10pt
|
||||
,breakable
|
||||
}
|
||||
|
||||
\newcommand{\SRACautorefname}{SRAC}
|
||||
\newisadof{textDOTCENELECUNDERSCOREFIVEZEROONETWOEIGHTDOTSRAC}%
|
||||
[label=,type=%
|
||||
,IsaUNDERSCORECOLDOTtextUNDERSCOREelementDOTlevel=%
|
||||
,IsaUNDERSCORECOLDOTtextUNDERSCOREelementDOTreferentiable=%
|
||||
,IsaUNDERSCORECOLDOTtextUNDERSCOREelementDOTvariants=%
|
||||
,CENELECUNDERSCOREFIVEZEROONETWOEIGHTDOTrequirementDOTisUNDERSCOREconcerned=%
|
||||
,CENELECUNDERSCOREFIVEZEROONETWOEIGHTDOTrequirementDOTlongUNDERSCOREname=%
|
||||
,CENELECUNDERSCOREFIVEZEROONETWOEIGHTDOTSRACDOTformalUNDERSCORErepr=%
|
||||
,CENELECUNDERSCOREFIVEZEROONETWOEIGHTDOTSRACDOTassumptionUNDERSCOREkind=%
|
||||
,CENELECUNDERSCOREFIVEZEROONETWOEIGHTDOTECDOTassumptionUNDERSCOREkind=%
|
||||
,CENELECUNDERSCOREFIVEZEROONETWOEIGHTDOTassumptionDOTassumptionUNDERSCOREkind=%
|
||||
][1]{%
|
||||
\begin{isamarkuptext}%
|
||||
\ifthenelse{\equal{\commandkey{CENELECUNDERSCOREFIVEZEROONETWOEIGHTDOTrequirementDOTlongUNDERSCOREname}}{}}{%
|
||||
\begin{SRAC}%
|
||||
\addxcontentsline{tos}{chapter}[]{\autoref{\commandkey{label}}}%
|
||||
}{%
|
||||
\begin{SRAC}[\commandkey{CENELECUNDERSCOREFIVEZEROONETWOEIGHTDOTrequirementDOTlongUNDERSCOREname}]%
|
||||
\addxcontentsline{tos}{chapter}[]{\autoref{\commandkey{label}}: \commandkey{CENELECUNDERSCOREFIVEZEROONETWOEIGHTDOTrequirementDOTlongUNDERSCOREname}}%
|
||||
\DOFindex{SRAC}{\commandkey{CENELECUNDERSCOREFIVEZEROONETWOEIGHTDOTrequirementDOTlongUNDERSCOREname}}%
|
||||
}\label{\commandkey{label}}%
|
||||
#1%
|
||||
\end{SRAC}
|
||||
\end{isamarkuptext}%
|
||||
}
|
||||
|
||||
% EC
|
||||
\providecolor{EC}{named}{blue}
|
||||
\ifcsdef{DeclareNewTOC}{%
|
||||
\DeclareNewTOC[%
|
||||
owner=\jobname,
|
||||
type=EC,%
|
||||
types=ECs,%
|
||||
listname={List of ECs}%
|
||||
]{toe}
|
||||
\setuptoc{toe}{chapteratlist}
|
||||
\AtEndEnvironment{frontmatter}{\listofECs}
|
||||
}{}
|
||||
|
||||
\newtheorem{EC}{EC}
|
||||
\tcolorboxenvironment{EC}{
|
||||
boxrule=0pt
|
||||
,boxsep=0pt
|
||||
,colback={white!90!EC}
|
||||
,enhanced jigsaw
|
||||
,borderline west={2pt}{0pt}{EC}
|
||||
,sharp corners
|
||||
,before skip=10pt
|
||||
,after skip=10pt
|
||||
,breakable
|
||||
}
|
||||
|
||||
\newcommand{\ECautorefname}{EC}
|
||||
\newisadof{textDOTCENELECUNDERSCOREFIVEZEROONETWOEIGHTDOTEC}%
|
||||
[label=,type=%
|
||||
,IsaUNDERSCORECOLDOTtextUNDERSCOREelementDOTlevel=%
|
||||
,IsaUNDERSCORECOLDOTtextUNDERSCOREelementDOTreferentiable=%
|
||||
,IsaUNDERSCORECOLDOTtextUNDERSCOREelementDOTvariants=%
|
||||
,CENELECUNDERSCOREFIVEZEROONETWOEIGHTDOTrequirementDOTisUNDERSCOREconcerned=%
|
||||
,CENELECUNDERSCOREFIVEZEROONETWOEIGHTDOTrequirementDOTlongUNDERSCOREname=%
|
||||
,CENELECUNDERSCOREFIVEZEROONETWOEIGHTDOTSRACDOTformalUNDERSCORErepr=%
|
||||
,CENELECUNDERSCOREFIVEZEROONETWOEIGHTDOTSRACDOTassumptionUNDERSCOREkind=%
|
||||
,CENELECUNDERSCOREFIVEZEROONETWOEIGHTDOTECDOTassumptionUNDERSCOREkind=%
|
||||
,CENELECUNDERSCOREFIVEZEROONETWOEIGHTDOTassumptionDOTassumptionUNDERSCOREkind=%
|
||||
][1]{%
|
||||
\begin{isamarkuptext}%
|
||||
\ifthenelse{\equal{\commandkey{CENELECUNDERSCOREFIVEZEROONETWOEIGHTDOTrequirementDOTlongUNDERSCOREname}}{}}{%
|
||||
\begin{EC}%
|
||||
\addxcontentsline{toe}{chapter}[]{\autoref{\commandkey{label}}}%
|
||||
}{%
|
||||
\begin{EC}[\commandkey{CENELECUNDERSCOREFIVEZEROONETWOEIGHTDOTrequirementDOTlongUNDERSCOREname}]%
|
||||
\addxcontentsline{toe}{chapter}[]{\autoref{\commandkey{label}}: \commandkey{CENELECUNDERSCOREFIVEZEROONETWOEIGHTDOTrequirementDOTlongUNDERSCOREname}}%
|
||||
\DOFindex{EC}{\commandkey{CENELECUNDERSCOREFIVEZEROONETWOEIGHTDOTrequirementDOTlongUNDERSCOREname}}%
|
||||
}\label{\commandkey{label}}%
|
||||
#1%
|
||||
\end{EC}
|
||||
\end{isamarkuptext}%
|
||||
}
|
||||
|
||||
% assumptions
|
||||
\providecolor{assumption}{named}{orange}
|
||||
\newtheorem{assumption}{assumption}
|
||||
\tcolorboxenvironment{assumption}{
|
||||
boxrule=0pt
|
||||
,boxsep=0pt
|
||||
,colback={white!90!assumption}
|
||||
,enhanced jigsaw
|
||||
,borderline west={2pt}{0pt}{assumption}
|
||||
,sharp corners
|
||||
,before skip=10pt
|
||||
,after skip=10pt
|
||||
,breakable
|
||||
}
|
||||
|
||||
\newcommand{\assumptionautorefname}{assumption}
|
||||
\newisadof{textDOTCENELECUNDERSCOREFIVEZEROONETWOEIGHTDOTassumption}%
|
||||
[label=,type=%
|
||||
,IsaUNDERSCORECOLDOTtextUNDERSCOREelementDOTlevel=%
|
||||
,IsaUNDERSCORECOLDOTtextUNDERSCOREelementDOTreferentiable=%
|
||||
,IsaUNDERSCORECOLDOTtextUNDERSCOREelementDOTvariants=%
|
||||
,CENELECUNDERSCOREFIVEZEROONETWOEIGHTDOTrequirementDOTisUNDERSCOREconcerned=%
|
||||
,CENELECUNDERSCOREFIVEZEROONETWOEIGHTDOTrequirementDOTlongUNDERSCOREname=%
|
||||
,CENELECUNDERSCOREFIVEZEROONETWOEIGHTDOTSRACDOTformalUNDERSCORErepr=%
|
||||
,CENELECUNDERSCOREFIVEZEROONETWOEIGHTDOTSRACDOTassumptionUNDERSCOREkind=%
|
||||
,CENELECUNDERSCOREFIVEZEROONETWOEIGHTDOTassumptionDOTassumptionUNDERSCOREkind=%
|
||||
][1]{%
|
||||
\begin{isamarkuptext}%
|
||||
\ifthenelse{\equal{\commandkey{CENELECUNDERSCOREFIVEZEROONETWOEIGHTDOTrequirementDOTlongUNDERSCOREname}}{}}{%
|
||||
\begin{assumption}%
|
||||
}{%
|
||||
\begin{assumption}[\commandkey{CENELECUNDERSCOREFIVEZEROONETWOEIGHTDOTrequirementDOTlongUNDERSCOREname}]%
|
||||
\DOFindex{assumption}{\commandkey{CENELECUNDERSCOREFIVEZEROONETWOEIGHTDOTrequirementDOTlongUNDERSCOREname}}%
|
||||
}\label{\commandkey{label}}%
|
||||
#1%
|
||||
\end{assumption}
|
||||
\end{isamarkuptext}%
|
||||
}
|
||||
|
||||
|
||||
% hypotheses
|
||||
\providecolor{hypothesis}{named}{teal}
|
||||
\newtheorem{hypothesis}{hypothesis}
|
||||
\tcolorboxenvironment{hypothesis}{
|
||||
,boxrule=0pt
|
||||
,boxsep=0pt
|
||||
,colback={white!90!hypothesis}
|
||||
,enhanced jigsaw
|
||||
,borderline west={2pt}{0pt}{hypothesis}
|
||||
,sharp corners
|
||||
,before skip=10pt
|
||||
,after skip=10pt
|
||||
,breakable
|
||||
}
|
||||
|
||||
|
||||
\newcommand{\hypothesisautorefname}{hypothesis}
|
||||
\newisadof{textDOTCENELECUNDERSCOREFIVEZEROONETWOEIGHTDOThypothesis}%
|
||||
[label=,type=%
|
||||
,IsaUNDERSCORECOLDOTtextUNDERSCOREelementDOTlevel=%
|
||||
,IsaUNDERSCORECOLDOTtextUNDERSCOREelementDOTreferentiable=%
|
||||
,IsaUNDERSCORECOLDOTtextUNDERSCOREelementDOTvariants=%
|
||||
,CENELECUNDERSCOREFIVEZEROONETWOEIGHTDOTrequirementDOTisUNDERSCOREconcerned=%
|
||||
,CENELECUNDERSCOREFIVEZEROONETWOEIGHTDOTrequirementDOTlongUNDERSCOREname=%
|
||||
,CENELECUNDERSCOREFIVEZEROONETWOEIGHTDOTSRACDOTformalUNDERSCORErepr=%
|
||||
,CENELECUNDERSCOREFIVEZEROONETWOEIGHTDOTSRACDOThypothesisUNDERSCOREkind=%
|
||||
,CENELECUNDERSCOREFIVEZEROONETWOEIGHTDOThypothesisDOThypUNDERSCOREtype=%
|
||||
][1]{%
|
||||
\begin{isamarkuptext}%
|
||||
\ifthenelse{\equal{\commandkey{CENELECUNDERSCOREFIVEZEROONETWOEIGHTDOTrequirementDOTlongUNDERSCOREname}}{}}{%
|
||||
\begin{hypothesis}%
|
||||
}{%
|
||||
\begin{hypothesis}[\commandkey{CENELECUNDERSCOREFIVEZEROONETWOEIGHTDOTrequirementDOTlongUNDERSCOREname}]%
|
||||
\DOFindex{hypothesis}{\commandkey{CENELECUNDERSCOREFIVEZEROONETWOEIGHTDOTrequirementDOTlongUNDERSCOREname}}%
|
||||
}\label{\commandkey{label}}%
|
||||
#1%
|
||||
\end{hypothesis}
|
||||
\end{isamarkuptext}%
|
||||
}
|
|
@ -1,23 +0,0 @@
|
|||
session "Isabelle_DOF-Ontologies" = "Isabelle_DOF" +
|
||||
options [document = pdf, document_output = "output", document_build = dof]
|
||||
directories
|
||||
"CC_v3_1_R5"
|
||||
"Conceptual"
|
||||
"small_math"
|
||||
"CENELEC_50128"
|
||||
theories
|
||||
"document_setup"
|
||||
"document_templates"
|
||||
"CC_v3_1_R5/CC_v3_1_R5"
|
||||
"CC_v3_1_R5/CC_terminology"
|
||||
"Conceptual/Conceptual"
|
||||
"small_math/small_math"
|
||||
"CENELEC_50128/CENELEC_50128"
|
||||
"CENELEC_50128/CENELEC_50128_Documentation"
|
||||
document_files
|
||||
"root.bib"
|
||||
"lstisadof-manual.sty"
|
||||
"preamble.tex"
|
||||
"figures/antiquotations-PIDE.png"
|
||||
"figures/srac-as-es-application.png"
|
||||
"figures/srac-definition.png"
|
|
@ -1,65 +0,0 @@
|
|||
%% Copyright (c) University of Exeter
|
||||
%% University of Paris-Saclay
|
||||
%%
|
||||
%% License:
|
||||
%% This program can be redistributed and/or modified under the terms
|
||||
%% of the LaTeX Project Public License Distributed from CTAN
|
||||
%% archives in directory macros/latex/base/lppl.txt; either
|
||||
%% version 1.3c of the License, or (at your option) any later version.
|
||||
%% OR
|
||||
%% The 2-clause BSD-style license.
|
||||
%%
|
||||
%% SPDX-License-Identifier: LPPL-1.3c+ OR BSD-2-Clause
|
||||
|
||||
%% Warning: Do Not Edit!
|
||||
%% =====================
|
||||
%% This is the root file for the Isabelle/DOF using the scrartcl class.
|
||||
%%
|
||||
%% All customization and/or additional packages should be added to the file
|
||||
%% preamble.tex.
|
||||
|
||||
\RequirePackage{ifvtex}
|
||||
\documentclass[16x9,9pt]{beamer}
|
||||
\PassOptionsToPackage{force}{DOF-scholarly_paper}
|
||||
\title{No Title Given}
|
||||
\usepackage{DOF-core}
|
||||
|
||||
\usepackage{textcomp}
|
||||
\bibliographystyle{abbrvnat}
|
||||
\RequirePackage{subcaption}
|
||||
|
||||
\providecommand{\institute}[1]{}%
|
||||
\providecommand{\inst}[1]{}%
|
||||
\providecommand{\orcidID}[1]{}%
|
||||
\providecommand{\email}[1]{}%
|
||||
|
||||
|
||||
\usepackage[numbers, sort&compress, sectionbib]{natbib}
|
||||
|
||||
\usepackage{hyperref}
|
||||
\setcounter{tocdepth}{3}
|
||||
\hypersetup{%
|
||||
bookmarksdepth=3
|
||||
,pdfpagelabels
|
||||
,pageanchor=true
|
||||
,bookmarksnumbered
|
||||
,plainpages=false
|
||||
} % more detailed digital TOC (aka bookmarks)
|
||||
\sloppy
|
||||
\allowdisplaybreaks[4]
|
||||
|
||||
\newenvironment{frontmatter}{}{}
|
||||
\raggedbottom
|
||||
\begin{document}
|
||||
\begin{frame}
|
||||
\maketitle
|
||||
\end{frame}
|
||||
\IfFileExists{dof_session.tex}{\input{dof_session}}{\input{session}}
|
||||
% optional bibliography
|
||||
\IfFileExists{root.bib}{{\bibliography{root}}}{}
|
||||
\end{document}
|
||||
|
||||
%%% Local Variables:
|
||||
%%% mode: latex
|
||||
%%% TeX-master: t
|
||||
%%% End:
|
|
@ -1,65 +0,0 @@
|
|||
%% Copyright (c) University of Exeter
|
||||
%% University of Paris-Saclay
|
||||
%%
|
||||
%% License:
|
||||
%% This program can be redistributed and/or modified under the terms
|
||||
%% of the LaTeX Project Public License Distributed from CTAN
|
||||
%% archives in directory macros/latex/base/lppl.txt; either
|
||||
%% version 1.3c of the License, or (at your option) any later version.
|
||||
%% OR
|
||||
%% The 2-clause BSD-style license.
|
||||
%%
|
||||
%% SPDX-License-Identifier: LPPL-1.3c+ OR BSD-2-Clause
|
||||
|
||||
%% Warning: Do Not Edit!
|
||||
%% =====================
|
||||
%% This is the root file for the Isabelle/DOF using the scrartcl class.
|
||||
%%
|
||||
%% All customization and/or additional packages should be added to the file
|
||||
%% preamble.tex.
|
||||
|
||||
\RequirePackage{ifvtex}
|
||||
\documentclass[]{beamer}
|
||||
\PassOptionsToPackage{force}{DOF-scholarly_paper}
|
||||
\title{No Title Given}
|
||||
\usepackage{beamerposter}
|
||||
\usepackage{DOF-core}
|
||||
|
||||
\usepackage{textcomp}
|
||||
\bibliographystyle{abbrvnat}
|
||||
\RequirePackage{subcaption}
|
||||
|
||||
\providecommand{\institute}[1]{}%
|
||||
\providecommand{\inst}[1]{}%
|
||||
\providecommand{\orcidID}[1]{}%
|
||||
\providecommand{\email}[1]{}%
|
||||
|
||||
|
||||
\usepackage[numbers, sort&compress, sectionbib]{natbib}
|
||||
|
||||
\usepackage{hyperref}
|
||||
\setcounter{tocdepth}{3}
|
||||
\hypersetup{%
|
||||
bookmarksdepth=3
|
||||
,pdfpagelabels
|
||||
,pageanchor=true
|
||||
,bookmarksnumbered
|
||||
,plainpages=false
|
||||
} % more detailed digital TOC (aka bookmarks)
|
||||
\sloppy
|
||||
\allowdisplaybreaks[4]
|
||||
|
||||
\newenvironment{frontmatter}{}{}
|
||||
\raggedbottom
|
||||
\begin{document}
|
||||
\begin{frame}[fragile]
|
||||
\IfFileExists{dof_session.tex}{\input{dof_session}}{\input{session}}
|
||||
% optional bibliography
|
||||
\IfFileExists{root.bib}{{\bibliography{root}}}{}
|
||||
\end{frame}
|
||||
\end{document}
|
||||
|
||||
%%% Local Variables:
|
||||
%%% mode: latex
|
||||
%%% TeX-master: t
|
||||
%%% End:
|
|
@ -1,68 +0,0 @@
|
|||
%% Copyright (c) 2019-2022 University of Exeter
|
||||
%% 2018-2022 University of Paris-Saclay
|
||||
%% 2018-2019 The University of Sheffield
|
||||
%%
|
||||
%% License:
|
||||
%% This program can be redistributed and/or modified under the terms
|
||||
%% of the LaTeX Project Public License Distributed from CTAN
|
||||
%% archives in directory macros/latex/base/lppl.txt; either
|
||||
%% version 1.3c of the License, or (at your option) any later version.
|
||||
%% OR
|
||||
%% The 2-clause BSD-style license.
|
||||
%%
|
||||
%% SPDX-License-Identifier: LPPL-1.3c+ OR BSD-2-Clause
|
||||
|
||||
%% Warning: Do Not Edit!
|
||||
%% =====================
|
||||
%% This is the root file for the Isabelle/DOF using the scrartcl class.
|
||||
%%
|
||||
%% All customization and/or additional packages should be added to the file
|
||||
%% preamble.tex.
|
||||
|
||||
\documentclass[iicol]{sn-jnl}
|
||||
\PassOptionsToPackage{force}{DOF-scholarly_paper}
|
||||
\title{No Title Given}
|
||||
\usepackage{DOF-core}
|
||||
\bibliographystyle{sn-basic}
|
||||
\let\proof\relax
|
||||
\let\endproof\relax
|
||||
\newcommand{\inst}[1]{}%
|
||||
\newcommand{\institute}[1]{}
|
||||
\usepackage{manyfoot}
|
||||
\usepackage{DOF-core}
|
||||
\setcounter{tocdepth}{3}
|
||||
\hypersetup{%
|
||||
bookmarksdepth=3
|
||||
,pdfpagelabels
|
||||
,pageanchor=true
|
||||
,bookmarksnumbered
|
||||
,plainpages=false
|
||||
} % more detailed digital TOC (aka bookmarks)
|
||||
\sloppy
|
||||
\allowdisplaybreaks[4]
|
||||
|
||||
\usepackage{subcaption}
|
||||
\usepackage[size=footnotesize]{caption}
|
||||
|
||||
\let\DOFauthor\relax
|
||||
\begin{document}
|
||||
\selectlanguage{USenglish}%
|
||||
\renewcommand{\bibname}{References}%
|
||||
\renewcommand{\figurename}{Fig.}
|
||||
\renewcommand{\abstractname}{Abstract.}
|
||||
\renewcommand{\subsubsectionautorefname}{Sect.}
|
||||
\renewcommand{\subsectionautorefname}{Sect.}
|
||||
\renewcommand{\sectionautorefname}{Sect.}
|
||||
\renewcommand{\figureautorefname}{Fig.}
|
||||
\newcommand{\lstnumberautorefname}{Line}
|
||||
|
||||
\maketitle
|
||||
\IfFileExists{dof_session.tex}{\input{dof_session}}{\input{session}}
|
||||
% optional bibliography
|
||||
\IfFileExists{root.bib}{{\bibliography{root}}}{}
|
||||
\end{document}
|
||||
|
||||
%%% Local Variables:
|
||||
%%% mode: latex
|
||||
%%% TeX-master: t
|
||||
%%% End:
|
|
@ -1,65 +0,0 @@
|
|||
%% Copyright (c) 2019-2022 University of Exeter
|
||||
%% 2018-2022 University of Paris-Saclay
|
||||
%% 2018-2019 The University of Sheffield
|
||||
%%
|
||||
%% License:
|
||||
%% This program can be redistributed and/or modified under the terms
|
||||
%% of the LaTeX Project Public License Distributed from CTAN
|
||||
%% archives in directory macros/latex/base/lppl.txt; either
|
||||
%% version 1.3c of the License, or (at your option) any later version.
|
||||
%% OR
|
||||
%% The 2-clause BSD-style license.
|
||||
%%
|
||||
%% SPDX-License-Identifier: LPPL-1.3c+ OR BSD-2-Clause
|
||||
|
||||
%% Warning: Do Not Edit!
|
||||
%% =====================
|
||||
%% This is the root file for the Isabelle/DOF using the scrartcl class.
|
||||
%%
|
||||
%% All customization and/or additional packages should be added to the file
|
||||
%% preamble.tex.
|
||||
|
||||
\RequirePackage{ifvtex}
|
||||
\RequirePackage{fix-cm}
|
||||
\documentclass[]{svjour3}
|
||||
|
||||
\title{No Title Given}
|
||||
\usepackage{DOF-core}
|
||||
\usepackage{mathptmx}
|
||||
\bibliographystyle{abbrvnat}
|
||||
\newcommand{\inst}[1]{}%
|
||||
|
||||
\usepackage[numbers, sort&compress, sectionbib]{natbib}
|
||||
\usepackage{hyperref}
|
||||
\setcounter{tocdepth}{3}
|
||||
\hypersetup{%
|
||||
bookmarksdepth=3
|
||||
,pdfpagelabels
|
||||
,pageanchor=true
|
||||
,bookmarksnumbered
|
||||
,plainpages=false
|
||||
} % more detailed digital TOC (aka bookmarks)
|
||||
\sloppy
|
||||
%\allowdisplaybreaks[4]
|
||||
|
||||
\begin{document}
|
||||
\selectlanguage{USenglish}%
|
||||
\renewcommand{\bibname}{References}%
|
||||
\renewcommand{\figurename}{Fig.}
|
||||
\renewcommand{\abstractname}{Abstract.}
|
||||
\renewcommand{\subsubsectionautorefname}{Sect.}
|
||||
\renewcommand{\subsectionautorefname}{Sect.}
|
||||
\renewcommand{\sectionautorefname}{Sect.}
|
||||
\renewcommand{\figureautorefname}{Fig.}
|
||||
\newcommand{\lstnumberautorefname}{Line}
|
||||
|
||||
\maketitle
|
||||
\IfFileExists{dof_session.tex}{\input{dof_session}}{\input{session}}
|
||||
% optional bibliography
|
||||
\IfFileExists{root.bib}{{\bibliography{root}}}{}
|
||||
\end{document}
|
||||
|
||||
%%% Local Variables:
|
||||
%%% mode: latex
|
||||
%%% TeX-master: t
|
||||
%%% End:
|
Binary file not shown.
Before Width: | Height: | Size: 96 KiB |
Binary file not shown.
Before Width: | Height: | Size: 67 KiB |
Binary file not shown.
Before Width: | Height: | Size: 50 KiB |
|
@ -1,327 +0,0 @@
|
|||
%% Copyright (C) 2018 The University of Sheffield
|
||||
%% 2018-2021 The University of Paris-Saclay
|
||||
%% 2019-2021 The University of Exeter
|
||||
%%
|
||||
%% License:
|
||||
%% This program can be redistributed and/or modified under the terms
|
||||
%% of the LaTeX Project Public License Distributed from CTAN
|
||||
%% archives in directory macros/latex/base/lppl.txt; either
|
||||
%% version 1.3c of the License, or (at your option) any later version.
|
||||
%% OR
|
||||
%% The 2-clause BSD-style license.
|
||||
%%
|
||||
%% SPDX-License-Identifier: LPPL-1.3c+ OR BSD-2-Clause
|
||||
\usepackage{listings}
|
||||
\usepackage{listingsutf8}
|
||||
\usepackage{tikz}
|
||||
\usepackage[many]{tcolorbox}
|
||||
\tcbuselibrary{listings}
|
||||
\tcbuselibrary{skins}
|
||||
\usepackage{xstring}
|
||||
|
||||
\definecolor{OliveGreen} {cmyk}{0.64,0,0.95,0.40}
|
||||
\definecolor{BrickRed} {cmyk}{0,0.89,0.94,0.28}
|
||||
\definecolor{Blue} {cmyk}{1,1,0,0}
|
||||
\definecolor{CornflowerBlue}{cmyk}{0.65,0.13,0,0}
|
||||
|
||||
|
||||
|
||||
|
||||
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||
%% <antiquotations>
|
||||
%% Hack: re-defining tag types for supporting highlighting of antiquotations
|
||||
\gdef\lst@tagtypes{s}
|
||||
\gdef\lst@TagKey#1#2{%
|
||||
\lst@Delim\lst@tagstyle #2\relax
|
||||
{Tag}\lst@tagtypes #1%
|
||||
{\lst@BeginTag\lst@EndTag}%
|
||||
\@@end\@empty{}}
|
||||
\lst@Key{tag}\relax{\lst@TagKey\@empty{#1}}
|
||||
\lst@Key{tagstyle}{}{\def\lst@tagstyle{#1}}
|
||||
\lst@AddToHook{EmptyStyle}{\let\lst@tagstyle\@empty}
|
||||
\gdef\lst@BeginTag{%
|
||||
\lst@DelimOpen
|
||||
\lst@ifextags\else
|
||||
{\let\lst@ifkeywords\iftrue
|
||||
\lst@ifmarkfirstintag \lst@firstintagtrue \fi}}
|
||||
\lst@AddToHookExe{ExcludeDelims}{\let\lst@ifextags\iffalse}
|
||||
\gdef\lst@EndTag{\lst@DelimClose\lst@ifextags\else}
|
||||
\lst@Key{usekeywordsintag}t[t]{\lstKV@SetIf{#1}\lst@ifusekeysintag}
|
||||
\lst@Key{markfirstintag}f[t]{\lstKV@SetIf{#1}\lst@ifmarkfirstintag}
|
||||
\gdef\lst@firstintagtrue{\global\let\lst@iffirstintag\iftrue}
|
||||
\global\let\lst@iffirstintag\iffalse
|
||||
\lst@AddToHook{PostOutput}{\lst@tagresetfirst}
|
||||
\lst@AddToHook{Output}
|
||||
{\gdef\lst@tagresetfirst{\global\let\lst@iffirstintag\iffalse}}
|
||||
\lst@AddToHook{OutputOther}{\gdef\lst@tagresetfirst{}}
|
||||
\lst@AddToHook{Output}
|
||||
{\ifnum\lst@mode=\lst@tagmode
|
||||
\lst@iffirstintag \let\lst@thestyle\lst@gkeywords@sty \fi
|
||||
\lst@ifusekeysintag\else \let\lst@thestyle\lst@gkeywords@sty\fi
|
||||
\fi}
|
||||
\lst@NewMode\lst@tagmode
|
||||
\gdef\lst@Tag@s#1#2\@empty#3#4#5{%
|
||||
\lst@CArg #1\relax\lst@DefDelimB {}{}%
|
||||
{\ifnum\lst@mode=\lst@tagmode \expandafter\@gobblethree \fi}%
|
||||
#3\lst@tagmode{#5}%
|
||||
\lst@CArg #2\relax\lst@DefDelimE {}{}{}#4\lst@tagmode}%
|
||||
\gdef\lst@BeginCDATA#1\@empty{%
|
||||
\lst@TrackNewLines \lst@PrintToken
|
||||
\lst@EnterMode\lst@GPmode{}\let\lst@ifmode\iffalse
|
||||
\lst@mode\lst@tagmode #1\lst@mode\lst@GPmode\relax\lst@modetrue}
|
||||
%
|
||||
\def\beginlstdelim#1#2#3%
|
||||
{%
|
||||
\def\endlstdelim{\texttt{\textbf{\color{black!60}#2}}\egroup}%
|
||||
\ttfamily\textbf{\color{black!60}#1}\bgroup\rmfamily\color{#3}\aftergroup\endlstdelim%
|
||||
}
|
||||
%% </antiquotations>
|
||||
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||
|
||||
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||
%% <isar>
|
||||
\providecolor{isar}{named}{blue}
|
||||
\renewcommand{\isacommand}[1]{\textcolor{OliveGreen!60}{\ttfamily\bfseries #1}}
|
||||
\newcommand{\inlineisarbox}[1]{#1}
|
||||
\NewTColorBox[]{isarbox}{}{
|
||||
,boxrule=0pt
|
||||
,boxsep=0pt
|
||||
,colback=white!90!isar
|
||||
,enhanced jigsaw
|
||||
,borderline west={2pt}{0pt}{isar!60!black}
|
||||
,sharp corners
|
||||
%,before skip balanced=0.5\baselineskip plus 2pt % works only with Tex Live 2020 and later
|
||||
,enlarge top by=0mm
|
||||
,enhanced
|
||||
,overlay={\node[draw,fill=isar!60!black,xshift=0pt,anchor=north
|
||||
east,font=\bfseries\footnotesize\color{white}]
|
||||
at (frame.north east) {Isar};}
|
||||
}
|
||||
%% </isar>
|
||||
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||
|
||||
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||
%% <out>
|
||||
\providecolor{out}{named}{green}
|
||||
\newtcblisting{out}[1][]{%
|
||||
listing only%
|
||||
,boxrule=0pt
|
||||
,boxsep=0pt
|
||||
,colback=white!90!out
|
||||
,enhanced jigsaw
|
||||
,borderline west={2pt}{0pt}{out!60!black}
|
||||
,sharp corners
|
||||
% ,before skip=10pt
|
||||
% ,after skip=10pt
|
||||
,enlarge top by=0mm
|
||||
,enhanced
|
||||
,overlay={\node[draw,fill=out!60!black,xshift=0pt,anchor=north
|
||||
east,font=\bfseries\footnotesize\color{white}]
|
||||
at (frame.north east) {Document};}
|
||||
,listing options={
|
||||
breakatwhitespace=true
|
||||
,columns=flexible%
|
||||
,basicstyle=\small\rmfamily
|
||||
,mathescape
|
||||
,#1
|
||||
}
|
||||
}%
|
||||
%% </out>
|
||||
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||
|
||||
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||
%% <sml>
|
||||
\lstloadlanguages{ML}
|
||||
\providecolor{sml}{named}{red}
|
||||
\lstdefinestyle{sml}{
|
||||
,escapechar=ë%
|
||||
,basicstyle=\ttfamily%
|
||||
,commentstyle=\itshape%
|
||||
,keywordstyle=\bfseries\color{CornflowerBlue}%
|
||||
,ndkeywordstyle=\color{green}%
|
||||
,language=ML
|
||||
% ,literate={%
|
||||
% {<@>}{@}1%
|
||||
% }
|
||||
,keywordstyle=[6]{\itshape}%
|
||||
,morekeywords=[6]{args_type}%
|
||||
,tag=**[s]{@\{}{\}}%
|
||||
,tagstyle=\color{CornflowerBlue}%
|
||||
,markfirstintag=true%
|
||||
}%
|
||||
\def\inlinesml{\lstinline[style=sml,breaklines=true,breakatwhitespace=true]}
|
||||
\newtcblisting{sml}[1][]{%
|
||||
listing only%
|
||||
,boxrule=0pt
|
||||
,boxsep=0pt
|
||||
,colback=white!90!sml
|
||||
,enhanced jigsaw
|
||||
,borderline west={2pt}{0pt}{sml!60!black}
|
||||
,sharp corners
|
||||
% ,before skip=10pt
|
||||
% ,after skip=10pt
|
||||
,enlarge top by=0mm
|
||||
,enhanced
|
||||
,overlay={\node[draw,fill=sml!60!black,xshift=0pt,anchor=north
|
||||
east,font=\bfseries\footnotesize\color{white}]
|
||||
at (frame.north east) {SML};}
|
||||
,listing options={
|
||||
style=sml
|
||||
,columns=flexible%
|
||||
,basicstyle=\small\ttfamily
|
||||
,#1
|
||||
}
|
||||
}%
|
||||
%% </sml>
|
||||
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||
|
||||
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||
%% <latex>
|
||||
\lstloadlanguages{TeX}
|
||||
\providecolor{ltx}{named}{yellow}
|
||||
\lstdefinestyle{lltx}{language=[AlLaTeX]TeX,
|
||||
,basicstyle=\ttfamily%
|
||||
,showspaces=false%
|
||||
,escapechar=ë
|
||||
,showlines=false%
|
||||
,morekeywords={newisadof}
|
||||
% ,keywordstyle=\bfseries%
|
||||
% Defining 2-keywords
|
||||
,keywordstyle=[1]{\color{BrickRed!60}\bfseries}%
|
||||
% Defining 3-keywords
|
||||
,keywordstyle=[2]{\color{OliveGreen!60}\bfseries}%
|
||||
% Defining 4-keywords
|
||||
,keywordstyle=[3]{\color{black!60}\bfseries}%
|
||||
% Defining 5-keywords
|
||||
,keywordstyle=[4]{\color{Blue!70}\bfseries}%
|
||||
% Defining 6-keywords
|
||||
,keywordstyle=[5]{\itshape}%
|
||||
%
|
||||
}
|
||||
\lstdefinestyle{ltx}{style=lltx,
|
||||
basicstyle=\ttfamily\small}%
|
||||
\def\inlineltx{\lstinline[style=ltx, breaklines=true,columns=fullflexible]}
|
||||
% see
|
||||
% https://tex.stackexchange.com/questions/247643/problem-with-tcblisting-first-listed-latex-command-is-missing
|
||||
\NewTCBListing{ltx}{ !O{} }{%
|
||||
listing only%
|
||||
,boxrule=0pt
|
||||
,boxsep=0pt
|
||||
,colback=white!90!ltx
|
||||
,enhanced jigsaw
|
||||
,borderline west={2pt}{0pt}{ltx!60!black}
|
||||
,sharp corners
|
||||
% ,before skip=10pt
|
||||
% ,after skip=10pt
|
||||
,enlarge top by=0mm
|
||||
,enhanced
|
||||
,overlay={\node[draw,fill=ltx!60!black,xshift=0pt,anchor=north
|
||||
east,font=\bfseries\footnotesize\color{white}]
|
||||
at (frame.north east) {\LaTeX};}
|
||||
,listing options={
|
||||
style=lltx,
|
||||
,columns=flexible%
|
||||
,basicstyle=\small\ttfamily
|
||||
,#1
|
||||
}
|
||||
}%
|
||||
%% </latex>
|
||||
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||
|
||||
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||
%% <bash>
|
||||
\providecolor{bash}{named}{black}
|
||||
\lstloadlanguages{bash}
|
||||
\lstdefinestyle{bash}{%
|
||||
language=bash
|
||||
,escapechar=ë
|
||||
,basicstyle=\ttfamily%
|
||||
,showspaces=false%
|
||||
,showlines=false%
|
||||
,columns=flexible%
|
||||
% ,keywordstyle=\bfseries%
|
||||
% Defining 2-keywords
|
||||
,keywordstyle=[1]{\color{BrickRed!60}\bfseries}%
|
||||
% Defining 3-keywords
|
||||
,keywordstyle=[2]{\color{OliveGreen!60}\bfseries}%
|
||||
% Defining 4-keywords
|
||||
,keywordstyle=[3]{\color{black!60}\bfseries}%
|
||||
% Defining 5-keywords
|
||||
,keywordstyle=[4]{\color{Blue!80}\bfseries}%
|
||||
,alsoletter={*,-,:,~,/}
|
||||
,morekeywords=[4]{}%
|
||||
% Defining 6-keywords
|
||||
,keywordstyle=[5]{\itshape}%
|
||||
%
|
||||
}
|
||||
\def\inlinebash{\lstinline[style=bash, breaklines=true,columns=fullflexible]}
|
||||
\newcommand\@isabsolutepath[3]{%
|
||||
\StrLeft{#1}{1}[\firstchar]%
|
||||
\IfStrEq{\firstchar}{/}{#2}{#3}%
|
||||
}
|
||||
|
||||
\newcommand{\@homeprefix}[1]{%
|
||||
\ifthenelse{\equal{#1}{}}{\textasciitilde}{\textasciitilde/}%
|
||||
}
|
||||
|
||||
\newcommand{\prompt}[1]{%
|
||||
\color{Blue!80}\textbf{\texttt{%
|
||||
achim@logicalhacking:{\@isabsolutepath{#1}{#1}{\@homeprefix{#1}#1}}\$}}%
|
||||
}
|
||||
\newtcblisting{bash}[1][]{%
|
||||
listing only%
|
||||
,boxrule=0pt
|
||||
,boxsep=0pt
|
||||
,colback=white!90!bash
|
||||
,enhanced jigsaw
|
||||
,borderline west={2pt}{0pt}{bash!60!black}
|
||||
,sharp corners
|
||||
% ,before skip=10pt
|
||||
% ,after skip=10pt
|
||||
,enlarge top by=0mm
|
||||
,enhanced
|
||||
,overlay={\node[draw,fill=bash!60!black,xshift=0pt,anchor=north
|
||||
east,font=\bfseries\footnotesize\color{white}]
|
||||
at (frame.north east) {Bash};}
|
||||
,listing options={
|
||||
style=bash
|
||||
,columns=flexible%
|
||||
,breaklines=true%
|
||||
,prebreak=\mbox{\space\textbackslash}%
|
||||
,basicstyle=\small\ttfamily%
|
||||
,#1
|
||||
}
|
||||
}%
|
||||
%% </bash>
|
||||
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||
|
||||
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||
%% <config>
|
||||
\providecolor{config}{named}{gray}
|
||||
\newtcblisting{config}[2][]{%
|
||||
listing only%
|
||||
,boxrule=0pt
|
||||
,boxsep=0pt
|
||||
,colback=white!90!config
|
||||
,enhanced jigsaw
|
||||
,borderline west={2pt}{0pt}{config!60!black}
|
||||
,sharp corners
|
||||
% ,before skip=10pt
|
||||
% ,after skip=10pt
|
||||
,enlarge top by=0mm
|
||||
,enhanced
|
||||
,overlay={\node[draw,fill=config!60!black,xshift=0pt,anchor=north
|
||||
east,font=\bfseries\footnotesize\color{white}]
|
||||
at (frame.north east) {#2};}
|
||||
,listing options={
|
||||
breakatwhitespace=true
|
||||
,columns=flexible%
|
||||
,basicstyle=\small\ttfamily
|
||||
,mathescape
|
||||
,#1
|
||||
}
|
||||
}%
|
||||
%% </config>
|
||||
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||
|
||||
|
|
@ -1,4 +0,0 @@
|
|||
\usepackage{dirtree}
|
||||
\renewcommand*\DTstylecomment{\ttfamily\itshape}
|
||||
|
||||
\usepackage{lstisadof-manual}
|
|
@ -1,20 +0,0 @@
|
|||
(*<*)
|
||||
theory "document_setup"
|
||||
imports
|
||||
"Isabelle_DOF.technical_report"
|
||||
"Isabelle_DOF-Ontologies.CENELEC_50128"
|
||||
"Isabelle_DOF-Ontologies.CC_terminology"
|
||||
begin
|
||||
|
||||
use_template "scrreprt-modern"
|
||||
use_ontology "Isabelle_DOF.technical_report" and "Isabelle_DOF-Ontologies.CENELEC_50128"
|
||||
and "Isabelle_DOF-Ontologies.CC_terminology"
|
||||
|
||||
(*>*)
|
||||
|
||||
title*[title::title] \<open>Isabelle/DOF\<close>
|
||||
subtitle*[subtitle::subtitle]\<open>Ontologies\<close>
|
||||
|
||||
(*<*)
|
||||
end
|
||||
(*>*)
|
|
@ -1,30 +0,0 @@
|
|||
(*************************************************************************
|
||||
* Copyright (C)
|
||||
* 2019 The University of Exeter
|
||||
* 2018-2019 The University of Paris-Saclay
|
||||
* 2018 The University of Sheffield
|
||||
*
|
||||
* License:
|
||||
* This program can be redistributed and/or modified under the terms
|
||||
* of the 2-clause BSD-style license.
|
||||
*
|
||||
* SPDX-License-Identifier: BSD-2-Clause
|
||||
*************************************************************************)
|
||||
|
||||
theory
|
||||
"document_templates"
|
||||
imports
|
||||
"Isabelle_DOF.Isa_DOF"
|
||||
begin
|
||||
|
||||
define_template "./document-templates/root-lipics-v2021-UNSUPPORTED.tex"
|
||||
"Unsupported template for LIPICS (v2021). Not for general use."
|
||||
define_template "./document-templates/root-svjour3-UNSUPPORTED.tex"
|
||||
"Unsupported template for SVJOUR. Not for general use."
|
||||
define_template "./document-templates/root-sn-article-UNSUPPORTED.tex"
|
||||
"Unsupported template for Springer Nature's template. Not for general use."
|
||||
define_template "./document-templates/root-beamer-UNSUPPORTED.tex"
|
||||
"Unsupported template for presentations. Not for general use."
|
||||
define_template "./document-templates/root-beamerposter-UNSUPPORTED.tex"
|
||||
"Unsupported template for poster. Not for general use."
|
||||
end
|
|
@ -1,10 +0,0 @@
|
|||
session "Isabelle_DOF-Proofs" (proofs) = "HOL-Proofs" +
|
||||
options [document = false, record_proofs = 2, parallel_limit = 500, document_build = dof]
|
||||
sessions
|
||||
"Isabelle_DOF"
|
||||
Metalogic_ProofChecker
|
||||
theories
|
||||
Isabelle_DOF.ontologies
|
||||
Isabelle_DOF.Isa_DOF
|
||||
Very_Deep_DOF
|
||||
Reification_Test
|
|
@ -1,739 +0,0 @@
|
|||
theory Reification_Test
|
||||
imports "Isabelle_DOF-Proofs.Very_Deep_DOF"
|
||||
|
||||
begin
|
||||
|
||||
ML\<open>
|
||||
val ty1 = Meta_ISA_core.reify_typ @{typ "int"}
|
||||
val ty2 = Meta_ISA_core.reify_typ @{typ "int \<Rightarrow> bool"}
|
||||
val ty3 = Meta_ISA_core.reify_typ @{typ "prop"}
|
||||
val ty4 = Meta_ISA_core.reify_typ @{typ "'a list"}
|
||||
\<close>
|
||||
|
||||
term*\<open>@{typ \<open>int\<close>}\<close>
|
||||
value*\<open>@{typ \<open>int\<close>}\<close>
|
||||
value*\<open>@{typ \<open>int \<Rightarrow> bool\<close>}\<close>
|
||||
term*\<open>@{typ \<open>prop\<close>}\<close>
|
||||
value*\<open>@{typ \<open>prop\<close>}\<close>
|
||||
term*\<open>@{typ \<open>'a list\<close>}\<close>
|
||||
value*\<open>@{typ \<open>'a list\<close>}\<close>
|
||||
|
||||
ML\<open>
|
||||
val t1 = Meta_ISA_core.reify_term @{term "1::int"}
|
||||
val t2 = Meta_ISA_core.reify_term @{term "\<lambda>x. x = 1"}
|
||||
val t3 = Meta_ISA_core.reify_term @{term "[2, 3::int]"}
|
||||
\<close>
|
||||
term*\<open>@{term \<open>1::int\<close>}\<close>
|
||||
value*\<open>@{term \<open>1::int\<close>}\<close>
|
||||
term*\<open>@{term \<open>\<lambda>x. x = 1\<close>}\<close>
|
||||
value*\<open>@{term \<open>\<lambda>x. x = 1\<close>}\<close>
|
||||
term*\<open>@{term \<open>[2, 3::int]\<close>}\<close>
|
||||
value*\<open>@{term \<open>[2, 3::int]\<close>}\<close>
|
||||
|
||||
prf refl
|
||||
full_prf refl
|
||||
|
||||
term*\<open>@{thm \<open>HOL.refl\<close>}\<close>
|
||||
value*\<open>proof @{thm \<open>HOL.refl\<close>}\<close>
|
||||
value*\<open>proof @{thm \<open>HOL.refl\<close>}\<close>
|
||||
value*\<open>depth (proof @{thm \<open>HOL.refl\<close>})\<close>
|
||||
value*\<open>size (proof @{thm \<open>HOL.refl\<close>})\<close>
|
||||
value*\<open>fv_Proof (proof @{thm \<open>HOL.refl\<close>})\<close>
|
||||
term*\<open>@{thms-of \<open>HOL.refl\<close>}\<close>
|
||||
value*\<open>@{thms-of \<open>HOL.refl\<close>}\<close>
|
||||
|
||||
ML\<open>
|
||||
val t_schematic = TVar(("'a",0), [])
|
||||
val t = @{term "Tv (Var (STR '''a'', 0)) {}"}
|
||||
val rt_schematic = Meta_ISA_core.reify_typ t_schematic
|
||||
val true = rt_schematic = t
|
||||
\<close>
|
||||
|
||||
lemma test : "A \<and> B \<longrightarrow> B \<and> A"
|
||||
by auto
|
||||
|
||||
lemma test2 : "A \<and> B \<Longrightarrow> B \<and> A"
|
||||
by auto
|
||||
|
||||
lemma test3: "A \<and> B \<longrightarrow> B \<and> A"
|
||||
proof
|
||||
assume "A \<and> B"
|
||||
then obtain B and A ..
|
||||
then show "B \<and> A" ..
|
||||
qed
|
||||
|
||||
lemma test4:
|
||||
assumes "(A \<and> B)"
|
||||
shows "B \<and> A"
|
||||
apply (insert assms)
|
||||
by auto
|
||||
|
||||
lemma test_subst : "\<lbrakk>x = f x; odd(f x)\<rbrakk> \<Longrightarrow> odd x"
|
||||
by (erule ssubst)
|
||||
|
||||
inductive_set even' :: "nat set" where
|
||||
"0 \<in> even'"
|
||||
| "n \<in> even' \<Longrightarrow> (Suc (Suc n)) \<in> even'"
|
||||
|
||||
find_theorems name:"even'.induct"
|
||||
|
||||
|
||||
(*lemma even_dvd : "n \<in> even' \<Longrightarrow> 2 dvd n"
|
||||
proof(induct n)
|
||||
case 0 then show ?case by simp
|
||||
next
|
||||
case (Suc n) then show ?case
|
||||
apply (simp add: dvd_def)
|
||||
apply (rule_tac x ="Suc k" in exI)
|
||||
apply clarify*)
|
||||
|
||||
theorem "((A \<longrightarrow> B) \<longrightarrow> A) \<longrightarrow> A"
|
||||
proof
|
||||
assume "(A \<longrightarrow> B) \<longrightarrow> A"
|
||||
show A
|
||||
proof (rule classical)
|
||||
assume "\<not> A"
|
||||
have "A \<longrightarrow> B"
|
||||
proof
|
||||
assume A
|
||||
with \<open>\<not> A\<close> show B by contradiction
|
||||
qed
|
||||
with \<open>(A \<longrightarrow> B) \<longrightarrow> A\<close> show A ..
|
||||
qed
|
||||
qed
|
||||
|
||||
(*lemma even_dvd : "n \<in> even' \<Longrightarrow> 2 dvd n"
|
||||
using [[simp_trace]]
|
||||
apply (induct n)
|
||||
apply (subst even_zero)
|
||||
apply(rule TrueI)
|
||||
|
||||
apply(simp)*)
|
||||
|
||||
lemma even_dvd : "n \<in> even' \<Longrightarrow> 2 dvd n"
|
||||
apply (erule even'.induct)
|
||||
apply (simp_all add: dvd_def)
|
||||
using [[simp_trace]]
|
||||
apply clarify
|
||||
find_theorems name:"_ = 2 * _"
|
||||
apply (rule_tac x ="Suc k" in exI)
|
||||
using [[simp_trace]]
|
||||
apply simp
|
||||
done
|
||||
|
||||
(*
|
||||
lemma even_dvd : "n \<in> even' \<Longrightarrow> 2 dvd n"
|
||||
apply (induct_tac rule:even'.induct)*)
|
||||
|
||||
inductive ev :: " nat \<Rightarrow> bool " where
|
||||
ev0: " ev 0 " |
|
||||
evSS: " ev n \<Longrightarrow> ev (n + 2) "
|
||||
|
||||
fun evn :: " nat \<Rightarrow> bool " where
|
||||
" evn 0 = True " |
|
||||
" evn (Suc 0) = False " |
|
||||
" evn (Suc (Suc n)) = evn n "
|
||||
|
||||
(*lemma assumes a: " ev (Suc(Suc m)) " shows" ev m "
|
||||
proof(induction "Suc (Suc m)" arbitrary: " m " rule: ev.induct)*)
|
||||
|
||||
(*lemma " ev (Suc (Suc m)) \<Longrightarrow> ev m "
|
||||
proof(induction " Suc (Suc m) " arbitrary: " m " rule: ev.induct)
|
||||
case ev0
|
||||
then show ?case sorry
|
||||
next
|
||||
case (evSS n)
|
||||
then show ?case sorry
|
||||
qed*)
|
||||
|
||||
(* And neither of these can apply the induction *)
|
||||
(*
|
||||
lemma assumes a1: " ev n " and a2: " n = (Suc (Suc m)) " shows " ev m "
|
||||
proof (induction " n " arbitrary: " m " rule: ev.induct)
|
||||
|
||||
lemma assumes a1: " n = (Suc (Suc m)) " and a2: "ev n " shows " ev m "
|
||||
proof (induction " n " arbitrary: " m " rule: ev.induct)
|
||||
*)
|
||||
|
||||
(* But this one can ?! *)
|
||||
(*
|
||||
lemma assumes a1: " ev n " and a2: " n = (Suc (Suc m)) " shows " ev m "
|
||||
proof -
|
||||
from a1 and a2 show " ev m "
|
||||
proof (induction " n " arbitrary: " m " rule: ev.induct)
|
||||
case ev0
|
||||
then show ?case by simp
|
||||
next
|
||||
case (evSS n) thus ?case by simp
|
||||
qed
|
||||
qed
|
||||
*)
|
||||
|
||||
inductive_set even :: "int set" where
|
||||
zero[intro!]: "0 \<in> even" |
|
||||
plus[intro!]: "n \<in> even \<Longrightarrow> n+2 \<in> even " |
|
||||
min[intro!]: "n \<in> even \<Longrightarrow> n-2 \<in> even "
|
||||
|
||||
lemma a : "2+2=4" by simp
|
||||
|
||||
lemma b : "(0::int)+2=2" by simp
|
||||
|
||||
lemma test_subst_2 : "4 \<in> even"
|
||||
apply (subst a[symmetric])
|
||||
apply (rule plus)
|
||||
apply (subst b[symmetric])
|
||||
apply (rule plus)
|
||||
apply (rule zero)
|
||||
done
|
||||
|
||||
|
||||
(*lemma "\<lbrakk>P x y z; Suc x < y\<rbrakk> \<Longrightarrow> f z = x * y"
|
||||
(*using [[simp_trace]]*)
|
||||
(*apply (simp add: mult.commute)*)
|
||||
apply (subst mult.commute)
|
||||
apply (rule mult.commute [THEN ssubst])*)
|
||||
|
||||
datatype 'a seq = Empty | Seq 'a "'a seq"
|
||||
find_consts name:"Reification_Test*seq*"
|
||||
fun conc :: "'a seq \<Rightarrow> 'a seq \<Rightarrow> 'a seq"
|
||||
where
|
||||
c1 : "conc Empty ys = ys"
|
||||
| c2 : "conc (Seq x xs) ys = Seq x (conc xs ys)"
|
||||
|
||||
lemma seq_not_eq : "Seq x xs \<noteq> xs"
|
||||
using [[simp_trace]]
|
||||
proof (induct xs arbitrary: x)
|
||||
case Empty
|
||||
show "Seq x Empty \<noteq> Empty" by simp
|
||||
next
|
||||
case (Seq y ys)
|
||||
show "Seq x (Seq y ys) \<noteq> Seq y ys"
|
||||
using \<open>Seq y ys \<noteq> ys\<close> by simp
|
||||
qed
|
||||
|
||||
lemma identity_conc : "conc xs Empty = xs"
|
||||
using [[simp_trace]]
|
||||
using[[simp_trace_depth_limit=8]]
|
||||
using [[unify_trace_simp]]
|
||||
using[[unify_trace_types]]
|
||||
using [[unify_trace_bound=0]]
|
||||
(* using [[simp_trace_new depth=10]] *)
|
||||
apply (induct xs)
|
||||
apply (subst c1)
|
||||
apply (rule refl)
|
||||
apply (subst c2)
|
||||
apply (rule_tac s="xs" and P="\<lambda>X. Seq x1 X = Seq x1 xs" in subst)
|
||||
apply (rule sym)
|
||||
apply assumption
|
||||
apply (rule refl)
|
||||
done
|
||||
|
||||
lemma imp_ex : "(\<exists>x. \<forall>y. P x y) \<longrightarrow> (\<forall>y. \<exists>x. P x y)"
|
||||
using [[simp_trace]]
|
||||
using[[simp_trace_depth_limit=8]]
|
||||
apply (auto)
|
||||
done
|
||||
|
||||
lemma length_0_conv' [iff]: "(length [] = 0)"
|
||||
apply (subst List.list.size(3))
|
||||
apply (rule refl)
|
||||
done
|
||||
|
||||
lemma cons_list : "a#xs = [a]@xs"
|
||||
using [[simp_trace]]
|
||||
apply (subst List.append.append_Cons)
|
||||
apply (subst List.append.append_Nil)
|
||||
apply (rule refl)
|
||||
done
|
||||
lemma replacement: "\<lbrakk> a = b; c = d \<rbrakk> \<Longrightarrow> f a c = f b d"
|
||||
apply (erule ssubst)+
|
||||
apply (rule refl )
|
||||
done
|
||||
lemma assoc_append : "k @ (l @ m) = (k @ l ) @ m"
|
||||
apply (induct_tac k )
|
||||
apply (subst append_Nil )+
|
||||
apply (rule refl )
|
||||
apply (subst append_Cons)
|
||||
apply (subst append_Cons)
|
||||
apply (subst append_Cons)
|
||||
apply (rule_tac f ="Cons" in replacement)
|
||||
apply (rule refl)
|
||||
apply assumption
|
||||
done
|
||||
|
||||
lemma length_cons : "length (xs @ ys) = length xs + length ys"
|
||||
using [[simp_trace]]
|
||||
apply (subst List.length_append)
|
||||
apply (rule refl)
|
||||
done
|
||||
lemma length_plus : "(length [a] + length xs = 0) = ([a] @ xs = [])"
|
||||
using [[simp_trace]]
|
||||
apply (subst List.list.size(4))
|
||||
apply (subst List.list.size(3))
|
||||
apply (subst Nat.add_Suc_right)
|
||||
apply (subst Groups.monoid_add_class.add.right_neutral)
|
||||
apply (subst Nat.plus_nat.add_Suc)
|
||||
apply (subst Groups.monoid_add_class.add.left_neutral)
|
||||
apply (subst Nat.old.nat.distinct(2))
|
||||
by simp
|
||||
lemma empty_list : "(length [] = 0) = ([] = []) = True"
|
||||
using [[simp_trace]]
|
||||
by simp
|
||||
lemma TrueI: True
|
||||
using [[simp_trace]]
|
||||
unfolding True_def
|
||||
by (rule refl)
|
||||
|
||||
lemma length_0_conv [iff]: "(length xs = 0) = (xs = [])"
|
||||
using [[simp_trace]]
|
||||
apply (induct xs)
|
||||
apply (subst List.list.size(3))
|
||||
apply(subst HOL.simp_thms(6))
|
||||
apply(subst HOL.simp_thms(6))
|
||||
apply(rule refl)
|
||||
|
||||
apply (subst cons_list)
|
||||
apply (subst(2) cons_list)
|
||||
apply (subst length_cons)
|
||||
apply (subst length_plus)
|
||||
apply (subst HOL.simp_thms(6))
|
||||
apply (rule TrueI)
|
||||
done
|
||||
(*by (induct xs) auto*)
|
||||
|
||||
find_theorems (50) name:"HOL.simp_thms"
|
||||
find_theorems (50) name:"List.list*size"
|
||||
find_theorems (50) name:"List.list*length"
|
||||
find_theorems "_ @ _"
|
||||
find_theorems (500) "List.length [] = 0"
|
||||
find_theorems (550) "length _ = length _ + length _"
|
||||
|
||||
|
||||
lemma identity_list : "xs @ [] = xs"
|
||||
using [[simp_trace]]
|
||||
using[[simp_trace_depth_limit=8]]
|
||||
using [[unify_trace_simp]]
|
||||
using[[unify_trace_types]]
|
||||
using [[unify_trace_bound=0]]
|
||||
apply (induct xs)
|
||||
apply (subst List.append_Nil2)
|
||||
apply (subst HOL.simp_thms(6))
|
||||
apply(rule TrueI)
|
||||
apply (subst List.append_Nil2)
|
||||
apply (subst HOL.simp_thms(6))
|
||||
apply(rule TrueI)
|
||||
done
|
||||
|
||||
lemma identity_list' : "xs @ [] = xs"
|
||||
using [[simp_trace]]
|
||||
using[[simp_trace_depth_limit=8]]
|
||||
using [[unify_trace_simp]]
|
||||
using[[unify_trace_types]]
|
||||
using [[unify_trace_bound=0]]
|
||||
(* using [[simp_trace_new depth=10]] *)
|
||||
apply (induct "length xs")
|
||||
apply (subst (asm) zero_reorient)
|
||||
apply(subst(asm) length_0_conv)
|
||||
apply (subst List.append_Nil2)
|
||||
apply (subst HOL.simp_thms(6))
|
||||
apply (rule TrueI)
|
||||
apply (subst List.append_Nil2)
|
||||
apply (subst HOL.simp_thms(6))
|
||||
apply (rule TrueI)
|
||||
done
|
||||
|
||||
lemma conj_test : "A \<and> B \<and> C \<longrightarrow> B \<and> A"
|
||||
apply (rule impI)
|
||||
apply (rule conjI)
|
||||
apply (drule conjunct2)
|
||||
apply (drule conjunct1)
|
||||
apply assumption
|
||||
apply (drule conjunct1)
|
||||
apply assumption
|
||||
done
|
||||
|
||||
declare[[show_sorts]]
|
||||
declare[[ML_print_depth = 20]]
|
||||
|
||||
ML\<open>
|
||||
val full = true
|
||||
val thm = @{thm "test"}
|
||||
val hyps = Thm.hyps_of thm
|
||||
val prems = Thm.prems_of thm
|
||||
val reconstruct_proof = Thm.reconstruct_proof_of thm
|
||||
val standard_proof = Proof_Syntax.standard_proof_of
|
||||
{full = full, expand_name = Thm.expand_name thm} thm
|
||||
val term_of_proof = Proof_Syntax.term_of_proof standard_proof
|
||||
\<close>
|
||||
|
||||
lemma identity_conc' : "conc xs Empty = xs"
|
||||
using [[simp_trace]]
|
||||
using[[simp_trace_depth_limit=8]]
|
||||
using [[unify_trace_simp]]
|
||||
using[[unify_trace_types]]
|
||||
using [[unify_trace_bound=0]]
|
||||
(* using [[simp_trace_new depth=10]] *)
|
||||
apply (induct xs)
|
||||
apply (subst c1)
|
||||
apply (rule refl)
|
||||
apply (subst c2)
|
||||
apply (rule_tac s="xs" and P="\<lambda>X. Seq x1 X = Seq x1 xs" in subst)
|
||||
apply (rule sym)
|
||||
apply assumption
|
||||
apply (rule refl)
|
||||
done
|
||||
|
||||
declare[[show_sorts = false]]
|
||||
|
||||
ML\<open> (*See: *) \<^file>\<open>~~/src/HOL/Proofs/ex/Proof_Terms.thy\<close>\<close>
|
||||
ML\<open>
|
||||
val thm = @{thm "identity_conc'"};
|
||||
|
||||
(*proof body with digest*)
|
||||
val body = Proofterm.strip_thm_body (Thm.proof_body_of thm);
|
||||
|
||||
(*proof term only*)
|
||||
val prf = Proofterm.proof_of body;
|
||||
|
||||
(*clean output*)
|
||||
Pretty.writeln (Proof_Syntax.pretty_proof \<^context> prf);
|
||||
Pretty.writeln (Proof_Syntax.pretty_standard_proof_of \<^context> false thm);
|
||||
Pretty.writeln (Proof_Syntax.pretty_standard_proof_of \<^context> true thm);
|
||||
|
||||
(*all theorems used in the graph of nested proofs*)
|
||||
val all_thms =
|
||||
Proofterm.fold_body_thms
|
||||
(fn {name, ...} => insert (op =) name) [body] [];
|
||||
\<close>
|
||||
|
||||
term*\<open>@{thm \<open>Reification_Test.identity_conc\<close>}\<close>
|
||||
value*\<open>proof @{thm \<open>Reification_Test.identity_conc\<close>}\<close>
|
||||
|
||||
lemma cons_list' : "a#xs = [a]@xs"
|
||||
using [[simp_trace]]
|
||||
apply (subst List.append.append_Cons)
|
||||
apply (subst List.append.append_Nil)
|
||||
apply (rule refl)
|
||||
done
|
||||
|
||||
ML\<open> (*See: *) \<^file>\<open>~~/src/HOL/Proofs/ex/Proof_Terms.thy\<close>\<close>
|
||||
ML\<open>
|
||||
val thm = @{thm "cons_list'"};
|
||||
|
||||
(*proof body with digest*)
|
||||
val body = Proofterm.strip_thm_body (Thm.proof_body_of thm);
|
||||
|
||||
(*proof term only*)
|
||||
val prf = Proofterm.proof_of body;
|
||||
|
||||
(*clean output*)
|
||||
Pretty.writeln (Proof_Syntax.pretty_proof \<^context> prf);
|
||||
Pretty.writeln (Proof_Syntax.pretty_standard_proof_of \<^context> false thm);
|
||||
Pretty.writeln (Proof_Syntax.pretty_standard_proof_of \<^context> true thm);
|
||||
|
||||
(*all theorems used in the graph of nested proofs*)
|
||||
val all_thms =
|
||||
Proofterm.fold_body_thms
|
||||
(fn {name, ...} => insert (op =) name) [body] [];
|
||||
\<close>
|
||||
|
||||
declare[[show_sorts = false]]
|
||||
declare[[ML_print_depth = 20]]
|
||||
|
||||
ML\<open> (*See: *) \<^file>\<open>~~/src/HOL/Proofs/ex/Proof_Terms.thy\<close>\<close>
|
||||
ML\<open>
|
||||
val thm = @{thm "test"};
|
||||
|
||||
(*proof body with digest*)
|
||||
val body = Proofterm.strip_thm_body (Thm.proof_body_of thm);
|
||||
|
||||
(*proof term only*)
|
||||
val prf = Proofterm.proof_of body;
|
||||
|
||||
(*clean output*)
|
||||
Pretty.writeln (Proof_Syntax.pretty_proof \<^context> prf);
|
||||
Pretty.writeln (Proof_Syntax.pretty_standard_proof_of \<^context> false thm);
|
||||
Pretty.writeln (Proof_Syntax.pretty_standard_proof_of \<^context> true thm);
|
||||
|
||||
(*all theorems used in the graph of nested proofs*)
|
||||
val all_thms =
|
||||
Proofterm.fold_body_thms
|
||||
(fn {name, ...} => insert (op =) name) [body] [];
|
||||
\<close>
|
||||
|
||||
prf test
|
||||
full_prf test
|
||||
term*\<open>@{thm \<open>Reification_Test.test\<close>}\<close>
|
||||
value*\<open>proof @{thm \<open>Reification_Test.test\<close>}\<close>
|
||||
term*\<open>@{thms-of \<open>Reification_Test.test\<close>}\<close>
|
||||
value*\<open>@{thms-of \<open>Reification_Test.test\<close>}\<close>
|
||||
|
||||
ML\<open> (*See: *) \<^file>\<open>~~/src/HOL/Proofs/ex/Proof_Terms.thy\<close>\<close>
|
||||
ML\<open>
|
||||
val thm = @{thm test2};
|
||||
|
||||
(*proof body with digest*)
|
||||
val body = Proofterm.strip_thm_body (Thm.proof_body_of thm);
|
||||
|
||||
(*proof term only*)
|
||||
val prf = Proofterm.proof_of body;
|
||||
|
||||
(*clean output*)
|
||||
Pretty.writeln (Proof_Syntax.pretty_standard_proof_of \<^context> false thm);
|
||||
Pretty.writeln (Proof_Syntax.pretty_standard_proof_of \<^context> true thm);
|
||||
|
||||
(*all theorems used in the graph of nested proofs*)
|
||||
val all_thms =
|
||||
Proofterm.fold_body_thms
|
||||
(fn {name, ...} => insert (op =) name) [body] [];
|
||||
\<close>
|
||||
|
||||
prf test2
|
||||
full_prf test2
|
||||
term*\<open>@{thm \<open>Reification_Test.test2\<close>}\<close>
|
||||
value*\<open>proof @{thm \<open>Reification_Test.test2\<close>}\<close>
|
||||
|
||||
ML\<open> (*See: *) \<^file>\<open>~~/src/HOL/Proofs/ex/Proof_Terms.thy\<close>\<close>
|
||||
ML\<open>
|
||||
val thm = @{thm test3};
|
||||
|
||||
(*proof body with digest*)
|
||||
val body = Proofterm.strip_thm_body (Thm.proof_body_of thm);
|
||||
|
||||
(*proof term only*)
|
||||
val prf = Proofterm.proof_of body;
|
||||
|
||||
(*clean output*)
|
||||
Pretty.writeln (Proof_Syntax.pretty_standard_proof_of \<^context> false thm);
|
||||
Pretty.writeln (Proof_Syntax.pretty_standard_proof_of \<^context> true thm);
|
||||
|
||||
(*all theorems used in the graph of nested proofs*)
|
||||
val all_thms =
|
||||
Proofterm.fold_body_thms
|
||||
(fn {name, ...} => insert (op =) name) [body] [];
|
||||
\<close>
|
||||
|
||||
prf test3
|
||||
full_prf test3
|
||||
term*\<open>@{thm \<open>Reification_Test.test3\<close>}\<close>
|
||||
value*\<open>@{thm \<open>Reification_Test.test3\<close>}\<close>
|
||||
|
||||
ML\<open> (*See: *) \<^file>\<open>~~/src/HOL/Proofs/ex/Proof_Terms.thy\<close>\<close>
|
||||
ML\<open>
|
||||
val thm = @{thm test4};
|
||||
|
||||
(*proof body with digest*)
|
||||
val body = Proofterm.strip_thm_body (Thm.proof_body_of thm);
|
||||
|
||||
(*proof term only*)
|
||||
val prf = Proofterm.proof_of body;
|
||||
|
||||
(*clean output*)
|
||||
Pretty.writeln (Proof_Syntax.pretty_standard_proof_of \<^context> false thm);
|
||||
Pretty.writeln (Proof_Syntax.pretty_standard_proof_of \<^context> true thm);
|
||||
|
||||
(*all theorems used in the graph of nested proofs*)
|
||||
val all_thms =
|
||||
Proofterm.fold_body_thms
|
||||
(fn {name, ...} => insert (op =) name) [body] [];
|
||||
\<close>
|
||||
|
||||
prf test4
|
||||
full_prf test4
|
||||
term*\<open>@{thm \<open>Reification_Test.test4\<close>}\<close>
|
||||
value*\<open>@{thm \<open>Reification_Test.test4\<close>}\<close>
|
||||
|
||||
ML\<open> (*See: *) \<^file>\<open>~~/src/HOL/Proofs/ex/Proof_Terms.thy\<close>\<close>
|
||||
ML\<open>
|
||||
val thm = @{thm Pure.symmetric};
|
||||
|
||||
(*proof body with digest*)
|
||||
val body = Proofterm.strip_thm_body (Thm.proof_body_of thm);
|
||||
|
||||
(*proof term only*)
|
||||
val prf = Proofterm.proof_of body;
|
||||
|
||||
(*clean output*)
|
||||
Pretty.writeln (Proof_Syntax.pretty_standard_proof_of \<^context> false thm);
|
||||
Pretty.writeln (Proof_Syntax.pretty_standard_proof_of \<^context> true thm);
|
||||
|
||||
(*all theorems used in the graph of nested proofs*)
|
||||
val all_thms =
|
||||
Proofterm.fold_body_thms
|
||||
(fn {name, ...} => insert (op =) name) [body] [];
|
||||
\<close>
|
||||
|
||||
prf symmetric
|
||||
full_prf symmetric
|
||||
term*\<open>@{thm \<open>Pure.symmetric\<close>}\<close>
|
||||
value*\<open>proof @{thm \<open>Pure.symmetric\<close>}\<close>
|
||||
|
||||
ML\<open>
|
||||
val full = true
|
||||
val thm = @{thm "Groups.minus_class.super"}
|
||||
val standard_proof = Proof_Syntax.standard_proof_of
|
||||
{full = full, expand_name = Thm.expand_name thm} thm
|
||||
val term_of_proof = Proof_Syntax.term_of_proof standard_proof
|
||||
\<close>
|
||||
|
||||
ML\<open>
|
||||
val thm = Proof_Context.get_thm \<^context> "Groups.minus_class.super"
|
||||
val prop = Thm.prop_of thm
|
||||
val proof = Thm.proof_of thm
|
||||
\<close>
|
||||
|
||||
prf Groups.minus_class.super
|
||||
full_prf Groups.minus_class.super
|
||||
term*\<open>@{thm \<open>Groups.minus_class.super\<close>}\<close>
|
||||
value*\<open>@{thm \<open>Groups.minus_class.super\<close>}\<close>
|
||||
|
||||
(*ML\<open>
|
||||
val full = true
|
||||
val thm = @{thm "Homotopy.starlike_imp_contractible"}
|
||||
val standard_proof = Proof_Syntax.standard_proof_of
|
||||
{full = full, expand_name = Thm.expand_name thm} thm
|
||||
val term_of_proof = Proof_Syntax.term_of_proof standard_proof
|
||||
\<close>
|
||||
|
||||
ML\<open>
|
||||
val thm = Proof_Context.get_thm \<^context> "Homotopy.starlike_imp_contractible"
|
||||
val prop = Thm.prop_of thm
|
||||
val proof = Thm.proof_of thm
|
||||
\<close>
|
||||
|
||||
prf Homotopy.starlike_imp_contractible
|
||||
full_prf Homotopy.starlike_imp_contractible
|
||||
term*\<open>@{thm \<open>Homotopy.starlike_imp_contractible\<close>}\<close>
|
||||
value*\<open>@{thm \<open>Homotopy.starlike_imp_contractible\<close>}\<close>*)
|
||||
|
||||
(* stefan bergofer phd thesis example proof construction 2.3.2 *)
|
||||
|
||||
lemma stefan_example : "(\<exists>x. \<forall>y. P x y) \<longrightarrow> (\<forall>y. \<exists>x. P x y)"
|
||||
apply (rule impI)
|
||||
apply(rule allI)
|
||||
apply (erule exE)
|
||||
apply(rule exI)
|
||||
apply(erule allE)
|
||||
apply (assumption)
|
||||
done
|
||||
|
||||
ML\<open> (*See: *) \<^file>\<open>~~/src/HOL/Proofs/ex/Proof_Terms.thy\<close>\<close>
|
||||
ML\<open>
|
||||
val thm = @{thm stefan_example};
|
||||
|
||||
(*proof body with digest*)
|
||||
val body = Proofterm.strip_thm_body (Thm.proof_body_of thm);
|
||||
|
||||
(*proof term only*)
|
||||
val prf = Proofterm.proof_of body;
|
||||
|
||||
(*clean output*)
|
||||
Pretty.writeln (Proof_Syntax.pretty_proof \<^context> prf);
|
||||
Pretty.writeln (Proof_Syntax.pretty_standard_proof_of \<^context> false thm);
|
||||
Pretty.writeln (Proof_Syntax.pretty_standard_proof_of \<^context> true thm);
|
||||
|
||||
(*all theorems used in the graph of nested proofs*)
|
||||
val all_thms =
|
||||
Proofterm.fold_body_thms
|
||||
(fn {name, ...} => insert (op =) name) [body] [];
|
||||
\<close>
|
||||
ML\<open>
|
||||
val thy = \<^theory>;
|
||||
val prf =
|
||||
Proof_Syntax.read_proof thy true false
|
||||
"mp \<cdot> _ \<cdot> _ \<bullet> (impI \<cdot> _ \<cdot> _ \<bullet> (conjI \<cdot> _ \<cdot> _ ))";
|
||||
(*"conjI \<cdot> _ \<cdot> _ ";*)
|
||||
(*"(\<^bold>\<lambda>(H: _) Ha: _. conjI \<cdot> _ \<cdot> _ \<bullet> Ha \<bullet> H)";*)
|
||||
(*val t = Proofterm.reconstruct_proof thy \<^prop>\<open>(A \<longrightarrow> B) \<Longrightarrow> A \<Longrightarrow> B\<close> prf*)
|
||||
(* val thm =
|
||||
Proofterm.reconstruct_proof thy \<^prop>\<open>A \<Longrightarrow> B\<close> prf
|
||||
|> Proof_Checker.thm_of_proof thy
|
||||
|> Drule.export_without_context
|
||||
val pretty = Pretty.writeln (Proof_Syntax.pretty_standard_proof_of \<^context> true thm);*)
|
||||
\<close>
|
||||
|
||||
extract_type
|
||||
"typeof (Trueprop P) \<equiv> typeof P"
|
||||
|
||||
realizers
|
||||
impI (P, Q): "\<lambda>pq. pq"
|
||||
"\<^bold>\<lambda>(c: _) (d: _) P Q pq (h: _). allI \<cdot> _ \<bullet> c \<bullet> (\<^bold>\<lambda>x. impI \<cdot> _ \<cdot> _ \<bullet> (h \<cdot> x))"
|
||||
find_consts name:"MinProof"
|
||||
|
||||
ML_val \<open>
|
||||
val thy = \<^theory>;
|
||||
val prf =
|
||||
Proof_Syntax.read_proof thy true false
|
||||
"impI \<cdot> _ \<cdot> _ \<bullet> \
|
||||
\ (\<^bold>\<lambda>H: _. \
|
||||
\ conjE \<cdot> _ \<cdot> _ \<cdot> _ \<bullet> H \<bullet> \
|
||||
\ (\<^bold>\<lambda>(H: _) Ha: _. conjI \<cdot> _ \<cdot> _ \<bullet> Ha \<bullet> H))";
|
||||
val thm =
|
||||
Proofterm.reconstruct_proof thy \<^prop>\<open>A \<and> B \<longrightarrow> B \<and> A\<close> prf
|
||||
|> Proof_Checker.thm_of_proof thy
|
||||
|> Drule.export_without_context;
|
||||
val pretty = Pretty.writeln (Proof_Syntax.pretty_standard_proof_of \<^context> true thm);
|
||||
\<close>
|
||||
ML_file "~~/src/Provers/classical.ML"
|
||||
lemma testtest : "A \<and> B \<longrightarrow> B \<and> A"
|
||||
apply (rule impI)
|
||||
apply (erule conjE)
|
||||
apply(erule conjI)
|
||||
apply assumption
|
||||
done
|
||||
|
||||
ML\<open> (*See: *) \<^file>\<open>~~/src/HOL/Proofs/ex/Proof_Terms.thy\<close>\<close>
|
||||
ML\<open>
|
||||
val thm = @{thm testtest};
|
||||
|
||||
(*proof body with digest*)
|
||||
val body = Proofterm.strip_thm_body (Thm.proof_body_of thm);
|
||||
|
||||
(*proof term only*)
|
||||
val prf = Proofterm.proof_of body;
|
||||
|
||||
(*clean output*)
|
||||
Pretty.writeln (Proof_Syntax.pretty_proof \<^context> prf);
|
||||
Pretty.writeln (Proof_Syntax.pretty_standard_proof_of \<^context> false thm);
|
||||
Pretty.writeln (Proof_Syntax.pretty_standard_proof_of \<^context> true thm);
|
||||
|
||||
(*all theorems used in the graph of nested proofs*)
|
||||
val all_thms =
|
||||
Proofterm.fold_body_thms
|
||||
(fn {name, ...} => insert (op =) name) [body] [];
|
||||
\<close>
|
||||
|
||||
ML\<open>
|
||||
val thy = \<^theory>
|
||||
val prf =
|
||||
Proof_Syntax.read_proof thy true false
|
||||
"impI \<cdot> _ \<cdot> _ \<bullet> \
|
||||
\ (\<^bold>\<lambda>H: _. \
|
||||
\ conjE \<cdot> _ \<cdot> _ \<cdot> _ \<bullet> H \<bullet> \
|
||||
\ (\<^bold>\<lambda>(H: _) Ha: _. conjI \<cdot> _ \<cdot> _ \<bullet> Ha \<bullet> H))";
|
||||
\<close>
|
||||
|
||||
ML\<open>
|
||||
val thy = \<^theory>
|
||||
val prf =
|
||||
Proof_Syntax.read_proof thy true false
|
||||
"\<^bold>\<lambda>(H: A \<and> B). conjE \<cdot> A \<cdot> B \<cdot> A \<and> B \<bullet> H";
|
||||
(* val thm =
|
||||
Proofterm.reconstruct_proof thy \<^prop>\<open>A \<Longrightarrow> B \<Longrightarrow> B \<and> A\<close> prf
|
||||
|> Proof_Checker.thm_of_proof thy
|
||||
|> Drule.export_without_context;
|
||||
val pretty = Pretty.writeln (Proof_Syntax.pretty_standard_proof_of \<^context> true thm);*)
|
||||
\<close>
|
||||
ML\<open>
|
||||
val thy = \<^theory>
|
||||
val prf =
|
||||
Proof_Syntax.read_proof thy true false
|
||||
"\<^bold>\<lambda>(H: _) Ha: _. conjI \<cdot> _ \<cdot> _ \<bullet> Ha \<bullet> H";
|
||||
val thm =
|
||||
Proofterm.reconstruct_proof thy \<^prop>\<open>A \<Longrightarrow> B \<Longrightarrow> B \<and> A\<close> prf
|
||||
|> Proof_Checker.thm_of_proof thy
|
||||
|> Drule.export_without_context;
|
||||
val pretty = Pretty.writeln (Proof_Syntax.pretty_standard_proof_of \<^context> true thm);
|
||||
\<close>
|
||||
|
||||
end
|
|
@ -1,19 +0,0 @@
|
|||
theory Very_Deep_DOF
|
||||
imports "Isabelle_DOF-Proofs.Very_Deep_Interpretation"
|
||||
|
||||
begin
|
||||
|
||||
(* tests *)
|
||||
term "@{typ ''int => int''}"
|
||||
term "@{term ''Bound 0''}"
|
||||
term "@{thm ''refl''}"
|
||||
term "@{docitem ''<doc_ref>''}"
|
||||
ML\<open> @{term "@{docitem ''<doc_ref>''}"}\<close>
|
||||
|
||||
term "@{typ \<open>int \<Rightarrow> int\<close>}"
|
||||
term "@{term \<open>\<forall>x. P x \<longrightarrow> Q\<close>}"
|
||||
term "@{thm \<open>refl\<close>}"
|
||||
term "@{docitem \<open>doc_ref\<close>}"
|
||||
ML\<open> @{term "@{docitem \<open>doc_ref\<close>}"}\<close>
|
||||
|
||||
end
|
|
@ -1,237 +0,0 @@
|
|||
theory Very_Deep_Interpretation
|
||||
imports "Isabelle_DOF.Isa_COL"
|
||||
Metalogic_ProofChecker.ProofTerm
|
||||
|
||||
begin
|
||||
|
||||
subsection\<open> Syntax \<close>
|
||||
|
||||
\<comment> \<open>and others in the future : file, http, thy, ...\<close>
|
||||
|
||||
(* Delete shallow interpretation notations (mixfixes) of the term anti-quotations,
|
||||
so we can use them for the deep interpretation *)
|
||||
no_notation "Isabelle_DOF_typ" ("@{typ _}")
|
||||
no_notation "Isabelle_DOF_term" ("@{term _}")
|
||||
no_notation "Isabelle_DOF_thm" ("@{thm _}")
|
||||
no_notation "Isabelle_DOF_file" ("@{file _}")
|
||||
no_notation "Isabelle_DOF_thy" ("@{thy _}")
|
||||
no_notation "Isabelle_DOF_docitem" ("@{docitem _}")
|
||||
no_notation "Isabelle_DOF_docitem_attr" ("@{docitemattr (_) :: (_)}")
|
||||
no_notation "Isabelle_DOF_trace_attribute" ("@{trace'_-attribute _}")
|
||||
|
||||
consts Isabelle_DOF_typ :: "string \<Rightarrow> typ" ("@{typ _}")
|
||||
consts Isabelle_DOF_term :: "string \<Rightarrow> term" ("@{term _}")
|
||||
datatype "thm" = Isabelle_DOF_thm string ("@{thm _}") | Thm_content ("proof":proofterm)
|
||||
datatype "thms_of" = Isabelle_DOF_thms_of string ("@{thms-of _}")
|
||||
datatype "file" = Isabelle_DOF_file string ("@{file _}")
|
||||
datatype "thy" = Isabelle_DOF_thy string ("@{thy _}")
|
||||
consts Isabelle_DOF_docitem :: "string \<Rightarrow> 'a" ("@{docitem _}")
|
||||
datatype "docitem_attr" = Isabelle_DOF_docitem_attr string string ("@{docitemattr (_) :: (_)}")
|
||||
consts Isabelle_DOF_trace_attribute :: "string \<Rightarrow> (string * string) list" ("@{trace'_-attribute _}")
|
||||
|
||||
subsection\<open> Semantics \<close>
|
||||
|
||||
ML\<open>
|
||||
structure Meta_ISA_core =
|
||||
struct
|
||||
|
||||
fun ML_isa_check_trace_attribute thy (term, _, pos) s =
|
||||
let
|
||||
val oid = (HOLogic.dest_string term
|
||||
handle TERM(_,[t]) => error ("wrong term format: must be string constant: "
|
||||
^ Syntax.string_of_term_global thy t ))
|
||||
val _ = DOF_core.get_instance_global oid thy
|
||||
in SOME term end
|
||||
|
||||
fun reify_typ (Type (s, typ_list)) =
|
||||
\<^Const>\<open>Ty\<close> $ HOLogic.mk_literal s $ HOLogic.mk_list \<^Type>\<open>typ\<close> (map reify_typ typ_list)
|
||||
| reify_typ (TFree (name, sort)) =
|
||||
\<^Const>\<open>Tv\<close> $(\<^Const>\<open>Free\<close> $ HOLogic.mk_literal name)
|
||||
$ (HOLogic.mk_set \<^typ>\<open>class\<close> (map HOLogic.mk_literal sort))
|
||||
| reify_typ (TVar (indexname, sort)) =
|
||||
let val (name, index_value) = indexname
|
||||
in \<^Const>\<open>Tv\<close>
|
||||
$ (\<^Const>\<open>Var\<close>
|
||||
$ HOLogic.mk_prod (HOLogic.mk_literal name, HOLogic.mk_number \<^Type>\<open>int\<close> index_value))
|
||||
$ (HOLogic.mk_set \<^typ>\<open>class\<close> (map HOLogic.mk_literal sort)) end
|
||||
|
||||
fun ML_isa_elaborate_typ (thy:theory) _ _ term_option _ =
|
||||
case term_option of
|
||||
NONE => error("Wrong term option. You must use a defined term")
|
||||
| SOME term => let
|
||||
val typ_name = HOLogic.dest_string term
|
||||
val typ = Syntax.read_typ_global thy typ_name
|
||||
in reify_typ typ end
|
||||
|
||||
fun reify_term (Const (name, typ)) =\<^Const>\<open>Ct\<close> $ HOLogic.mk_literal name $ reify_typ typ
|
||||
| reify_term (Free (name, typ)) =
|
||||
\<^Const>\<open>Fv\<close> $ (\<^Const>\<open>Free\<close> $ HOLogic.mk_literal name) $ reify_typ typ
|
||||
| reify_term (Var (indexname, typ)) =
|
||||
let val (name, index_value) = indexname
|
||||
in \<^Const>\<open>Fv\<close>
|
||||
$ (\<^Const>\<open>Var\<close>
|
||||
$ HOLogic.mk_prod (HOLogic.mk_literal name, HOLogic.mk_number \<^Type>\<open>int\<close> index_value))
|
||||
$ reify_typ typ end
|
||||
| reify_term (Bound i) = \<^Const>\<open>Bv\<close> $ HOLogic.mk_nat i
|
||||
| reify_term (Abs (_, typ, term)) = \<^Const>\<open>Abs\<close> $ reify_typ typ $ reify_term term
|
||||
| reify_term (Term.$ (t1, t2)) = \<^Const>\<open>App\<close> $ reify_term t1 $ reify_term t2
|
||||
|
||||
fun ML_isa_elaborate_term (thy:theory) _ _ term_option _ =
|
||||
case term_option of
|
||||
NONE => error("Wrong term option. You must use a defined term")
|
||||
| SOME term => let
|
||||
val term_name = HOLogic.dest_string term
|
||||
val term = Syntax.read_term_global thy term_name
|
||||
in reify_term term end
|
||||
|
||||
fun reify_proofterm (PBound i) =\<^Const>\<open>PBound\<close> $ (HOLogic.mk_nat i)
|
||||
| reify_proofterm (Abst (_, typ_option, proof)) =
|
||||
\<^Const>\<open>Abst\<close> $ reify_typ (the typ_option) $ reify_proofterm proof
|
||||
| reify_proofterm (AbsP (_, term_option, proof)) =
|
||||
\<^Const>\<open>AbsP\<close> $ reify_term (the term_option) $ reify_proofterm proof
|
||||
| reify_proofterm (op % (proof, term_option)) =
|
||||
\<^Const>\<open>Appt\<close> $ reify_proofterm proof $ reify_term (the term_option)
|
||||
| reify_proofterm (op %% (proof1, proof2)) =
|
||||
\<^Const>\<open>AppP\<close> $ reify_proofterm proof1 $ reify_proofterm proof2
|
||||
| reify_proofterm (Hyp term) = \<^Const>\<open>Hyp\<close> $ (reify_term term)
|
||||
| reify_proofterm (PAxm (_, term, typ_list_option)) =
|
||||
let
|
||||
val tvars = rev (Term.add_tvars term [])
|
||||
val meta_tvars = map (fn ((name, index_value), sort) =>
|
||||
HOLogic.mk_prod
|
||||
(\<^Const>\<open>Var\<close>
|
||||
$ HOLogic.mk_prod
|
||||
(HOLogic.mk_literal name, HOLogic.mk_number \<^Type>\<open>int\<close> index_value)
|
||||
, HOLogic.mk_set \<^typ>\<open>class\<close> (map HOLogic.mk_literal sort))) tvars
|
||||
val meta_typ_list =
|
||||
HOLogic.mk_list @{typ "tyinst"} (map2 (fn x => fn y => HOLogic.mk_prod (x, y))
|
||||
meta_tvars (map reify_typ (the typ_list_option)))
|
||||
in \<^Const>\<open>PAxm\<close> $ reify_term term $ meta_typ_list end
|
||||
| reify_proofterm (PClass (typ, class)) =
|
||||
\<^Const>\<open>OfClass\<close> $ reify_typ typ $ HOLogic.mk_literal class
|
||||
| reify_proofterm (PThm ({prop = prop, types = types, ...}, _)) =
|
||||
let
|
||||
val tvars = rev (Term.add_tvars prop [])
|
||||
val meta_tvars = map (fn ((name, index_value), sort) =>
|
||||
HOLogic.mk_prod
|
||||
(\<^Const>\<open>Var\<close>
|
||||
$ HOLogic.mk_prod
|
||||
(HOLogic.mk_literal name, HOLogic.mk_number \<^Type>\<open>int\<close> index_value)
|
||||
, HOLogic.mk_set \<^typ>\<open>class\<close> (map HOLogic.mk_literal sort))) tvars
|
||||
val meta_typ_list =
|
||||
HOLogic.mk_list \<^typ>\<open>tyinst\<close> (map2 (fn x => fn y => HOLogic.mk_prod (x, y))
|
||||
meta_tvars (map reify_typ (the types)))
|
||||
in \<^Const>\<open>PAxm\<close> $ reify_term prop $ meta_typ_list end
|
||||
|
||||
fun ML_isa_elaborate_thm (thy:theory) _ _ term_option pos =
|
||||
case term_option of
|
||||
NONE => ISA_core.err ("Malformed term annotation") pos
|
||||
| SOME term =>
|
||||
let
|
||||
val thm_name = HOLogic.dest_string term
|
||||
val _ = writeln ("In ML_isa_elaborate_thm thm_name: " ^ \<^make_string> thm_name)
|
||||
val thm = Proof_Context.get_thm (Proof_Context.init_global thy) thm_name
|
||||
val _ = writeln ("In ML_isa_elaborate_thm thm: " ^ \<^make_string> thm)
|
||||
val body = Proofterm.strip_thm_body (Thm.proof_body_of thm);
|
||||
val prf = Proofterm.proof_of body;
|
||||
(* Proof_Syntax.standard_proof_of reconstructs the proof and seems to rewrite
|
||||
the option arguments (with a value NONE) of the proof datatype constructors,
|
||||
at least for PAxm, with "SOME (typ/term)",
|
||||
allowing us the use the projection function "the".
|
||||
Maybe the function can deal with
|
||||
all the option types of the proof datatype constructors *)
|
||||
val proof = Proof_Syntax.standard_proof_of
|
||||
{full = true, expand_name = Thm.expand_name thm} thm
|
||||
val _ = writeln ("In ML_isa_elaborate_thm proof: " ^ \<^make_string> proof)
|
||||
(* After a small discussion with Simon Roßkopf, It seems preferable to use
|
||||
Thm.reconstruct_proof_of instead of Proof_Syntax.standard_proof_of
|
||||
whose operation is not well known.
|
||||
Thm.reconstruct_proof_of seems sufficient to have a reifiable PAxm
|
||||
in the metalogic. *)
|
||||
val proof' = Thm.reconstruct_proof_of thm
|
||||
(*in \<^Const>\<open>Thm_content\<close> $ reify_proofterm prf end*)
|
||||
(*in \<^Const>\<open>Thm_content\<close> $ reify_proofterm proof end*)
|
||||
in \<^Const>\<open>Thm_content\<close> $ reify_proofterm proof end
|
||||
|
||||
|
||||
fun ML_isa_elaborate_thms_of (thy:theory) _ _ term_option pos =
|
||||
case term_option of
|
||||
NONE => ISA_core.err ("Malformed term annotation") pos
|
||||
| SOME term =>
|
||||
let
|
||||
val thm_name = HOLogic.dest_string term
|
||||
val thm = Proof_Context.get_thm (Proof_Context.init_global thy) thm_name
|
||||
val body = Proofterm.strip_thm_body (Thm.proof_body_of thm)
|
||||
val all_thms_name = Proofterm.fold_body_thms (fn {name, ...} => insert (op =) name) [body] []
|
||||
(*val all_thms = map (Proof_Context.get_thm (Proof_Context.init_global thy)) all_thms_name*)
|
||||
(*val all_proofs = map (Proof_Syntax.standard_proof_of
|
||||
{full = true, expand_name = Thm.expand_name thm}) all_thms*)
|
||||
(*in HOLogic.mk_list \<^Type>\<open>thm\<close> (map (fn proof => \<^Const>\<open>Thm_content\<close> $ reify_proofterm proof) all_proofs) end*)
|
||||
in HOLogic.mk_list \<^typ>\<open>string\<close> (map HOLogic.mk_string all_thms_name) end
|
||||
|
||||
fun ML_isa_elaborate_trace_attribute (thy:theory) _ _ term_option pos =
|
||||
case term_option of
|
||||
NONE => ISA_core.err ("Malformed term annotation") pos
|
||||
| SOME term =>
|
||||
let
|
||||
val oid = HOLogic.dest_string term
|
||||
val traces = ISA_core.compute_attr_access (Context.Theory thy) "trace" oid NONE pos
|
||||
fun conv (\<^Const>\<open>Pair \<^typ>\<open>doc_class rexp\<close> \<^typ>\<open>string\<close>\<close>
|
||||
$ (\<^Const>\<open>Atom \<^typ>\<open>doc_class\<close>\<close> $ (\<^Const>\<open>mk\<close> $ s)) $ S) =
|
||||
let val s' = DOF_core.get_onto_class_name_global (HOLogic.dest_string s) thy
|
||||
in \<^Const>\<open>Pair \<^typ>\<open>string\<close> \<^typ>\<open>string\<close>\<close> $ HOLogic.mk_string s' $ S end
|
||||
val traces' = map conv (HOLogic.dest_list traces)
|
||||
in HOLogic.mk_list \<^Type>\<open>prod \<^typ>\<open>string\<close> \<^typ>\<open>string\<close>\<close> traces' end
|
||||
|
||||
end; (* struct *)
|
||||
|
||||
\<close>
|
||||
|
||||
ML\<open>
|
||||
val ty1 = Meta_ISA_core.reify_typ @{typ "int"}
|
||||
val ty2 = Meta_ISA_core.reify_typ @{typ "int \<Rightarrow> bool"}
|
||||
val ty3 = Meta_ISA_core.reify_typ @{typ "prop"}
|
||||
val ty4 = Meta_ISA_core.reify_typ @{typ "'a list"}
|
||||
\<close>
|
||||
|
||||
ML\<open>
|
||||
val t1 = Meta_ISA_core.reify_term @{term "1::int"}
|
||||
val t2 = Meta_ISA_core.reify_term @{term "\<lambda>x. x = 1"}
|
||||
val t3 = Meta_ISA_core.reify_term @{term "[2, 3::int]"}
|
||||
\<close>
|
||||
|
||||
subsection\<open> Isar - Setup\<close>
|
||||
(* Isa_transformers declaration for Isabelle_DOF term anti-quotations (typ, term, thm, etc.).
|
||||
They must be declared in the same theory file as the one of the declaration
|
||||
of Isabelle_DOF term anti-quotations !!! *)
|
||||
setup\<open>
|
||||
[(\<^type_name>\<open>thm\<close>, ISA_core.ML_isa_check_thm, Meta_ISA_core.ML_isa_elaborate_thm)
|
||||
, (\<^type_name>\<open>thms_of\<close>, ISA_core.ML_isa_check_thm, Meta_ISA_core.ML_isa_elaborate_thms_of)
|
||||
, (\<^type_name>\<open>file\<close>, ISA_core.ML_isa_check_file, ISA_core.ML_isa_elaborate_generic)]
|
||||
|> fold (fn (n, check, elaborate) => fn thy =>
|
||||
let val ns = Sign.tsig_of thy |> Type.type_space
|
||||
val name = n
|
||||
val {pos, ...} = Name_Space.the_entry ns name
|
||||
val bname = Long_Name.base_name name
|
||||
val binding = Binding.make (bname, pos)
|
||||
|> Binding.prefix_name DOF_core.ISA_prefix
|
||||
|> Binding.prefix false bname
|
||||
in DOF_core.add_isa_transformer binding ((check, elaborate) |> DOF_core.make_isa_transformer) thy
|
||||
end)
|
||||
#>
|
||||
([(\<^const_name>\<open>Isabelle_DOF_typ\<close>, ISA_core.ML_isa_check_typ, Meta_ISA_core.ML_isa_elaborate_typ)
|
||||
,(\<^const_name>\<open>Isabelle_DOF_term\<close>, ISA_core.ML_isa_check_term, Meta_ISA_core.ML_isa_elaborate_term)
|
||||
,(\<^const_name>\<open>Isabelle_DOF_docitem\<close>,
|
||||
ISA_core.ML_isa_check_docitem, ISA_core.ML_isa_elaborate_generic)
|
||||
,(\<^const_name>\<open>Isabelle_DOF_trace_attribute\<close>,
|
||||
ISA_core.ML_isa_check_trace_attribute, ISA_core.ML_isa_elaborate_trace_attribute)]
|
||||
|> fold (fn (n, check, elaborate) => fn thy =>
|
||||
let val ns = Sign.consts_of thy |> Consts.space_of
|
||||
val name = n
|
||||
val {pos, ...} = Name_Space.the_entry ns name
|
||||
val bname = Long_Name.base_name name
|
||||
val binding = Binding.make (bname, pos)
|
||||
in DOF_core.add_isa_transformer binding ((check, elaborate) |> DOF_core.make_isa_transformer) thy
|
||||
end))
|
||||
\<close>
|
||||
end
|
|
@ -1,237 +0,0 @@
|
|||
(*************************************************************************
|
||||
* Copyright (C)
|
||||
* 2019 The University of Exeter
|
||||
* 2018-2019 The University of Paris-Saclay
|
||||
* 2018 The University of Sheffield
|
||||
*
|
||||
* License:
|
||||
* This program can be redistributed and/or modified under the terms
|
||||
* of the 2-clause BSD-style license.
|
||||
*
|
||||
* SPDX-License-Identifier: BSD-2-Clause
|
||||
*************************************************************************)
|
||||
|
||||
chapter\<open>Testing Freeform and Formal Elements from the scholarly-paper Ontology\<close>
|
||||
|
||||
theory
|
||||
AssnsLemmaThmEtc
|
||||
imports
|
||||
"Isabelle_DOF-Ontologies.Conceptual"
|
||||
"Isabelle_DOF.scholarly_paper"
|
||||
"Isabelle_DOF_Unit_Tests_document"
|
||||
TestKit
|
||||
begin
|
||||
|
||||
section\<open>Test Objective\<close>
|
||||
|
||||
text\<open>Testing Core Elements for \<^theory>\<open>Isabelle_DOF.scholarly_paper\<close> wrt. to
|
||||
existance, controlability via implicit and explicit default classes, and potential
|
||||
LaTeX Layout.\<close>
|
||||
|
||||
text\<open>Current status:\<close>
|
||||
print_doc_classes
|
||||
print_doc_items
|
||||
|
||||
|
||||
section\<open>An Example for use-before-declaration of Formal Content\<close>
|
||||
|
||||
text*[aa::F, properties = "[@{term ''True''}]"]
|
||||
\<open>Our definition of the HOL-Logic has the following properties:\<close>
|
||||
assert*\<open>F.properties @{F \<open>aa\<close>} = [@{term ''True''}]\<close>
|
||||
|
||||
text\<open>For now, as the term annotation is not bound to a meta logic which will translate
|
||||
\<^term>\<open>[@{term ''True''}]\<close> to \<^term>\<open>[True]\<close>, we can not use the HOL \<^const>\<open>True\<close> constant
|
||||
in the assertion.\<close>
|
||||
|
||||
ML\<open> @{term_ "[@{term \<open>True \<longrightarrow> True \<close>}]"}; (* with isa-check *) \<close>
|
||||
|
||||
ML\<open>
|
||||
(* Checking the default classes which should be in a neutral(unset) state. *)
|
||||
(* Note that in this state, the "implicit default" is "math_content". *)
|
||||
@{assert} (Config.get_global @{theory} Definition_default_class = "");
|
||||
@{assert} (Config.get_global @{theory} Lemma_default_class = "");
|
||||
@{assert} (Config.get_global @{theory} Theorem_default_class = "");
|
||||
@{assert} (Config.get_global @{theory} Proposition_default_class = "");
|
||||
@{assert} (Config.get_global @{theory} Premise_default_class = "");
|
||||
@{assert} (Config.get_global @{theory} Corollary_default_class = "");
|
||||
@{assert} (Config.get_global @{theory} Consequence_default_class = "");
|
||||
@{assert} (Config.get_global @{theory} Assumption_default_class = "");
|
||||
@{assert} (Config.get_global @{theory} Hypothesis_default_class = "");
|
||||
@{assert} (Config.get_global @{theory} Consequence_default_class = "");
|
||||
@{assert} (Config.get_global @{theory} Assertion_default_class = "");
|
||||
@{assert} (Config.get_global @{theory} Proof_default_class = "");
|
||||
@{assert} (Config.get_global @{theory} Example_default_class = "");
|
||||
\<close>
|
||||
|
||||
|
||||
Definition*[e1]\<open>Lorem ipsum dolor sit amet, ... \<close>
|
||||
text\<open>Note that this should yield a warning since \<^theory_text>\<open>Definition*\<close> uses as "implicit default" the class
|
||||
\<^doc_class>\<open>math_content\<close> which has no \<^term>\<open>text_element.level\<close> set, however in this context,
|
||||
it is required to be a positive number since it is \<^term>\<open>text_element.referentiable\<close> .
|
||||
This is intended behaviour in order to give the user a nudge to be more specific.\<close>
|
||||
|
||||
text\<open>A repair looks like this:\<close>
|
||||
declare [[Definition_default_class = "definition"]]
|
||||
|
||||
text\<open>Now, define a forward reference to the formal content: \<close>
|
||||
|
||||
declare_reference*[e1bisbis::"definition"]
|
||||
|
||||
text\<open>... which makes it possible to refer in a freeform definition to its formal counterpart
|
||||
which will appear textually later. With this pragmatics, an "out-of- order-presentation"
|
||||
can be achieved within \<^theory>\<open>Isabelle_DOF.scholarly_paper\<close> for the most common cases.\<close>
|
||||
|
||||
|
||||
(*<*) (* PDF references to definition* not implemented *)
|
||||
Definition*[e1bis::"definition", short_name="\<open>Nice lemma.\<close>"]
|
||||
\<open>Lorem ipsum dolor sit amet, ...
|
||||
This is formally defined as follows in @{definition (unchecked) "e1bisbis"}\<close>
|
||||
definition*[e1bisbis, status=formal] e :: int where "e = 2"
|
||||
(*>*)
|
||||
section\<open>Tests for Theorems, Assertions, Assumptions, Hypothesis, etc.\<close>
|
||||
|
||||
declare [[Theorem_default_class = "theorem",
|
||||
Premise_default_class = "premise",
|
||||
Hypothesis_default_class = "hypothesis",
|
||||
Assumption_default_class = "assumption",
|
||||
Conclusion_default_class = "conclusion",
|
||||
Consequence_default_class = "consequence",
|
||||
Assertion_default_class = "assertion",
|
||||
Corollary_default_class = "corollary",
|
||||
Proof_default_class = "math_proof",
|
||||
Conclusion_default_class = "conclusion_stmt"]]
|
||||
|
||||
Theorem*[e2]\<open>... suspendisse non arcu malesuada mollis, nibh morbi, ... \<close>
|
||||
|
||||
theorem*[e2bis::"theorem", status=formal] f : "e = 1+1" unfolding e_def by simp
|
||||
|
||||
(*<*) (* @{theorem "e2bis"} breaks LaTeX generation ... *)
|
||||
Lemma*[e3,level="Some 2"]
|
||||
\<open>... phasellus amet id massa nunc, pede suscipit repellendus, ... @{theorem "e2bis"} \<close>
|
||||
(*>*)
|
||||
Proof*[d10, short_name="\<open>Induction over Tinea pedis.\<close>"]\<open>Freeform Proof\<close>
|
||||
|
||||
lemma*[dfgd::"lemma"] q: "All (\<lambda>x. X \<and> Y \<longrightarrow> True)" oops
|
||||
text-assert-error\<open>@{lemma dfgd} \<close>\<open>Undefined instance:\<close> \<comment> \<open>oops‘ed objects are not referentiable.\<close>
|
||||
|
||||
text\<open>... in ut tortor eleifend augue pretium consectetuer...
|
||||
Lectus accumsan velit ultrices, ...\<close>
|
||||
|
||||
Proposition*[d2::"proposition"]\<open>"Freeform Proposition"\<close>
|
||||
|
||||
Assumption*[d3] \<open>"Freeform Assertion"\<close>
|
||||
|
||||
Premise*[d4]\<open>"Freeform Premise"\<close>
|
||||
|
||||
Corollary*[d5]\<open>"Freeform Corollary"\<close>
|
||||
|
||||
Consequence*[d6::scholarly_paper.consequence]\<open>"Freeform Consequence"\<close> \<comment> \<open>longname just for test\<close>
|
||||
|
||||
(*<*)
|
||||
declare_reference*[ababa::scholarly_paper.assertion]
|
||||
Assertion*[d7]\<open>Freeform Assumption with forward reference to the formal
|
||||
@{assertion (unchecked) ababa}.\<close>
|
||||
assert*[ababa::assertion] "3 < (4::int)"
|
||||
assert*[ababab::assertion] "0 < (4::int)"
|
||||
(*>*)
|
||||
|
||||
Conclusion*[d8]\<open>"Freeform Conclusion"\<close>
|
||||
|
||||
Hypothesis*[d9]\<open>"Freeform Hypothesis"\<close>
|
||||
|
||||
Example*[d11::math_example]\<open>"Freeform Example"\<close>
|
||||
|
||||
text\<open>An example for the ontology specification character of the short-cuts such as
|
||||
@{command "assert*"}: in the following, we use the same notation referring to a completely
|
||||
different class. "F" and "assertion" have only in common that they posses the attribute
|
||||
@{const [names_short] \<open>properties\<close>}: \<close>
|
||||
|
||||
section\<open>Exhaustive Scholarly\_paper Test\<close>
|
||||
|
||||
subsection\<open>Global Structural Elements\<close>
|
||||
(* maybe it is neither necessary nor possible to test these here... title is unique in
|
||||
a document, for example. To be commented out of needed. *)
|
||||
text*[tt1::scholarly_paper.title]\<open>Lectus accumsan velit ultrices, ...\<close>
|
||||
text*[tt2::scholarly_paper.author]\<open>Lectus accumsan velit ultrices, ...\<close>
|
||||
text*[tt3::scholarly_paper.article]\<open>Lectus accumsan velit ultrices, ...\<close>
|
||||
text*[tt4::scholarly_paper.annex]\<open>Lectus accumsan velit ultrices, ...\<close>
|
||||
text*[tt5::scholarly_paper.abstract]\<open>Lectus accumsan velit ultrices, ...\<close>
|
||||
text*[tt6::scholarly_paper.subtitle]\<open>Lectus accumsan velit ultrices, ...\<close>
|
||||
text*[tt7::scholarly_paper.bibliography]\<open>Lectus accumsan velit ultrices, ...\<close>
|
||||
text*[tt8::scholarly_paper.introduction]\<open>Lectus accumsan velit ultrices, ...\<close>
|
||||
text*[tt9::scholarly_paper.related_work]\<open>Lectus accumsan velit ultrices, ...\<close>
|
||||
text*[tt11::scholarly_paper.text_section]\<open>Lectus accumsan velit ultrices, ...\<close>
|
||||
text*[tt12::scholarly_paper.background ]\<open>Lectus accumsan velit ultrices, ...\<close>
|
||||
text*[tt13::scholarly_paper.conclusion ]\<open>Lectus accumsan velit ultrices, ...\<close>
|
||||
|
||||
subsection\<open>Technical Content Specific Elements\<close>
|
||||
|
||||
text*[tu1::scholarly_paper.axiom ]\<open>Lectus accumsan velit ultrices, ...\<close>
|
||||
text*[tu1bis::scholarly_paper.math_content, mcc="axm" ]\<open>Lectus accumsan velit ultrices, ...\<close>
|
||||
text*[tu2::scholarly_paper.lemma ]\<open>Lectus accumsan velit ultrices, ...\<close>
|
||||
text*[tu3::scholarly_paper.example ]\<open>Lectus accumsan velit ultrices, ...\<close>
|
||||
text*[tu4::scholarly_paper.premise ]\<open>Lectus accumsan velit ultrices, ...\<close>
|
||||
text*[tu5::scholarly_paper.theorem ]\<open>Lectus accumsan velit ultrices, ...\<close>
|
||||
text*[tu6::scholarly_paper.assertion]\<open>Lectus accumsan velit ultrices, ...\<close>
|
||||
text*[tu7::scholarly_paper.corollary]\<open>Lectus accumsan velit ultrices, ...\<close>
|
||||
text*[tu9::scholarly_paper.technical]\<open>Lectus accumsan velit ultrices, ...\<close>
|
||||
text*[tu10::scholarly_paper.assumption ]\<open>Lectus accumsan velit ultrices, ...\<close>
|
||||
text*[tu13::scholarly_paper.definition ]\<open>Lectus accumsan velit ultrices, ...\<close>
|
||||
text*[tu15::scholarly_paper.experiment ]\<open>Lectus accumsan velit ultrices, ...\<close>
|
||||
text*[tu16::scholarly_paper.hypothesis ]\<open>Lectus accumsan velit ultrices, ...\<close>
|
||||
text*[tu17::scholarly_paper.math_proof ]\<open>Lectus accumsan velit ultrices, ...\<close>
|
||||
text*[tu18::scholarly_paper.consequence]\<open>Lectus accumsan velit ultrices, ...\<close>
|
||||
text*[tu19::scholarly_paper.math_formal]\<open>Lectus accumsan velit ultrices, ...\<close>
|
||||
text*[tu20::scholarly_paper.proposition]\<open>Lectus accumsan velit ultrices, ...\<close>
|
||||
text*[tu21::scholarly_paper.math_content ]\<open>Lectus accumsan velit ultrices, ...\<close>
|
||||
text*[tu22::scholarly_paper.math_example ]\<open>Lectus accumsan velit ultrices, ...\<close>
|
||||
text*[tu23::scholarly_paper.conclusion_stmt ]\<open>Lectus accumsan velit ultrices, ...\<close>
|
||||
text*[tu24::scholarly_paper.math_motivation ]\<open>Lectus accumsan velit ultrices, ...\<close>
|
||||
text*[tu25::scholarly_paper.tech_definition ]\<open>Lectus accumsan velit ultrices, ...\<close>
|
||||
text*[tu28::scholarly_paper.eng_example ]\<open>Lectus accumsan velit ultrices, ...\<close>
|
||||
text*[tt10::scholarly_paper.tech_example]\<open>Lectus accumsan velit ultrices, ...\<close>
|
||||
text*[tu8::scholarly_paper.tech_code] \<open>Lectus accumsan velit ultrices, ...\<close>
|
||||
text*[tu27::scholarly_paper.engineering_content]\<open>Lectus accumsan velit ultrices, ...\<close>
|
||||
text*[tu14::scholarly_paper.evaluation ]\<open>Lectus accumsan velit ultrices, ...\<close>
|
||||
|
||||
text\<open> @{axiom tu1} @{lemma tu2} @{example tu3} @{premise tu4} @{theorem tu5} @{assertion tu6}
|
||||
@{technical tu9} @{assumption tu10 } @{definition tu13 }
|
||||
@{experiment tu15 } @{hypothesis tu16 } @{math_proof tu17 }
|
||||
@{consequence tu18 } @{math_formal tu19 } @{proposition tu20 }
|
||||
@{math_content tu21 } @{math_example tu22 } @{conclusion_stmt tu23 }
|
||||
@{math_motivation tu24 } @{tech_definition tu25 } @{eng_example tu28 }
|
||||
@{tech_example tt10 } @{tech_code tu8 } @{engineering_content tu27 }
|
||||
@{evaluation tu14 }
|
||||
\<close>
|
||||
|
||||
subsection\<open>The Use in Macros\<close>
|
||||
|
||||
Lemma*[ttu2::scholarly_paper.lemma ]\<open>Lectus accumsan velit ultrices, ...\<close>
|
||||
Example*[ttu3::scholarly_paper.math_example ]\<open>Lectus accumsan velit ultrices, ...\<close>
|
||||
Premise*[ttu4::scholarly_paper.premise ]\<open>Lectus accumsan velit ultrices, ...\<close>
|
||||
Theorem*[ttu5::scholarly_paper.theorem ]\<open>Lectus accumsan velit ultrices, ...\<close>
|
||||
Assertion*[ttu6::scholarly_paper.assertion]\<open>Lectus accumsan velit ultrices, ...\<close>
|
||||
Corollary*[ttu7::scholarly_paper.corollary]\<open>Lectus accumsan velit ultrices, ...\<close>
|
||||
Assumption*[ttu10::scholarly_paper.assumption ]\<open>Lectus accumsan velit ultrices, ...\<close>
|
||||
Definition*[ttu13::scholarly_paper.definition ]\<open>Lectus accumsan velit ultrices, ...\<close>
|
||||
Hypothesis*[ttu16::scholarly_paper.hypothesis ]\<open>Lectus accumsan velit ultrices, ...\<close>
|
||||
Proof*[ttu17::scholarly_paper.math_proof ]\<open>Lectus accumsan velit ultrices, ...\<close>
|
||||
Consequence*[ttu18::scholarly_paper.consequence]\<open>Lectus accumsan velit ultrices, ...\<close>
|
||||
Proposition*[ttu20::scholarly_paper.proposition]\<open>Lectus accumsan velit ultrices, ...\<close>
|
||||
Conclusion*[ttu23::scholarly_paper.conclusion_stmt ]\<open>Lectus accumsan velit ultrices, ...\<close>
|
||||
(* Definition*[ttu25::scholarly_paper.tech_definition ]\<open>Lectus accumsan velit ultrices, ...\<close>
|
||||
interesting modeling bug.
|
||||
*)
|
||||
(*Example*[ttu28::scholarly_paper.eng_example ]\<open>Lectus accumsan velit ultrices, ...\<close>
|
||||
interesting modeling bug.
|
||||
*)
|
||||
text\<open> @{lemma ttu2} @{math_example ttu3} @{premise ttu4} @{theorem ttu5} @{assertion ttu6}
|
||||
@{assumption ttu10 } @{definition ttu13 }
|
||||
@{hypothesis ttu16 } @{math_proof ttu17 }
|
||||
@{consequence ttu18 } @{proposition ttu20 }
|
||||
@{math_content tu21 } @{conclusion_stmt ttu23 }
|
||||
@ \<open>{eng_example ttu28 }\<close>
|
||||
@ \<open>{tech_example tt10 }\<close>
|
||||
\<close>
|
||||
|
||||
end
|
|
@ -1,59 +0,0 @@
|
|||
theory
|
||||
COL_Test
|
||||
imports
|
||||
"Isabelle_DOF_Unit_Tests_document"
|
||||
begin
|
||||
|
||||
|
||||
print_doc_items
|
||||
print_doc_classes
|
||||
|
||||
section\<open>General Heading COL Elements\<close>
|
||||
|
||||
chapter*[S1::"chapter"]\<open>Chapter\<close>
|
||||
text*[S1'::"chapter"]\<open>Chapter\<close>
|
||||
|
||||
section*[S2::"section"]\<open>Section\<close>
|
||||
text*[S2'::"section"]\<open>Section\<close>
|
||||
|
||||
subsection*[S3::"subsection"]\<open>Subsection\<close>
|
||||
text*[S3'::"subsection"]\<open>Subsection\<close>
|
||||
|
||||
subsubsection*[S4::"subsubsection"]\<open>Subsubsection\<close>
|
||||
text*[S4'::"subsubsection"]\<open>Subsubsection\<close>
|
||||
|
||||
paragraph*[S5::"paragraph"]\<open>PAragraph\<close>
|
||||
text*[S5'::"paragraph"]\<open>Paragraph\<close>
|
||||
|
||||
|
||||
section\<open>General Figure COL Elements\<close>
|
||||
|
||||
figure*[fig1_test,relative_width="95",file_src="''figures/A.png''"]
|
||||
\<open> This is the label text \<^term>\<open>\<sigma>\<^sub>i+2\<close> \<close>
|
||||
|
||||
(*<*) (* text* with type figure not supported *)
|
||||
text*[fig2_test::figure, relative_width="95",file_src="''figures/A.png''"
|
||||
]\<open> This is the label text\<close>
|
||||
|
||||
text\<open>check @{figure fig1_test} cmp to @{figure fig2_test}\<close>
|
||||
(*>*)
|
||||
|
||||
|
||||
(* And a side-chick ... *)
|
||||
|
||||
text*[inlinefig::float,
|
||||
main_caption="\<open>The Caption.\<close>"]
|
||||
\<open>@{theory_text [display, margin = 5] \<open>lemma A :: "a \<longrightarrow> b"\<close>}\<close>
|
||||
|
||||
text*[dupl_graphics::float,
|
||||
main_caption="\<open>The Caption.\<close>"]
|
||||
\<open>
|
||||
@{fig_content (width=40, height=35, caption="This is a left test") "figures/A.png"
|
||||
}\<^hfill>@{fig_content (width=40, height=35, caption="This is a right \<^term>\<open>\<sigma>\<^sub>i + 1\<close> test") "figures/B.png"}
|
||||
\<close>
|
||||
|
||||
|
||||
|
||||
end
|
||||
(*>*)
|
||||
|
|
@ -1,70 +0,0 @@
|
|||
theory
|
||||
Cenelec_Test
|
||||
imports
|
||||
"Isabelle_DOF_Unit_Tests_document"
|
||||
"Isabelle_DOF-Ontologies.CENELEC_50128"
|
||||
begin
|
||||
|
||||
declare[[strict_monitor_checking = true]]
|
||||
declare[[invariants_checking = true]]
|
||||
declare[[invariants_checking_with_tactics = true]]
|
||||
|
||||
print_doc_items
|
||||
print_doc_classes
|
||||
|
||||
open_monitor*[SIL0Test::monitor_SIL0]
|
||||
|
||||
text*[sqap_instance::SQAP, sil="SIL0", written_by="Some RQM", fst_check="Some VER", snd_check="Some VAL"]\<open>\<close>
|
||||
text*[sqavr_instance::SQAVR, sil= "SIL0", written_by="Some VER", fst_check="None", snd_check="Some VAL"]\<open>\<close>
|
||||
text*[scmp_instance::SCMP, sil="SIL0", written_by="Some CM", fst_check="Some VER", snd_check="Some VAL"]\<open>\<close>
|
||||
text*[svp_instance::SVP, sil="SIL0", written_by="Some VER", fst_check="None", snd_check="Some VAL"]\<open>\<close>
|
||||
text*[svap_instance::SVAP, sil="SIL0", written_by="Some VAL", fst_check="Some VER", snd_check="None"]\<open>\<close>
|
||||
text*[swrs_instance::SWRS, sil="SIL0", written_by="Some RQM", fst_check="Some VER", snd_check="Some VAL"]\<open>\<close>
|
||||
text*[oswts_instance::OSWTS, sil="SIL0", written_by="Some VER", fst_check="None", snd_check="Some VAL"]\<open>\<close>
|
||||
text*[swrvr_instance::SWRVR, sil="SIL0", written_by="Some VER", fst_check="None", snd_check="Some VAL"]\<open>\<close>
|
||||
text*[swas_instance::SWAS, sil="SIL0", written_by="Some VER", fst_check="None", snd_check="Some VAL"]\<open>\<close>
|
||||
text*[swds_instance::SWDS, sil="SIL0", written_by="Some VER", fst_check="None", snd_check="Some VAL"]\<open>\<close>
|
||||
text*[swis_instance::SWIS, sil="SIL0", written_by="Some VER", fst_check="None", snd_check="Some VAL"]\<open>\<close>
|
||||
text*[swits_instance::SWITS, sil="SIL0", written_by="Some VER", fst_check="None", snd_check="Some VAL"]\<open>\<close>
|
||||
text*[swhits_instance::SWHITS, sil="SIL0", written_by="Some VER", fst_check="None", snd_check="Some VAL"]\<open>\<close>
|
||||
text*[swadvr_instance::SWADVR, sil="SIL0", written_by="Some VER", fst_check="None", snd_check="Some VAL"]\<open>\<close>
|
||||
text*[swcds_instance::SWCDS, sil="SIL0", nlvl="R", written_by="Some VER", fst_check="None", snd_check="Some VAL"]\<open>\<close>
|
||||
text*[swcts_instance::SWCTS, sil="SIL0", nlvl="R", written_by="Some VER", fst_check="None", snd_check="Some VAL"]\<open>\<close>
|
||||
text*[swcdvr_instance::SWCDVR, sil="SIL0", nlvl="R", written_by="Some VER", fst_check="None", snd_check="Some VAL"]\<open>\<close>
|
||||
text*[swscd_instance::SWSCD, sil="SIL0", written_by="Some VER", fst_check="None", snd_check="Some VAL"]\<open>\<close>
|
||||
text*[swctr_instance::SWCTR, sil="SIL0", nlvl="R", written_by="Some VER", fst_check="None", snd_check="Some VAL"]\<open>\<close>
|
||||
text*[swscvr_instance::SWSCVR, sil="SIL0", written_by="Some VER", fst_check="None", snd_check="Some VAL"]\<open>\<close>
|
||||
text*[switr_instance::SWITR, sil="SIL0", written_by="Some VER", fst_check="None", snd_check="Some VAL"]\<open>\<close>
|
||||
text*[swhaitr_instance::SWHAITR, sil="SIL0", written_by="Some VER", fst_check="None", snd_check="Some VAL"]\<open>\<close>
|
||||
text*[swivr_instance::SWIVR, sil="SIL0", written_by="Some VER", fst_check="None", snd_check="Some VAL"]\<open>\<close>
|
||||
text*[oswtr_instance::OSWTR, sil="SIL0", written_by="Some VER", fst_check="None", snd_check="Some VAL"]\<open>\<close>
|
||||
text*[swvalr_instance::SWVALR, sil="SIL0", written_by="Some VER", fst_check="None", snd_check="Some VAL"]\<open>\<close>
|
||||
text*[tvalr_instance::TVALR, sil="SIL0", nlvl="R", written_by="Some VER", fst_check="None", snd_check="Some VAL"]\<open>\<close>
|
||||
text*[swvrn_instance::SWVRN, sil="SIL0", written_by="Some VER", fst_check="None", snd_check="Some VAL"]\<open>\<close>
|
||||
text*[ars_instance::ARS, sil="SIL0", written_by="Some VER", fst_check="None", snd_check="Some VAL"]\<open>\<close>
|
||||
text*[app_instance::APP, sil="SIL0", written_by="Some VER", fst_check="None", snd_check="Some VAL"]\<open>\<close>
|
||||
text*[ats_instance::ATS, sil="SIL0", written_by="Some VER", fst_check="None", snd_check="Some VAL"]\<open>\<close>
|
||||
text*[aad_instance::AAD, sil="SIL0", written_by="Some VER", fst_check="None", snd_check="Some VAL"]\<open>\<close>
|
||||
text*[apvr_instance::APVR, sil="SIL0", written_by="Some VER", fst_check="None", snd_check="Some VAL"]\<open>\<close>
|
||||
text*[atr_instance::ATR, sil="SIL0", written_by="Some VER", fst_check="None", snd_check="Some VAL"]\<open>\<close>
|
||||
text*[socoada_instance::SOCOADA, sil="SIL0", written_by="Some VER", fst_check="None", snd_check="Some VAL"]\<open>\<close>
|
||||
text*[adavr_instance::ADAVR, sil="SIL0", written_by="Some VER", fst_check="None", snd_check="Some VAL"]\<open>\<close>
|
||||
text*[swrdp_instance::SWRDP, sil="SIL0", nlvl="R", written_by="Some VER", fst_check="None", snd_check="Some VAL"]\<open>\<close>
|
||||
text*[swdm_instance::SWDM, sil="SIL0", nlvl="R", written_by="Some VER", fst_check="None", snd_check="Some VAL"]\<open>\<close>
|
||||
text*[swdrn_instance::SWDRN, sil="SIL0", written_by="Some VER", fst_check="None", snd_check="Some VAL"]\<open>\<close>
|
||||
text*[swdr_instance::SWDR, sil="SIL0", nlvl="R", written_by="Some VER", fst_check="None", snd_check="Some VAL"]\<open>\<close>
|
||||
text*[swdvr_instance::SWDVR, sil="SIL0", nlvl="R", written_by="Some VER", fst_check="None", snd_check="Some VAL"]\<open>\<close>
|
||||
text*[swmp_instance::SWMP, sil="SIL0", nlvl="R", written_by="Some VER", fst_check="None", snd_check="Some VAL"]\<open>\<close>
|
||||
text*[swcr_instance::SWCR, sil="SIL0", written_by="Some VER", fst_check="None", snd_check="Some VAL"]\<open>\<close>
|
||||
text*[swmr_instance::SWMR, sil="SIL0", nlvl="R", written_by="Some VER", fst_check="None", snd_check="Some VAL"]\<open>\<close>
|
||||
text*[swmvr_instance::SWMVR, sil="SIL0", nlvl="R", written_by="Some VER", fst_check="None", snd_check="Some VAL"]\<open>\<close>
|
||||
text*[swap_instance::SWAP, sil="SIL0", nlvl="R", written_by="Some VER", fst_check="None", snd_check="Some VAL"]\<open>\<close>
|
||||
text*[swar_instance::SWAR, sil="SIL0", nlvl="R", written_by="Some VER", fst_check="None", snd_check="Some VAL"]\<open>\<close>
|
||||
|
||||
close_monitor*[SIL0Test]
|
||||
|
||||
declare[[strict_monitor_checking = true]]
|
||||
declare[[invariants_checking = true]]
|
||||
declare[[invariants_checking_with_tactics = true]]
|
||||
|
||||
end
|
|
@ -1,162 +0,0 @@
|
|||
(*************************************************************************
|
||||
* Copyright (C)
|
||||
* 2019-2023 The University of Exeter
|
||||
* 2018-2023 The University of Paris-Saclay
|
||||
* 2018 The University of Sheffield
|
||||
*
|
||||
* License:
|
||||
* This program can be redistributed and/or modified under the terms
|
||||
* of the 2-clause BSD-style license.
|
||||
*
|
||||
* SPDX-License-Identifier: BSD-2-Clause
|
||||
*************************************************************************)
|
||||
|
||||
chapter\<open>Testing hand-programmed (low-level) Invariants\<close>
|
||||
|
||||
theory Concept_Example_Low_Level_Invariant
|
||||
imports
|
||||
"Isabelle_DOF_Unit_Tests_document"
|
||||
"Isabelle_DOF-Ontologies.Conceptual" (* we use the generic "Conceptual" ontology *)
|
||||
TestKit
|
||||
begin
|
||||
|
||||
section\<open>Test Purpose.\<close>
|
||||
text\<open> Via @{ML "DOF_core.add_ml_invariant"} it is possible to attach user-defined
|
||||
ML-code to classes which is executed at each creation or modification of
|
||||
class instances. We test exection of creation and updates. \<close>
|
||||
|
||||
text\<open>Consult the status of the DOF engine:\<close>
|
||||
print_doc_classes
|
||||
print_doc_items
|
||||
|
||||
|
||||
section\<open>Example: Standard Class Invariant\<close>
|
||||
|
||||
|
||||
text\<open>Watch out: The current programming interface to document class invariants is pretty low-level:
|
||||
\<^item> No inheritance principle
|
||||
\<^item> No high-level notation in HOL
|
||||
\<^item> Typing on ML level is assumed to be correct.
|
||||
The implementor of an ontology must know what he does ...
|
||||
\<close>
|
||||
|
||||
text\<open>Setting a sample invariant, which simply produces some side-effect:\<close>
|
||||
|
||||
setup\<open>
|
||||
fn thy =>
|
||||
let val ctxt = Proof_Context.init_global thy
|
||||
val cid_long = DOF_core.get_onto_class_name_global "A" thy
|
||||
val bind = Binding.name "Sample_Echo"
|
||||
val exec = (fn oid => fn {is_monitor = b} => fn ctxt =>
|
||||
(writeln ("sample echo : "^oid); true))
|
||||
in DOF_core.add_ml_invariant bind (DOF_core.make_ml_invariant (exec, cid_long)) thy end
|
||||
\<close>
|
||||
|
||||
text\<open>The checker \<open>exec\<close> above is set. Just used to provoke output: "sample echo : b"\<close>
|
||||
text*[b::A, x = "5"] \<open> Lorem ipsum dolor sit amet, ... \<close>
|
||||
|
||||
text\<open>Setting a sample invariant, referring to attribute value "x":\<close>
|
||||
setup\<open>
|
||||
fn thy =>
|
||||
let fun check_A_invariant oid {is_monitor:bool} ctxt =
|
||||
let val term = ISA_core.compute_attr_access ctxt "x" oid NONE @{here}
|
||||
val (@{typ "int"},x_value) = HOLogic.dest_number term
|
||||
in if x_value > 5 then error("class A invariant violation") else true end
|
||||
val cid_long = DOF_core.get_onto_class_name_global "A" thy
|
||||
val bind = Binding.name "Check_A_Invariant"
|
||||
in DOF_core.add_ml_invariant bind (DOF_core.make_ml_invariant (check_A_invariant, cid_long)) thy end
|
||||
\<close>
|
||||
|
||||
(* borderline test *)
|
||||
text*[d0::A, x = "5"] \<open>Lorem ipsum dolor sit amet, ...\<close>
|
||||
text-assert-error[d1::A, x = "6"]\<open>Lorem ipsum dolor sit amet, ...\<close>\<open>class A invariant violation\<close>
|
||||
|
||||
subsection*[d::A, x = "4"] \<open> Lorem ipsum dolor sit amet, ... \<close>
|
||||
|
||||
(* invariant still valid *)
|
||||
update_instance*[d::A, x += "1"]
|
||||
|
||||
(* invariant no longer holds*)
|
||||
update_instance-assert-error[d::A, x += "1"]\<open>class A invariant violation\<close>
|
||||
|
||||
|
||||
section\<open>Example: Monitor Class Invariant\<close>
|
||||
|
||||
text\<open>Of particular interest are class invariants attached to monitor classes: since the
|
||||
latter manage a trace-attribute, a class invariant on them can assure a global form of consistency.
|
||||
It is possible to express:
|
||||
\<^item> that attributes of a document element must satisfy particular conditions depending on the
|
||||
prior document elements --- as long they have been observed in a monitor.
|
||||
\<^item> non-regular properties on a trace not expressible in a regular expression
|
||||
(like balanced ness of opening and closing text elements)
|
||||
\<^item> etc.
|
||||
\<close>
|
||||
|
||||
text\<open>A simple global trace-invariant is expressed in the following: it requires
|
||||
that instances of class C occur more often as those of class D; note that this is meant
|
||||
to take sub-classing into account:
|
||||
\<close>
|
||||
|
||||
setup\<open>
|
||||
fn thy =>
|
||||
let fun check_M_invariant oid {is_monitor} ctxt =
|
||||
let val term = ISA_core.compute_attr_access ctxt "trace" oid NONE @{here}
|
||||
fun conv (\<^Const>\<open>Pair \<^typ>\<open>doc_class rexp\<close> \<^typ>\<open>string\<close>\<close>
|
||||
$ (\<^Const>\<open>Atom \<^typ>\<open>doc_class\<close>\<close> $ (\<^Const>\<open>mk\<close> $ s)) $ S) =
|
||||
let val s' = DOF_core.get_onto_class_name_global' (HOLogic.dest_string s) thy
|
||||
in (s', HOLogic.dest_string S) end
|
||||
val string_pair_list = map conv (HOLogic.dest_list term)
|
||||
val cid_list = map fst string_pair_list
|
||||
val ctxt' = Proof_Context.init_global(Context.theory_of ctxt)
|
||||
fun is_C x = DOF_core.is_subclass ctxt' x "Conceptual.C"
|
||||
fun is_D x = DOF_core.is_subclass ctxt' x "Conceptual.D"
|
||||
val n = length (filter is_C cid_list)
|
||||
val m = length (filter is_D cid_list)
|
||||
in if m > n then error("class M invariant violation") else true end
|
||||
val cid_long = DOF_core.get_onto_class_name_global "M" thy
|
||||
val binding = Binding.name "Check_M_Invariant"
|
||||
in DOF_core.add_ml_invariant binding (DOF_core.make_ml_invariant (check_M_invariant, cid_long)) thy end
|
||||
\<close>
|
||||
|
||||
|
||||
section\<open>Example: Monitor Class Invariant\<close>
|
||||
|
||||
open_monitor*[struct::M]
|
||||
|
||||
subsection*[a::A, x = "3"] \<open> Lorem ipsum dolor sit amet, ... \<close>
|
||||
|
||||
text*[c1::C, x = "''beta''"] \<open> ... suspendisse non arcu malesuada mollis, nibh morbi, ... \<close>
|
||||
|
||||
text*[d1::E, a1 = "X3"] \<open> ... phasellus amet id massa nunc, pede suscipit repellendus, ... \<close>
|
||||
|
||||
text*[c2:: C, x = "''delta''"] \<open> ... in ut tortor eleifend augue pretium consectetuer... \<close>
|
||||
|
||||
subsection*[f::E] \<open> Lectus accumsan velit ultrices, ... \<close>
|
||||
|
||||
|
||||
text-assert-error[f2::E] \<open> Lectus accumsan velit ultrices, ... \<close>\<open>class M invariant violation\<close>
|
||||
|
||||
|
||||
ML\<open>val ctxt = @{context}
|
||||
val term = ISA_core.compute_attr_access
|
||||
(Context.Proof ctxt) "trace" "struct" NONE @{here} ;
|
||||
fun conv (Const(@{const_name "Pair"},_) $ Const(s,_) $ S) = (s, HOLogic.dest_string S)
|
||||
fun conv' (\<^Const>\<open>Pair \<^typ>\<open>doc_class rexp\<close> \<^typ>\<open>string\<close>\<close>
|
||||
$ (\<^Const>\<open>Atom \<^typ>\<open>doc_class\<close>\<close> $ (\<^Const>\<open>mk\<close> $ s)) $ S) =
|
||||
let val s' = DOF_core.get_onto_class_name_global'
|
||||
(HOLogic.dest_string s)
|
||||
(Proof_Context.theory_of ctxt)
|
||||
in (s', HOLogic.dest_string S) end
|
||||
val string_pair_list = map conv' (HOLogic.dest_list term);
|
||||
@{assert} (string_pair_list =
|
||||
[("Conceptual.A", "a"), ("Conceptual.C", "c1"),
|
||||
("Conceptual.E", "d1"), ("Conceptual.C", "c2"),
|
||||
("Conceptual.E", "f")])
|
||||
\<close>
|
||||
|
||||
|
||||
close_monitor*[struct]
|
||||
|
||||
|
||||
end
|
||||
|
|
@ -1,280 +0,0 @@
|
|||
(*************************************************************************
|
||||
* Copyright (C)
|
||||
* 2019-2023 The University of Exeter
|
||||
* 2018-2023 The University of Paris-Saclay
|
||||
* 2018 The University of Sheffield
|
||||
*
|
||||
* License:
|
||||
* This program can be redistributed and/or modified under the terms
|
||||
* of the 2-clause BSD-style license.
|
||||
*
|
||||
* SPDX-License-Identifier: BSD-2-Clause
|
||||
*************************************************************************)
|
||||
|
||||
chapter\<open>High-level Class Invariants\<close>
|
||||
|
||||
theory Concept_High_Level_Invariants
|
||||
imports "Isabelle_DOF.Isa_DOF"
|
||||
"Isabelle_DOF_Unit_Tests_document"
|
||||
TestKit
|
||||
begin
|
||||
|
||||
section\<open>Test Purpose.\<close>
|
||||
text\<open>
|
||||
Without invariants, ontological classes as such are too liberal in many situations.
|
||||
Similarly to UML constraints, invariants or hand-programmed checking functions
|
||||
can be added in ODL ontologies in order to constrain class instances or
|
||||
(via monitor traces) impose structural constraints over an entire document.
|
||||
|
||||
While hand-programmed checking functions were tested in test-case
|
||||
\<^verbatim>\<open>Concept_Example_Low_Level_Invariant\<close>, in this text case, we test
|
||||
high-level invariants, i.e. data-constraints speicified as executable
|
||||
HOL-predicates in the @{theory_text \<open>invariant\<close>} clause of ODL definitions.
|
||||
|
||||
To enable the checking of the invariants, the \<open>invariants_checking\<close>
|
||||
theory attribute must be set:\<close>
|
||||
|
||||
|
||||
section\<open>The Scenario.\<close>
|
||||
|
||||
text\<open> This is merely an example that shows that the generated invariants
|
||||
fit nicely together; i.e. allow for sensible consistency and invariant
|
||||
preservation proofs related to ontological matchings. \<close>
|
||||
|
||||
|
||||
text\<open>Using HOL, we can define a mapping between two ontologies.
|
||||
It is called ontology matching or ontology alignment.
|
||||
Here is an example which show how to map two classes.
|
||||
HOL also allows us to map the invariants (ontological rules) of the classes!\<close>
|
||||
|
||||
text\<open>
|
||||
Ontological classes as described so far are too liberal in many situations.
|
||||
There is a first high-level syntax implementation for class invariants.
|
||||
These invariants can be checked when an instance of the class is defined.
|
||||
To enable the checking of the invariants, the \<open>invariants_checking\<close>
|
||||
theory attribute must be set:\<close>
|
||||
|
||||
|
||||
declare[[invariants_strict_checking = true]]
|
||||
|
||||
text\<open>For example, let's define the following two classes:\<close>
|
||||
|
||||
doc_class class_inv1 =
|
||||
int1 :: "int"
|
||||
invariant inv1 :: "int1 \<sigma> \<ge> 3"
|
||||
|
||||
doc_class class_inv2 = class_inv1 +
|
||||
int2 :: "int"
|
||||
invariant inv2 :: "int2 \<sigma> < 2"
|
||||
|
||||
text\<open>The symbol \<^term>\<open>\<sigma>\<close> is reserved and references the future instance class.
|
||||
By relying on the implementation of the Records
|
||||
in Isabelle/HOL~@{cite "wenzel:isabelle-isar:2020"},
|
||||
one can reference an attribute of an instance using its selector function.
|
||||
For example, \<^term>\<open>int1 \<sigma>\<close> denotes the value
|
||||
of the \<^term>\<open>int1\<close> attribute
|
||||
of the future instance of the class @{doc_class class_inv1}.
|
||||
Now let's define two instances, one of each class:\<close>
|
||||
|
||||
text*[testinv1::class_inv1, int1=4]\<open>lorem ipsum...\<close>
|
||||
update_instance*[testinv1::class_inv1, int1:="3"]
|
||||
(* When not commented, should violated the invariant:
|
||||
update_instance*[testinv1::class_inv1, int1:=1]
|
||||
*)
|
||||
|
||||
text*[testinv2::class_inv2, int1=3, int2=1]\<open>lorem ipsum...\<close>
|
||||
|
||||
text\<open>
|
||||
The value of each attribute defined for the instances is checked against their classes invariants.
|
||||
As the class @{doc_class class_inv2} is a subsclass of the class @{doc_class class_inv1},
|
||||
it inherits @{doc_class class_inv1} invariants.
|
||||
Hence the \<^term>\<open>int1\<close> invariant is checked when the instance @{docitem testinv2} is defined.\<close>
|
||||
|
||||
text\<open>Test invariant for attributes of attributes: \<close>
|
||||
|
||||
doc_class inv_test1 =
|
||||
a :: int
|
||||
|
||||
doc_class inv_test2 =
|
||||
b :: "inv_test1"
|
||||
c:: int
|
||||
invariant inv_test2 :: "c \<sigma> = 1"
|
||||
invariant inv_test2' :: "a (b \<sigma>) = 2"
|
||||
|
||||
doc_class inv_test3 = inv_test1 +
|
||||
b :: "inv_test1"
|
||||
c:: int
|
||||
invariant inv_test3 :: "a \<sigma> = 2"
|
||||
invariant inv_test3' :: "a (b \<sigma>) = 2"
|
||||
|
||||
doc_class inv_test4 = inv_test2 +
|
||||
d :: "inv_test3"
|
||||
invariant inv_test4 :: "a (inv_test2.b \<sigma>) = 2"
|
||||
invariant inv_test4' :: "a (d \<sigma>) = 2"
|
||||
|
||||
text*[inv_test1_instance::inv_test1, a=2]\<open>\<close>
|
||||
text*[inv_test3_instance::inv_test3, a=2, b="@{inv_test1 \<open>inv_test1_instance\<close>}" ]\<open>\<close>
|
||||
text*[inv_test4_instance::inv_test4, b="@{inv_test1 \<open>inv_test1_instance\<close>}"
|
||||
, c=1, d="@{inv_test3 \<open>inv_test3_instance\<close>}"]\<open>\<close>
|
||||
|
||||
text\<open>To support invariant on attributes in attributes
|
||||
and invariant on attributes of the superclasses,
|
||||
we check that the type of the attribute of the subclass is ground:\<close>
|
||||
ML\<open>
|
||||
val Type(st, [ty]) = \<^typ>\<open>inv_test1\<close>
|
||||
val Type(st', [ty']) = \<^typ>\<open>'a inv_test1_scheme\<close>
|
||||
val t = ty = \<^typ>\<open>unit\<close>
|
||||
\<close>
|
||||
|
||||
text\<open>Now assume the following ontology:\<close>
|
||||
|
||||
doc_class title =
|
||||
short_title :: "string option" <= "None"
|
||||
|
||||
doc_class author =
|
||||
email :: "string" <= "''''"
|
||||
|
||||
datatype classification = SIL0 | SIL1 | SIL2 | SIL3 | SIL4
|
||||
|
||||
doc_class abstract =
|
||||
keywordlist :: "string list" <= "[]"
|
||||
safety_level :: "classification" <= "SIL3"
|
||||
|
||||
doc_class text_section =
|
||||
authored_by :: "author set" <= "{}"
|
||||
level :: "int option" <= "None"
|
||||
|
||||
type_synonym notion = string
|
||||
|
||||
doc_class introduction = text_section +
|
||||
authored_by :: "author set" <= "UNIV"
|
||||
uses :: "notion set"
|
||||
invariant author_finite :: "finite (authored_by \<sigma>)"
|
||||
and force_level :: "(level \<sigma>) \<noteq> None \<and> the (level \<sigma>) > 1"
|
||||
|
||||
doc_class claim = introduction +
|
||||
based_on :: "notion list"
|
||||
|
||||
doc_class technical = text_section +
|
||||
formal_results :: "thm list"
|
||||
|
||||
doc_class "definition" = technical +
|
||||
is_formal :: "bool"
|
||||
property :: "term list" <= "[]"
|
||||
|
||||
datatype kind = expert_opinion | argument | "proof"
|
||||
|
||||
doc_class result = technical +
|
||||
evidence :: kind
|
||||
property :: "thm list" <= "[]"
|
||||
invariant has_property :: "evidence \<sigma> = proof \<longleftrightarrow> property \<sigma> \<noteq> []"
|
||||
|
||||
doc_class example = technical +
|
||||
referring_to :: "(notion + definition) set" <= "{}"
|
||||
|
||||
doc_class conclusion = text_section +
|
||||
establish :: "(claim \<times> result) set"
|
||||
invariant establish_defined :: "\<forall> x. x \<in> Domain (establish \<sigma>)
|
||||
\<longrightarrow> (\<exists> y \<in> Range (establish \<sigma>). (x, y) \<in> establish \<sigma>)"
|
||||
|
||||
text\<open>Next we define some instances (docitems): \<close>
|
||||
|
||||
declare[[invariants_checking_with_tactics = true]]
|
||||
|
||||
text*[church::author, email="\<open>church@lambda.org\<close>"]\<open>\<close>
|
||||
|
||||
text\<open>We can also reference instances of classes defined in parent theories:\<close>
|
||||
text*[church'::scholarly_paper.author, email="\<open>church'@lambda.org\<close>"]\<open>\<close>
|
||||
|
||||
text*[resultProof::result, evidence = "proof", property="[@{thm \<open>HOL.refl\<close>}]"]\<open>\<close>
|
||||
text*[resultArgument::result, evidence = "argument"]\<open>\<close>
|
||||
|
||||
text\<open>The invariants \<^theory_text>\<open>author_finite\<close> and \<^theory_text>\<open>establish_defined\<close> can not be checked directly
|
||||
and need a little help.
|
||||
We can set the \<open>invariants_checking_with_tactics\<close> theory attribute to help the checking.
|
||||
It will enable a basic tactic, using unfold and auto:\<close>
|
||||
|
||||
declare[[invariants_checking_with_tactics = true]]
|
||||
|
||||
text*[curry::author, email="\<open>curry@lambda.org\<close>"]\<open>\<close>
|
||||
|
||||
text*[introduction2::introduction, authored_by = "{@{author \<open>church\<close>}}", level = "Some 2"]\<open>\<close>
|
||||
(* When not commented, should violated the invariant:
|
||||
update_instance*[introduction2::Introduction
|
||||
, authored_by := "{@{Author \<open>church\<close>}}"
|
||||
, level := "Some 1"]
|
||||
*)
|
||||
text\<open>Use of the instance @{docitem_name "church'"}
|
||||
to instantiate a \<^doc_class>\<open>scholarly_paper.introduction\<close> class:\<close>
|
||||
text*[introduction2'::scholarly_paper.introduction,
|
||||
main_author = "Some @{scholarly_paper.author \<open>church'\<close>}", level = "Some 2"]\<open>\<close>
|
||||
|
||||
value*\<open>@{scholarly_paper.author \<open>church'\<close>}\<close>
|
||||
value*\<open>@{author \<open>church\<close>}\<close>
|
||||
value*\<open>@{Concept_High_Level_Invariants.author \<open>church\<close>}\<close>
|
||||
|
||||
value*\<open>@{instances_of \<open>scholarly_paper.author\<close>}\<close>
|
||||
value*\<open>@{instances_of \<open>author\<close>}\<close>
|
||||
value*\<open>@{instances_of \<open>Concept_High_Level_Invariants.author\<close>}\<close>
|
||||
|
||||
text*[introduction3::introduction, authored_by = "{@{author \<open>church\<close>}}", level = "Some 2"]\<open>\<close>
|
||||
text*[introduction4::introduction, authored_by = "{@{author \<open>curry\<close>}}", level = "Some 4"]\<open>\<close>
|
||||
|
||||
text*[resultProof2::result, evidence = "proof", property="[@{thm \<open>HOL.sym\<close>}]"]\<open>\<close>
|
||||
|
||||
text\<open>Then we can evaluate expressions with instances:\<close>
|
||||
|
||||
term*\<open>authored_by @{introduction \<open>introduction2\<close>} = authored_by @{introduction \<open>introduction3\<close>}\<close>
|
||||
value*\<open>authored_by @{introduction \<open>introduction2\<close>} = authored_by @{introduction \<open>introduction3\<close>}\<close>
|
||||
value*\<open>authored_by @{introduction \<open>introduction2\<close>} = authored_by @{introduction \<open>introduction4\<close>}\<close>
|
||||
|
||||
value*\<open>@{introduction \<open>introduction2\<close>}\<close>
|
||||
|
||||
value*\<open>{@{author \<open>curry\<close>}} = {@{author \<open>church\<close>}}\<close>
|
||||
|
||||
term*\<open>property @{result \<open>resultProof\<close>} = property @{result \<open>resultProof2\<close>}\<close>
|
||||
value*\<open>property @{result \<open>resultProof\<close>} = property @{result \<open>resultProof2\<close>}\<close>
|
||||
|
||||
value*\<open>evidence @{result \<open>resultProof\<close>} = evidence @{result \<open>resultProof2\<close>}\<close>
|
||||
|
||||
declare[[invariants_checking_with_tactics = false]]
|
||||
|
||||
declare[[invariants_strict_checking = false]]
|
||||
|
||||
text\<open>Invariants can have term anti-quotations\<close>
|
||||
doc_class invA =
|
||||
a :: int
|
||||
|
||||
text*[invA_inst::invA, a = 3]\<open>\<close>
|
||||
|
||||
doc_class invB = invA +
|
||||
b :: int
|
||||
invariant a_pos :: "a \<sigma> \<ge> 0"
|
||||
|
||||
text*[invB_inst::invB, a = 3]\<open>\<close>
|
||||
|
||||
doc_class invC =
|
||||
c :: invB
|
||||
invariant a_invB_pos :: "a (c \<sigma>) \<ge> a @{invA \<open>invA_inst\<close>}"
|
||||
|
||||
text*[invC_inst::invC, c = "@{invB \<open>invB_inst\<close>}"]\<open>\<close>
|
||||
|
||||
text\<open>Bug:
|
||||
With the polymorphic class implementation, invariants type inference is to permissive:
|
||||
\<close>
|
||||
doc_class invA' =
|
||||
a :: int
|
||||
|
||||
doc_class invB' = invA' +
|
||||
b :: int
|
||||
invariant a_pos :: "a \<sigma> \<ge> 0"
|
||||
|
||||
doc_class ('a, 'b) invC' =
|
||||
c :: invB'
|
||||
d :: "'a list"
|
||||
e :: "'b list"
|
||||
invariant a_pos :: "a \<sigma> \<ge> 0"
|
||||
text\<open>The \<^const>\<open>a\<close> selector in the \<^const>\<open>a_pos_inv\<close> invariant of the class \<^doc_class>\<open>('a, 'b) invC'\<close>
|
||||
should be rejected as the class does not have nor inherit an \<^const>\<open>a\<close> attribute
|
||||
\<close>
|
||||
end
|
|
@ -1,127 +0,0 @@
|
|||
(*************************************************************************
|
||||
* Copyright (C)
|
||||
* 2019-2023 The University of Exeter
|
||||
* 2018-2023 The University of Paris-Saclay
|
||||
* 2018 The University of Sheffield
|
||||
*
|
||||
* License:
|
||||
* This program can be redistributed and/or modified under the terms
|
||||
* of the 2-clause BSD-style license.
|
||||
*
|
||||
* SPDX-License-Identifier: BSD-2-Clause
|
||||
*************************************************************************)
|
||||
|
||||
chapter\<open>Testing Nested Monitors\<close>
|
||||
|
||||
theory
|
||||
Concept_MonitorTest1
|
||||
imports
|
||||
"Isabelle_DOF_Unit_Tests_document"
|
||||
"Isabelle_DOF-Ontologies.Conceptual" (* we use the generic "Conceptual" ontology *)
|
||||
TestKit
|
||||
begin
|
||||
|
||||
section\<open>Test Purpose.\<close>
|
||||
text\<open> Creation of document parts that are controlled by (nested, locally defined) monitors. \<close>
|
||||
|
||||
open_monitor*[aaa::Conceptual.M]
|
||||
text*[test::A]\<open>For Test and Validation\<close>
|
||||
|
||||
text\<open>Defining some document elements to be referenced in later on in another theory: \<close>
|
||||
text*[sdf] \<open> Lorem ipsum ... \<close> \<comment> \<open>anonymous reference, ignored by monitor.\<close>
|
||||
text*[sdfg :: F] \<open> Lorem ipsum ...\<close> \<comment> \<open>causes just warnings for invariant violations
|
||||
due to non-strict checking mode\<close>
|
||||
close_monitor*[aaa] \<comment> \<open>causes warning: accept clause 1
|
||||
not in final state .\<close>
|
||||
|
||||
section\<open>A Local Monitor Class Definition\<close>
|
||||
|
||||
doc_class test_monitor_free =
|
||||
tmhd :: int
|
||||
doc_class test_monitor_head =
|
||||
tmhd :: int
|
||||
doc_class test_monitor_A = test_monitor_head +
|
||||
tmA :: int
|
||||
doc_class test_monitor_B = test_monitor_A +
|
||||
tmB :: int
|
||||
doc_class test_monitor_C = test_monitor_A +
|
||||
tmC :: int
|
||||
doc_class test_monitor_D = test_monitor_B +
|
||||
tmD :: int
|
||||
doc_class test_monitor_E = test_monitor_D +
|
||||
tmE :: int
|
||||
|
||||
doc_class monitor_M =
|
||||
tmM :: int
|
||||
rejects "test_monitor_A"
|
||||
accepts "test_monitor_head ~~ test_monitor_B ~~ test_monitor_C"
|
||||
|
||||
section\<open>A more Complex Monitoring Example \<close>
|
||||
|
||||
text\<open>Consult the status of the DOF engine:\<close>
|
||||
print_doc_classes
|
||||
print_doc_items
|
||||
|
||||
|
||||
|
||||
declare[[free_class_in_monitor_checking]]
|
||||
|
||||
open_monitor*[test_monitor_M::monitor_M]
|
||||
|
||||
text*[testFree::test_monitor_free]\<open>...\<close>
|
||||
|
||||
open_monitor*[test_monitor_M2::monitor_M]
|
||||
|
||||
declare[[strict_monitor_checking]]
|
||||
text-assert-error[test_monitor_A1::test_monitor_A]\<open>\<close>
|
||||
\<open>accepts clause 1 of monitor Concept_MonitorTest1.test_monitor_M rejected\<close>
|
||||
declare[[strict_monitor_checking=false]]
|
||||
text*[test_monitor_A1::test_monitor_A]\<open>\<close> \<comment> \<open>the same in non-strict monitor checking.\<close>
|
||||
declare[[free_class_in_monitor_strict_checking]]
|
||||
text-assert-error[testFree2::test_monitor_free]\<open>\<close>
|
||||
\<open>accepts clause 1 of monitor Concept_MonitorTest1.test_monitor_M not enabled\<close>
|
||||
declare[[free_class_in_monitor_strict_checking=false]]
|
||||
text*[test_monitor_head1::test_monitor_head]\<open>\<close>
|
||||
text*[testFree3::test_monitor_free]\<open>\<close>
|
||||
text*[test_monitor_B1::test_monitor_B]\<open>\<close>
|
||||
text*[testFree4::test_monitor_free]\<open>\<close>
|
||||
text*[test_monitor_D1::test_monitor_D]\<open>\<close>
|
||||
text*[testFree5::test_monitor_free]\<open>\<close>
|
||||
text*[test_monitor_C1::test_monitor_C]\<open>\<close>
|
||||
text*[testFree6::test_monitor_free]\<open>\<close>
|
||||
|
||||
close_monitor*[test_monitor_M2]
|
||||
|
||||
close_monitor*[test_monitor_M]
|
||||
|
||||
declare[[free_class_in_monitor_checking = false]]
|
||||
|
||||
text\<open>Consult the final status of the DOF engine:\<close>
|
||||
print_doc_classes
|
||||
print_doc_items
|
||||
|
||||
ML\<open>
|
||||
val (oid, DOF_core.Instance {value, ...}) =
|
||||
Name_Space.check (Context.Proof \<^context>) (DOF_core.get_instances \<^context>) ("aaa", Position.none)
|
||||
\<close>
|
||||
term*\<open>map fst @{trace_attribute \<open>test_monitor_M\<close>}\<close>
|
||||
value*\<open>map fst @{trace_attribute \<open>test_monitor_M\<close>}\<close>
|
||||
|
||||
ML\<open>@{assert} ([("Conceptual.A", "test"), ("Conceptual.F", "sdfg")] = @{trace_attribute aaa}) \<close>
|
||||
|
||||
|
||||
open_monitor*[test_monitor_M3::monitor_M]
|
||||
|
||||
declare[[strict_monitor_checking]]
|
||||
text-assert-error[test_monitor_A2::test_monitor_A]\<open>\<close>
|
||||
\<open>accepts clause 1 of monitor Concept_MonitorTest1.test_monitor_M3 rejected\<close>
|
||||
declare[[strict_monitor_checking=false]]
|
||||
text*[test_monitor_A3::test_monitor_A]\<open>\<close> \<comment> \<open>the same in non-strict monitor checking.\<close>
|
||||
declare[[free_class_in_monitor_strict_checking]]
|
||||
text-assert-error[testFree7::test_monitor_free]\<open>\<close>
|
||||
\<open>accepts clause 1 of monitor Concept_MonitorTest1.test_monitor_M3 not enabled\<close>
|
||||
declare[[free_class_in_monitor_strict_checking=false]]
|
||||
|
||||
|
||||
end
|
||||
|
|
@ -1,68 +0,0 @@
|
|||
(*************************************************************************
|
||||
* Copyright (C)
|
||||
* 2019-2023 The University of Exeter
|
||||
* 2018-2023 The University of Paris-Saclay
|
||||
* 2018 The University of Sheffield
|
||||
*
|
||||
* License:
|
||||
* This program can be redistributed and/or modified under the terms
|
||||
* of the 2-clause BSD-style license.
|
||||
*
|
||||
* SPDX-License-Identifier: BSD-2-Clause
|
||||
*************************************************************************)
|
||||
|
||||
chapter\<open>Testing Nested Monitors\<close>
|
||||
|
||||
theory
|
||||
Concept_MonitorTest2
|
||||
imports
|
||||
Concept_MonitorTest1
|
||||
begin
|
||||
|
||||
section\<open>Test Purpose.\<close>
|
||||
text\<open> Creation of document parts that are controlled by (nested, locally defined) monitors. \<close>
|
||||
|
||||
doc_class test_monitor_B =
|
||||
tmB :: int
|
||||
|
||||
doc_class monitor_M =
|
||||
tmM :: int
|
||||
rejects "Concept_MonitorTest1.test_monitor_B"
|
||||
accepts "test_monitor_E ~~ test_monitor_C"
|
||||
|
||||
doc_class test_monitor_head =
|
||||
tmhd :: int
|
||||
|
||||
declare[[free_class_in_monitor_checking]]
|
||||
|
||||
declare[[free_class_in_monitor_strict_checking]]
|
||||
text-assert-error[test_monitor_head1::test_monitor_head]\<open>\<close>
|
||||
\<open>accepts clause 1 of monitor Concept_MonitorTest1.test_monitor_M3 not enabled\<close>
|
||||
declare[[free_class_in_monitor_strict_checking=false]]
|
||||
text*[test_monitor_head2::Concept_MonitorTest1.test_monitor_head]\<open>\<close>
|
||||
|
||||
open_monitor*[test_monitor_M3::monitor_M]
|
||||
|
||||
text*[test_monitor_head3::Concept_MonitorTest1.test_monitor_head]\<open>\<close>
|
||||
text*[testFree3::test_monitor_free]\<open>\<close>
|
||||
declare[[strict_monitor_checking]]
|
||||
text-assert-error[test_monitor_B1::test_monitor_B]\<open>\<close>
|
||||
\<open>accepts clause 1 of monitor Concept_MonitorTest2.test_monitor_M3 rejected\<close>
|
||||
declare[[strict_monitor_checking=false]]
|
||||
text*[testFree4::test_monitor_free]\<open>\<close>
|
||||
declare[[strict_monitor_checking]]
|
||||
text-assert-error[test_monitor_D1::test_monitor_D]\<open>\<close>
|
||||
\<open>accepts clause 1 of monitor Concept_MonitorTest2.test_monitor_M3 rejected\<close>
|
||||
declare[[strict_monitor_checking=false]]
|
||||
text*[testFree5::test_monitor_free]\<open>\<close>
|
||||
text*[test_monitor_E1::test_monitor_E]\<open>\<close>
|
||||
text*[test_monitor_C1::test_monitor_C]\<open>\<close>
|
||||
text*[testFree6::test_monitor_free]\<open>\<close>
|
||||
|
||||
close_monitor*[Concept_MonitorTest1.test_monitor_M3]
|
||||
|
||||
close_monitor*[test_monitor_M3]
|
||||
|
||||
declare[[free_class_in_monitor_checking = false]]
|
||||
|
||||
end
|
|
@ -1,177 +0,0 @@
|
|||
(*************************************************************************
|
||||
* Copyright (C)
|
||||
* 2019-2023 The University of Exeter
|
||||
* 2018-2023 The University of Paris-Saclay
|
||||
* 2018 The University of Sheffield
|
||||
*
|
||||
* License:
|
||||
* This program can be redistributed and/or modified under the terms
|
||||
* of the 2-clause BSD-style license.
|
||||
*
|
||||
* SPDX-License-Identifier: BSD-2-Clause
|
||||
*************************************************************************)
|
||||
|
||||
chapter\<open>Creating and Referencing Ontological Instances\<close>
|
||||
|
||||
theory Concept_OntoReferencing
|
||||
imports "TestKit"
|
||||
"Isabelle_DOF_Unit_Tests_document"
|
||||
"Isabelle_DOF-Ontologies.Conceptual"
|
||||
begin
|
||||
|
||||
section\<open>Test Purpose.\<close>
|
||||
text\<open> Creation of ontological instances along the \<^theory>\<open>Isabelle_DOF-Ontologies.Conceptual\<close>
|
||||
Ontology. Emphasis is put on type-safe (ontologically consistent) referencing of text, code and
|
||||
proof elements. Some tests cover also the critical cases concerning name spaces of oid's. \<close>
|
||||
|
||||
section\<open>Setting up a monitor.\<close>
|
||||
text\<open>\<^theory>\<open>Isabelle_DOF-Ontologies.Conceptual\<close> provides a monitor \<^typ>\<open>M\<close> enforcing a
|
||||
particular document structure. Here, we say: From now on, this structural rules are
|
||||
respected wrt. all \<^theory_text>\<open>doc_classes M\<close> is enabled for.\<close>
|
||||
|
||||
open_monitor*[struct::M]
|
||||
|
||||
section\<open>Defining Text Elements and Referring to them... \<close>
|
||||
|
||||
text\<open> This uses elements of two ontologies, notably
|
||||
\<^theory>\<open>Isabelle_DOF-Ontologies.Conceptual\<close> and \<^theory>\<open>Isabelle_DOF.Isa_COL\<close>.\<close>
|
||||
|
||||
(*<*)
|
||||
|
||||
title*[ag::title, short_title="Some\<open>ooups.\<close>"]\<open>Lorem ipsum dolor sit amet ...\<close>
|
||||
subtitle*[af::subtitle, abbrev = "Some\<open>ooups-oups.\<close>"]\<open>Lorem ipsum dolor sit amet ...\<close>
|
||||
chapter*[a0::A, x = "3"] \<open> Lorem ipsum dolor sit amet ... \<close>
|
||||
section*[a::A, x = "3"] \<open> Lorem ipsum dolor sit amet, ... \<close>
|
||||
subsection*[ab::A, x = "3"] \<open> Lorem ipsum dolor sit amet, ...
|
||||
As mentioned in the @{title \<open>ag\<close>}... \<close> \<comment> \<open>old-style and ...\<close>
|
||||
subsubsection*[ac::A, x = "3"] \<open> Lorem ipsum dolor sit amet, ...
|
||||
As mentioned in the \<^title>\<open>ag\<close>\<close> \<comment> \<open>new-style references to
|
||||
ontological instances
|
||||
assigned to text
|
||||
elements ...\<close>
|
||||
|
||||
|
||||
text\<open>Meta-Objects are typed, and references have to respect this : \<close>
|
||||
text-assert-error[ad]\<open> \<^title>\<open>a\<close> \<close> \<open>reference ontologically inconsistent\<close>
|
||||
text-assert-error[ae]\<open> \<^title>\<open>af\<close> \<close>\<open>reference ontologically inconsistent\<close>
|
||||
\<comment> \<open>erroneous reference: please consider class hierarchy!\<close>
|
||||
(*>*)
|
||||
|
||||
text\<open>References to Meta-Objects can be forward-declared:\<close>
|
||||
|
||||
text-assert-error[ae1]\<open>@{C \<open>c1\<close>}\<close>\<open>Undefined instance:\<close>
|
||||
|
||||
declare_reference*[c1::C] \<comment> \<open>forward declaration\<close>
|
||||
|
||||
text-assert-error\<open>@{C \<open>c1\<close>} \<close>\<open>Instance declared but not defined, try option unchecked\<close>
|
||||
|
||||
text\<open>@{C (unchecked) \<open>c1\<close>} \<close>
|
||||
|
||||
text*[a1::A, level="Some 0", x = 3]\<open>... phasellus amet id massa nunc, ...\<close>
|
||||
text*[c1::C, x = "''beta''"] \<open> ... suspendisse non arcu malesuada mollis, nibh morbi, ... \<close>
|
||||
|
||||
text-assert-error[c1::C, x = "''gamma''"]
|
||||
\<open> ... suspendisse non arcu malesuada mollis, nibh morbi, ... \<close>
|
||||
\<open>Duplicate instance declaration\<close>
|
||||
|
||||
\<comment> \<open>Referencing from a text context:\<close>
|
||||
text*[d::D, a1 = "X3"] \<open> ... phasellus amet id massa nunc, pede suscipit repellendus,
|
||||
... @{C "c1"} or @{C \<open>c1\<close>} or \<^C>\<open>c1\<close>
|
||||
similar to @{thm "refl"} and \<^thm>"refl"\<close> \<comment> \<open>ontological and built-in
|
||||
references\<close>
|
||||
|
||||
text\<open>Not only text-elements are "ontology-aware", proofs and code can this be too !\<close>
|
||||
|
||||
\<comment> \<open>Referencing from and to a ML-code context:\<close>
|
||||
|
||||
ML*[c4::C, z = "Some @{A \<open>a1\<close>}"]\<open>
|
||||
fun fac x = if x = 0 then 1 else x * (fac(x-1))
|
||||
val v = \<^value_>\<open>A.x (the (z @{C \<open>c4\<close>}))\<close> |> HOLogic.dest_number |> snd |> fac
|
||||
\<close>
|
||||
|
||||
definition*[a2::A, x=5, level="Some 1"] xx' where "xx' \<equiv> A.x @{A \<open>a1\<close>}" if "A.x @{A \<open>a1\<close>} = 5"
|
||||
|
||||
lemma*[e5::E] testtest : "xx + A.x @{A \<open>a1\<close>} = yy + A.x @{A \<open>a1\<close>} \<Longrightarrow> xx = yy" by simp
|
||||
|
||||
doc_class cc_assumption_test =
|
||||
a :: int
|
||||
text*[cc_assumption_test_ref::cc_assumption_test]\<open>\<close>
|
||||
|
||||
definition tag_l :: "'a \<Rightarrow> 'b \<Rightarrow> 'b" where "tag_l \<equiv> \<lambda>x y. y"
|
||||
|
||||
lemma* tagged : "tag_l @{cc_assumption_test \<open>cc_assumption_test_ref\<close>} AA \<Longrightarrow> AA"
|
||||
by (simp add: tag_l_def)
|
||||
|
||||
find_theorems name:tagged "(_::cc_assumption_test \<Rightarrow> _ \<Rightarrow> _) _ _ \<Longrightarrow>_"
|
||||
|
||||
declare_reference-assert-error[c1::C]\<open>Duplicate instance declaration\<close> \<comment> \<open>forward declaration\<close>
|
||||
|
||||
declare_reference*[e6::E]
|
||||
(*<*) (* pdf GENERATION NEEDS TO BE IMPLEMENTED IN FRONT AND BACKEND *)
|
||||
text\<open>This is the answer to the "OutOfOrder Presentation Problem": @{E (unchecked) \<open>e6\<close>} \<close>
|
||||
|
||||
definition*[e6::E] facu :: "nat \<Rightarrow> nat" where "facu arg = undefined"
|
||||
|
||||
text\<open>As shown in @{E \<open>e5\<close>} following from @{E \<open>e6\<close>}\<close>
|
||||
|
||||
|
||||
text\<open>As shown in @{C \<open>c4\<close>}\<close>
|
||||
(*>*)
|
||||
|
||||
|
||||
|
||||
text\<open>Ontological information ("class instances") is mutable: \<close>
|
||||
|
||||
update_instance*[d::D, a1 := X2]
|
||||
(*<*)
|
||||
text\<open> ... in ut tortor ... @{docitem \<open>a\<close>} ... @{A \<open>a\<close>} ... \<close> \<comment> \<open>untyped or typed referencing \<close>
|
||||
(*>*)
|
||||
text-assert-error[ae::text_element]\<open>the function @{C [display] "c4"} \<close>\<open>referred text-element is no macro!\<close>
|
||||
|
||||
text*[c2::C, x = "\<open>delta\<close>"] \<open> ... in ut tortor eleifend augue pretium consectetuer. \<close>
|
||||
|
||||
text\<open>Note that both the notations @{term "''beta''"} and @{term "\<open>beta\<close>"} are possible;
|
||||
the former is a more ancient format only supporting pure ascii, while the latter also supports
|
||||
fancy unicode such as: @{term "\<open>\<beta>\<^sub>i''\<close>"} \<close>
|
||||
|
||||
text*[f::F] \<open> Lectus accumsan velit ultrices, ... \<close>
|
||||
|
||||
theorem some_proof : "True" by simp
|
||||
|
||||
text\<open>This is an example where we add a theorem into a kind of "result-list" of the doc-item f.\<close>
|
||||
update_instance*[f::F,r:="[@{thm ''Concept_OntoReferencing.some_proof''}]"]
|
||||
(*<*)
|
||||
text\<open> ..., mauris amet, id elit aliquam aptent id, ... @{docitem \<open>a\<close>} \<close>
|
||||
(*>*)
|
||||
text\<open>Here we add and maintain a link that is actually modeled as m-to-n relation ...\<close>
|
||||
update_instance*[f::F,b:="{(@{A \<open>a\<close>}::A,@{C \<open>c1\<close>}::C),
|
||||
(@{A \<open>a\<close>}, @{C \<open>c2\<close>})}"]
|
||||
|
||||
section\<open>Closing the Monitor and testing the Results.\<close>
|
||||
|
||||
close_monitor*[struct]
|
||||
|
||||
text\<open>And the trace of the monitor is:\<close>
|
||||
ML\<open>val trace = @{trace_attribute struct}\<close>
|
||||
ML\<open>@{assert} (trace =
|
||||
[("Conceptual.A", "a0"), ("Conceptual.A", "a"), ("Conceptual.A", "ab"),
|
||||
("Conceptual.A", "ac"), ("Conceptual.A", "a1"),
|
||||
("Conceptual.C", "c1"), ("Conceptual.D", "d"), ("Conceptual.C", "c4"),
|
||||
("Conceptual.A", "a2"), ("Conceptual.E", "e5"),
|
||||
("Conceptual.E", "e6"), ("Conceptual.C", "c2"), ("Conceptual.F", "f")]) \<close>
|
||||
|
||||
|
||||
text\<open>Note that the monitor \<^typ>\<open>M\<close> of the ontology \<^theory>\<open>Isabelle_DOF-Ontologies.Conceptual\<close> does
|
||||
not observe the common entities of \<^theory>\<open>Isabelle_DOF.Isa_COL\<close>, but just those defined in the
|
||||
accept- clause of \<^typ>\<open>M\<close>.\<close>
|
||||
|
||||
text\<open>One final check of the status DOF core: observe that no new classes have been defined,
|
||||
just a couple of new document elements have been introduced.\<close>
|
||||
|
||||
print_doc_classes
|
||||
print_doc_items
|
||||
|
||||
|
||||
|
||||
end
|
||||
|
|
@ -1,182 +0,0 @@
|
|||
(*************************************************************************
|
||||
* Copyright (C)
|
||||
* 2019-2023 The University of Exeter
|
||||
* 2018-2023 The University of Paris-Saclay
|
||||
* 2018 The University of Sheffield
|
||||
*
|
||||
* License:
|
||||
* This program can be redistributed and/or modified under the terms
|
||||
* of the 2-clause BSD-style license.
|
||||
*
|
||||
* SPDX-License-Identifier: BSD-2-Clause
|
||||
*************************************************************************)
|
||||
|
||||
chapter\<open>Term Antiquotations\<close>
|
||||
|
||||
text\<open>Terms are represented by "Inner Syntax" parsed by an Earley parser in Isabelle.
|
||||
For historical reasons, \<^emph>\<open>term antiquotations\<close> are called therefore somewhat misleadingly
|
||||
"Inner Syntax Antiquotations". \<close>
|
||||
|
||||
theory
|
||||
Concept_TermAntiquotations
|
||||
imports
|
||||
"Isabelle_DOF_Unit_Tests_document"
|
||||
"Isabelle_DOF-Ontologies.Conceptual"
|
||||
TestKit
|
||||
begin
|
||||
|
||||
section\<open>Context\<close>
|
||||
|
||||
text\<open>Since the syntax chosen for values of doc-class attributes is HOL-syntax --- requiring
|
||||
a fast read on the ``What's in Main''-documentation, but not additional knowledge on, say,
|
||||
SML --- an own syntax for references to types, terms, theorems, etc. are necessary. These are
|
||||
the ``Term Antiquotations'' (in earlier papers also called: ``Inner Syntax Antiquotations'').
|
||||
|
||||
They are the key-mechanism to denote
|
||||
\<^item> Ontological Links, i.e. attributes refering to document classes defined by the ontology
|
||||
\<^item> Ontological F-Links, i.e. attributes referring to formal entities inside Isabelle (such as thm's)
|
||||
|
||||
This file contains a number of examples resulting from the
|
||||
@{theory "Isabelle_DOF-Ontologies.Conceptual"} - ontology; the emphasis of this presentation is to
|
||||
present the expressivity of ODL on a paradigmatical example.
|
||||
\<close>
|
||||
|
||||
|
||||
section\<open>Test Purpose.\<close>
|
||||
|
||||
text\<open>Testing Standard Term-Antiquotations and Code-Term-Antiquotations. \<close>
|
||||
|
||||
text\<open>Just a check of the status DOF core: observe that no new classes have been defined.\<close>
|
||||
|
||||
print_doc_classes
|
||||
print_doc_items
|
||||
|
||||
|
||||
section\<open>Term-Antiquotations Referring to \<^verbatim>\<open>thm\<close>‘s\<close>
|
||||
|
||||
text\<open>Some sample lemma:\<close>
|
||||
lemma*[l::E] murks : "Example = @{thm ''refl''}" oops
|
||||
|
||||
text-assert-error\<open>... @{E "l"}\<close>\<open>Undefined instance:\<close> \<comment> \<open>oops retracts the ENTIRE system state,
|
||||
thus also the creation of an instance of E\<close>
|
||||
|
||||
lemma*[l::E] local_sample_lemma :
|
||||
"@{thm \<open>refl\<close>} = @{thm ''refl''}" by simp
|
||||
\<comment> \<open>un-evaluated references are similar to
|
||||
uninterpreted constants. Not much is known
|
||||
about them, but that doesn't mean that we
|
||||
can't prove some basics over them...\<close>
|
||||
|
||||
|
||||
lemma*[l2::E] local_sample_lemma2 :
|
||||
"@{thm ''local_sample_lemma''} = @{thm ''local_sample_lemma''}" by simp
|
||||
|
||||
|
||||
value*\<open>@{thm ''local_sample_lemma''}\<close>
|
||||
value-assert-error\<open> @{thm \<open>Conxept_TermAntiquotations.local_sample_lemma\<close>}\<close>\<open>Undefined fact\<close>
|
||||
|
||||
section\<open>Testing the Standard ("Built-in") Term-Antiquotations\<close>
|
||||
|
||||
text\<open>Example for a meta-attribute of ODL-type @{typ "file"} with an
|
||||
appropriate ISA for the file @{file "Concept_TermAntiquotations.thy"}\<close>
|
||||
|
||||
|
||||
|
||||
text*[xcv1::A, x=5]\<open>Lorem ipsum ...\<close>
|
||||
text*[xcv3::A, x=7]\<open>Lorem ipsum ...\<close>
|
||||
|
||||
text\<open>Example for a meta-attribute of ODL-type @{typ "typ"} with an appropriate ISA for the
|
||||
theorem @{thm "refl"}\<close>
|
||||
text*[xcv2::C, g="@{thm ''HOL.refl''}"]\<open>Lorem ipsum ...\<close>
|
||||
|
||||
text\<open>A warning about the usage of the \<open>docitem\<close> TA:
|
||||
The \<open>docitem\<close> TA offers a way to check the reference of class instances
|
||||
without checking the instances type.
|
||||
So one will be able to reference \<open>docitem\<close>s (class instances) and have them checked,
|
||||
without the burden of the type checking required otherwise.
|
||||
But it may give rise to unwanted behaviors, due to its polymorphic type.
|
||||
It must not be used for certification.
|
||||
\<close>
|
||||
|
||||
section\<open>Other Built-In Term Antiquotations\<close>
|
||||
text-assert-error[ae::text_element]\<open>@{file "non-existing.thy"}\<close>\<open>No such file: \<close>
|
||||
text\<open>A text-antiquotation from Main: @{file "TestKit.thy"}\<close>
|
||||
|
||||
value-assert-error\<open>@{file \<open>non-existing.thy\<close>}\<close>\<open>No such file: \<close>
|
||||
value*\<open>@{file \<open>TestKit.thy\<close>}\<close>
|
||||
|
||||
text*[xcv::F, u="@{file ''TestKit.thy''}"]\<open>Lorem ipsum ...\<close>
|
||||
|
||||
|
||||
value*\<open>@{term \<open>aa + bb\<close>}\<close>
|
||||
value*\<open>@{typ \<open>'a list\<close>}\<close>
|
||||
|
||||
|
||||
section\<open>Putting everything together\<close>
|
||||
|
||||
text\<open>Major sample: test-item of doc-class \<open>F\<close> with a relational link between class instances,
|
||||
and links to formal Isabelle items like \<open>typ\<close>, \<open>term\<close> and \<open>thm\<close>. \<close>
|
||||
declare[[ML_print_depth = 10000]]
|
||||
text*[xcv4::F, r="[@{thm ''HOL.refl''},
|
||||
@{thm \<open>Concept_TermAntiquotations.local_sample_lemma\<close>}]", (* long names required *)
|
||||
b="{(@{A ''xcv1''},@{C \<open>xcv2\<close>})}", (* notations \<open>...\<close> vs. ''...'' *)
|
||||
s="[@{typ \<open>int list\<close>}]",
|
||||
properties = "[@{term \<open>H \<longrightarrow> H\<close>}]" (* notation \<open>...\<close> required for UTF8*)
|
||||
]\<open>Lorem ipsum ...\<close>
|
||||
declare[[ML_print_depth = 20]]
|
||||
text*[xcv5::G, g="@{thm \<open>HOL.sym\<close>}"]\<open>Lorem ipsum ...\<close>
|
||||
|
||||
text\<open>... and here we add a relation between @{docitem \<open>xcv3\<close>} and @{docitem \<open>xcv2\<close>}
|
||||
into the relation \verb+b+ of @{docitem \<open>xcv5\<close>}. Note that in the link-relation,
|
||||
a @{typ "C"}-type is required, so if a @{typ "G"}-type is offered, it is considered illegal
|
||||
in \verb+Isa_DOF+ despite the sub-class relation between those classes: \<close>
|
||||
update_instance-assert-error[xcv4::F, b+="{(@{docitem ''xcv3''},@{docitem ''xcv5''})}"]
|
||||
\<open>Type unification failed\<close>
|
||||
|
||||
text\<open>And here is the results of some ML-term antiquotations:\<close>
|
||||
ML\<open> @{docitem_attribute b::xcv4} \<close>
|
||||
ML\<open> @{docitem xcv4} \<close>
|
||||
ML\<open> @{docitem_name xcv4} \<close>
|
||||
|
||||
text\<open>Now we might need to reference a class instance in a term command and we would like
|
||||
Isabelle to check that this instance is indeed an instance of this class.
|
||||
Here, we want to reference the instance @{docitem_name "xcv4"} previously defined.
|
||||
We can use the term* command which extends the classic term command
|
||||
and does the appropriate checking.\<close>
|
||||
term*\<open>@{F \<open>xcv4\<close>}\<close>
|
||||
|
||||
text\<open>We can also reference an attribute of the instance.
|
||||
Here we reference the attribute r of the class F which has the type @{typ \<open>thm list\<close>}.\<close>
|
||||
term*\<open>r @{F \<open>xcv4\<close>}\<close>
|
||||
|
||||
text\<open>We declare a new text element. Note that the class name contains an underscore "\_".\<close>
|
||||
text*[te::text_element]\<open>Lorem ipsum...\<close>
|
||||
|
||||
term*\<open>@{text_element \<open>te\<close>}\<close>
|
||||
|
||||
text\<open>Terms containing term antiquotations can be checked and evaluated
|
||||
using \<^theory_text>\<open>term_\<close> and \<^theory_text>\<open>value_\<close> text antiquotations respectively:
|
||||
We can print the term @{term_ \<open>r @{F \<open>xcv4\<close>}\<close>} with \<open>@{term_ \<open>r @{F \<open>xcv4\<close>}\<close>}\<close>
|
||||
or get the value of the \<^const>\<open>F.r\<close> attribute of @{docitem \<open>xcv4\<close>} with \<open>@{value_ \<open>r @{F \<open>xcv4\<close>}\<close>}\<close>
|
||||
\<^theory_text>\<open>value_\<close> may have an optional argument between square brackets to specify the evaluator but this
|
||||
argument must be specified after a default optional argument already defined
|
||||
by the text antiquotation implementation.
|
||||
So one must use the following syntax if he does not want to specify the first optional argument:
|
||||
\<open>@{value_ [] [nbe] \<open>r @{F \<open>xcv4\<close>}\<close>}\<close>. Note the empty brackets.
|
||||
|
||||
\<close>
|
||||
|
||||
text\<open>There also are \<^theory_text>\<open>term_\<close> and \<^theory_text>\<open>value_\<close> ML antiquotations:
|
||||
\<^ML>\<open>@{term_ \<open>r @{F \<open>xcv4\<close>}\<close>}\<close> will return the ML representation of the term \<^term_>\<open>r @{F \<open>xcv4\<close>}\<close>,
|
||||
and \<^ML>\<open>@{value_ \<open>r @{F \<open>xcv4\<close>}\<close>}\<close> will return the ML representation
|
||||
of the value of the \<^const>\<open>F.r\<close> attribute of @{docitem \<open>xcv4\<close>}.
|
||||
\<^theory_text>\<open>value_\<close> may have an optional argument between square brackets to specify the evaluator:
|
||||
\<close>
|
||||
|
||||
ML\<open>
|
||||
val t = @{term_ \<open>r @{F \<open>xcv4\<close>}\<close>}
|
||||
val tt = @{value_ [nbe] \<open>r @{F \<open>xcv4\<close>}\<close>}
|
||||
\<close>
|
||||
|
||||
end
|
||||
|
|
@ -1,30 +0,0 @@
|
|||
(*<*)
|
||||
(*************************************************************************
|
||||
* Copyright (C)
|
||||
* 2019-2023 The University of Exeter
|
||||
* 2018-2023 The University of Paris-Saclay
|
||||
* 2018 The University of Sheffield
|
||||
*
|
||||
* License:
|
||||
* This program can be redistributed and/or modified under the terms
|
||||
* of the 2-clause BSD-style license.
|
||||
*
|
||||
* SPDX-License-Identifier: BSD-2-Clause
|
||||
*************************************************************************)
|
||||
theory "Isabelle_DOF_Unit_Tests_document"
|
||||
imports
|
||||
"Isabelle_DOF.technical_report"
|
||||
(* "Isabelle_DOF-Ontologies.CENELEC_50128" where do we use this - bu *)
|
||||
|
||||
begin
|
||||
|
||||
use_template "scrreprt-modern"
|
||||
use_ontology "technical_report" (* and "Isabelle_DOF-Ontologies.CENELEC_50128" *)
|
||||
(*>*)
|
||||
|
||||
title*[title::title] \<open>The Isabelle/DOF Implementation\<close>
|
||||
subtitle*[subtitle::subtitle]\<open>The Unit-Test Suite\<close>
|
||||
|
||||
(*<*)
|
||||
end
|
||||
(*>*)
|
|
@ -1,444 +0,0 @@
|
|||
(*************************************************************************
|
||||
* Copyright (C)
|
||||
* 2019-2023 The University of Exeter
|
||||
* 2018-2023 The University of Paris-Saclay
|
||||
* 2018 The University of Sheffield
|
||||
*
|
||||
* License:
|
||||
* This program can be redistributed and/or modified under the terms
|
||||
* of the 2-clause BSD-style license.
|
||||
*
|
||||
* SPDX-License-Identifier: BSD-2-Clause
|
||||
*************************************************************************)
|
||||
|
||||
chapter\<open>LaTeX Generation Tests\<close>
|
||||
|
||||
|
||||
theory Latex_Tests
|
||||
imports "TestKit"
|
||||
"Isabelle_DOF_Unit_Tests_document"
|
||||
keywords "Figure*" :: document_body (* still experimental feature *)
|
||||
|
||||
begin
|
||||
|
||||
|
||||
section\<open>Test Purpose.\<close>
|
||||
text\<open> Testing the generation of LaTeX code. Serves in particular during development. \<close>
|
||||
|
||||
text\<open>Output status:\<close>
|
||||
print_doc_classes
|
||||
print_doc_items
|
||||
|
||||
|
||||
section\<open>Elementary Creation of Doc-items and Access of their Attibutes\<close>
|
||||
|
||||
|
||||
text\<open>And here a tex - text macro.\<close>
|
||||
text\<open>Pythons ReStructuredText (RST).
|
||||
@{url \<open>https://de.wikipedia.org/wiki/ReStructuredText\<close>}. Tool: Sphinx.
|
||||
\<close>
|
||||
|
||||
text*[aaaa::B]\<open>dfg @{thm [display] refl}\<close>
|
||||
(*<*)
|
||||
text-[dfgdfg::B]
|
||||
\<open> Lorem ipsum ... @{thm [display] refl} Frédéric \textbf{TEST} \verb+sdf+ \<open>dfgdfg\<close> \<close>
|
||||
|
||||
text-latex\<open> Lorem ipsum ... @{thm [display] refl} Frédéric \textbf{TEST} \verb+sdf+ \<open>dfgdfg\<close> \<close>
|
||||
text-[asd::B]
|
||||
\<open>... and here is its application macro expansion:
|
||||
@{B [display] "dfgdfg"}
|
||||
\textbf{TEST}
|
||||
@{cartouche [display]
|
||||
\<open>text*[dfgdfg::B]
|
||||
\<open> Lorem ipsum ... @{thm refl} Frederic \textbf{TEST} \verb+sdf+ \<open>dfgdfg\<close> \<close>
|
||||
\<close>}
|
||||
\<close>
|
||||
|
||||
text-latex\<open>... and here is its application macro expansion:
|
||||
@{B [display] "dfgdfg"}
|
||||
\textbf{TEST}
|
||||
@{cartouche [display]
|
||||
\<open>text*[dfgdfg::B]
|
||||
\<open> Lorem ipsum ... @{thm refl} Fr\'ed\'eric \textbf{TEST} \verb+sdf+ \<open>dfgdfg\<close> \<close>
|
||||
\<close>}\<close>
|
||||
|
||||
text-latex\<open> \<^theory_text>\<open>definition df = ...
|
||||
\<close>
|
||||
@{ML [display] \<open> let val x = 3 + 4 in true end
|
||||
\<close>}
|
||||
|
||||
@{ML_text [display] \<open> val x = ...
|
||||
\<close>}
|
||||
|
||||
@{verbatim [display] \<open> Lorem ipsum ... @{thm refl}
|
||||
Fr\'ed\'eric \textbf{TEST} \verb+sdf+ \<open>dfgdfg\<close> \<close>}
|
||||
@{theory_text [display] \<open>definition df = ... \<lambda>x.
|
||||
\<close>}
|
||||
@{cartouche [display] \<open> @{figure "cfgdfg"}\<close>} \<close>
|
||||
(*>*)
|
||||
|
||||
text\<open>Final Status:\<close>
|
||||
print_doc_items
|
||||
print_doc_classes
|
||||
|
||||
section\<open>Experiments on Inline-Textelements\<close>
|
||||
text\<open>Std Abbreviations. Inspired by the block *\<open>control spacing\<close>
|
||||
in @{file \<open>$ISABELLE_HOME/src/Pure/Thy/document_antiquotations.ML\<close>}.
|
||||
We mechanize the table-construction and even attach the LaTeX
|
||||
quirks to be dumped into the prelude. \<close>
|
||||
|
||||
ML\<open>
|
||||
val _ =
|
||||
Theory.setup
|
||||
( Document_Output.antiquotation_raw \<^binding>\<open>doof\<close> (Scan.succeed ())
|
||||
(fn _ => fn () => Latex.string "\\emph{doof}")
|
||||
#> Document_Output.antiquotation_raw \<^binding>\<open>LATEX\<close> (Scan.succeed ())
|
||||
(fn _ => fn () => Latex.string "\\textbf{LaTeX}")
|
||||
)
|
||||
\<close>
|
||||
|
||||
text-latex\<open> \<^doof> \<^LATEX> \<close>
|
||||
|
||||
(* the same effect is achieved with : *)
|
||||
setup \<open>DOF_lib.define_shortcut (Binding.make("bla",\<^here>)) "\\blabla"\<close>
|
||||
(* Note that this assumes that the generated LaTeX macro call "\blabla" is defined somewhere in the
|
||||
target document, for example, in the tex prelude. Note that the "Binding.make" can be avoided
|
||||
using the alternative \<^binding> notation (see above).*)
|
||||
|
||||
|
||||
setup\<open>DOF_lib.define_macro (Binding.make("blong",\<^here>)) "\\blong{" "}" (K(K()))\<close>
|
||||
|
||||
(*<*)
|
||||
text-latex\<open> \<^blong>\<open>asd\<close> outer \<^blong>\<open>syntax| ! see {syntax, outer}\<close> \<close>
|
||||
(*>*)
|
||||
|
||||
section\<open>Experimental Code and Test of advanced LaTeX for free-form text units\<close>
|
||||
|
||||
ML\<open>
|
||||
|
||||
fun report_text ctxt text =
|
||||
let val pos = Input.pos_of text in
|
||||
Context_Position.reports ctxt
|
||||
[(pos, Markup.language_text (Input.is_delimited text)),
|
||||
(pos, Markup.raw_text)]
|
||||
end;
|
||||
|
||||
fun report_theory_text ctxt text =
|
||||
let val keywords = Thy_Header.get_keywords' ctxt;
|
||||
val _ = report_text ctxt text;
|
||||
val _ =
|
||||
Input.source_explode text
|
||||
|> Token.tokenize keywords {strict = true}
|
||||
|> maps (Token.reports keywords)
|
||||
|> Context_Position.reports_text ctxt;
|
||||
in () end
|
||||
|
||||
fun prepare_text ctxt =
|
||||
Input.source_content #> #1 #> Document_Antiquotation.prepare_lines ctxt;
|
||||
(* This also produces indent-expansion and changes space to "\_" and the introduction of "\newline",
|
||||
I believe. Otherwise its in Thy_Output.output_source, the compiler from string to LaTeX.text. *)
|
||||
|
||||
fun string_2_text_antiquotation ctxt text =
|
||||
prepare_text ctxt text
|
||||
|> Document_Output.output_source ctxt
|
||||
|> Document_Output.isabelle ctxt
|
||||
|
||||
fun string_2_theory_text_antiquotation ctxt text =
|
||||
let
|
||||
val keywords = Thy_Header.get_keywords' ctxt;
|
||||
in
|
||||
prepare_text ctxt text
|
||||
|> Token.explode0 keywords
|
||||
|> maps (Document_Output.output_token ctxt)
|
||||
|> Document_Output.isabelle ctxt
|
||||
end
|
||||
|
||||
fun gen_text_antiquotation name reportNcheck compile =
|
||||
Document_Output.antiquotation_raw_embedded name (Scan.lift Parse.embedded_input)
|
||||
(fn ctxt => fn text:Input.source =>
|
||||
let
|
||||
val _ = reportNcheck ctxt text;
|
||||
in
|
||||
compile ctxt text
|
||||
end);
|
||||
|
||||
fun std_text_antiquotation name (* redefined in these more abstract terms *) =
|
||||
gen_text_antiquotation name report_text string_2_text_antiquotation
|
||||
|
||||
(* should be the same as (2020):
|
||||
fun text_antiquotation name =
|
||||
Thy_Output.antiquotation_raw_embedded name (Scan.lift Parse.embedded_input)
|
||||
(fn ctxt => fn text =>
|
||||
let
|
||||
val _ = report_text ctxt text;
|
||||
in
|
||||
prepare_text ctxt text
|
||||
|> Thy_Output.output_source ctxt
|
||||
|> Thy_Output.isabelle ctxt
|
||||
end);
|
||||
*)
|
||||
|
||||
fun std_theory_text_antiquotation name (* redefined in these more abstract terms *) =
|
||||
gen_text_antiquotation name report_theory_text string_2_theory_text_antiquotation
|
||||
|
||||
(* should be the same as (2020):
|
||||
fun theory_text_antiquotation name =
|
||||
Thy_Output.antiquotation_raw_embedded name (Scan.lift Parse.embedded_input)
|
||||
(fn ctxt => fn text =>
|
||||
let
|
||||
val keywords = Thy_Header.get_keywords' ctxt;
|
||||
|
||||
val _ = report_text ctxt text;
|
||||
val _ =
|
||||
Input.source_explode text
|
||||
|> Token.tokenize keywords {strict = true}
|
||||
|> maps (Token.reports keywords)
|
||||
|> Context_Position.reports_text ctxt;
|
||||
in
|
||||
prepare_text ctxt text
|
||||
|> Token.explode0 keywords
|
||||
|> maps (Thy_Output.output_token ctxt)
|
||||
|> Thy_Output.isabelle ctxt
|
||||
|> enclose_env ctxt "isarbox"
|
||||
end);
|
||||
*)
|
||||
|
||||
|
||||
|
||||
fun enclose_env ctxt block_env body =
|
||||
if Config.get ctxt Document_Antiquotation.thy_output_display
|
||||
then Latex.environment block_env body
|
||||
else body;
|
||||
|
||||
|
||||
fun boxed_text_antiquotation name (* redefined in these more abstract terms *) =
|
||||
gen_text_antiquotation name report_text
|
||||
(fn ctxt => string_2_text_antiquotation ctxt
|
||||
#> enclose_env ctxt "isarbox")
|
||||
|
||||
|
||||
fun boxed_theory_text_antiquotation name (* redefined in these more abstract terms *) =
|
||||
gen_text_antiquotation name report_theory_text
|
||||
(fn ctxt => string_2_theory_text_antiquotation ctxt
|
||||
#> enclose_env ctxt "isarbox")
|
||||
|
||||
|
||||
(* #> enclose_env ctxt "isarbox" *)
|
||||
|
||||
val _ = Theory.setup
|
||||
(std_text_antiquotation \<^binding>\<open>my_text\<close> #>
|
||||
boxed_text_antiquotation \<^binding>\<open>boxed_text\<close> #>
|
||||
std_text_antiquotation \<^binding>\<open>my_cartouche\<close> #>
|
||||
boxed_text_antiquotation \<^binding>\<open>boxed_cartouche\<close> #>
|
||||
std_theory_text_antiquotation \<^binding>\<open>my_theory_text\<close>#>
|
||||
boxed_theory_text_antiquotation \<^binding>\<open>boxed_theory_text\<close>); (* is overriding possible ?*)
|
||||
|
||||
\<close>
|
||||
(*<*)
|
||||
text-latex\<open>
|
||||
@{boxed_cartouche [display] \<open>definition dfg = \<lambda>x. x\<close>}
|
||||
@{boxed_theory_text [display] \<open>doc_class dfg = \<lambda>x... \<Gamma>\<close>} \<close>
|
||||
(*>*)
|
||||
|
||||
|
||||
|
||||
section\<open>Experimental Section for Multiple Figure Content\<close>
|
||||
|
||||
ML\<open>
|
||||
|
||||
val thy_output_display = Attrib.setup_option_bool ("thy_output_display", \<^here>);
|
||||
|
||||
val caption_param = Config.declare_string ("caption", \<^here>) (K "");
|
||||
val width_param = Config.declare_int ("width", \<^here>) (K 80); \<comment> \<open>char per line\<close>
|
||||
val scale_param = Config.declare_int ("scale", \<^here>) (K 100); \<comment> \<open>in percent\<close>
|
||||
|
||||
Config.put caption_param;
|
||||
Config.put_global;
|
||||
Config.get ;
|
||||
|
||||
(*
|
||||
Latex.string (enclose "[" "]" (String.concat [ label_and_type, ", args={", (commas str_args), "}"]))
|
||||
*)
|
||||
|
||||
(*
|
||||
\begin{figure}[h]
|
||||
\centering
|
||||
|
||||
\includegraphics[scale=0.5]{graph_a}
|
||||
\caption{An example graph}
|
||||
|
||||
\label{fig:x cubed graph}
|
||||
\end{figure}
|
||||
|
||||
|
||||
\begin{figure}
|
||||
\centering
|
||||
|
||||
\begin{subfigure}[b]{0.3\textwidth}
|
||||
|
||||
\centering
|
||||
\includegraphics[width=\textwidth]{graph1}
|
||||
\caption{$y=x$}
|
||||
|
||||
\label{fig:y equals x} (* PROBLEM *)
|
||||
\end{subfigure}
|
||||
|
||||
\hfill
|
||||
|
||||
\begin{subfigure}[b]{0.3\textwidth}
|
||||
|
||||
\centering
|
||||
\includegraphics[width=\textwidth]{graph2}
|
||||
\caption{$y=3sinx$}
|
||||
|
||||
\label{fig:three sin x} (* PROBLEM *)
|
||||
\end{subfigure}
|
||||
|
||||
\hfill
|
||||
|
||||
\begin{subfigure}[b]{0.3\textwidth}
|
||||
|
||||
\centering
|
||||
\includegraphics[width=\textwidth]{graph3}
|
||||
\caption{$y=5/x$}
|
||||
|
||||
\label{fig:five over x} (* PROBLEM *)
|
||||
\end{subfigure}
|
||||
|
||||
\caption{Three simple graphs}
|
||||
\label{fig:three graphs}
|
||||
\end{figure}
|
||||
|
||||
|
||||
\begin{wrapfigure}{l}{0.5\textwidth}
|
||||
|
||||
\centering
|
||||
\includegraphics[width=1.5cm]{logo.png}
|
||||
\caption{$y=5/x$}
|
||||
|
||||
\end{wrapfigure}
|
||||
|
||||
*)
|
||||
|
||||
datatype figure_type = single | subfigure | float_embedded
|
||||
|
||||
(* to check if this can be done more properly: user-state or attributes ??? *)
|
||||
val figure_mode = Unsynchronized.ref(float_embedded)
|
||||
val figure_label = Unsynchronized.ref(NONE:string option)
|
||||
val figure_proportions = Unsynchronized.ref([]:int list)
|
||||
(* invariant : !figure_mode = subfigure_embedded ==> length(!figure_proportions) > 1 *)
|
||||
|
||||
fun figure_antiq (check: Proof.context -> Path.T option -> Input.source -> Path.T) =
|
||||
Args.context -- Scan.lift Parse.path_input >> (fn (ctxt, source) =>
|
||||
(check ctxt NONE source;
|
||||
let val cap = Config.get ctxt caption_param
|
||||
val cap_txt = if cap = "" then "" else (Library.enclose "\n\\caption{" "}\n" cap)
|
||||
\<comment> \<open>this is naive. one should add an evaluation of doc antiquotations here\<close>
|
||||
val wdth= Config.get ctxt width_param
|
||||
val wdth_ltx = (if wdth = 100 then ""
|
||||
else if 10<=wdth andalso wdth<=99
|
||||
then "width=0."^(Int.toString wdth)
|
||||
else if 1<=wdth then "width=0.0"^(Int.toString wdth)
|
||||
else error "width out of range (must be between 1 and 100"
|
||||
)^"\\textwidth"
|
||||
val scl = Config.get ctxt scale_param
|
||||
val scl_ltx = if scl = 100 then ""
|
||||
else if 10<=scl andalso scl<=99 then "scale=0."^(Int.toString scl)
|
||||
else if 1<=scl then "scale=0.0"^(Int.toString scl)
|
||||
else error "scale out of range (must be between 1 and 100"
|
||||
val fig_args = Library.enclose "[" "]" (commas [wdth_ltx,scl_ltx])
|
||||
val _ = writeln cap
|
||||
fun proportion () = "0."^ (Int.toString (100 div List.length(!figure_proportions)))
|
||||
\<comment> \<open>naive: assumes equal proportions\<close>
|
||||
fun core arg = "\n\\centering\n"
|
||||
^"\\includegraphics"
|
||||
^fig_args^(Library.enclose "{" "}" arg)
|
||||
^cap_txt
|
||||
\<comment> \<open>add internal labels here\<close>
|
||||
fun pat arg = case !figure_mode of
|
||||
single => core arg
|
||||
|subfigure => "\n\\begin{subfigure}[b]{"^proportion ()^"\\textwidth}"
|
||||
^ core arg
|
||||
^"\n\\end{subfigure}\n"
|
||||
|float_embedded => "\n\\begin{wrapfigure}{r}{"^wdth_ltx^"}"
|
||||
^ core arg
|
||||
^"\n\\end{wrapfigure}\n"
|
||||
|
||||
in (Latex.output_ascii_breakable "/" (Input.string_of source))
|
||||
|> pat
|
||||
|> Latex.string
|
||||
end));
|
||||
|
||||
val _ = Theory.setup
|
||||
(Document_Antiquotation.setup_option \<^binding>\<open>width\<close>
|
||||
(Config.put width_param o Document_Antiquotation.integer) #>
|
||||
Document_Antiquotation.setup_option \<^binding>\<open>scale\<close>
|
||||
(Config.put scale_param o Document_Antiquotation.integer) #>
|
||||
Document_Antiquotation.setup_option \<^binding>\<open>caption\<close>
|
||||
(Config.put caption_param) #>
|
||||
Document_Output.antiquotation_raw_embedded \<^binding>\<open>figure_content\<close>
|
||||
(figure_antiq Resources.check_file) (K I)
|
||||
|
||||
);
|
||||
\<close>
|
||||
|
||||
(*<*)
|
||||
text-latex\<open>
|
||||
@{figure_content [width=40, scale=35, caption="This is a test"] "ROOT"}
|
||||
\<close>
|
||||
(*>*)
|
||||
ML\<open>
|
||||
|
||||
fun gen_enriched_document_command3 name {body} cid_transform attr_transform markdown
|
||||
((((binding,cid_pos), doc_attrs) : ODL_Meta_Args_Parser.meta_args_t,
|
||||
xstring_opt:(xstring * Position.T) option),
|
||||
toks:Input.source list)
|
||||
= gen_enriched_document_command2 name {body=body} cid_transform attr_transform markdown
|
||||
((((binding,cid_pos), doc_attrs) : ODL_Meta_Args_Parser.meta_args_t,
|
||||
xstring_opt:(xstring * Position.T) option),
|
||||
toks) \<comment> \<open>Hack : drop second and thrd args.\<close>
|
||||
|
||||
val _ =
|
||||
Outer_Syntax.command ("Figure*", @{here}) "multiple figure"
|
||||
(ODL_Meta_Args_Parser.attributes -- Parse.opt_target -- Scan.repeat1 Parse.document_source
|
||||
>> (Toplevel.theory o (gen_enriched_document_command2 "TTT" {body=true} I I {markdown = true} )));
|
||||
|
||||
|
||||
\<close>
|
||||
(*
|
||||
Figure*[fff::figure,src="\<open>this is a side-by-side\<close>"]
|
||||
\<open>@{figure_content [width=40, scale=35, caption="This is a test"] "ROOT"}\<close>
|
||||
\<open> \<^doof> \<^LATEX> \<close>
|
||||
\<open> \<^theory_text>\<open>definition df = ... \<close>
|
||||
@{ML [display] \<open> let val x = 3 + 4 in true end\<close>}
|
||||
@{cartouche [display] \<open> @{figure "cfgdfg"}\<close>}
|
||||
\<close>
|
||||
*)
|
||||
|
||||
|
||||
(*<*)
|
||||
|
||||
Figure*[figxxx::float,main_caption="\<open>Proofs establishing an Invariant Preservation.\<close>"]
|
||||
\<open> @{fig_content (width=40, height=35, caption="This is a right test") "figures/A.png"}
|
||||
@{fig_content (width=40, height=35, caption="This is a left \<^term>\<open>\<sigma>\<^sub>i + 1\<close> test") "figures/A.png"}
|
||||
\<close>
|
||||
|
||||
|
||||
|
||||
(* proposed syntax for sub-figure labels : text\<open> @{figure "ffff(2)"}\<close> *)
|
||||
|
||||
Figure*[figxxxx::float,main_caption="\<open>Proofs establishing an Invariant Preservation.\<close>"]
|
||||
\<open>@{boxed_theory_text [display]
|
||||
\<open>lemma inv_c2_preserved : "c2_inv \<sigma> \<Longrightarrow> c1_inv (\<sigma> \<langle>Hardware\<rangle>\<^sub>C\<^sub>o\<^sub>m\<^sub>p\<^sub>u\<^sub>t\<^sub>e\<^sub>r\<^sub>H\<^sub>a\<^sub>r\<^sub>d\<^sub>w\<^sub>a\<^sub>r\<^sub>e)"
|
||||
unfolding c1_inv_def c2_inv_def
|
||||
Computer_Hardware_to_Hardware_morphism_def
|
||||
Product_to_Component_morphism_def
|
||||
by (auto simp: comp_def)
|
||||
|
||||
lemma Computer_Hardware_to_Hardware_total :
|
||||
"Computer_Hardware_to_Hardware_morphism ` ({X. c2_inv X})
|
||||
\<subseteq> ({X::Hardware. c1_inv X})"
|
||||
using inv_c2_preserved by auto\<close>}\<close>
|
||||
|
||||
end
|
||||
(*>*)
|
|
@ -1,95 +0,0 @@
|
|||
(*************************************************************************
|
||||
* Copyright (C)
|
||||
* 2019-2023 The University of Exeter
|
||||
* 2018-2023 The University of Paris-Saclay
|
||||
* 2018 The University of Sheffield
|
||||
*
|
||||
* License:
|
||||
* This program can be redistributed and/or modified under the terms
|
||||
* of the 2-clause BSD-style license.
|
||||
*
|
||||
* SPDX-License-Identifier: BSD-2-Clause
|
||||
*************************************************************************)
|
||||
|
||||
chapter\<open>Ontologys Matching\<close>
|
||||
|
||||
theory Ontology_Matching_Example
|
||||
imports TestKit
|
||||
"Isabelle_DOF.Isa_DOF"
|
||||
"Isabelle_DOF_Unit_Tests_document"
|
||||
|
||||
begin
|
||||
|
||||
section\<open>Test Purpose.\<close>
|
||||
text\<open> This is merely an example that shows that the generated invariants
|
||||
fit nicely together; i.e. allow for sensible consistency and invariant
|
||||
preservation proofs related to ontological matchings. \<close>
|
||||
|
||||
section\<open>The Scenario.\<close>
|
||||
|
||||
text\<open>Using HOL, we can define a mapping between two ontologies.
|
||||
It is called ontology matching or ontology alignment.
|
||||
Here is an example which show how to map two classes.
|
||||
HOL also allows us to map the invariants (ontological rules) of the classes!\<close>
|
||||
|
||||
type_synonym UTF8 = string
|
||||
|
||||
definition utf8_to_string
|
||||
where "utf8_to_string xx = xx"
|
||||
|
||||
|
||||
doc_class A =
|
||||
first_name :: UTF8
|
||||
last_name :: UTF8
|
||||
age :: nat
|
||||
married_to :: "string option"
|
||||
invariant a :: "age \<sigma> < 18 \<longrightarrow> married_to \<sigma> = None"
|
||||
|
||||
|
||||
doc_class B =
|
||||
name :: string
|
||||
adult :: bool
|
||||
is_married :: bool
|
||||
invariant b :: "is_married \<sigma> \<longrightarrow> adult \<sigma>"
|
||||
|
||||
text\<open>We define the mapping between the two classes,
|
||||
i.e. how to transform the class @{doc_class A} in to the class @{doc_class B}:\<close>
|
||||
|
||||
definition A_to_B_morphism
|
||||
where "A_to_B_morphism X =
|
||||
\<lparr> tag_attribute = A.tag_attribute X
|
||||
, name = utf8_to_string (first_name X) @ '' '' @ utf8_to_string (last_name X)
|
||||
, adult = (age X \<ge> 18)
|
||||
, is_married = (married_to X \<noteq> None) \<rparr>"
|
||||
|
||||
text\<open>Sanity check: Invariants are non-contradictory, i.e. there is a witness.\<close>
|
||||
|
||||
lemma inv_a_satisfyable : " Ex (a_inv::A \<Rightarrow> bool)"
|
||||
unfolding a_inv_def
|
||||
apply(rule_tac x ="\<lparr>Ontology_Matching_Example.A.tag_attribute = xxx,
|
||||
first_name = yyy, last_name = zzz, age = 17,
|
||||
married_to = None\<rparr>" in exI)
|
||||
by auto
|
||||
|
||||
text\<open>Now we check that the invariant is preserved through the morphism:\<close>
|
||||
|
||||
lemma inv_a_preserved :
|
||||
"a_inv X \<Longrightarrow> b_inv (A_to_B_morphism X)"
|
||||
unfolding a_inv_def b_inv_def A_to_B_morphism_def
|
||||
by auto
|
||||
|
||||
text\<open>This also implies that B invariants are non-contradictory: \<close>
|
||||
|
||||
lemma inv_b_preserved : "\<exists>x. (b_inv::B \<Rightarrow> bool) x"
|
||||
apply(rule_tac x ="A_to_B_morphism \<lparr>Ontology_Matching_Example.A.tag_attribute = xxx,
|
||||
first_name = yyy, last_name = zzz, age = 17,
|
||||
married_to = None\<rparr>" in exI)
|
||||
by(rule inv_a_preserved,auto simp: a_inv_def)
|
||||
|
||||
|
||||
lemma A_morphism_B_total :
|
||||
"A_to_B_morphism ` ({X::A. a_inv X}) \<subseteq> ({X::B. b_inv X})"
|
||||
using inv_a_preserved
|
||||
by auto
|
||||
|
||||
end
|
|
@ -1,433 +0,0 @@
|
|||
(*************************************************************************
|
||||
* Copyright (C)
|
||||
* 2019 The University of Exeter
|
||||
* 2018-2019 The University of Paris-Saclay
|
||||
* 2018 The University of Sheffield
|
||||
*
|
||||
* License:
|
||||
* This program can be redistributed and/or modified under the terms
|
||||
* of the 2-clause BSD-style license.
|
||||
*
|
||||
* SPDX-License-Identifier: BSD-2-Clause
|
||||
*************************************************************************)
|
||||
|
||||
theory
|
||||
OutOfOrderPresntn
|
||||
imports
|
||||
"Isabelle_DOF_Unit_Tests_document"
|
||||
"TestKit"
|
||||
"Isabelle_DOF-Ontologies.Conceptual"
|
||||
keywords "Figure*" :: document_body
|
||||
|
||||
begin
|
||||
|
||||
section\<open>Elementary Creation of Doc-items and Access of their Attibutes\<close>
|
||||
|
||||
|
||||
|
||||
text\<open>And here a tex - text macro.\<close>
|
||||
text\<open>Pythons ReStructuredText (RST).
|
||||
@{url \<open>https://de.wikipedia.org/wiki/ReStructuredText\<close>}. Tool: Sphinx.
|
||||
\<close>
|
||||
|
||||
text*[aaaa::B]\<open>dfg @{thm [display] refl}\<close>
|
||||
|
||||
(*<*)
|
||||
|
||||
text-[dfgdfg::B]
|
||||
\<open> Lorem ipsum ... @{thm [display] refl} Frédéric \textbf{TEST} \verb+sdf+ \<open>dfgdfg\<close> \<close>
|
||||
|
||||
text-latex\<open> Lorem ipsum ... @{thm [display] refl} Frédéric \textbf{TEST} \verb+sdf+ \<open>dfgdfg\<close> \<close>
|
||||
|
||||
text-[asd::B]
|
||||
\<open>... and here is its application macro expansion:
|
||||
@{B [display] "dfgdfg"}
|
||||
\textbf{TEST}
|
||||
@{cartouche [display]
|
||||
\<open>text*[dfgdfg::B]
|
||||
\<open> Lorem ipsum ... @{thm refl} Frédéric \textbf{TEST} \verb+sdf+ \<open>dfgdfg\<close> \<close>
|
||||
\<close>}
|
||||
\<close>
|
||||
|
||||
text-latex\<open>... and here is its application macro expansion:
|
||||
@{B [display] "dfgdfg"}
|
||||
\textbf{TEST}
|
||||
@{cartouche [display]
|
||||
\<open>text*[dfgdfg::B]
|
||||
\<open> Lorem ipsum ... @{thm refl} Frédéric \textbf{TEST} \verb+sdf+ \<open>dfgdfg\<close> \<close>
|
||||
\<close>}\<close>
|
||||
|
||||
text-latex\<open> \<^theory_text>\<open>definition df = ...
|
||||
\<close>
|
||||
@{ML [display] \<open> let val x = 3 + 4 in true end
|
||||
\<close>}
|
||||
|
||||
@{ML_text [display] \<open> val x = ...
|
||||
\<close>}
|
||||
|
||||
@{verbatim [display] \<open> Lorem ipsum ... @{thm refl}
|
||||
Frédéric \textbf{TEST} \verb+sdf+ \<open>dfgdfg\<close> \<close>}
|
||||
@{theory_text [display] \<open>definition df = ... \<lambda>x.
|
||||
\<close>}
|
||||
@{cartouche [display] \<open> @{figure "cfgdfg"}\<close>} \<close>
|
||||
|
||||
|
||||
text\<open>Final Status:\<close>
|
||||
print_doc_items
|
||||
print_doc_classes
|
||||
|
||||
section\<open>Experiments on Inline-Textelements\<close>
|
||||
text\<open>Std Abbreviations. Inspired by the block *\<open>control spacing\<close>
|
||||
in @{file \<open>$ISABELLE_HOME/src/Pure/Thy/document_antiquotations.ML\<close>}.
|
||||
We mechanize the table-construction and even attach the LaTeX
|
||||
quirks to be dumped into the prelude. \<close>
|
||||
|
||||
ML\<open>
|
||||
val _ =
|
||||
Theory.setup
|
||||
( Document_Output.antiquotation_raw \<^binding>\<open>doof\<close> (Scan.succeed ())
|
||||
(fn _ => fn () => Latex.string "\\emph{doof}")
|
||||
#> Document_Output.antiquotation_raw \<^binding>\<open>LATEX\<close> (Scan.succeed ())
|
||||
(fn _ => fn () => Latex.string "\\textbf{LaTeX}")
|
||||
)
|
||||
\<close>
|
||||
|
||||
text-latex\<open> \<^doof> \<^LATEX> \<close>
|
||||
|
||||
(* the same effect is achieved with : *)
|
||||
setup \<open>DOF_lib.define_shortcut (Binding.make("bla",\<^here>)) "\\blabla"\<close>
|
||||
(* Note that this assumes that the generated LaTeX macro call "\blabla" is defined somewhere in the
|
||||
target document, for example, in the tex prelude. Note that the "Binding.make" can be avoided
|
||||
using the alternative \<^binding> notation (see above).*)
|
||||
|
||||
setup\<open>DOF_lib.define_macro (Binding.make("blong",\<^here>)) "\\blong{" "}" (K(K()))\<close>
|
||||
|
||||
text-latex\<open> \<^blong>\<open>asd\<close> outer \<^blong>\<open>syntax| ! see {syntax, outer}\<close> \<close>
|
||||
(*>*)
|
||||
|
||||
ML\<open>
|
||||
|
||||
fun report_text ctxt text =
|
||||
let val pos = Input.pos_of text in
|
||||
Context_Position.reports ctxt
|
||||
[(pos, Markup.language_text (Input.is_delimited text)),
|
||||
(pos, Markup.raw_text)]
|
||||
end;
|
||||
|
||||
fun report_theory_text ctxt text =
|
||||
let val keywords = Thy_Header.get_keywords' ctxt;
|
||||
val _ = report_text ctxt text;
|
||||
val _ =
|
||||
Input.source_explode text
|
||||
|> Token.tokenize keywords {strict = true}
|
||||
|> maps (Token.reports keywords)
|
||||
|> Context_Position.reports_text ctxt;
|
||||
in () end
|
||||
|
||||
fun prepare_text ctxt =
|
||||
Input.source_content #> #1 #> Document_Antiquotation.prepare_lines ctxt;
|
||||
(* This also produces indent-expansion and changes space to "\_" and the introduction of "\newline",
|
||||
I believe. Otherwise its in Thy_Output.output_source, the compiler from string to LaTeX.text. *)
|
||||
|
||||
fun string_2_text_antiquotation ctxt text =
|
||||
prepare_text ctxt text
|
||||
|> Document_Output.output_source ctxt
|
||||
|> Document_Output.isabelle ctxt
|
||||
|
||||
fun string_2_theory_text_antiquotation ctxt text =
|
||||
let
|
||||
val keywords = Thy_Header.get_keywords' ctxt;
|
||||
in
|
||||
prepare_text ctxt text
|
||||
|> Token.explode0 keywords
|
||||
|> maps (Document_Output.output_token ctxt)
|
||||
|> Document_Output.isabelle ctxt
|
||||
end
|
||||
|
||||
fun gen_text_antiquotation name reportNcheck compile =
|
||||
Document_Output.antiquotation_raw_embedded name (Scan.lift Parse.embedded_input)
|
||||
(fn ctxt => fn text:Input.source =>
|
||||
let
|
||||
val _ = reportNcheck ctxt text;
|
||||
in
|
||||
compile ctxt text
|
||||
end);
|
||||
|
||||
fun std_text_antiquotation name (* redefined in these more abstract terms *) =
|
||||
gen_text_antiquotation name report_text string_2_text_antiquotation
|
||||
|
||||
(* should be the same as (2020):
|
||||
fun text_antiquotation name =
|
||||
Thy_Output.antiquotation_raw_embedded name (Scan.lift Parse.embedded_input)
|
||||
(fn ctxt => fn text =>
|
||||
let
|
||||
val _ = report_text ctxt text;
|
||||
in
|
||||
prepare_text ctxt text
|
||||
|> Thy_Output.output_source ctxt
|
||||
|> Thy_Output.isabelle ctxt
|
||||
end);
|
||||
*)
|
||||
|
||||
fun std_theory_text_antiquotation name (* redefined in these more abstract terms *) =
|
||||
gen_text_antiquotation name report_theory_text string_2_theory_text_antiquotation
|
||||
|
||||
(* should be the same as (2020):
|
||||
fun theory_text_antiquotation name =
|
||||
Thy_Output.antiquotation_raw_embedded name (Scan.lift Parse.embedded_input)
|
||||
(fn ctxt => fn text =>
|
||||
let
|
||||
val keywords = Thy_Header.get_keywords' ctxt;
|
||||
|
||||
val _ = report_text ctxt text;
|
||||
val _ =
|
||||
Input.source_explode text
|
||||
|> Token.tokenize keywords {strict = true}
|
||||
|> maps (Token.reports keywords)
|
||||
|> Context_Position.reports_text ctxt;
|
||||
in
|
||||
prepare_text ctxt text
|
||||
|> Token.explode0 keywords
|
||||
|> maps (Thy_Output.output_token ctxt)
|
||||
|> Thy_Output.isabelle ctxt
|
||||
|> enclose_env ctxt "isarbox"
|
||||
end);
|
||||
*)
|
||||
|
||||
|
||||
|
||||
fun enclose_env ctxt block_env body =
|
||||
if Config.get ctxt Document_Antiquotation.thy_output_display
|
||||
then Latex.environment block_env body
|
||||
else body;
|
||||
|
||||
|
||||
fun boxed_text_antiquotation name (* redefined in these more abstract terms *) =
|
||||
gen_text_antiquotation name report_text
|
||||
(fn ctxt => string_2_text_antiquotation ctxt
|
||||
#> enclose_env ctxt "isarbox")
|
||||
|
||||
|
||||
fun boxed_theory_text_antiquotation name (* redefined in these more abstract terms *) =
|
||||
gen_text_antiquotation name report_theory_text
|
||||
(fn ctxt => string_2_theory_text_antiquotation ctxt
|
||||
#> enclose_env ctxt "isarbox")
|
||||
|
||||
|
||||
(* #> enclose_env ctxt "isarbox" *)
|
||||
|
||||
val _ = Theory.setup
|
||||
(std_text_antiquotation \<^binding>\<open>my_text\<close> #>
|
||||
boxed_text_antiquotation \<^binding>\<open>boxed_text\<close> #>
|
||||
std_text_antiquotation \<^binding>\<open>my_cartouche\<close> #>
|
||||
boxed_text_antiquotation \<^binding>\<open>boxed_cartouche\<close> #>
|
||||
std_theory_text_antiquotation \<^binding>\<open>my_theory_text\<close>#>
|
||||
boxed_theory_text_antiquotation \<^binding>\<open>boxed_theory_text\<close>); (* is overriding possible ?*)
|
||||
|
||||
\<close>
|
||||
|
||||
(*<*)
|
||||
text-latex\<open>
|
||||
@{boxed_cartouche [display] \<open>definition dfg = \<lambda>x. x\<close>}
|
||||
@{boxed_theory_text [display] \<open>doc_class dfg = \<lambda>x... \<Gamma>\<close>} \<close>
|
||||
(*>*)
|
||||
|
||||
|
||||
section\<open>Section Experiments of picture-content\<close>
|
||||
|
||||
(*<*)
|
||||
|
||||
ML\<open>
|
||||
|
||||
val thy_output_display = Attrib.setup_option_bool ("thy_output_display", \<^here>);
|
||||
|
||||
val caption_param = Config.declare_string ("caption", \<^here>) (K "");
|
||||
val width_param = Config.declare_int ("width", \<^here>) (K 80); \<comment> \<open>char per line\<close>
|
||||
val scale_param = Config.declare_int ("scale", \<^here>) (K 100); \<comment> \<open>in percent\<close>
|
||||
|
||||
Config.put caption_param;
|
||||
Config.put_global;
|
||||
Config.get ;
|
||||
|
||||
(*
|
||||
Latex.string (enclose "[" "]" (String.concat [ label_and_type, ", args={", (commas str_args), "}"]))
|
||||
*)
|
||||
|
||||
(*
|
||||
\begin{figure}[h]
|
||||
\centering
|
||||
|
||||
\includegraphics[scale=0.5]{graph_a}
|
||||
\caption{An example graph}
|
||||
|
||||
\label{fig:x cubed graph}
|
||||
\end{figure}
|
||||
|
||||
|
||||
\begin{figure}
|
||||
\centering
|
||||
|
||||
\begin{subfigure}[b]{0.3\textwidth}
|
||||
|
||||
\centering
|
||||
\includegraphics[width=\textwidth]{graph1}
|
||||
\caption{$y=x$}
|
||||
|
||||
\label{fig:y equals x} (* PROBLEM *)
|
||||
\end{subfigure}
|
||||
|
||||
\hfill
|
||||
|
||||
\begin{subfigure}[b]{0.3\textwidth}
|
||||
|
||||
\centering
|
||||
\includegraphics[width=\textwidth]{graph2}
|
||||
\caption{$y=3sinx$}
|
||||
|
||||
\label{fig:three sin x} (* PROBLEM *)
|
||||
\end{subfigure}
|
||||
|
||||
\hfill
|
||||
|
||||
\begin{subfigure}[b]{0.3\textwidth}
|
||||
|
||||
\centering
|
||||
\includegraphics[width=\textwidth]{graph3}
|
||||
\caption{$y=5/x$}
|
||||
|
||||
\label{fig:five over x} (* PROBLEM *)
|
||||
\end{subfigure}
|
||||
|
||||
\caption{Three simple graphs}
|
||||
\label{fig:three graphs}
|
||||
\end{figure}
|
||||
|
||||
|
||||
\begin{wrapfigure}{l}{0.5\textwidth}
|
||||
|
||||
\centering
|
||||
\includegraphics[width=1.5cm]{logo.png}
|
||||
\caption{$y=5/x$}
|
||||
|
||||
\end{wrapfigure}
|
||||
|
||||
*)
|
||||
|
||||
datatype figure_type = single | subfigure | float_embedded
|
||||
|
||||
(* to check if this can be done more properly: user-state or attributes ??? *)
|
||||
val figure_mode = Unsynchronized.ref(float_embedded)
|
||||
val figure_label = Unsynchronized.ref(NONE:string option)
|
||||
val figure_proportions = Unsynchronized.ref([]:int list)
|
||||
(* invariant : !figure_mode = subfigure_embedded ==> length(!figure_proportions) > 1 *)
|
||||
|
||||
fun figure_antiq (check: Proof.context -> Path.T option -> Input.source -> Path.T) =
|
||||
Args.context -- Scan.lift Parse.path_input >> (fn (ctxt, source) =>
|
||||
(check ctxt NONE source;
|
||||
let val cap = Config.get ctxt caption_param
|
||||
val cap_txt = if cap = "" then "" else (Library.enclose "\n\\caption{" "}\n" cap)
|
||||
\<comment> \<open>this is naive. one should add an evaluation of doc antiquotations here\<close>
|
||||
val wdth= Config.get ctxt width_param
|
||||
val wdth_ltx = (if wdth = 100 then ""
|
||||
else if 10<=wdth andalso wdth<=99
|
||||
then "width=0."^(Int.toString wdth)
|
||||
else if 1<=wdth then "width=0.0"^(Int.toString wdth)
|
||||
else error "width out of range (must be between 1 and 100"
|
||||
)^"\\textwidth"
|
||||
val scl = Config.get ctxt scale_param
|
||||
val scl_ltx = if scl = 100 then ""
|
||||
else if 10<=scl andalso scl<=99 then "scale=0."^(Int.toString scl)
|
||||
else if 1<=scl then "scale=0.0"^(Int.toString scl)
|
||||
else error "scale out of range (must be between 1 and 100"
|
||||
val fig_args = Library.enclose "[" "]" (commas [wdth_ltx,scl_ltx])
|
||||
val _ = writeln cap
|
||||
fun proportion () = "0."^ (Int.toString (100 div List.length(!figure_proportions)))
|
||||
\<comment> \<open>naive: assumes equal proportions\<close>
|
||||
fun core arg = "\n\\centering\n"
|
||||
^"\\includegraphics"
|
||||
^fig_args^(Library.enclose "{" "}" arg)
|
||||
^cap_txt
|
||||
\<comment> \<open>add internal labels here\<close>
|
||||
fun pat arg = case !figure_mode of
|
||||
single => core arg
|
||||
|subfigure => "\n\\begin{subfigure}[b]{"^proportion ()^"\\textwidth}"
|
||||
^ core arg
|
||||
^"\n\\end{subfigure}\n"
|
||||
|float_embedded => "\n\\begin{wrapfigure}{r}{"^wdth_ltx^"}"
|
||||
^ core arg
|
||||
^"\n\\end{wrapfigure}\n"
|
||||
|
||||
in (Latex.output_ascii_breakable "/" (Input.string_of source))
|
||||
|> pat
|
||||
|> Latex.string
|
||||
end));
|
||||
|
||||
val _ = Theory.setup
|
||||
(Document_Antiquotation.setup_option \<^binding>\<open>width\<close>
|
||||
(Config.put width_param o Document_Antiquotation.integer) #>
|
||||
Document_Antiquotation.setup_option \<^binding>\<open>scale\<close>
|
||||
(Config.put scale_param o Document_Antiquotation.integer) #>
|
||||
Document_Antiquotation.setup_option \<^binding>\<open>caption\<close>
|
||||
(Config.put caption_param) #>
|
||||
Document_Output.antiquotation_raw_embedded \<^binding>\<open>figure_content\<close>
|
||||
(figure_antiq Resources.check_file) (K I)
|
||||
|
||||
);
|
||||
\<close>
|
||||
|
||||
text-latex\<open>
|
||||
@{figure_content [width=40, scale=35, caption="This is a test"] "ROOT"}
|
||||
\<close>
|
||||
|
||||
ML\<open>
|
||||
|
||||
fun gen_enriched_document_command3 name {body} cid_transform attr_transform markdown
|
||||
((((binding,cid_pos), doc_attrs) : ODL_Meta_Args_Parser.meta_args_t,
|
||||
xstring_opt:(xstring * Position.T) option),
|
||||
toks:Input.source list)
|
||||
= gen_enriched_document_command2 name {body=body} cid_transform attr_transform markdown
|
||||
((((binding,cid_pos), doc_attrs) : ODL_Meta_Args_Parser.meta_args_t,
|
||||
xstring_opt:(xstring * Position.T) option),
|
||||
toks) \<comment> \<open>Hack : drop second and thrd args.\<close>
|
||||
|
||||
val _ =
|
||||
Outer_Syntax.command ("Figure*", @{here}) "multiple figure"
|
||||
(ODL_Meta_Args_Parser.attributes -- Parse.opt_target -- Scan.repeat1 Parse.document_source
|
||||
>> (Toplevel.theory o (gen_enriched_document_command2 "TTT" {body=true} I I {markdown = true} )));
|
||||
|
||||
|
||||
\<close>
|
||||
(*
|
||||
Figure*[fff::figure,src="\<open>this is a side-by-side\<close>"]
|
||||
\<open>@{figure_content [width=40, scale=35, caption="This is a test"] "ROOT"}\<close>
|
||||
\<open> \<^doof> \<^LATEX> \<close>
|
||||
\<open> \<^theory_text>\<open>definition df = ... \<close>
|
||||
@{ML [display] \<open> let val x = 3 + 4 in true end\<close>}
|
||||
@{cartouche [display] \<open> @{figure "cfgdfg"}\<close>}
|
||||
\<close>
|
||||
*)
|
||||
|
||||
|
||||
Figure*[ffff::float, main_caption="\<open>this is another 2 side-by-side\<close>"]
|
||||
\<open>@{figure_content [width=40, scale=35, caption="This is a left test"] "ROOT"}\<close>
|
||||
\<open>@{figure_content [width=40, scale=35, caption="This is a right test"] "ROOT"}\<close>
|
||||
|
||||
(* proposed syntax for sub-figure labels :
|
||||
text\<open> @{figure "ffff(2)"}\<close>
|
||||
*)
|
||||
|
||||
Figure*[figxxx::float,main_caption="\<open>Proofs establishing an Invariant Preservation.\<close>"]
|
||||
\<open>@{boxed_theory_text [display]
|
||||
\<open>lemma inv_c2_preserved : "c2_inv \<sigma> \<Longrightarrow> c1_inv (\<sigma> \<langle>Hardware\<rangle>\<^sub>C\<^sub>o\<^sub>m\<^sub>p\<^sub>u\<^sub>t\<^sub>e\<^sub>r\<^sub>H\<^sub>a\<^sub>r\<^sub>d\<^sub>w\<^sub>a\<^sub>r\<^sub>e)"
|
||||
unfolding c1_inv_def c2_inv_def
|
||||
Computer_Hardware_to_Hardware_morphism_def
|
||||
Product_to_Component_morphism_def
|
||||
by (auto simp: comp_def)
|
||||
|
||||
lemma Computer_Hardware_to_Hardware_total :
|
||||
"Computer_Hardware_to_Hardware_morphism ` ({X. c2_inv X})
|
||||
\<subseteq> ({X::Hardware. c1_inv X})"
|
||||
using inv_c2_preserved by auto\<close>}\<close>
|
||||
|
||||
end
|
||||
(*>*)
|
|
@ -1,23 +0,0 @@
|
|||
session "Isabelle_DOF-Unit-Tests" = "Isabelle_DOF-Ontologies" +
|
||||
options [document = pdf, document_output = "output", document_build = dof, document_variants = "document:overview=-proof,-ML,-unimportant"]
|
||||
theories
|
||||
"TestKit"
|
||||
"Latex_Tests"
|
||||
"Concept_OntoReferencing"
|
||||
"Concept_Example_Low_Level_Invariant"
|
||||
"Concept_High_Level_Invariants"
|
||||
"Concept_MonitorTest1"
|
||||
"Concept_MonitorTest2"
|
||||
"Concept_TermAntiquotations"
|
||||
"Concept_TermEvaluation"
|
||||
"Attributes"
|
||||
"AssnsLemmaThmEtc"
|
||||
"Ontology_Matching_Example"
|
||||
"Cenelec_Test"
|
||||
"OutOfOrderPresntn"
|
||||
"COL_Test"
|
||||
"Test_Polymorphic_Classes"
|
||||
document_files
|
||||
"root.bib"
|
||||
"figures/A.png"
|
||||
"figures/B.png"
|
|
@ -1,219 +0,0 @@
|
|||
(*************************************************************************
|
||||
* Copyright (C)
|
||||
* 2019-2023 The University of Exeter
|
||||
* 2018-2023 The University of Paris-Saclay
|
||||
* 2018 The University of Sheffield
|
||||
*
|
||||
* License:
|
||||
* This program can be redistributed and/or modified under the terms
|
||||
* of the 2-clause BSD-style license.
|
||||
*
|
||||
* SPDX-License-Identifier: BSD-2-Clause
|
||||
*************************************************************************)
|
||||
|
||||
chapter\<open>The Isabelle/DOF TestKit\<close>
|
||||
|
||||
theory
|
||||
TestKit
|
||||
imports
|
||||
"Isabelle_DOF_Unit_Tests_document"
|
||||
"Isabelle_DOF-Ontologies.Conceptual"
|
||||
keywords "text-" "text-latex" :: document_body
|
||||
and "text-assert-error" :: document_body
|
||||
and "update_instance-assert-error" :: document_body
|
||||
and "declare_reference-assert-error" :: document_body
|
||||
and "value-assert-error" :: document_body
|
||||
and "definition-assert-error" :: document_body
|
||||
and "doc_class-assert-error" :: document_body
|
||||
|
||||
begin
|
||||
|
||||
section\<open>Testing Commands (exec-catch-verify - versions of DOF commands)\<close>
|
||||
|
||||
ML\<open>
|
||||
|
||||
fun gen_enriched_document_command2 name {body} cid_transform attr_transform markdown
|
||||
((meta_args,
|
||||
xstring_opt:(xstring * Position.T) option),
|
||||
toks_list:Input.source list)
|
||||
: theory -> theory =
|
||||
let val toplvl = Toplevel.make_state o SOME
|
||||
val ((binding,cid_pos), doc_attrs) = meta_args
|
||||
val oid = Binding.name_of binding
|
||||
val oid' = if meta_args = ODL_Meta_Args_Parser.empty_meta_args
|
||||
then "output"
|
||||
else oid
|
||||
(* as side-effect, generates markup *)
|
||||
fun check_n_tex_text thy toks = let val ctxt = Toplevel.presentation_context (toplvl thy)
|
||||
val pos = Input.pos_of toks;
|
||||
val _ = Context_Position.reports ctxt
|
||||
[(pos, Markup.language_document (Input.is_delimited toks)),
|
||||
(pos, Markup.plain_text)];
|
||||
fun markup xml = let val m = if body then Markup.latex_body
|
||||
else Markup.latex_heading
|
||||
in [XML.Elem (m (Latex.output_name name),
|
||||
xml)] end;
|
||||
|
||||
val text = Document_Output.output_document
|
||||
(Proof_Context.init_global thy)
|
||||
markdown toks
|
||||
(* type file = {path: Path.T, pos: Position.T, content: string} *)
|
||||
|
||||
val strg = XML.string_of (hd (Latex.output text))
|
||||
val file = {path = Path.make [oid' ^ "_snippet.tex"],
|
||||
pos = @{here},
|
||||
content = Bytes.string strg}
|
||||
val dir = Path.append (Resources.master_directory thy) (Path.make ["latex_test"])
|
||||
val _ = Generated_Files.write_file dir file
|
||||
val _ = writeln (strg)
|
||||
in () end \<comment> \<open>important observation: thy is not modified.
|
||||
This implies that several text block can be
|
||||
processed in parallel in a future, as long
|
||||
as they are associated to one meta arg.\<close>
|
||||
val handle_margs_opt = (if meta_args = ODL_Meta_Args_Parser.empty_meta_args
|
||||
then I
|
||||
else
|
||||
Value_Command.Docitem_Parser.create_and_check_docitem
|
||||
{is_monitor = false} {is_inline = false} {define = true}
|
||||
binding (cid_transform cid_pos) (attr_transform doc_attrs))
|
||||
(* ... generating the level-attribute syntax *)
|
||||
in handle_margs_opt #> (fn thy => (app (check_n_tex_text thy) toks_list; thy))
|
||||
end;
|
||||
|
||||
val _ =
|
||||
Outer_Syntax.command ("text-", @{here}) "formal comment macro"
|
||||
(ODL_Meta_Args_Parser.attributes -- Parse.opt_target -- Scan.repeat1 Parse.document_source
|
||||
>> (Toplevel.theory o (gen_enriched_document_command2 "TTT" {body=true} I I {markdown = true} )));
|
||||
|
||||
(* copied from Pure_syn for experiments *)
|
||||
|
||||
fun output_document2 state markdown txt =
|
||||
let
|
||||
val ctxt = Toplevel.presentation_context state;
|
||||
val pos = Input.pos_of txt;
|
||||
val _ =
|
||||
Context_Position.reports ctxt
|
||||
[(pos, Markup.language_document (Input.is_delimited txt)),
|
||||
(pos, Markup.plain_text)];
|
||||
val txt' = Document_Output.output_document ctxt markdown txt
|
||||
val strg = XML.string_of (hd (Latex.output txt'))
|
||||
|
||||
val _ = writeln (strg)
|
||||
in Document_Output.output_document ctxt markdown txt end;
|
||||
|
||||
fun document_command2 markdown (loc, txt) =
|
||||
let fun doc2 state = (case loc of
|
||||
NONE => ignore (output_document2 state markdown txt)
|
||||
| SOME (_, pos) =>(ISA_core.err
|
||||
"Illegal target specification -- not a theory context"
|
||||
pos))
|
||||
fun out2 state = output_document2 state markdown txt
|
||||
in Toplevel.keep doc2 o Toplevel.present_local_theory loc out2
|
||||
end
|
||||
|
||||
fun gen_enriched_document_command3 assert name body trans at md (margs, src_list) thy
|
||||
= (gen_enriched_document_command2 name body trans at md (margs, src_list) thy)
|
||||
handle ERROR msg => (if assert src_list msg then (writeln ("Correct error: "^msg^": reported.");thy)
|
||||
else error"Wrong error reported")
|
||||
|
||||
fun error_match src msg = (String.isPrefix (Input.string_of src) msg)
|
||||
|
||||
fun error_match2 [_, src] msg = error_match src msg
|
||||
| error_match2 _ _ = error "Wrong text-assertion-error. Argument format <arg><match> required."
|
||||
|
||||
|
||||
val _ =
|
||||
Outer_Syntax.command ("text-assert-error", @{here}) "formal comment macro"
|
||||
(ODL_Meta_Args_Parser.opt_attributes -- Parse.opt_target -- Scan.repeat1 Parse.document_source
|
||||
>> (Toplevel.theory o (gen_enriched_document_command3 error_match2 "TTT" {body=true}
|
||||
I I {markdown = true} )));
|
||||
|
||||
fun update_instance_command (args,src) thy =
|
||||
(Monitor_Command_Parser.update_instance_command args thy
|
||||
handle ERROR msg => (if error_match src msg
|
||||
then (writeln ("Correct error: "^msg^": reported.");thy)
|
||||
else error"Wrong error reported"))
|
||||
val _ =
|
||||
Outer_Syntax.command \<^command_keyword>\<open>update_instance-assert-error\<close>
|
||||
"update meta-attributes of an instance of a document class"
|
||||
(ODL_Meta_Args_Parser.attributes_upd -- Parse.document_source
|
||||
>> (Toplevel.theory o update_instance_command));
|
||||
|
||||
val _ =
|
||||
let fun create_and_check_docitem (((binding,cid_pos),doc_attrs),src) thy =
|
||||
(Value_Command.Docitem_Parser.create_and_check_docitem
|
||||
{is_monitor = false} {is_inline=true}
|
||||
{define = false} binding (cid_pos) (doc_attrs) thy)
|
||||
handle ERROR msg => (if error_match src msg
|
||||
then (writeln ("Correct error: "^msg^": reported.");thy)
|
||||
else error"Wrong error reported")
|
||||
in Outer_Syntax.command \<^command_keyword>\<open>declare_reference-assert-error\<close>
|
||||
"declare document reference"
|
||||
(ODL_Meta_Args_Parser.attributes -- Parse.document_source
|
||||
>> (Toplevel.theory o create_and_check_docitem))
|
||||
end;
|
||||
|
||||
|
||||
val _ =
|
||||
let fun pass_trans_to_value_cmd (args, (((name, modes), t),src)) trans =
|
||||
let val pos = Toplevel.pos_of trans
|
||||
in trans |> Toplevel.theory
|
||||
(fn thy => Value_Command.value_cmd {assert=false} args name modes t pos thy
|
||||
handle ERROR msg => (if error_match src msg
|
||||
then (writeln ("Correct error: "^msg^": reported."); thy)
|
||||
else error"Wrong error reported"))
|
||||
end
|
||||
in Outer_Syntax.command \<^command_keyword>\<open>value-assert-error\<close> "evaluate and print term"
|
||||
(ODL_Meta_Args_Parser.opt_attributes --
|
||||
(Value_Command.opt_evaluator
|
||||
-- Value_Command.opt_modes
|
||||
-- Parse.term
|
||||
-- Parse.document_source)
|
||||
>> (pass_trans_to_value_cmd))
|
||||
end;
|
||||
|
||||
val _ =
|
||||
let fun definition_cmd' meta_args_opt decl params prems spec src bool ctxt =
|
||||
Local_Theory.background_theory (Value_Command.meta_args_exec meta_args_opt) ctxt
|
||||
|> (fn ctxt => Definition_Star_Command.definition_cmd decl params prems spec bool ctxt
|
||||
handle ERROR msg => if error_match src msg
|
||||
then (writeln ("Correct error: "^msg^": reported.")
|
||||
; pair "Bound 0" @{thm refl}
|
||||
|> pair (Bound 0)
|
||||
|> rpair ctxt)
|
||||
else error"Wrong error reported")
|
||||
in
|
||||
Outer_Syntax.local_theory' \<^command_keyword>\<open>definition-assert-error\<close> "constant definition"
|
||||
(ODL_Meta_Args_Parser.opt_attributes --
|
||||
(Scan.option Parse_Spec.constdecl -- (Parse_Spec.opt_thm_name ":" -- Parse.prop) --
|
||||
Parse_Spec.if_assumes -- Parse.for_fixes -- Parse.document_source)
|
||||
>> (fn (meta_args_opt, ((((decl, spec), prems), params), src)) =>
|
||||
#2 oo definition_cmd' meta_args_opt decl params prems spec src))
|
||||
end;
|
||||
|
||||
|
||||
val _ =
|
||||
let fun add_doc_class_cmd' ((((overloaded, hdr), (parent, attrs)),((rejects,accept_rex),invars)), src) =
|
||||
(fn thy => OntoParser.add_doc_class_cmd {overloaded = overloaded} hdr parent attrs rejects accept_rex invars thy
|
||||
handle ERROR msg => (if error_match src msg
|
||||
then (writeln ("Correct error: "^msg^": reported."); thy)
|
||||
else error"Wrong error reported"))
|
||||
in
|
||||
Outer_Syntax.command \<^command_keyword>\<open>doc_class-assert-error\<close>
|
||||
"define document class"
|
||||
((OntoParser.parse_doc_class -- Parse.document_source)
|
||||
>> (Toplevel.theory o add_doc_class_cmd'))
|
||||
end
|
||||
|
||||
val _ =
|
||||
Outer_Syntax.command ("text-latex", \<^here>) "formal comment (primary style)"
|
||||
(Parse.opt_target -- Parse.document_source >> document_command2 {markdown = true});
|
||||
|
||||
\<close>
|
||||
|
||||
(* a little auto-test *)
|
||||
text-latex\<open>dfg\<close>
|
||||
|
||||
text-assert-error[aaaa::A]\<open> @{A \<open>sdf\<close>}\<close>\<open>reference ontologically inconsistent\<close>
|
||||
|
||||
end
|
|
@ -1,719 +0,0 @@
|
|||
theory Test_Polymorphic_Classes
|
||||
imports Isabelle_DOF.Isa_DOF
|
||||
TestKit
|
||||
begin
|
||||
|
||||
text\<open>The name \<open>text\<close> is reserved by the implementation and refers to the default super class:\<close>
|
||||
doc_class-assert-error "text" =
|
||||
a::int
|
||||
\<open>text: This name is reserved by the implementation\<close>
|
||||
|
||||
doc_class title =
|
||||
short_title :: "string option" <= "None"
|
||||
|
||||
doc_class Author =
|
||||
email :: "string" <= "''''"
|
||||
|
||||
datatype classification = SIL0 | SIL1 | SIL2 | SIL3 | SIL4
|
||||
|
||||
doc_class abstract =
|
||||
keywordlist :: "string list" <= "[]"
|
||||
safety_level :: "classification" <= "SIL3"
|
||||
|
||||
doc_class text_section =
|
||||
authored_by :: "Author set" <= "{}"
|
||||
level :: "int option" <= "None"
|
||||
|
||||
doc_class ('a::one, 'b, 'c) test0 = text_section +
|
||||
testa :: "'a list"
|
||||
testb :: "'b list"
|
||||
testc :: "'c list"
|
||||
|
||||
typ\<open>('a, 'b, 'c) test0\<close>
|
||||
typ\<open>('a, 'b, 'c, 'd) test0_scheme\<close>
|
||||
|
||||
find_consts name:"test0"
|
||||
find_theorems name:"test0"
|
||||
|
||||
|
||||
doc_class 'a test1 = text_section +
|
||||
test1 :: "'a list"
|
||||
invariant author_finite_test :: "finite (authored_by \<sigma>)"
|
||||
invariant force_level_test :: "(level \<sigma>) \<noteq> None \<and> the (level \<sigma>) > 1"
|
||||
|
||||
find_consts name:"test1*inv"
|
||||
find_theorems name:"test1*inv"
|
||||
|
||||
text*[church::Author, email="\<open>b\<close>"]\<open>\<close>
|
||||
text\<open>@{Author "church"}\<close>
|
||||
value*\<open>@{Author \<open>church\<close>}\<close>
|
||||
|
||||
text\<open>\<^value_>\<open>@{Author \<open>church\<close>}\<close>\<close>
|
||||
|
||||
doc_class ('a, 'b) test2 = "'a test1" +
|
||||
test2 :: "'b list"
|
||||
type_synonym ('a, 'b) test2_syn = "('a, 'b) test2"
|
||||
|
||||
find_theorems name:"test2"
|
||||
|
||||
declare [[invariants_checking_with_tactics]]
|
||||
text*[testtest::"('a, int) test2", level = "Some 2", authored_by = "{@{Author \<open>church\<close>}}", test2 = "[1]"]\<open>\<close>
|
||||
value*\<open>test2 @{test2 \<open>testtest\<close>}\<close>
|
||||
|
||||
text*[testtest2''::"(nat, int) test2", test1 = "[2::nat, 3]", test2 = "[4::int, 5]", level = "Some (2::int)"]\<open>\<close>
|
||||
value*\<open>test1 @{test2 \<open>testtest2''\<close>}\<close>
|
||||
declare [[invariants_checking_with_tactics = false]]
|
||||
|
||||
ML\<open>
|
||||
val t = Syntax.parse_term \<^context> "@{test2 \<open>testtest\<close>}"
|
||||
|
||||
\<close>
|
||||
ML\<open>
|
||||
val t = \<^term>\<open>test2.make 8142730 Test_Parametric_Classes_2_test2_authored_by_Attribute_Not_Initialized Test_Parametric_Classes_2_test2_level_Attribute_Not_Initialized Test_Parametric_Classes_2_test2_test1_Attribute_Not_Initialized
|
||||
Test_Parametric_Classes_2_test2_test2_Attribute_Not_Initialized
|
||||
\<lparr>authored_by := bot, level := None\<rparr> \<close>
|
||||
\<close>
|
||||
|
||||
text\<open>test2 = "[1::'a::one]" should be test2 = "[1::int]" because the type of testtest4 is ('a::one, int) test2:\<close>
|
||||
text-assert-error[testtest4::"('a::one, int) test2", level = "Some 2", authored_by = "{@{Author \<open>church\<close>}}", test2 = "[1::'a::one]"]\<open>\<close>
|
||||
\<open>Type unification failed\<close>
|
||||
text\<open>Indeed this definition fails:\<close>
|
||||
definition-assert-error testtest2::"('a::one, int) test2" where "testtest2 \<equiv>
|
||||
test2.make 11953346
|
||||
{@{Author \<open>church\<close>}}
|
||||
(Some 2)
|
||||
[]
|
||||
[]
|
||||
\<lparr>authored_by := bot
|
||||
, level := None, level := Some 2
|
||||
, authored_by := insert \<lparr>Author.tag_attribute = 11953164, email = []\<rparr> bot
|
||||
, test2.test2 := [1::('a::one)]\<rparr> "
|
||||
\<open>Type unification failed\<close>
|
||||
|
||||
text\<open>For now, no more support of type synonyms as parent:\<close>
|
||||
doc_class ('a, 'b, 'c) A =
|
||||
a :: "'a list"
|
||||
b :: "'b list"
|
||||
c :: "'c list"
|
||||
type_synonym ('a, 'b, 'c) A_syn = "('a, 'b, 'c) A"
|
||||
|
||||
doc_class-assert-error ('a, 'b, 'c, 'd) B = "('b, 'c, 'd) A_syn" +
|
||||
d ::"'a::one list" <= "[1]"
|
||||
\<open>Undefined onto class: "A_syn"\<close>
|
||||
|
||||
|
||||
declare[[invariants_checking_with_tactics]]
|
||||
|
||||
definition* testauthor0 where "testauthor0 \<equiv> \<lparr>Author.tag_attribute = 5, email = \<open>test_author_email\<close>\<rparr>"
|
||||
definition* testauthor :: "Author" where "testauthor \<equiv> \<lparr>Author.tag_attribute = 5, email = \<open>test_author_email\<close>\<rparr>"
|
||||
definition* testauthor2 :: "Author" where "testauthor2 \<equiv> \<lparr>Author.tag_attribute = 5, email = \<open>test_author_email\<close>\<rparr> \<lparr>email := \<open>test_author_email_2\<close> \<rparr>"
|
||||
definition* testauthor3 :: "Author" where "testauthor3 \<equiv> testauthor \<lparr>email := \<open>test_author_email_2\<close> \<rparr>"
|
||||
|
||||
ML\<open>
|
||||
val ctxt = \<^context>
|
||||
val input0 = Syntax.read_input "@{Author \<open>church\<close>}"
|
||||
val source = Syntax.read_input "\<^term_>\<open>@{Author \<open>church\<close>}\<close>"
|
||||
val input = source
|
||||
val tt = Document_Output.output_document ctxt {markdown = false} input
|
||||
\<close>
|
||||
|
||||
doc_class ('a, 'b) elaborate1 =
|
||||
a :: "'a list"
|
||||
b :: "'b list"
|
||||
|
||||
doc_class ('a, 'b) elaborate2 =
|
||||
c :: "('a, 'b) elaborate1 list"
|
||||
|
||||
doc_class ('a, 'b) elaborate3 =
|
||||
d :: "('a, 'b) elaborate2 list"
|
||||
|
||||
text*[test_elaborate1::"('a::one, 'b) elaborate1", a = "[1]"]\<open>\<close>
|
||||
|
||||
term*\<open>@{elaborate1 \<open>test_elaborate1\<close>}\<close>
|
||||
value* [nbe]\<open>@{elaborate1 \<open>test_elaborate1\<close>}\<close>
|
||||
|
||||
|
||||
text*[test_elaborate2::"('a::one, 'b) elaborate2", c = "[@{elaborate1 \<open>test_elaborate1\<close>}]"]\<open>\<close>
|
||||
|
||||
text*[test_elaborate3::"('a::one, 'b) elaborate3", d = "[@{elaborate2 \<open>test_elaborate2\<close>}]"]\<open>\<close>
|
||||
|
||||
term*\<open>(concat o concat) ((map o map) a (map c (elaborate3.d @{elaborate3 \<open>test_elaborate3\<close>})))\<close>
|
||||
value*\<open>(concat o concat) ((map o map) a (map c (elaborate3.d @{elaborate3 \<open>test_elaborate3\<close>})))\<close>
|
||||
|
||||
|
||||
text\<open>
|
||||
The term antiquotation is considered a ground term.
|
||||
Then its type here is \<^typ>\<open>'a::one list\<close> with \<open>'a\<close> a fixed-type variable.
|
||||
So the following definition only works because the parameter of the class is also \<open>'a\<close>.\<close>
|
||||
declare[[ML_print_depth = 10000]]
|
||||
doc_class 'a elaborate4 =
|
||||
d :: "'a::one list" <= "(concat o concat) ((map o map) a (map c (elaborate3.d @{elaborate3 \<open>test_elaborate3\<close>})))"
|
||||
declare[[ML_print_depth = 20]]
|
||||
|
||||
declare[[ML_print_depth = 10000]]
|
||||
text*[test_elaborate4::"'a::one elaborate4"]\<open>\<close>
|
||||
declare[[ML_print_depth = 20]]
|
||||
|
||||
|
||||
text\<open>Bug:
|
||||
As the term antiquotation is considered as a ground term,
|
||||
its type \<^typ>\<open>'a::one list\<close> conflicts with the type of the attribute \<^typ>\<open>int list\<close>.
|
||||
To support the instantiation of the term antiquotation as an \<^typ>\<open>int list\<close>,
|
||||
the term antiquotation should have the same behavior as a constant definition,
|
||||
which is not the case for now.\<close>
|
||||
declare[[ML_print_depth = 10000]]
|
||||
doc_class-assert-error elaborate4' =
|
||||
d :: "int list" <= "(concat o concat) ((map o map) a (map c (elaborate3.d @{elaborate3 \<open>test_elaborate3\<close>})))"
|
||||
\<open>Type unification failed\<close>
|
||||
declare[[ML_print_depth = 20]]
|
||||
|
||||
text\<open>The behavior we want to support: \<close>
|
||||
|
||||
definition one_list :: "'a::one list" where "one_list \<equiv> [1]"
|
||||
|
||||
text\<open>the constant \<^const>\<open>one_list\<close> can be instantiate as an \<^typ>\<open>int list\<close>:\<close>
|
||||
doc_class elaborate4'' =
|
||||
d :: "int list" <= "one_list"
|
||||
|
||||
declare[[ML_print_depth = 10000]]
|
||||
text*[test_elaborate4''::"elaborate4''"]\<open>\<close>
|
||||
declare[[ML_print_depth = 20]]
|
||||
|
||||
|
||||
term*\<open>concat (map a (elaborate2.c @{elaborate2 \<open>test_elaborate2\<close>}))\<close>
|
||||
value*\<open>concat (map a (elaborate2.c @{elaborate2 \<open>test_elaborate2\<close>}))\<close>
|
||||
|
||||
text\<open>
|
||||
The term antiquotation is considered a ground term.
|
||||
Then its type here is \<^typ>\<open>'a::one list\<close> with \<open>'a\<close> a fixed-type variable.
|
||||
So the following definition only works because the parameter of the class is also \<open>'a\<close>.\<close>
|
||||
declare[[ML_print_depth = 10000]]
|
||||
doc_class 'a elaborate5 =
|
||||
d :: "'a::one list" <= "concat (map a (elaborate2.c @{elaborate2 \<open>test_elaborate2\<close>}))"
|
||||
declare[[ML_print_depth = 20]]
|
||||
|
||||
text\<open>Bug: But when defining an instance, as we use a \<open>'b\<close> variable to specify the type
|
||||
of the instance (\<^typ>\<open>'b::one elaborate5\<close>, then the unification fails\<close>
|
||||
declare[[ML_print_depth = 10000]]
|
||||
text-assert-error[test_elaborate5::"'b::one elaborate5"]\<open>\<close>
|
||||
\<open>Inconsistent sort constraints for type variable "'b"\<close>
|
||||
declare[[ML_print_depth = 20]]
|
||||
|
||||
text\<open>Bug:
|
||||
The term antiquotation is considered a ground term.
|
||||
Then its type here is \<^typ>\<open>'a::one list\<close> with \<open>'a\<close> a fixed-type variable.
|
||||
So it is not compatible with the type of the attribute \<^typ>\<open>'a::numeral list\<close>\<close>
|
||||
doc_class-assert-error 'a elaborate5' =
|
||||
d :: "'a::numeral list" <= "concat (map a (elaborate2.c @{elaborate2 \<open>test_elaborate2\<close>}))"
|
||||
\<open>Sort constraint\<close>
|
||||
|
||||
text\<open>The behavior we want to support: \<close>
|
||||
|
||||
text\<open>the constant \<^const>\<open>one_list\<close> can be instantiate as an \<^typ>\<open>'a::numeral list\<close>:\<close>
|
||||
doc_class 'a elaborate5'' =
|
||||
d :: "'a::numeral list" <= "one_list"
|
||||
|
||||
|
||||
text*[test_elaborate1a::"('a::one, int) elaborate1", a = "[1]", b = "[2]"]\<open>\<close>
|
||||
|
||||
term*\<open>@{elaborate1 \<open>test_elaborate1a\<close>}\<close>
|
||||
value* [nbe]\<open>@{elaborate1 \<open>test_elaborate1a\<close>}\<close>
|
||||
|
||||
text*[test_elaborate2a::"('a::one, int) elaborate2", c = "[@{elaborate1 \<open>test_elaborate1a\<close>}]"]\<open>\<close>
|
||||
|
||||
text*[test_elaborate3a::"('a::one, int) elaborate3", d = "[@{elaborate2 \<open>test_elaborate2a\<close>}]"]\<open>\<close>
|
||||
|
||||
text\<open>
|
||||
The term antiquotation is considered a ground term.
|
||||
Then its type here is \<^typ>\<open>'a::one list\<close> with \<open>'a\<close> a fixed-type variable.
|
||||
So the following definition only works because the parameter of the class is also \<open>'a\<close>.\<close>
|
||||
definition* test_elaborate3_embedding ::"'a::one list"
|
||||
where "test_elaborate3_embedding \<equiv> (concat o concat) ((map o map) elaborate1.a (map elaborate2.c (elaborate3.d @{elaborate3 \<open>test_elaborate3a\<close>})))"
|
||||
|
||||
text\<open>Bug:
|
||||
The term antiquotation is considered a ground term.
|
||||
Then its type here is \<^typ>\<open>'a::one list\<close> with \<open>'a\<close> a fixed-type variable.
|
||||
So it is not compatible with the specified type of the definition \<^typ>\<open>int list\<close>:\<close>
|
||||
definition-assert-error test_elaborate3_embedding'::"int list"
|
||||
where "test_elaborate3_embedding' \<equiv> (concat o concat) ((map o map) elaborate1.a (map elaborate2.c (elaborate3.d @{elaborate3 \<open>test_elaborate3a\<close>})))"
|
||||
\<open>Type unification failed\<close>
|
||||
|
||||
term*\<open>@{elaborate1 \<open>test_elaborate1a\<close>}\<close>
|
||||
value* [nbe]\<open>@{elaborate1 \<open>test_elaborate1a\<close>}\<close>
|
||||
|
||||
|
||||
record ('a, 'b) elaborate1' =
|
||||
a :: "'a list"
|
||||
b :: "'b list"
|
||||
|
||||
record ('a, 'b) elaborate2' =
|
||||
c :: "('a, 'b) elaborate1' list"
|
||||
|
||||
record ('a, 'b) elaborate3' =
|
||||
d :: "('a, 'b) elaborate2' list"
|
||||
|
||||
doc_class 'a one =
|
||||
a::"'a list"
|
||||
|
||||
text*[test_one::"'a::one one", a = "[1]"]\<open>\<close>
|
||||
|
||||
value* [nbe] \<open>@{one \<open>test_one\<close>}\<close>
|
||||
|
||||
term*\<open>a @{one \<open>test_one\<close>}\<close>
|
||||
|
||||
text\<open>Bug:
|
||||
The term antiquotation is considered a ground term.
|
||||
Then its type here is \<^typ>\<open>'a::one list\<close> with \<open>'a\<close> a fixed-type variable.
|
||||
So it is not compatible with the specified type of the definition \<^typ>\<open>('b::one, 'a::numeral) elaborate1'\<close>
|
||||
because the term antiquotation can not be instantiate as a \<^typ>\<open>'b::one list\<close>
|
||||
and the \<open>'a\<close> is checked against the \<open>'a::numeral\<close> instance type parameter:\<close>
|
||||
definition-assert-error test_elaborate1'::"('b::one, 'a::numeral) elaborate1'"
|
||||
where "test_elaborate1' \<equiv> \<lparr> elaborate1'.a = a @{one \<open>test_one\<close>}, b = [2] \<rparr>"
|
||||
\<open>Sort constraint\<close>
|
||||
|
||||
text\<open>This is the same behavior as the following:\<close>
|
||||
definition-assert-error test_elaborate10::"('b::one, 'a::numeral) elaborate1'"
|
||||
where "test_elaborate10 \<equiv> \<lparr> elaborate1'.a = [1::'a::one], b = [2] \<rparr>"
|
||||
\<open>Sort constraint\<close>
|
||||
|
||||
definition-assert-error test_elaborate11::"('b::one, 'c::numeral) elaborate1'"
|
||||
where "test_elaborate11 \<equiv> \<lparr> elaborate1'.a = [1::'a::one], b = [2] \<rparr>"
|
||||
\<open>Type unification failed\<close>
|
||||
|
||||
text\<open>So this works:\<close>
|
||||
definition* test_elaborate1''::"('a::one, 'b::numeral) elaborate1'"
|
||||
where "test_elaborate1'' \<equiv> \<lparr> elaborate1'.a = a @{one \<open>test_one\<close>}, b = [2] \<rparr>"
|
||||
|
||||
term \<open>elaborate1'.a test_elaborate1''\<close>
|
||||
value [nbe] \<open>elaborate1'.a test_elaborate1''\<close>
|
||||
|
||||
text\<open>But if we embed the term antiquotation in a definition,
|
||||
the type unification works:\<close>
|
||||
definition* onedef where "onedef \<equiv> @{one \<open>test_one\<close>}"
|
||||
|
||||
definition test_elaborate1'''::"('b::one, 'a::numeral) elaborate1'"
|
||||
where "test_elaborate1''' \<equiv> \<lparr> elaborate1'.a = a onedef, b = [2] \<rparr>"
|
||||
|
||||
value [nbe] \<open>elaborate1'.a test_elaborate1'''\<close>
|
||||
|
||||
|
||||
definition test_elaborate2'::"(int, 'b::numeral) elaborate2'"
|
||||
where "test_elaborate2' \<equiv> \<lparr> c = [test_elaborate1''] \<rparr>"
|
||||
|
||||
definition test_elaborate3'::"(int, 'b::numeral) elaborate3'"
|
||||
where "test_elaborate3' \<equiv> \<lparr> d = [test_elaborate2'] \<rparr>"
|
||||
|
||||
|
||||
doc_class 'a test3' =
|
||||
test3 :: "int"
|
||||
test3' :: "'a list"
|
||||
|
||||
text*[testtest30::"'a::one test3'", test3'="[1]"]\<open>\<close>
|
||||
text-assert-error[testtest30::"'a test3'", test3'="[1]"]\<open>\<close>
|
||||
\<open>Type unification failed: Variable\<close>
|
||||
|
||||
find_consts name:"test3'.test3"
|
||||
definition testone :: "'a::one test3'" where "testone \<equiv> \<lparr>tag_attribute = 5, test3 = 3, test3' = [1] \<rparr>"
|
||||
definition* testtwo :: "'a::one test3'" where "testtwo \<equiv> \<lparr>tag_attribute = 5, test3 = 1, test3' = [1] \<rparr>\<lparr> test3 := 1\<rparr>"
|
||||
|
||||
text*[testtest3'::"'a test3'", test3 = "1"]\<open>\<close>
|
||||
|
||||
declare [[show_sorts = false]]
|
||||
definition* testtest30 :: "'a test3'" where "testtest30 \<equiv> \<lparr>tag_attribute = 12, test3 = 2, test3' = [] \<rparr>"
|
||||
update_instance*[testtest3'::"'a test3'", test3 := "2"]
|
||||
|
||||
ML\<open>
|
||||
val t = @{value_ [nbe] \<open>test3 @{test3' \<open>testtest3'\<close>}\<close>}
|
||||
val tt = HOLogic.dest_number t
|
||||
\<close>
|
||||
|
||||
text\<open>@{value_ [] [nbe] \<open>test3 @{test3' \<open>testtest3'\<close>}\<close>}\<close>
|
||||
|
||||
update_instance*[testtest3'::"'a test3'", test3 += "2"]
|
||||
|
||||
ML\<open>
|
||||
val t = @{value_ [nbe] \<open>test3 @{test3' \<open>testtest3'\<close>}\<close>}
|
||||
val tt = HOLogic.dest_number t
|
||||
\<close>
|
||||
|
||||
value\<open>test3 \<lparr> tag_attribute = 1, test3 = 2, test3' = [2::int, 3] \<rparr>\<close>
|
||||
value\<open>test3 \<lparr> tag_attribute = 1, test3 = 2, test3' = [2::int, 3] \<rparr>\<close>
|
||||
find_consts name:"test3'.test3"
|
||||
|
||||
ML\<open>
|
||||
val test_value = @{value_ \<open>@{test3' \<open>testtest3'\<close>}\<close>}
|
||||
|
||||
\<close>
|
||||
declare [[show_sorts = false]]
|
||||
update_instance*[testtest3'::"'a test3'", test3 += "3"]
|
||||
declare [[show_sorts = false]]
|
||||
value*\<open>test3 @{test3' \<open>testtest3'\<close>}\<close>
|
||||
value\<open>test3 \<lparr> tag_attribute = 12, test3 = 5, test3' = AAAAAA\<rparr>\<close>
|
||||
|
||||
find_consts name:"test3'.test3"
|
||||
|
||||
text*[testtest3''::"int test3'", test3 = "1"]\<open>\<close>
|
||||
|
||||
update_instance*[testtest3''::"int test3'", test3' += "[3]"]
|
||||
|
||||
value*\<open>test3' @{test3' \<open>testtest3''\<close>}\<close>
|
||||
|
||||
update_instance*[testtest3''::"int test3'", test3' := "[3]"]
|
||||
|
||||
value*\<open>test3' @{test3' \<open>testtest3''\<close>}\<close>
|
||||
|
||||
update_instance*[testtest3''::"int test3'", test3' += "[2,5]"]
|
||||
|
||||
value*\<open>test3' @{test3' \<open>testtest3''\<close>}\<close>
|
||||
|
||||
definition testeq where "testeq \<equiv> \<lambda>x. x"
|
||||
find_consts name:"test3'.ma"
|
||||
|
||||
text-assert-error[testtest3''::"int test3'", test3 = "1", test3' = "[3::'a::numeral]"]\<open>\<close>
|
||||
\<open>Type unification failed\<close>
|
||||
|
||||
text-assert-error[testtest3''::"int test3'", test3 = "1", test3' = "[3]"]\<open>\<close>
|
||||
\<open>Duplicate instance declaration\<close>
|
||||
|
||||
|
||||
declare[[ML_print_depth = 10000]]
|
||||
definition-assert-error testest3''' :: "int test3'"
|
||||
where "testest3''' \<equiv> \<lparr> tag_attribute = 12, test3 = 1, test3' = [2]\<rparr>\<lparr> test3' := [3::'a::numeral]\<rparr>"
|
||||
\<open>Type unification failed\<close>
|
||||
declare[[ML_print_depth = 20]]
|
||||
|
||||
value* \<open>test3 @{test3' \<open>testtest3''\<close>}\<close>
|
||||
value* \<open>\<lparr> tag_attribute = 12, test3 = 1, test3' = [2]\<rparr>\<lparr> test3' := [3::int]\<rparr>\<close>
|
||||
value* \<open>test3 (\<lparr> tag_attribute = 12, test3 = 1, test3' = [2]\<rparr>\<lparr> test3' := [3::int]\<rparr>)\<close>
|
||||
term*\<open>@{test3' \<open>testtest3''\<close>}\<close>
|
||||
|
||||
ML\<open>val t = \<^term_>\<open>test3 @{test3' \<open>testtest3''\<close>}\<close>\<close>
|
||||
|
||||
value\<open>test3 \<lparr> tag_attribute = 12, test3 = 2, test3' = [2::int ,3]\<rparr>\<close>
|
||||
|
||||
find_consts name:"test3'.test3"
|
||||
find_consts name:"Isabelle_DOF_doc_class_test3'"
|
||||
update_instance*[testtest3''::"int test3'", test3 := "2"]
|
||||
ML\<open>
|
||||
val t = @{value_ [nbe] \<open>test3 @{test3' \<open>testtest3''\<close>}\<close>}
|
||||
val tt = HOLogic.dest_number t |> snd
|
||||
\<close>
|
||||
|
||||
doc_class 'a testa =
|
||||
a:: "'a set"
|
||||
b:: "int set"
|
||||
|
||||
text*[testtesta::"'a testa", b = "{2}"]\<open>\<close>
|
||||
update_instance*[testtesta::"'a testa", b += "{3}"]
|
||||
|
||||
ML\<open>
|
||||
val t = @{value_ [nbe] \<open>b @{testa \<open>testtesta\<close>}\<close>}
|
||||
val tt = HOLogic.dest_set t |> map (HOLogic.dest_number #> snd)
|
||||
\<close>
|
||||
|
||||
update_instance-assert-error[testtesta::"'a::numeral testa", a := "{2::'a::numeral}"]
|
||||
\<open>incompatible classes:'a Test_Polymorphic_Classes.testa:'a::numeral Test_Polymorphic_Classes.testa\<close>
|
||||
|
||||
text*[testtesta'::"'a::numeral testa", a = "{2}"]\<open>\<close>
|
||||
|
||||
update_instance*[testtesta'::"'a::numeral testa", a += "{3}"]
|
||||
|
||||
ML\<open>
|
||||
val t = @{value_ [nbe] \<open>a @{testa \<open>testtesta'\<close>}\<close>}
|
||||
\<close>
|
||||
|
||||
update_instance-assert-error[testtesta'::"'a::numeral testa", a += "{3::int}"]
|
||||
\<open>Type unification failed\<close>
|
||||
|
||||
definition-assert-error testtesta'' :: "'a::numeral testa"
|
||||
where "testtesta'' \<equiv> \<lparr>tag_attribute = 5, a = {1}, b = {1} \<rparr>\<lparr> a := {1::int}\<rparr>"
|
||||
\<open>Type unification failed\<close>
|
||||
|
||||
update_instance*[testtesta'::"'a::numeral testa", b := "{3::int}"]
|
||||
ML\<open>
|
||||
val t = @{value_ [nbe] \<open>b @{testa \<open>testtesta'\<close>}\<close>}
|
||||
\<close>
|
||||
|
||||
value* [nbe] \<open>b @{testa \<open>testtesta'\<close>}\<close>
|
||||
|
||||
definition testtesta'' :: "'a::numeral testa"
|
||||
where "testtesta'' \<equiv> \<lparr>tag_attribute = 5, a = {1}, b = {1} \<rparr>\<lparr> b := {2::int}\<rparr>"
|
||||
|
||||
value [nbe]\<open>b testtesta''\<close>
|
||||
|
||||
doc_class 'a test3 =
|
||||
test3 :: "'a list"
|
||||
type_synonym 'a test3_syn = "'a test3"
|
||||
|
||||
text*[testtest3::"int test3", test3 = "[1]"]\<open>\<close>
|
||||
update_instance*[testtest3::"int test3", test3 := "[2]"]
|
||||
ML\<open>
|
||||
val t = \<^term_>\<open>test3 @{test3 \<open>testtest3\<close>}\<close>
|
||||
val tt = \<^value_>\<open>test3 @{test3 \<open>testtest3\<close>}\<close> |> HOLogic.dest_list |> map HOLogic.dest_number
|
||||
\<close>
|
||||
|
||||
update_instance*[testtest3::"int test3", test3 += "[3]"]
|
||||
value*\<open>test3 @{test3 \<open>testtest3\<close>}\<close>
|
||||
|
||||
|
||||
doc_class ('a, 'b) test4 = "'a test3" +
|
||||
test4 :: "'b list"
|
||||
|
||||
definition-assert-error testtest0'::"('a::one, int) test4" where "testtest0' \<equiv>
|
||||
test4.make 11953346
|
||||
[] [1::('a::one)]"
|
||||
\<open>Type unification failed\<close>
|
||||
|
||||
definition-assert-error testtest0''::"('a, int) test4" where "testtest0'' \<equiv>
|
||||
test4.make 11953346
|
||||
[1] Test_Parametric_Classes_2_test4_test4_Attribute_Not_Initialized"
|
||||
\<open>Type unification failed\<close>
|
||||
|
||||
text\<open>Must fail because the equivalent definition
|
||||
\<open>testtest0'\<close> below fails
|
||||
due to the constraint in the where [1::('a::one)] is not an \<^typ>\<open>int list\<close>
|
||||
but an \<^typ>\<open>'a::one list\<close> list \<close>
|
||||
text-assert-error[testtest0::"('a::one, int) test4", test4 = "[1::'a::one]"]\<open>\<close>
|
||||
\<open>Type unification failed\<close>
|
||||
update_instance-assert-error[testtest0::"('a::one, int) test4"]
|
||||
\<open>Undefined instance: "testtest0"\<close>
|
||||
|
||||
value-assert-error\<open>@{test4 \<open>testtest0\<close>}\<close>\<open>Undefined instance: "testtest0"\<close>
|
||||
|
||||
definition testtest0''::"('a, int) test4" where "testtest0'' \<equiv>
|
||||
\<lparr> tag_attribute = 11953346, test3 = [], test4 = [1]\<rparr>\<lparr>test4 := [2]\<rparr>"
|
||||
|
||||
definition testtest0'''::"('a, int) test4" where "testtest0''' \<equiv>
|
||||
\<lparr> tag_attribute = 11953346, test3 = [], test4 = [1]\<rparr>\<lparr>test4 := [2]\<rparr>"
|
||||
|
||||
|
||||
value [nbe] \<open>test3 testtest0''\<close>
|
||||
|
||||
type_synonym notion = string
|
||||
|
||||
doc_class Introduction = text_section +
|
||||
authored_by :: "Author set" <= "UNIV"
|
||||
uses :: "notion set"
|
||||
invariant author_finite :: "finite (authored_by \<sigma>)"
|
||||
and force_level :: "(level \<sigma>) \<noteq> None \<and> the (level \<sigma>) > 1"
|
||||
|
||||
doc_class claim = Introduction +
|
||||
based_on :: "notion list"
|
||||
|
||||
doc_class technical = text_section +
|
||||
formal_results :: "thm list"
|
||||
|
||||
doc_class "definition" = technical +
|
||||
is_formal :: "bool"
|
||||
property :: "term list" <= "[]"
|
||||
|
||||
datatype kind = expert_opinion | argument | "proof"
|
||||
|
||||
doc_class result = technical +
|
||||
evidence :: kind
|
||||
property :: "thm list" <= "[]"
|
||||
invariant has_property :: "evidence \<sigma> = proof \<longleftrightarrow> property \<sigma> \<noteq> []"
|
||||
|
||||
doc_class example = technical +
|
||||
referring_to :: "(notion + definition) set" <= "{}"
|
||||
|
||||
doc_class conclusion = text_section +
|
||||
establish :: "(claim \<times> result) set"
|
||||
invariant establish_defined :: "\<forall> x. x \<in> Domain (establish \<sigma>)
|
||||
\<longrightarrow> (\<exists> y \<in> Range (establish \<sigma>). (x, y) \<in> establish \<sigma>)"
|
||||
|
||||
text\<open>Next we define some instances (docitems): \<close>
|
||||
|
||||
declare[[invariants_checking_with_tactics = true]]
|
||||
|
||||
text*[church1::Author, email="\<open>church@lambda.org\<close>"]\<open>\<close>
|
||||
|
||||
text*[resultProof::result, evidence = "proof", property="[@{thm \<open>HOL.refl\<close>}]"]\<open>\<close>
|
||||
text*[resultArgument::result, evidence = "argument"]\<open>\<close>
|
||||
|
||||
text\<open>The invariants \<^theory_text>\<open>author_finite\<close> and \<^theory_text>\<open>establish_defined\<close> can not be checked directly
|
||||
and need a little help.
|
||||
We can set the \<open>invariants_checking_with_tactics\<close> theory attribute to help the checking.
|
||||
It will enable a basic tactic, using unfold and auto:\<close>
|
||||
|
||||
declare[[invariants_checking_with_tactics = true]]
|
||||
|
||||
text*[curry::Author, email="\<open>curry@lambda.org\<close>"]\<open>\<close>
|
||||
text*[introduction2::Introduction, authored_by = "{@{Author \<open>church\<close>}}", level = "Some 2"]\<open>\<close>
|
||||
(* When not commented, should violated the invariant:
|
||||
update_instance*[introduction2::Introduction
|
||||
, authored_by := "{@{Author \<open>church\<close>}}"
|
||||
, level := "Some 1"]
|
||||
*)
|
||||
|
||||
text*[introduction_test_parsed_elaborate::Introduction, authored_by = "authored_by @{Introduction \<open>introduction2\<close>}", level = "Some 2"]\<open>\<close>
|
||||
term*\<open>authored_by @{Introduction \<open>introduction_test_parsed_elaborate\<close>}\<close>
|
||||
value*\<open>authored_by @{Introduction \<open>introduction_test_parsed_elaborate\<close>}\<close>
|
||||
text*[introduction3::Introduction, authored_by = "{@{Author \<open>church\<close>}}", level = "Some 2"]\<open>\<close>
|
||||
text*[introduction4::Introduction, authored_by = "{@{Author \<open>curry\<close>}}", level = "Some 4"]\<open>\<close>
|
||||
|
||||
text*[resultProof2::result, evidence = "proof", property="[@{thm \<open>HOL.sym\<close>}]"]\<open>\<close>
|
||||
|
||||
text\<open>Then we can evaluate expressions with instances:\<close>
|
||||
|
||||
term*\<open>authored_by @{Introduction \<open>introduction2\<close>} = authored_by @{Introduction \<open>introduction3\<close>}\<close>
|
||||
value*\<open>authored_by @{Introduction \<open>introduction2\<close>} = authored_by @{Introduction \<open>introduction3\<close>}\<close>
|
||||
value*\<open>authored_by @{Introduction \<open>introduction2\<close>} = authored_by @{Introduction \<open>introduction4\<close>}\<close>
|
||||
|
||||
value*\<open>@{Introduction \<open>introduction2\<close>}\<close>
|
||||
|
||||
value*\<open>{@{Author \<open>curry\<close>}} = {@{Author \<open>church\<close>}}\<close>
|
||||
|
||||
term*\<open>property @{result \<open>resultProof\<close>} = property @{result \<open>resultProof2\<close>}\<close>
|
||||
value*\<open>property @{result \<open>resultProof\<close>} = property @{result \<open>resultProof2\<close>}\<close>
|
||||
|
||||
value*\<open>evidence @{result \<open>resultProof\<close>} = evidence @{result \<open>resultProof2\<close>}\<close>
|
||||
|
||||
declare[[invariants_checking_with_tactics = false]]
|
||||
|
||||
declare[[invariants_strict_checking = false]]
|
||||
|
||||
doc_class test_A =
|
||||
level :: "int option"
|
||||
x :: int
|
||||
|
||||
doc_class test_B =
|
||||
level :: "int option"
|
||||
x :: "string" (* attributes live in their own name-space *)
|
||||
y :: "string list" <= "[]" (* and can have arbitrary type constructors *)
|
||||
(* LaTeX may have problems with this, though *)
|
||||
|
||||
text\<open>We may even use type-synonyms for class synonyms ...\<close>
|
||||
type_synonym test_XX = test_B
|
||||
|
||||
doc_class test_C0 = test_B +
|
||||
z :: "test_A option" <= None (* A LINK, i.e. an attribute that has a type
|
||||
referring to a document class. Mathematical
|
||||
relations over document items can be modeled. *)
|
||||
g :: "thm" (* a reference to the proxy-type 'thm' allowing
|
||||
|
||||
to denote references to theorems inside attributes *)
|
||||
|
||||
|
||||
doc_class test_C = test_B +
|
||||
z :: "test_A option" <= None (* A LINK, i.e. an attribute that has a type
|
||||
referring to a document class. Mathematical
|
||||
relations over document items can be modeled. *)
|
||||
g :: "thm" (* a reference to the proxy-type 'thm' allowing
|
||||
|
||||
to denote references to theorems inside attributes *)
|
||||
|
||||
datatype enum = X1 | X2 | X3 (* we add an enumeration type ... *)
|
||||
|
||||
|
||||
doc_class test_D = test_B +
|
||||
x :: "string" <= "\<open>def \<longrightarrow>\<close>" (* overriding default *)
|
||||
a1 :: enum <= "X2" (* class - definitions may be mixed
|
||||
with arbitrary HOL-commands, thus
|
||||
also local definitions of enumerations *)
|
||||
a2 :: int <= 0
|
||||
|
||||
doc_class test_E = test_D +
|
||||
x :: "string" <= "''qed''" (* overriding default *)
|
||||
|
||||
doc_class test_G = test_C +
|
||||
g :: "thm" <= "@{thm \<open>HOL.refl\<close>}" (* warning overriding attribute expected*)
|
||||
|
||||
doc_class 'a test_F =
|
||||
properties :: "term list"
|
||||
r :: "thm list"
|
||||
u :: "file"
|
||||
s :: "typ list"
|
||||
b :: "(test_A \<times> 'a test_C_scheme) set" <= "{}" (* This is a relation link, roughly corresponding
|
||||
to an association class. It can be used to track
|
||||
claims to result - relations, for example.*)
|
||||
b' :: "(test_A \<times> 'a test_C_scheme) list" <= "[]"
|
||||
invariant br :: "r \<sigma> \<noteq> [] \<and> card(b \<sigma>) \<ge> 3"
|
||||
and br':: "r \<sigma> \<noteq> [] \<and> length(b' \<sigma>) \<ge> 3"
|
||||
and cr :: "properties \<sigma> \<noteq> []"
|
||||
|
||||
lemma*[l::test_E] local_sample_lemma :
|
||||
"@{thm \<open>refl\<close>} = @{thm ''refl''}" by simp
|
||||
\<comment> \<open>un-evaluated references are similar to
|
||||
uninterpreted constants. Not much is known
|
||||
about them, but that doesn't mean that we
|
||||
can't prove some basics over them...\<close>
|
||||
|
||||
text*[xcv1::test_A, x=5]\<open>Lorem ipsum ...\<close>
|
||||
text*[xcv2::test_C, g="@{thm ''HOL.refl''}"]\<open>Lorem ipsum ...\<close>
|
||||
text*[xcv3::test_A, x=7]\<open>Lorem ipsum ...\<close>
|
||||
|
||||
text\<open>Bug: For now, the implementation is no more compatible with the docitem term-antiquotation:\<close>
|
||||
text-assert-error[xcv10::"unit test_F", r="[@{thm ''HOL.refl''},
|
||||
@{thm \<open>local_sample_lemma\<close>}]", (* long names required *)
|
||||
b="{(@{docitem ''xcv1''},@{docitem \<open>xcv2\<close>})}", (* notations \<open>...\<close> vs. ''...'' *)
|
||||
s="[@{typ \<open>int list\<close>}]",
|
||||
properties = "[@{term \<open>H \<longrightarrow> H\<close>}]" (* notation \<open>...\<close> required for UTF8*)
|
||||
]\<open>Lorem ipsum ...\<close>\<open>Type unification failed\<close>
|
||||
|
||||
text*[xcv11::"unit test_F", r="[@{thm ''HOL.refl''},
|
||||
@{thm \<open>local_sample_lemma\<close>}]", (* long names required *)
|
||||
b="{(@{test_A ''xcv1''},@{test_C \<open>xcv2\<close>})}", (* notations \<open>...\<close> vs. ''...'' *)
|
||||
s="[@{typ \<open>int list\<close>}]",
|
||||
properties = "[@{term \<open>H \<longrightarrow> H\<close>}]" (* notation \<open>...\<close> required for UTF8*)
|
||||
]\<open>Lorem ipsum ...\<close>
|
||||
|
||||
value*\<open>b @{test_F \<open>xcv11\<close>}\<close>
|
||||
|
||||
typ\<open>unit test_F\<close>
|
||||
|
||||
text*[xcv4::"unit test_F", r="[@{thm ''HOL.refl''},
|
||||
@{thm \<open>local_sample_lemma\<close>}]", (* long names required *)
|
||||
b="{(@{test_A ''xcv1''},@{test_C \<open>xcv2\<close>})}", (* notations \<open>...\<close> vs. ''...'' *)
|
||||
s="[@{typ \<open>int list\<close>}]",
|
||||
properties = "[@{term \<open>H \<longrightarrow> H\<close>}]" (* notation \<open>...\<close> required for UTF8*)
|
||||
]\<open>Lorem ipsum ...\<close>
|
||||
|
||||
value*\<open>b @{test_F \<open>xcv4\<close>}\<close>
|
||||
|
||||
text*[xcv5::test_G, g="@{thm \<open>HOL.sym\<close>}"]\<open>Lorem ipsum ...\<close>
|
||||
|
||||
update_instance*[xcv4::"unit test_F", b+="{(@{test_A ''xcv3''},@{test_C ''xcv2''})}"]
|
||||
|
||||
update_instance-assert-error[xcv4::"unit test_F", b+="{(@{test_A ''xcv3''},@{test_G ''xcv5''})}"]
|
||||
\<open>Type unification failed: Clash of types\<close>
|
||||
|
||||
|
||||
|
||||
typ\<open>unit test_G_ext\<close>
|
||||
typ\<open>\<lparr>test_G.tag_attribute :: int\<rparr>\<close>
|
||||
text*[xcv6::"\<lparr>test_G.tag_attribute :: int\<rparr> test_F", b="{(@{test_A ''xcv3''},@{test_G ''xcv5''})}"]\<open>\<close>
|
||||
|
||||
|
||||
text\<open>\<open>lemma*\<close>, etc. do not support well polymorphic classes term antiquotations.
|
||||
For now only embedded term-antiquotation in a definition could work:\<close>
|
||||
definition* testtest_level where "testtest_level \<equiv> the (text_section.level @{test2 \<open>testtest2''\<close>})"
|
||||
lemma*[e5::E] testtest : "xx + testtest_level = yy + testtest_level \<Longrightarrow> xx = yy" by simp
|
||||
|
||||
text\<open>Indeed this fails:\<close>
|
||||
(*lemma*[e6::E] testtest : "xx + the (level @{test2 \<open>testtest2''\<close>}) = yy + the (level @{test2 \<open>testtest2''\<close>}) \<Longrightarrow> xx = yy" by simp*)
|
||||
|
||||
text\<open>Bug: Checking of \<^theory_text>\<open>text*\<close> type against \<^theory_text>\<open>declare_reference*\<close> is not done.
|
||||
Should be compatible with type unification mechanism. See the record package\<close>
|
||||
doc_class 'a AAA_test =
|
||||
aaa::"'a list"
|
||||
|
||||
doc_class 'a BBB_test =
|
||||
bbb::"'a list"
|
||||
|
||||
declare_reference*[aaa_test::"'a::one AAA_test"]
|
||||
text\<open>@{AAA_test (unchecked) \<open>aaa_test\<close>}\<close>
|
||||
|
||||
text\<open>\<open>aaa_test\<close> should fail and trigger an error because its type was \<^typ>\<open>'a::one AAA_test\<close>
|
||||
in the \<^theory_text>\<open>declare_reference*\<close> command:\<close>
|
||||
(*text*[aaa_test::"'a::one BBB_test"]\<open>\<close>*)
|
||||
|
||||
text*[aaa_test::"int AAA_test"]\<open>\<close>
|
||||
|
||||
text\<open>\<open>aaa_test'\<close> should fail and trigger an error because its type \<^typ>\<open>string AAA_test\<close>
|
||||
is not compatible with its type \<^typ>\<open>'a::one AAA_test\<close> declared in
|
||||
the \<^theory_text>\<open>declare_reference*\<close> command:\<close>
|
||||
text*[aaa_test'::"string AAA_test"]\<open>\<close>
|
||||
|
||||
|
||||
end
|
Binary file not shown.
Before Width: | Height: | Size: 12 KiB |
|
@ -1,6 +0,0 @@
|
|||
@Manual{ wenzel:isabelle-isar:2020,
|
||||
title = {The Isabelle/Isar Reference Manual},
|
||||
author = {Makarius Wenzel},
|
||||
year = 2020,
|
||||
note = {Part of the Isabelle distribution.}
|
||||
}
|
|
@ -1,59 +0,0 @@
|
|||
chapter AFP
|
||||
|
||||
session "Isabelle_DOF" (AFP) = "Functional-Automata" +
|
||||
options [document = pdf, document_output = "output", document_build = dof, timeout = 300]
|
||||
sessions
|
||||
"Regular-Sets"
|
||||
directories
|
||||
"thys"
|
||||
"thys/manual"
|
||||
"ontologies"
|
||||
"ontologies/scholarly_paper"
|
||||
"ontologies/technical_report"
|
||||
theories [document = false]
|
||||
"ontologies/ontologies"
|
||||
"ontologies/technical_report/technical_report"
|
||||
"ontologies/scholarly_paper/scholarly_paper"
|
||||
"thys/RegExpInterface"
|
||||
"thys/Isa_DOF"
|
||||
"thys/Isa_COL"
|
||||
theories [document = true]
|
||||
"thys/manual/M_00_Frontmatter"
|
||||
"thys/manual/M_01_Introduction"
|
||||
"thys/manual/M_02_Background"
|
||||
"thys/manual/M_03_GuidedTour"
|
||||
"thys/manual/M_04_Document_Ontology"
|
||||
"thys/manual/M_05_Proofs_Ontologies"
|
||||
"thys/manual/M_06_RefMan"
|
||||
"thys/manual/M_07_Implementation"
|
||||
"thys/manual/Isabelle_DOF_Manual"
|
||||
document_files
|
||||
"root.bib"
|
||||
"dof_session.tex"
|
||||
"root.mst"
|
||||
"preamble.tex"
|
||||
"lstisadof-manual.sty"
|
||||
"figures/cicm2018-combined.png"
|
||||
"figures/document-hierarchy.pdf"
|
||||
"figures/Dogfood-figures.png"
|
||||
"figures/Dogfood-II-bgnd1.png"
|
||||
"figures/Dogfood-III-bgnd-text_section.png"
|
||||
"figures/Dogfood-IV-jumpInDocCLass.png"
|
||||
"figures/Dogfood-V-attribute.png"
|
||||
"figures/Dogfood-VI-linkappl.png"
|
||||
"figures/isabelle-architecture.pdf"
|
||||
"figures/Isabelle_DOF-logo.pdf"
|
||||
"figures/header_CSP_pdf.png"
|
||||
"figures/header_CSP_source.png"
|
||||
"figures/definition-use-CSP-pdf.png"
|
||||
"figures/definition-use-CSP.png"
|
||||
"figures/MyCommentedIsabelle.png"
|
||||
"figures/doc-mod-generic.pdf"
|
||||
"figures/doc-mod-isar.pdf"
|
||||
"figures/doc-mod-onto-docinst.pdf"
|
||||
"figures/doc-mod-DOF.pdf"
|
||||
"figures/doc-mod-term-aq.pdf"
|
||||
"figures/ThisPaperWithPreviewer.png"
|
||||
export_classpath
|
||||
|
||||
|
|
@ -1,9 +0,0 @@
|
|||
\input{M_00_Frontmatter.tex}
|
||||
\input{M_01_Introduction.tex}
|
||||
\input{M_02_Background.tex}
|
||||
\input{M_03_GuidedTour.tex}
|
||||
\input{M_04_Document_Ontology.tex}
|
||||
\input{M_05_Proofs_Ontologies.tex}
|
||||
\input{M_06_RefMan.tex}
|
||||
\input{M_07_Implementation.tex}
|
||||
\input{Isabelle_DOF_Manual.tex}
|
Binary file not shown.
Before Width: | Height: | Size: 541 KiB |
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue