This commit is contained in:
Burkhart Wolff 2019-07-21 16:07:01 +02:00
commit b88031ec27
79 changed files with 299 additions and 1778 deletions

7
.ci/Jenkinsfile vendored
View File

@ -4,21 +4,22 @@ pipeline {
stages {
stage('Build Docker') {
steps {
sh 'cp patches/thy_output.ML .ci/isabelle4isadof/'
sh 'cp src/patches/thy_output.ML .ci/isabelle4isadof/'
sh 'docker build -t logicalhacking:isabelle4dof .ci/isabelle4isadof'
sh 'rm -f .ci/isabelle4isadof/thy_output.ML'
}
}
stage('Check Docker') {
when { changeset "patches/*" }
when { changeset "src/patches/*" }
steps {
sh 'cp patches/thy_output.ML .ci/isabelle4isadof/'
sh 'cp src/patches/thy_output.ML .ci/isabelle4isadof/'
sh 'docker build --no-cache -t logicalhacking:isabelle4dof .ci/isabelle4isadof'
sh 'rm -f .ci/isabelle4isadof/thy_output.ML'
}
}
stage('Build Isabelle/DOF') {
steps {
sh 'find -type d -name "output" -exec rm -rf {} \\; || true'
sh 'docker run -v $PWD:/DOF logicalhacking:isabelle4dof sh -c "cd /DOF && ./install && isabelle build -D ."'
}
}

View File

@ -37,7 +37,7 @@ The installer will
* apply a patch to Isabelle that is necessary to use Isabelle/DOF.
If this patch installations fails, you need to manually replace
the file ``Isabelle2019/src/Pure/Thy/thy_output.ML`` in the Isabelle
distribution with the file ``patches/thy_output.ML`` from the
distribution with the file ``src/patches/thy_output.ML`` from the
Isabelle/DOF distribution:
```console
cp patches/thy_output.ML `isabelle getenv -b ISABELLE_HOME`/src/Pure/Thy/
@ -86,7 +86,7 @@ editor.
The DOF-plugin provides an alternative to Isabelle's ``mkroot`` command.
Isabelle projects that use DOF need to be created using
```console
foo@bar:~$ isabelle DOF_mkroot -d
foo@bar:~$ isabelle DOF_mkroot
```
The ``DOF_mkroot`` command takes the same parameter as the standard
``mkroot`` command of Isabelle. Thereafter, the normal Isabelle
@ -102,7 +102,6 @@ Usage: isabelle DOF_mkroot [OPTIONS] [DIR]
Options are:
-h print this help text and exit
-d enable document preparation
-n NAME alternative session name (default: DIR base name)
-o ONTOLOGY (default: core)
Available ontologies:
@ -119,7 +118,7 @@ Usage: isabelle DOF_mkroot [OPTIONS] [DIR]
```
For example,
```console
foo@bar:~$ isabelle DOF_mkroot -d -o scholarly_paper -t lncs
foo@bar:~$ isabelle DOF_mkroot -o scholarly_paper -t lncs
```
creates a setup using the scholarly_paper ontology and Springer's
LNCS LaTeX class as document class. Note that the generated setup

12
ROOT
View File

@ -1,12 +0,0 @@
session "Isabelle_DOF" = "Functional-Automata" +
options [document = pdf, document_output = "output"]
sessions
"Regular-Sets"
theories
Isa_DOF
"ontologies/Conceptual"
"ontologies/CENELEC_50128"
"ontologies/scholarly_paper"
"ontologies/technical_report"
"ontologies/mathex_onto"

1
ROOTS
View File

@ -1 +1,2 @@
src
examples

View File

@ -1,26 +0,0 @@
# Isabelle_DOF: Document Preparation Setup
This directory contains the LaTeX setup for Isabelle's
document generation system.
## Tips and Tricks
During debugging of LaTeX errors, it can be very helpful to use
more than 79 characters for error messages (otherwise, long errors
are truncated)"
``` bash
max_print_line=200 error_line=200 half_error_line=100 pdflatex root.tex
```
## Team
Main contacts:
* [Achim D. Brucker](http://www.brucker.ch/)
* [Burkhart Wolff](https://www.lri.fr/~wolff/)
## License
This project is licensed under a 2-clause BSD license.
SPDX-License-Identifier: BSD-2-Clause

View File

@ -1,4 +1,4 @@
scholarly_paper
technical_report
math_exam
cenelec
CENELEC_50128

View File

@ -1,105 +0,0 @@
chapter\<open> Example : Forward and Standard (use-after-define) Referencing\<close>
theory Example
imports
Isabelle_DOF.CENELEC_50128
Isabelle_DOF.scholarly_paper
begin
section\<open> Some examples of Isabelle's standard antiquotations. \<close>
(* some show-off of standard anti-quotations: *)
text \<open>THIS IS A TEXT\<close>
term "[]"
text\<open> @{thm refl} of name @{thm [source] refl}
@{thm[mode=Rule] conjI}
@{file "../../Isa_DOF.thy"}
@{value "3+4::int"}
@{const hd}
@{theory HOL.List}}
@{term "3"}
@{type bool}
@{term [show_types] "f x = a + x"} \<close>
section\<open> Core Examples for stating text-elements as doc-items.\<close>
text\<open>An "anonymous" text-item, automatically coerced into the top-class "text".\<close>
text*[tralala] \<open> Brexit means Brexit \<close>
text\<open>Examples for declaration of typed doc-items "assumption" and "hypothesis",
concepts defined in the underlying ontology @{theory "Draft.CENELEC_50128"}. \<close>
text*[ass1::assumption] \<open> The subsystem Y is safe. \<close>
text*[hyp1::hypothesis] \<open> P not equal NP \<close>
text\<open>A real example fragment from a larger project, declaring a text-element as a
"safety-related application condition", a concept defined in the
@{theory "Draft.CENELEC_50128"}
ontology:\<close>
text*[new_ass::hypothesis]\<open>Under the assumption @{assumption \<open>ass1\<close>} we establish the following: ... \<close>
text*[ass122::SRAC] \<open> The overall sampling frequence of the odometer
subsystem is therefore 14 khz, which includes sampling, computing and
result communication times... \<close>
text*[t10::test_result] \<open> This is a meta-test. This could be an ML-command
that governs the external test-execution via, eg., a makefile or specific calls
to a test-environment or test-engine \<close>
section\<open> References to doc-items.\<close>
text\<open>Finally some examples of references to doc-items, i.e. text-elements with declared
meta-information and status. \<close>
text \<open> As established by @{docref (unchecked) \<open>t10\<close>},
@{docref (define) \<open>t10\<close>} \<close>
text \<open> the @{docref \<open>t10\<close>}
as well as the @{docref \<open>ass122\<close>}\<close>
text \<open> represent a justification of the safety related applicability
condition @{SRAC \<open>ass122\<close>} aka exported constraint @{EC \<open>ass122\<close>}.\<close>
section\<open> Some Tests for Ontology Framework and its CENELEC Instance \<close>
declare_reference* [lalala::requirement, alpha="main", beta=42]
declare_reference* [blablabla::cid, alpha="beta sdf", beta=gamma, delta=dfg_fgh\<^sub>1]
paragraph* [sdfk::introduction] \<open> just a paragraph - lexical variant \<close>
section*[h::example]\<open> Some global inspection commands for the status of docitem and doc-class tables ... \<close>
section*[i::example]\<open> Text Antiquotation Infrastructure ... \<close>
text\<open> @{docitem \<open>lalala\<close>} -- produces warning. \<close>
text\<open> @{docitem (unchecked) \<open>lalala\<close>} -- produces no warning. \<close>
text\<open> @{docitem \<open>ass122\<close>} -- global reference to a text-item in another file. \<close>
text\<open> @{EC \<open>ass122\<close>} -- global reference to a exported constraint in another file.
Note that the link is actually a srac, which, according to
the ontology, happens to be an "ec". \<close>
section*[h2::example]\<open> Snippets ... \<close>
text*[req1::requirement, is_concerned="UNIV"]
\<open>The operating system shall provide secure
memory separation. \<close>
text\<open>The recurring issue of the certification
is @{requirement \<open>req1\<close>} ...\<close>
end

View File

@ -1,46 +0,0 @@
#!/usr/bin/env bash
# Copyright (c) 2018-2019 The University of Sheffield. All rights reserved.
# 2018 The University of Paris-Sud. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in
# the documentation and/or other materials provided with the
# distribution.
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
#
# SPDX-License-Identifier: BSD-2-Clause
set -e
if [ ! -f $ISABELLE_HOME_USER/DOF/document-template/build_lib.sh ]; then
echo ""
echo "Error: Isabelle/DOF not installed"
echo "====="
echo "This is a Isabelle/DOF project. The document preparation requires"
echo "the Isabelle/DOF framework. Please obtain the framework by cloning"
echo "the Isabelle/DOF git repository, i.e.: "
echo " git clone https://git.logicalhacking.com/HOL-OCL/Isabelle_DOF"
echo "You can install the framework as follows:"
echo " cd Isabelle_DOF/document-generator"
echo " ./install"
echo ""
exit 1
fi
cp $ISABELLE_HOME_USER/DOF/document-template/build_lib.sh .
source build_lib.sh

View File

@ -1,117 +0,0 @@
theory BAC2017
imports "Isabelle_DOF.mathex_onto"
Deriv
Transcendental
begin
open_monitor*[exam::MathExam]
(* currently rethinking on "deep ontologies" necessary ... Achim
text*[idir::Author,affiliation="''LRI, CentraleSupelec''",
email="''idir.aitsadoune@centralesupelec.fr''"]
{*Idir AIT SADOUNE*}
text*[keller::Author,affiliation="''LRI, Univ. Paris-Sud''",
email="''Chantal.Keller@lri.fr''"]
{*Chantal KELLER*}
text{* This example is an excerpt from the french baccaleareat 2017.
The textual explanations were kept in french.
*}
*)
text*[header::Header,examSubject="[analysis,geometry]", date="''21-06-2017''",
timeAllowed="240::int"]{* BACCALAUREAT GENERAL MATHEMATIQUES *}
text{*
\begin{itemize}
\item Les calculatrices électroniques de poche sont autorisées, conformément à la réglementation
en vigueur.
\item Le sujet est composé de 4 exercices indépendants.
\item Le candidat doit traiter tous les exercices.
\item Le candidat est invité à faire figurer sur la copie toute trace de recherche, même incomplète
ou non fructueuse, quil aura développée.
\item Il est rappelé que la qualité de la rédaction, la clarté et la précision des raisonnements
entreront pour une part importante dans lappréciation des copies.
\end{itemize}
*}
text*[exo1 :: Exercise,
concerns= "{setter,student,checker,externalExaminer}"]
{* On considère la fonction h définie sur lintervalle [0..+\<infinity>] par :
@{term "h(x) = x * exponent (-x)"}
*}
definition h :: "real \<Rightarrow> real"
where "h x \<equiv> x * exp (- x)"
text*[q1::Task, concerns= "{setter,student}",
level="oneStar", mark="1::int", type="formal"]
{* Déterminer la limite de la fonction @{term h} en +\<infinity>. *}
text*[a1::Answer_Formal_Step] {* Fill in term and justification*}
lemma q1 : "(h \<longlongrightarrow> (0::real)) at_top" sorry
text*[v1::Validation, proofs="[@{thm ''HOL.refl''}::thm]"] {* See lemma @{thm q1}. *}
text*[q2::Task, concerns= "{setter,checker,student}",
level="oneStar", mark="1::int", type="formal"]
{* Étudier les variations de la fonction @{term h} sur l'intervalle [0..+\<infinity>] et
dresser son tableau de variation *}
text*[a2::Answer_Formal_Step]
{* Fill in term and justification*}
definition h' :: "real \<Rightarrow> real"
where "h' x \<equiv> (1 - x) * exp (- x)"
lemma q2_a : "DERIV h x :> h' x"
proof -
have * : "DERIV (exp \<circ> uminus) x :> - (exp (-x))"
sorry (* by (simp add: has_derivative_compose) *)
have ** : "DERIV id x :> 1"
by (metis DERIV_ident eq_id_iff)
have *** : "DERIV h x :> x * (- (exp (- x))) + 1 * (exp (- x))"
sorry (* by (simp add: * ** has_derivative_mult comp_def) *)
show ?thesis
sorry (* by (metis "***" left_diff_distrib mult_minus_right uminus_add_conv_diff) *)
qed
lemma q2_b : "0 \<le> x \<and> x \<le> y \<and> y \<le> 1 \<Longrightarrow> h x \<le> h y"
sorry
lemma q2_c : "1 \<le> x \<and> x \<le> y \<Longrightarrow> h x \<ge> h y"
sorry
text*[v2::Validation, proofs="[@{thm ''BAC2017.q2_b''}, @{thm ''BAC2017.q2_c''}]"]
{* See lemmas @{thm q2_b} and @{thm q2_c}. *}
text*[q3a::Task, concerns= "{setter,checker,student}",
level="oneStar", mark="1::int", type="formal"]
{* Vérifier que pour tout nombre réel x appartenant à l'intervalle [0..+\<infinity>], on a :
@{term "h x = (exp (- x)) - (h' x)"}. *}
text*[a3a::Answer_Formal_Step]
{* Fill in term and justification*}
lemma q3a : "h x = (exp (- x)) - (h' x)"
by (simp add : h_def h'_def left_diff_distrib)
subsubsection*[v3a::Validation, proofs="[@{thm ''BAC2017.q3a''}::thm]"]
{* See lemma @{thm q3a}. *}
subsection*[sol1 :: Solution,
content="[exo1::Exercise]",
valids = "[v1::Validation,v2,v3a]"]
{* See validations. *}
close_monitor*[exam]
end

View File

@ -1,11 +0,0 @@
session "BAC2017" = "Isabelle_DOF" +
options [document = pdf, document_output = "output",quick_and_dirty=true]
theories [document = false]
"Deriv"
"Transcendental"
theories
BAC2017
document_files
"isadof.cfg"
"preamble.tex"
"build"

View File

@ -1,2 +0,0 @@
Template: scrartcl
Ontology: mathex

View File

@ -1,47 +0,0 @@
%% Copyright (C) 2018 The University of Sheffield
%% 2018 The University of Paris-Sud
%%
%% License:
%% This program can be redistributed and/or modified under the terms
%% of the LaTeX Project Public License Distributed from CTAN
%% archives in directory macros/latex/base/lppl.txt; either
%% version 1 of the License, or any later version.
%% OR
%% The 2-clause BSD-style license.
%%
%% SPDX-License-Identifier: LPPL-1.0+ OR BSD-2-Clause
%% This is a placeholder for user-specific configuration and packages.
\title{<TITLE>}
\author{<AUTHOR>}
\newkeycommand\isaDofTextExercise[label=,type=,Exercise.content=,content=,concerns=,][1]{%
\begin{Exercise}
#1
\end{Exercise}
}
\newkeycommand\isaDofTextSolution[Task.concerns=,concerns=,content=,valids=,][1]{%
#1
}
\newkeycommand\isaDofSectionSolution[Task.concerns=,concerns=,content=,valids=,][1]{%
#1
}
\newkeycommand\isaDofSubsectionSolution[Task.concerns=,concerns=,content=,valids=,][1]{%
#1
}
\newkeycommand\isaDofSubsubsectionSolution[Task.concerns=,concerns=,content=,valids=,][1]{%
#1
}
\newkeycommand\isaDofTextExercise[Task.concerns=,concerns=,content=,][1]{%
#1
}
\newkeycommand\isaDofTextValidation[tests=,proofs=,][1]{%
#1
}

View File

@ -1,22 +0,0 @@
\documentclass{article}
\usepackage{hyperref}
\begin{document}
\begin{Form}[action={http://your-web-server.com/path/receiveform.cgi}]
\begin{tabular}{l}
\TextField{From } \\\\
\TextField{have 1} \\\\
\TextField{have 2} \\\\
\TextField{have 3} \\\\
\TextField{finally show} \\\\
\CheckBox[width=1em]{Has the polynomial as many solutions as its degree ? } \\\\
\Submit{Submit}\\
\end{tabular}
\end{Form}
\end{document}

View File

@ -1,44 +1,34 @@
(*<*)
theory MathExam
imports "Isabelle_DOF.mathex_onto"
Real
imports "Isabelle_DOF.math_exam"
HOL.Real
begin
(*>*)
open_monitor*[exam::MathExam]
(* open_monitor*[exam::MathExam] *)
section*[header::Header,examSubject= "[algebra]",
date="''02-05-2018''", timeAllowed="90::int"] {* Exam number 1 *}
text{*
date="''02-05-2018''", timeAllowed="90::int"] \<open>Exam number 1\<close>
text\<open>
\begin{itemize}
\item Use black ink or black ball-point pen.
\item Draw diagrams in pencil.
\item Answer all questions in the spaces provided.
\end{itemize}
*}
\<close>
text*[idir::Author, affiliation="''CentraleSupelec''",
email="''idir.aitsadoune@centralesupelec.fr''"]
{*Idir AIT SADOUNE*}
\<open>Idir AIT SADOUNE\<close>
(* should be in DOF-core
* causes crash on the LaTeX side:
( FP-DIV )
*** ! Undefined control sequence.
*** <argument> ...ative_width}}{100} \includegraphics
*** [width=\scale \textwidth ]...
*** l.44 {A Polynome.}
*)
figure*[figure::figure, spawn_columns=False,
relative_width="80",
src="''figures/Polynomialdeg5.png''"]
src="''figures/Polynomialdeg5''"]
\<open>A Polynome.\<close>
subsubsection*[exo1 :: Exercise, content="[q1::Task,q2::Task]"]\<open>Exercise 1\<close>
text{*
text\<open>
Here are the first four lines of a number pattern.
\begin{itemize}
\item Line 1 : @{term "1*6 + 2*4 = 2*7"}
@ -46,15 +36,15 @@ Here are the first four lines of a number pattern.
\item Line 3 : @{term "3*8 + 2*6 = 4*9"}
\item Line 4 : @{term "4*9 + 2*7 = 5*10"}
\end{itemize}
*}
\<close>
declare [[show_sorts=false]]
subsubsection*[exo2 :: Exercise, content="[q1::Task,q2::Task]"]\<open>Exercise 2\<close>
text{* Find the roots of the polynome:
text\<open>Find the roots of the polynome:
@{term "(x^3) - 6 * x^2 + 5 * x + 12"}.
Note the intermediate steps in the following fields and submit the solution. *}
text{*
Note the intermediate steps in the following fields and submit the solution.\<close>
text\<open>
\begin{Form}[action={http://your-web-server.com/path/receiveform.cgi}]
\begin{tabular}{l}
From @{term "(x^3) - 6 * x^2 + 5 * x + 12"} \\\\
@ -66,7 +56,7 @@ text{*
\Submit{Submit}\\
\end{tabular}
\end{Form}
*}
\<close>
(* a bit brutal, as long as lemma* does not yet work *)
(*<*)
@ -88,21 +78,21 @@ proof -
qed
(*>*)
text*[a1::Answer_Formal_Step]{* First Step: Fill in term and justification *}
text*[a2::Answer_Formal_Step]{* Next Step: Fill in term and justification *}
text*[a3::Answer_Formal_Step]{* Next Step: Fill in term and justification *}
text*[a4::Answer_Formal_Step]{* Next Step: Fill in term and justification *}
text*[a1::Answer_Formal_Step]\<open>First Step: Fill in term and justification\<close>
text*[a2::Answer_Formal_Step]\<open>Next Step: Fill in term and justification\<close>
text*[a3::Answer_Formal_Step]\<open>Next Step: Fill in term and justification\<close>
text*[a4::Answer_Formal_Step]\<open>Next Step: Fill in term and justification\<close>
text*[q1::Task, local_grade="oneStar", mark="1::int", type="formal"]
{* Complete Line 10 : @{term "10*x + 2*y = 11*16"} *}
\<open>Complete Line 10 : @{term "10*x + 2*y = 11*16"}\<close>
subsubsection*[exo3 :: Exercise, content="[q1::Task,q2::Task]"]\<open>Exercise 3\<close>
text*[q2::Task, local_grade="threeStars", mark="3::int", type="formal"]
{* Prove that @{term "n*(n+5) + 2*(n+3) "} is always the product of two numbers
\<open>Prove that @{term "n*(n+5) + 2*(n+3) "} is always the product of two numbers
with a difference of 5.
*}
\<close>
(* this does not work on the level of the LaTeX output for known restrictions of the Toplevel. *)
close_monitor*[exam :: MathExam]
(* close_monitor*[exam :: MathExam] *)
end

View File

@ -1,2 +1,2 @@
Template: scrartcl
Ontology: mathex
Ontology: math_exam

View File

@ -1,8 +0,0 @@
theory Noodles
imports "../../ontologies/small_math"
"../../ontologies/technical_report"
begin
title*[t::title]\<open>On Noodles\<close>
end

View File

@ -1,32 +0,0 @@
theory "On_Noodles"
imports "../../ontologies/small_math"
"../../ontologies/technical_report"
begin
open_monitor*[this::article]
title*[t1::title]\<open>On Noodles\<close>
text*[simon::author]\<open>Simon Foster\<close>
text*[a::abstract, keywordlist = "[''topology'']"]
\<open>We present the first fundamental results on the goundbreaking theory of noodles...\<close>
section*[intro::introduction]\<open>Introduction\<close>
text\<open> Authorities say, that Noodles are unleavened dough which is stretched,
extruded, or rolled flat and cut into one or a variety of shapes which usually
include long, thin strips, or waves, helices, tubes, strings, or shells, or
folded over, or cut into other shapes. Noodles are usually cooked in boiling water,
sometimes with cooking oil or salt added. \<close>
section*[def_sec::technical]\<open>Basic definitions\<close>
text*[d1::"definition"]\<open>My first definition\<close>
definition noodle ::"bool" where "noodle = (THE x. True)"
(*
update_instance*[def1, formal_results:="[@{thm ''noodle_def''}]"]
*)
close_monitor*[this::article]
end

View File

@ -0,0 +1 @@
MathExam

View File

@ -4,9 +4,9 @@ theory IsaDofApplications
begin
open_monitor*[this::article]
declare[[strict_monitor_checking=false]]
(*>*)
declare[[strict_monitor_checking=false]]
title*[tit::title]\<open>Using the Isabelle Ontology Framework\<close>
subtitle*[stit::subtitle]\<open>Linking the Formal with the Informal\<close>
text*[adb:: author,
@ -389,7 +389,7 @@ text\<open> The document class \inlineisar+figure+ --- supported by the \isadof
such as @{docitem_ref \<open>fig_figures\<close>}.
\<close>
subsection*[mathex_onto::example]\<open> The Math-Exam Scenario \<close>
subsection*[math_exam::example]\<open> The Math-Exam Scenario \<close>
text\<open> The Math-Exam Scenario is an application with mixed formal and
semi-formal content. It addresses applications where the author of the exam is not present
during the exam and the preparation requires a very rigorous process, as the french
@ -729,7 +729,7 @@ text\<open> \isadof in its present form has a number of technical short-comings
paragraph\<open> Availability. \<close>
text\<open> The implementation of the framework, the discussed ontology definitions,
and examples are available at
\url{https://git.logicalhacking.com/HOL-OCL/Isabelle_DOF/}.\<close>
\url{https://git.logicalhacking.com/Isabelle_DOF/Isabelle_DOF/}.\<close>
paragraph\<open> Acknowledgement. \<close>
text\<open> This work was partly supported by the framework of IRT SystemX, Paris-Saclay, France,
and therefore granted with public funds within the scope of the Program ``Investissements dAvenir''.\<close>

View File

@ -1,55 +0,0 @@
chapter\<open>Setting and modifying attributes of doc-items\<close>
theory Concept_Example
imports "../../ontologies/Conceptual" (* we use the generic "Conceptual" ontology *)
begin
text\<open>@{theory \<open>Draft.Conceptual\<close>} provides a monitor @{typ M} enforcing a particular document
structure. Here, we say: From now on, this structural rules are respected wrt. all
\<^theory_text>\<open>doc_class\<close>es @{typ M} is enabled for.\<close>
open_monitor*[struct::M]
section*[a::A, x = "3"] \<open> Lorem ipsum dolor sit amet, ... \<close>
text*[c1::C, x = "''beta''"] \<open> ... suspendisse non arcu malesuada mollis, nibh morbi, ... \<close>
text*[d::D, a1 = "X3"] \<open> ... phasellus amet id massa nunc, pede suscipit repellendus,
... @{C c1} @{thm "refl"}\<close>
update_instance*[d::D, a1 := X2]
text\<open> ... in ut tortor ... @{docitem \<open>a\<close>} ... @{A \<open>a\<close>}\<close>
text*[c2::C, x = "''delta''"] \<open> ... in ut tortor eleifend augue pretium consectetuer. \<close>
text*[f::F] \<open> Lectus accumsan velit ultrices, ... }\<close>
theorem some_proof : "P" sorry
text\<open>This is an example where we add a theorem into a kind of "result-list" of the doc-item f.\<close>
update_instance*[f::F,r:="[@{thm ''Concept_Example.some_proof''}]"]
text\<open> ..., mauris amet, id elit aliquam aptent id, ... @{docitem \<open>a\<close>} \<close>
text\<open>Here we add and maintain a link that is actually modeled as m-to-n relation ...
The type annotations with @{typ A} and @{typ C} are optional and may help to get
additional information at the HOL level, the arguments of the inner-syntax antiquotation
are strings that can be denoted in two different syntactic variants; the former is
more robust that the traditional latter.\<close>
update_instance*[f::F,b:="{(@{docitem \<open>a\<close>}::A,@{docitem \<open>c1\<close>}::C),
(@{docitem ''a''}, @{docitem ''c2''})}"]
close_monitor*[struct]
text\<open>And the trace of the monitor is:\<close>
ML\<open>@{trace_attribute struct}\<close>
print_doc_classes
print_doc_items
check_doc_global
end

View File

@ -251,7 +251,7 @@ doc_class srac = ec +
\end{isar}
\<close>
section*[mathex_onto::example]\<open> The Math-Exam Scenario \<close>
section*[math_exam::example]\<open> The Math-Exam Scenario \<close>
text\<open> The Math-Exam Scenario is an application with mixed formal and
semi-formal content. It addresses applications where the author of the exam is not present
during the exam and the preparation requires a very rigorous process, as the french

View File

@ -26,7 +26,7 @@ is based on several design-decisions:
\<close>
subsection*["sec:plugins"::technical]{*Writing \isadof as User-Defined Plugin in Isabelle/Isar*}
subsection*["sec:plugins"::technical]\<open>Writing \isadof as User-Defined Plugin in Isabelle/Isar\<close>
text\<open>
Writing an own plugin in Isabelle starts with defining the local data
and registering it in the framework. As mentioned before, contexts
@ -142,7 +142,7 @@ front-end supporting PIDE, a popup window with the text: ``declare
document reference'' will appear.
\<close>
subsection*["sec:prog_anti"::technical]{*Programming Text Antiquotations*}
subsection*["sec:prog_anti"::technical]\<open>Programming Text Antiquotations\<close>
text\<open>
As mentioned in the introduction, Isabelle/Isar is configured with a
number of standard commands to annotate formal definitions and proofs

View File

@ -5,7 +5,7 @@ begin
(*>*)
chapter*[impl2::technical,main_author="Some(@{docitem ''bu''}::author)"]
{* \isadof: Design and Implementation*}
\<open>\isadof: Design and Implementation\<close>
text\<open>
In this section, we present the design and implementation of \isadof.
\subsection{Document Ontology Modeling with \isadof}
@ -110,7 +110,7 @@ expression consisting of the class identifier \inlineisar+A+,
\inlineisar+B+, etc. Its use is discussed in \autoref{sec:monitor-class}.
\<close>
subsection*[editing::example]{*Editing a Document with Ontology-Conform Meta-Data*}
subsection*[editing::example]\<open>Editing a Document with Ontology-Conform Meta-Data\<close>
text\<open>
As already mentioned, Isabelle/Isar comes with a number of standard
\emph{text commands} such as \inlineisar+section{* ... *}+ or
@ -193,7 +193,7 @@ referencing it, although the actual text-element will occur later in
the document.\<close>
subsection*[ontolinks::technical]{*Ontology-Conform Logical Links: \isadof Antiquotations*}
subsection*[ontolinks::technical]\<open>Ontology-Conform Logical Links: \isadof Antiquotations\<close>
text\<open>
Up to this point, the world of the formal and the informal document
parts are strictly separated. The main objective of \isadof are ways
@ -305,7 +305,7 @@ enforce that terms or theorems have a particular form or correspond to
``claims'' (contributions) listed in the introduction of the paper.
\<close>
subsection*["sec:monitor-class"::technical]{*Monitor Document Classes*}
subsection*["sec:monitor-class"::technical]\<open>Monitor Document Classes\<close>
text\<open>
\autoref{lst:example} shows our conceptual running example in all
details. While inheritance on document classes allows for structuring
@ -353,7 +353,7 @@ text\<open>
\end{isar}
\<close>
section{*Document Generation*}
section\<open>Document Generation\<close>
text\<open>
Up to know, we discussed the definition of ontologies and their
representation in an interactive development environment, \ie,

View File

@ -60,33 +60,67 @@
\def\endlstdelim{\texttt{\textbf{\color{black!60}#2}}\egroup}%
\ttfamily\textbf{\color{black!60}#1}\bgroup\rmfamily\color{#3}\aftergroup\endlstdelim%
}
\newcommand{\subscr}[1]{\ensuremath{_{\mbox{#1}}}}
\newcommand{\supscr}[1]{\ensuremath{^{\mbox{#1}}}}
\newcommand{\subscr}[1]{\ensuremath{_{\text{#1}}}}
\newcommand{\supscr}[1]{\ensuremath{^{\text{#1}}}}
\lstdefinestyle{ISAR}{%
language=%
,basicstyle=\ttfamily%
,showspaces=false%
,showlines=false%
,columns=flexible%
,keepspaces
,mathescape=false,
,morecomment=[s]{(*}{*)}%
% ,moredelim=*[s][\rmfamily]{\{*}{*\}}%
,moredelim = **[is][\beginlstdelim{\{*}{*\}}{black}]{\{*}{*\}}
,showstringspaces=false%
,moredelim=*[is][\supscr]{\\<^bsup>}{\\<^esup>}%
,moredelim=*[is][\supscr]{<bsup>}{<esup>}%
,moredelim=*[is][\subscr]{<bsub>}{<esub>}%
,literate={%
{...}{\,\ldots\,}3%
{\\<Open>}{\ensuremath{\isacartoucheopen}}1%
{\\at}{@}1%
{\\<Close>}{\ensuremath{\isacartoucheclose}}1%
{<Open>}{\ensuremath{\isacartoucheopen}}1%
{<open>}{\ensuremath{\isacartoucheopen}}1%
{<@>}{@}1%
{"}{}0%
{~}{\ }1%
{::}{:\!:}1%
{<Close>}{\ensuremath{\isacartoucheclose}}1%
{<close>}{\ensuremath{\isacartoucheclose}}1%
{\\<Gamma>}{\ensuremath{\Gamma}}1%
{\\<times>}{\ensuremath{\times}}1%
{\\<rbrakk>}{\ensuremath{\mathclose{\rbrack\mkern-3mu\rbrack}}}1%
{\\<rbrace>}{\ensuremath{\mathclose{\mid\mkern-4.5mu\rbrace}}}1%
{\\<lbrakk>}{\ensuremath{\mathopen{\lbrack\mkern-3mu\lbrack}}}1%
{\\<lbrace>}{\ensuremath{\mathopen{\lbrace\mkern-4.5mu\mid}}}1%
{\\<equiv>}{\ensuremath{\equiv}}1%
{\\<Rightarrow>}{\ensuremath{\Rightarrow}}1%
{\\<rightarrow>}{\ensuremath{\rightarrow}}1%
{\\<longrightarrow>}{\ensuremath{\rightarrow}}1%
{\\<and>}{\ensuremath{\land}}1%
{\\<or>}{\ensuremath{\lor}}1%
{\\<lfloor>}{\ensuremath{\lfloor}}1%
{\\<rfloor>}{\ensuremath{\rfloor}}1%
%{\\<lparr>}{\ensuremath{\lparr}}1%
%{\\<rparr>}{\ensuremath{\rparr}}1%
{\\<le>}{\ensuremath{\le}}1%
{\\<delta>}{\ensuremath{\delta}}1%
{\\<lambda>}{\ensuremath{\lambda}}1%
{\\<bar>}{\ensuremath{\vert}}1%
{\<sigma>}{\ensuremath{\sigma}}1%
{\\<lparr>}{\ensuremath{\isasymlparr}}1%
{\\<rparr>}{\ensuremath{\isasymrparr}}1%
{\\<leftrightarrow>}{\ensuremath{\leftrightarrow}}1%
{\{*}{\raise.3ex\hbox{$\scriptscriptstyle\langle$}}1%
{*\}}{\raise.3ex\hbox{$\scriptscriptstyle\rangle$}}1%
{\\<open>}{\raise.3ex\hbox{$\scriptscriptstyle\langle$}}1%
{\\<close>}{\raise.3ex\hbox{$\scriptscriptstyle\rangle$}}1%
{\\<forall>}{\ensuremath{\forall}}1%
{\\<exists>}{\ensuremath{\exists}}1%
{\\<in>}{\ensuremath{\in}}1%
{\\<delta>}{\ensuremath{\delta}}1%
{\\<real>}{\ensuremath{\mathbb{R}}}1%
{\\<noteq>}{\ensuremath{\neq}}1%
{\\<Forall>}{\ensuremath{\bigwedge\,}}1%
{\\<lbrakk>}{\ensuremath{\mathopen{\lbrack\mkern-3mu\lbrack}}}1%
{\\<lbrace>}{\ensuremath{\mathopen{\lbrace\mkern-4.5mu\mid}}}1%
{\\<rbrakk>}{\ensuremath{\mathclose{\rbrack\mkern-3mu\rbrack}}}1%
{\\<rbrace>}{\ensuremath{\mathclose{\mid\mkern-4.5mu\rbrace}}}1%
}%
% % Defining "tags" (text-antiquotations) based on 1-keywords
,tag=**[s]{@\{}{\}}%
@ -97,18 +131,18 @@
% Defining 2-keywords
,keywordstyle=[2]{\color{Blue!60}\bfseries}%
,alsoletter={*,-}
,morekeywords=[2]{theory, begin, end, ML,section,subsection,paragraph,chapter,text}%
,morekeywords=[2]{case, then, show, theory, begin, end, ML,section,subsection,paragraph,chapter,text}%
%,moredelim=[s][\textit]{<}{>}
% Defining 3-keywords
,keywordstyle=[3]{\color{OliveGreen!60}\bfseries}%
,morekeywords=[3]{doc_class,declare_reference,update_instance*,
open_monitor*, close_monitor*, figure*, title*, subtitle*,declare_reference*,section*,text*}%
open_monitor*, close_monitor*, declare_reference*,section*,text*,title*,abstract*}%
% Defining 4-keywords
,keywordstyle=[4]{\color{black!60}\bfseries}%
,morekeywords=[4]{where, imports}%
,morekeywords=[4]{where, imports, keywords}%
% Defining 5-keywords
,keywordstyle=[5]{\color{BrickRed!70}\bfseries}%
,morekeywords=[5]{datatype, typedecl, consts, theorem}%
,morekeywords=[5]{datatype, by, fun, Definition*, definition, type_synonym, typedecl, consts, assumes, and, shows, proof, next, qed, lemma, theorem}%
% Defining 6-keywords
,keywordstyle=[6]{\itshape}%
,morekeywords=[6]{meta-args, ref, expr, class_id}%
@ -117,8 +151,7 @@
%%
\lstnewenvironment{isar}[1][]{\lstset{style=ISAR,
backgroundcolor=\color{black!2},
frame=lines,
mathescape=true,
frame=lines,mathescape,
basicstyle=\footnotesize\ttfamily,#1}}{}
%%%
\def\inlineisar{\lstinline[style=ISAR,breaklines=true,mathescape,breakatwhitespace=true]}

13
install
View File

@ -47,8 +47,8 @@ print_help()
echo " --isabelle, -i isabelle isabelle command used for installation"
echo " (default: $ISABELLE)"
echo " --skip-patch-and-afp, -s skip installation of Isabelle/DOF patch for"
echo " Isabelle and required AFP entries. USE AT"
echo " YOUR OWN RISK (default: $SKIP)"
echo " Isabelle and required AFP entries. "
echo " USE AT YOUR OWN RISK (default: $SKIP)"
}
@ -125,7 +125,7 @@ check_afp_entries() {
check_isa_dof_patch() {
echo "* Check availability of Isabelle/DOF patch:"
src="patches/thy_output.ML"
src="src/patches/thy_output.ML"
dst="$ISABELLE_HOME/src/Pure/Thy/thy_output.ML"
if command -v cmp > /dev/null 2>&1 && cmp -s "$src" "$dst" ; then
@ -179,12 +179,13 @@ install_and_register(){
DIR="$ISABELLE_HOME_USER/DOF/document-template"
echo " - Installing document templates in $DIR"
mkdir -p "$DIR"
cp $GEN_DIR/document-template/* "$DIR"
cp $GEN_DIR/scripts/* "$DIR"
cp $GEN_DIR/document-templates/* "$DIR"
DIR="$ISABELLE_HOME_USER/DOF/latex"
echo " - Installing LaTeX styles in $DIR"
mkdir -p "$DIR"
cp $GEN_DIR/latex/*.sty "$DIR"
cp $GEN_DIR/*/*/*.sty "$DIR"
DIR="$ISABELLE_HOME_USER/etc"
echo " - Registering Isabelle/DOF"
@ -220,7 +221,7 @@ done
ACTUAL_ISABELLE_VERSION=`$ISABELLE version`
GEN_DIR=document-generator
GEN_DIR=src
PROG=`echo $0 | sed 's|.*/||'`;
VARS=`$ISABELLE getenv ISABELLE_HOME_USER ISABELLE_HOME ISABELLE_TOOLS`
for i in $VARS; do

View File

@ -1,17 +0,0 @@
chapter \<open>An Outline of a Common Criteria Ontology\<close>
text{* NOTE: An Ontology-Model of a certification standard such as Comon Criteria or
Common Criteria (ISO15408) identifies:
- notions (conceptual \emph{categories}) having \emph{instances}
(similar to classes and objects),
- their subtype relation (eg., a "security target" is a "requirement definition"),
- their syntactical structure
(for the moment: defined by regular expressions describing the
order of category instances in the overall document as a regular language)
*}
theory CC_ISO15408
imports "../Isa_DOF"
begin
end

View File

@ -1,543 +0,0 @@
(* Title: Pure/Thy/thy_output.ML
Author: Makarius
Theory document output.
*)
signature THY_OUTPUT =
sig
val output_document: Proof.context -> {markdown: bool} -> Input.source -> Latex.text list
val check_comments: Proof.context -> Symbol_Pos.T list -> unit
val output_token: Proof.context -> Token.T -> Latex.text list
val output_source: Proof.context -> string -> Latex.text list
type segment = {span: Command_Span.span, command: Toplevel.transition, state: Toplevel.state}
val present_thy: Options.T -> theory -> segment list -> Latex.text list
val pretty_term: Proof.context -> term -> Pretty.T
val pretty_thm: Proof.context -> thm -> Pretty.T
val lines: Latex.text list -> Latex.text list
val items: Latex.text list -> Latex.text list
val isabelle: Proof.context -> Latex.text list -> Latex.text
val isabelle_typewriter: Proof.context -> Latex.text list -> Latex.text
val typewriter: Proof.context -> string -> Latex.text
val verbatim: Proof.context -> string -> Latex.text
val source: Proof.context -> Token.src -> Latex.text
val pretty: Proof.context -> Pretty.T -> Latex.text
val pretty_source: Proof.context -> Token.src -> Pretty.T -> Latex.text
val pretty_items: Proof.context -> Pretty.T list -> Latex.text
val pretty_items_source: Proof.context -> Token.src -> Pretty.T list -> Latex.text
val antiquotation_pretty:
binding -> 'a context_parser -> (Proof.context -> 'a -> Pretty.T) -> theory -> theory
val antiquotation_pretty_source:
binding -> 'a context_parser -> (Proof.context -> 'a -> Pretty.T) -> theory -> theory
val antiquotation_raw:
binding -> 'a context_parser -> (Proof.context -> 'a -> Latex.text) -> theory -> theory
val antiquotation_verbatim:
binding -> 'a context_parser -> (Proof.context -> 'a -> string) -> theory -> theory
end;
structure Thy_Output: THY_OUTPUT =
struct
(* output document source *)
val output_symbols = single o Latex.symbols_output;
fun output_comment ctxt (kind, syms) =
(case kind of
Comment.Comment =>
Input.cartouche_content syms
|> output_document (ctxt |> Config.put Document_Antiquotation.thy_output_display false)
{markdown = false}
|> Latex.enclose_body "%\n\\isamarkupcmt{" "%\n}"
| Comment.Cancel =>
Symbol_Pos.cartouche_content syms
|> output_symbols
|> Latex.enclose_body "%\n\\isamarkupcancel{" "}"
| Comment.Latex =>
[Latex.symbols (Symbol_Pos.cartouche_content syms)])
and output_comment_document ctxt (comment, syms) =
(case comment of
SOME kind => output_comment ctxt (kind, syms)
| NONE => [Latex.symbols syms])
and output_document_text ctxt syms =
Comment.read_body syms |> maps (output_comment_document ctxt)
and output_document ctxt {markdown} source =
let
val pos = Input.pos_of source;
val syms = Input.source_explode source;
val output_antiquotes =
maps (Document_Antiquotation.evaluate (output_document_text ctxt) ctxt);
fun output_line line =
(if Markdown.line_is_item line then [Latex.string "\\item "] else []) @
output_antiquotes (Markdown.line_content line);
fun output_block (Markdown.Par lines) =
Latex.block (separate (Latex.string "\n") (map (Latex.block o output_line) lines))
| output_block (Markdown.List {kind, body, ...}) =
Latex.environment_block (Markdown.print_kind kind) (output_blocks body)
and output_blocks blocks = separate (Latex.string "\n\n") (map output_block blocks);
in
if Toplevel.is_skipped_proof (Toplevel.presentation_state ctxt) then []
else if markdown andalso exists (Markdown.is_control o Symbol_Pos.symbol) syms
then
let
val ants = Antiquote.parse_comments pos syms;
val reports = Antiquote.antiq_reports ants;
val blocks = Markdown.read_antiquotes ants;
val _ = Context_Position.reports ctxt (reports @ Markdown.reports blocks);
in output_blocks blocks end
else
let
val ants = Antiquote.parse_comments pos (trim (Symbol.is_blank o Symbol_Pos.symbol) syms);
val reports = Antiquote.antiq_reports ants;
val _ = Context_Position.reports ctxt (reports @ Markdown.text_reports ants);
in output_antiquotes ants end
end;
(* output tokens with formal comments *)
local
val output_symbols_antiq =
(fn Antiquote.Text syms => output_symbols syms
| Antiquote.Control {name = (name, _), body, ...} =>
Latex.string (Latex.output_symbols [Symbol.encode (Symbol.Control name)]) ::
output_symbols body
| Antiquote.Antiq {body, ...} =>
Latex.enclose_body "%\n\\isaantiq\n" "{}%\n\\endisaantiq\n" (output_symbols body));
fun output_comment_symbols ctxt {antiq} (comment, syms) =
(case (comment, antiq) of
(NONE, false) => output_symbols syms
| (NONE, true) =>
Antiquote.parse_comments (#1 (Symbol_Pos.range syms)) syms
|> maps output_symbols_antiq
| (SOME comment, _) => output_comment ctxt (comment, syms));
fun output_body ctxt antiq bg en syms =
Comment.read_body syms
|> maps (output_comment_symbols ctxt {antiq = antiq})
|> Latex.enclose_body bg en;
in
fun output_token ctxt tok =
let
fun output antiq bg en =
output_body ctxt antiq bg en (Input.source_explode (Token.input_of tok));
in
(case Token.kind_of tok of
Token.Comment NONE => []
| Token.Command => output false "\\isacommand{" "}"
| Token.Keyword =>
if Symbol.is_ascii_identifier (Token.content_of tok)
then output false "\\isakeyword{" "}"
else output false "" ""
| Token.String => output false "{\\isachardoublequoteopen}" "{\\isachardoublequoteclose}"
| Token.Alt_String => output false "{\\isacharbackquoteopen}" "{\\isacharbackquoteclose}"
| Token.Verbatim => output true "{\\isacharverbatimopen}" "{\\isacharverbatimclose}"
| Token.Cartouche => output false "{\\isacartoucheopen}" "{\\isacartoucheclose}"
| _ => output false "" "")
end handle ERROR msg => error (msg ^ Position.here (Token.pos_of tok));
fun output_source ctxt s =
output_body ctxt false "" "" (Symbol_Pos.explode (s, Position.none));
fun check_comments ctxt =
Comment.read_body #> List.app (fn (comment, syms) =>
let
val pos = #1 (Symbol_Pos.range syms);
val _ =
comment |> Option.app (fn kind =>
Context_Position.reports ctxt (map (pair pos) (Markup.cartouche :: Comment.markups kind)));
val _ = output_comment_symbols ctxt {antiq = false} (comment, syms);
in if comment = SOME Comment.Comment then check_comments ctxt syms else () end);
end;
(** present theory source **)
(*NB: arranging white space around command spans is a black art*)
val is_white = Token.is_space orf Token.is_informal_comment;
val is_black = not o is_white;
val is_white_comment = Token.is_informal_comment;
val is_black_comment = Token.is_formal_comment;
(* presentation tokens *)
datatype token =
Ignore_Token
| Basic_Token of Token.T
| Markup_Token of string * Input.source
| Markup_Env_Token of string * Input.source
| Raw_Token of Input.source;
fun basic_token pred (Basic_Token tok) = pred tok
| basic_token _ _ = false;
val white_token = basic_token is_white;
val white_comment_token = basic_token is_white_comment;
val blank_token = basic_token Token.is_blank;
val newline_token = basic_token Token.is_newline;
fun present_token ctxt tok =
(case tok of
Ignore_Token => []
| Basic_Token tok => output_token ctxt tok
| Markup_Token (cmd, source) =>
Latex.enclose_body ("%\n\\isamarkup" ^ cmd ^ "{") "%\n}\n"
(output_document ctxt {markdown = false} source)
| Markup_Env_Token (cmd, source) =>
[Latex.environment_block ("isamarkup" ^ cmd) (output_document ctxt {markdown = true} source)]
| Raw_Token source =>
Latex.string "%\n" :: output_document ctxt {markdown = true} source @ [Latex.string "\n"]);
(* command spans *)
type command = string * Position.T * string list; (*name, position, tags*)
type source = (token * (string * int)) list; (*token, markup flag, meta-comment depth*)
datatype span = Span of command * (source * source * source * source) * bool;
fun make_span cmd src =
let
fun chop_newline (tok :: toks) =
if newline_token (fst tok) then ([tok], toks, true)
else ([], tok :: toks, false)
| chop_newline [] = ([], [], false);
val (((src_prefix, src_main), src_suffix1), (src_suffix2, src_appendix, newline)) =
src
|> chop_prefix (white_token o fst)
||>> chop_suffix (white_token o fst)
||>> chop_prefix (white_comment_token o fst)
||> chop_newline;
in Span (cmd, (src_prefix, src_main, src_suffix1 @ src_suffix2, src_appendix), newline) end;
(* present spans *)
local
fun err_bad_nesting pos =
error ("Bad nesting of commands in presentation" ^ pos);
fun edge which f (x: string option, y) =
if x = y then I
else (case which (x, y) of NONE => I | SOME txt => cons (Latex.string (f txt)));
val begin_tag = edge #2 Latex.begin_tag;
val end_tag = edge #1 Latex.end_tag;
fun open_delim delim e = edge #2 Latex.begin_delim e #> delim #> edge #2 Latex.end_delim e;
fun close_delim delim e = edge #1 Latex.begin_delim e #> delim #> edge #1 Latex.end_delim e;
fun read_tag s =
(case space_explode "%" s of
["", b] => (SOME b, NONE)
| [a, b] => (NONE, SOME (a, b))
| _ => error ("Bad document_tags specification: " ^ quote s));
in
fun make_command_tag options keywords =
let
val document_tags =
map read_tag (space_explode "," (Options.string options \<^system_option>\<open>document_tags\<close>));
val document_tags_default = map_filter #1 document_tags;
val document_tags_command = map_filter #2 document_tags;
in
fn {cmd_name, cmd_tags, tag, active_tag} => fn state => fn state' =>
let
val tag' = try hd (fold (update (op =)) cmd_tags (the_list tag));
val keyword_tags =
if cmd_name = "end" andalso Toplevel.is_end_theory state' then ["theory"]
else Keyword.command_tags keywords cmd_name;
val command_tags =
the_list (AList.lookup (op =) document_tags_command cmd_name) @
keyword_tags @ document_tags_default;
val active_tag' =
if is_some tag' then tag'
else
(case command_tags of
default_tag :: _ => SOME default_tag
| [] =>
if Keyword.is_vacuous keywords cmd_name andalso Toplevel.is_proof state
then active_tag
else NONE);
in {tag' = tag', active_tag' = active_tag'} end
end;
fun present_span thy command_tag span state state'
(tag_stack, active_tag, newline, latex, present_cont) =
let
val ctxt' =
Toplevel.presentation_context state'
handle Toplevel.UNDEF => Proof_Context.get_global thy Context.PureN;
val present = fold (fn (tok, (flag, 0)) =>
fold cons (present_token ctxt' tok)
#> cons (Latex.string flag)
| _ => I);
val Span ((cmd_name, cmd_pos, cmd_tags), srcs, span_newline) = span;
val (tag, tags) = tag_stack;
val {tag', active_tag'} =
command_tag {cmd_name = cmd_name, cmd_tags = cmd_tags, tag = tag, active_tag = active_tag}
state state';
val edge = (active_tag, active_tag');
val nesting = Toplevel.level state' - Toplevel.level state;
val newline' =
if is_none active_tag' then span_newline else newline;
val tag_stack' =
if nesting = 0 andalso not (Toplevel.is_proof state) then tag_stack
else if nesting >= 0 then (tag', replicate nesting tag @ tags)
else
(case drop (~ nesting - 1) tags of
tg :: tgs => (tg, tgs)
| [] => err_bad_nesting (Position.here cmd_pos));
val latex' =
latex
|> end_tag edge
|> close_delim (fst present_cont) edge
|> snd present_cont
|> open_delim (present (#1 srcs)) edge
|> begin_tag edge
|> present (#2 srcs);
val present_cont' =
if newline then (present (#3 srcs), present (#4 srcs))
else (I, present (#3 srcs) #> present (#4 srcs));
in (tag_stack', active_tag', newline', latex', present_cont') end;
fun present_trailer ((_, tags), active_tag, _, latex, present_cont) =
if not (null tags) then err_bad_nesting " at end of theory"
else
latex
|> end_tag (active_tag, NONE)
|> close_delim (fst present_cont) (active_tag, NONE)
|> snd present_cont;
end;
(* present_thy *)
local
val markup_true = "\\isamarkuptrue%\n";
val markup_false = "\\isamarkupfalse%\n";
val space_proper =
Scan.one Token.is_blank -- Scan.many is_white_comment -- Scan.one is_black;
val is_improper = not o (is_black orf Token.is_begin_ignore orf Token.is_end_ignore);
val improper = Scan.many is_improper;
val improper_end = Scan.repeat (Scan.unless space_proper (Scan.one is_improper));
val blank_end = Scan.repeat (Scan.unless space_proper (Scan.one Token.is_blank));
val opt_newline = Scan.option (Scan.one Token.is_newline);
val ignore =
Scan.depend (fn d => opt_newline |-- Scan.one Token.is_begin_ignore
>> pair (d + 1)) ||
Scan.depend (fn d => Scan.one Token.is_end_ignore --|
(if d = 0 then Scan.fail_with (K (fn () => "Bad nesting of meta-comments")) else opt_newline)
>> pair (d - 1));
val tag = (improper -- Parse.$$$ "%" -- improper) |-- Parse.!!! (Parse.tag_name --| blank_end);
val locale =
Scan.option ((Parse.$$$ "(" -- improper -- Parse.$$$ "in") |--
Parse.!!! (improper |-- Parse.name --| (improper -- Parse.$$$ ")")));
in
type segment = {span: Command_Span.span, command: Toplevel.transition, state: Toplevel.state};
fun present_thy options thy (segments: segment list) =
let
val keywords = Thy_Header.get_keywords thy;
(* tokens *)
val ignored = Scan.state --| ignore
>> (fn d => (NONE, (Ignore_Token, ("", d))));
fun markup pred mk flag = Scan.peek (fn d =>
improper |--
Parse.position (Scan.one (fn tok =>
Token.is_command tok andalso pred keywords (Token.content_of tok))) --
Scan.repeat tag --
Parse.!!!! ((improper -- locale -- improper) |-- Parse.document_source --| improper_end)
>> (fn (((tok, pos'), tags), source) =>
let val name = Token.content_of tok
in (SOME (name, pos', tags), (mk (name, source), (flag, d))) end));
val command = Scan.peek (fn d =>
Scan.optional (Scan.one Token.is_command_modifier ::: improper) [] --
Scan.one Token.is_command -- Scan.repeat tag
>> (fn ((cmd_mod, cmd), tags) =>
map (fn tok => (NONE, (Basic_Token tok, ("", d)))) cmd_mod @
[(SOME (Token.content_of cmd, Token.pos_of cmd, tags),
(Basic_Token cmd, (markup_false, d)))]));
val cmt = Scan.peek (fn d =>
Scan.one is_black_comment >> (fn tok => (NONE, (Basic_Token tok, ("", d)))));
val other = Scan.peek (fn d =>
Parse.not_eof >> (fn tok => (NONE, (Basic_Token tok, ("", d)))));
val tokens =
(ignored ||
markup Keyword.is_document_heading Markup_Token markup_true ||
markup Keyword.is_document_body Markup_Env_Token markup_true ||
markup Keyword.is_document_raw (Raw_Token o #2) "") >> single ||
command ||
(cmt || other) >> single;
(* spans *)
val is_eof = fn (_, (Basic_Token x, _)) => Token.is_eof x | _ => false;
val stopper = Scan.stopper (K (NONE, (Basic_Token Token.eof, ("", 0)))) is_eof;
val cmd = Scan.one (is_some o fst);
val non_cmd = Scan.one (is_none o fst andf not o is_eof) >> #2;
val white_comments = Scan.many (white_comment_token o fst o snd);
val blank = Scan.one (blank_token o fst o snd);
val newline = Scan.one (newline_token o fst o snd);
val before_cmd =
Scan.option (newline -- white_comments) --
Scan.option (newline -- white_comments) --
Scan.option (blank -- white_comments) -- cmd;
val span =
Scan.repeat non_cmd -- cmd --
Scan.repeat (Scan.unless before_cmd non_cmd) --
Scan.option (newline >> (single o snd))
>> (fn (((toks1, (cmd, tok2)), toks3), tok4) =>
make_span (the cmd) (toks1 @ (tok2 :: (toks3 @ the_default [] tok4))));
val spans = segments
|> maps (Command_Span.content o #span)
|> drop_suffix Token.is_space
|> Source.of_list
|> Source.source' 0 Token.stopper (Scan.error (Scan.bulk tokens >> flat))
|> Source.source stopper (Scan.error (Scan.bulk span))
|> Source.exhaust;
val command_results =
segments |> map_filter (fn {command, state, ...} =>
if Toplevel.is_ignored command then NONE else SOME (command, state));
(* present commands *)
val command_tag = make_command_tag options keywords;
fun present_command tr span st st' =
Toplevel.setmp_thread_position tr (present_span thy command_tag span st st');
fun present _ [] = I
| present st ((span, (tr, st')) :: rest) = present_command tr span st st' #> present st' rest;
in
if length command_results = length spans then
((NONE, []), NONE, true, [], (I, I))
|> present Toplevel.toplevel (spans ~~ command_results)
|> present_trailer
|> rev
else error "Messed-up outer syntax for presentation"
end;
end;
(** standard output operations **)
(* pretty printing *)
fun pretty_term ctxt t =
Syntax.pretty_term (Variable.auto_fixes t ctxt) t;
fun pretty_thm ctxt = pretty_term ctxt o Thm.full_prop_of;
(* default output *)
val lines = separate (Latex.string "\\isanewline%\n");
val items = separate (Latex.string "\\isasep\\isanewline%\n");
fun isabelle ctxt body =
if Config.get ctxt Document_Antiquotation.thy_output_display
then Latex.environment_block "isabelle" body
else Latex.block (Latex.enclose_body "\\isa{" "}" body);
fun isabelle_typewriter ctxt body =
if Config.get ctxt Document_Antiquotation.thy_output_display
then Latex.environment_block "isabellett" body
else Latex.block (Latex.enclose_body "\\isatt{" "}" body);
fun typewriter ctxt s =
isabelle_typewriter ctxt [Latex.string (Latex.output_ascii s)];
fun verbatim ctxt =
if Config.get ctxt Document_Antiquotation.thy_output_display
then Document_Antiquotation.indent_lines ctxt #> typewriter ctxt
else split_lines #> map (typewriter ctxt) #> lines #> Latex.block;
fun source ctxt =
Token.args_of_src
#> map (Token.unparse #> Document_Antiquotation.prepare_lines ctxt)
#> space_implode " "
#> output_source ctxt
#> isabelle ctxt;
fun pretty ctxt =
Document_Antiquotation.output ctxt #> Latex.string #> single #> isabelle ctxt;
fun pretty_source ctxt src prt =
if Config.get ctxt Document_Antiquotation.thy_output_source
then source ctxt src else pretty ctxt prt;
fun pretty_items ctxt =
map (Document_Antiquotation.output ctxt #> Latex.string) #> items #> isabelle ctxt;
fun pretty_items_source ctxt src prts =
if Config.get ctxt Document_Antiquotation.thy_output_source
then source ctxt src else pretty_items ctxt prts;
(* antiquotation variants *)
fun antiquotation_pretty name scan f =
Document_Antiquotation.setup name scan
(fn {context = ctxt, argument = x, ...} => pretty ctxt (f ctxt x));
fun antiquotation_pretty_source name scan f =
Document_Antiquotation.setup name scan
(fn {context = ctxt, source = src, argument = x} => pretty_source ctxt src (f ctxt x));
fun antiquotation_raw name scan f =
Document_Antiquotation.setup name scan
(fn {context = ctxt, argument = x, ...} => f ctxt x);
fun antiquotation_verbatim name scan f =
antiquotation_raw name scan (fn ctxt => verbatim ctxt o f ctxt);
end;

View File

@ -7,7 +7,7 @@ begin
ML{*
ML\<open>
fun value_maybe_select some_name =
case some_name
@ -30,11 +30,11 @@ fun value_cmd2 some_name modes raw_t state =
Pretty.str "::", Pretty.brk 1, Pretty.quote (Syntax.pretty_typ ctxt' ty')]) ();
in Pretty.writeln p end;
*}
ML{* value_cmd2*}
\<close>
ML\<open>value_cmd2\<close>
definition ASSERT :: "bool \<Rightarrow> bool" where "ASSERT p == (p=True)"
ML{* val x = @{code "ASSERT"} *}
ML{*
ML\<open>val x = @{code "ASSERT"}\<close>
ML\<open>
val opt_modes =
Scan.optional (@{keyword "("} |-- Parse.!!! (Scan.repeat1 Parse.name --| @{keyword ")"})) [];
@ -49,9 +49,9 @@ val _ =
(* Toplevel.keep (Value_Command.value_cmd some_name modes (enclose "ASSERT(" ")" t)) *)
Toplevel.keep (value_cmd2 some_name modes t)
end));
*}
\<close>
assert "True"
assert "True \<and> True "
ML{* !TT ;
@{term "True"} *}
ML\<open>!TT ;
@{term "True"}\<close>

View File

@ -731,7 +731,7 @@ typedecl "thm"
typedecl "file"
typedecl "thy"
\<comment> \<open> and others in the future : file, http, thy, ... \<close>
\<comment> \<open>and others in the future : file, http, thy, ...\<close>
consts ISA_typ :: "string \<Rightarrow> typ" ("@{typ _}")
consts ISA_term :: "string \<Rightarrow> term" ("@{term _}")
@ -1525,7 +1525,7 @@ end
(* the following 2 lines set parser and converter for LaTeX generation of meta-attributes.
Currently of *all* commands, no distinction between text* and text command.
This code depends on a MODIFIED Isabelle2017 version resulting from applying the files
under Isabell_DOF/patches.
under src/patches.
*)
(* REMARK PORT 2018 : transmission of meta-args to LaTeX crude and untested. Can be found in
present_token. *)

View File

@ -25,22 +25,22 @@ definition opt :: "'a rexp \<Rightarrow> 'a rexp" ("\<lbrakk>(_)\<rbrakk>")
where "\<lbrakk>A\<rbrakk> \<equiv> A || One"
value "Star (Conc(Alt (Atom(CHR ''a'')) (Atom(CHR ''b''))) (Atom(CHR ''c'')))"
text{* or better equivalently: *}
text\<open>or better equivalently:\<close>
value "\<lbrace>(\<lfloor>CHR ''a''\<rfloor> || \<lfloor>CHR ''b''\<rfloor>) ~~ \<lfloor>CHR ''c''\<rfloor>\<rbrace>\<^sup>*"
section{* Definition of a semantic function: the ``language'' of the regular expression *}
section\<open>Definition of a semantic function: the ``language'' of the regular expression\<close>
text\<open> This is just a reminder - already defined in @{theory Regular_Exp} as @{term lang}.\<close>
text{* In the following, we give a semantics for our regular expressions, which so far have
text\<open>In the following, we give a semantics for our regular expressions, which so far have
just been a term language (i.e. abstract syntax). The semantics is a ``denotational semantics'',
i.e. we give a direct meaning for regular expressions in some universe of ``denotations''.
This universe of denotations is in our concrete case: *}
This universe of denotations is in our concrete case:\<close>
definition enabled :: "('a,'\<sigma> set)da \<Rightarrow> '\<sigma> set \<Rightarrow> 'a list \<Rightarrow> 'a list"
where "enabled A \<sigma> = filter (\<lambda>x. next A x \<sigma> \<noteq> {}) "
text{* Now the denotational semantics for regular expression can be defined on a post-card: *}
text\<open>Now the denotational semantics for regular expression can be defined on a post-card:\<close>
fun L :: "'a rexp => 'a lang"
where L_Emp : "L Zero = {}"

View File

@ -37,20 +37,20 @@ definition opt :: "'a rexp \<Rightarrow> 'a rexp" ("\<lbrakk>(_)\<rbrakk>")
where "\<lbrakk>A\<rbrakk> \<equiv> A || One"
value "Star (Conc(Alt (Atom(CHR ''a'')) (Atom(CHR ''b''))) (Atom(CHR ''c'')))"
text{* or better equivalently: *}
text\<open>or better equivalently:\<close>
value "\<lbrace>(\<lfloor>CHR ''a''\<rfloor> || \<lfloor>CHR ''b''\<rfloor>) ~~ \<lfloor>CHR ''c''\<rfloor>\<rbrace>\<^sup>*"
section{* Some Standard and Derived Semantics *}
section\<open>Some Standard and Derived Semantics\<close>
text\<open> This is just a reminder - already defined in @{theory "Regular-Sets.Regular_Exp"}
as @{term lang}.\<close>
text{* In the following, we give a semantics for our regular expressions, which so far have
text\<open>In the following, we give a semantics for our regular expressions, which so far have
just been a term language (i.e. abstract syntax). The semantics is a ``denotational semantics'',
i.e. we give a direct meaning for regular expressions in some universe of ``denotations''.
This universe of denotations is in our concrete case: *}
This universe of denotations is in our concrete case:\<close>
text{* Now the denotational semantics for regular expression can be defined on a post-card: *}
text\<open>Now the denotational semantics for regular expression can be defined on a post-card:\<close>
fun L :: "'a rexp => 'a lang"
where L_Emp : "L Zero = {}"

16
src/ROOT Normal file
View File

@ -0,0 +1,16 @@
session "Isabelle_DOF" = "Functional-Automata" +
options [document = pdf, document_output = "output"]
sessions
"Regular-Sets"
theories
"DOF/Isa_DOF"
"ontologies/ontologies"
session "Isabelle_DOF-tests" = "Isabelle_DOF" +
options [document = false]
theories
"tests/AssnsLemmaThmEtc"
"tests/Concept_ExampleInvariant"
"tests/Concept_Example"
"tests/InnerSyntaxAntiquotations"
"tests/Attributes"

View File

@ -1,6 +1,7 @@
#/usr/bin/env bash
# Copyright (c) 2018-2019 The University of Sheffield. All rights reserved.
# 2018 The University of Paris-Sud. All rights reserved.
# Copyright (c) 2019 University of Exeter
# 2018-2019 University of Paris-Sud
# 2018-2019 The University of Sheffield
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
@ -39,7 +40,6 @@ function usage()
echo
echo " Options are:"
echo " -h print this help text and exit"
echo " -d enable document preparation"
echo " -n NAME alternative session name (default: DIR base name)"
echo " -o ONTOLOGY (default: $DEFAULT_ONTOLOGY)"
echo " Available ontologies:"
@ -72,7 +72,6 @@ function fail()
# options
DOC=""
NAME=""
DEFAULT_TEMPLATE="scrreprt"
DEFAULT_ONTOLOGY="core"
@ -87,9 +86,6 @@ do
usage
exit 1
;;
d)
DOC="true"
;;
n)
NAME="$OPTARG"
;;
@ -132,28 +128,23 @@ if [ -z "$NAME" ]; then
NAME="$DIR"
fi
if [ "$DOC" = true ]; then
$ISABELLE_TOOL mkroot -d -n "$NAME" "$DIR"
echo " \"preamble.tex\"" >> "$DIR"/ROOT
echo " \"build\"" >> "$DIR"/ROOT
sed -i -e "s/root.tex/isadof.cfg/" "$DIR"/ROOT
sed -i -e "s/HOL/Isabelle_DOF/" "$DIR"/ROOT
rm -f "$DIR"/document/root.tex
$ISABELLE_TOOL mkroot -n "$NAME" "$DIR"
echo " \"preamble.tex\"" >> "$DIR"/ROOT
echo " \"build\"" >> "$DIR"/ROOT
sed -i -e "s/root.tex/isadof.cfg/" "$DIR"/ROOT
sed -i -e "s/HOL/Isabelle_DOF/" "$DIR"/ROOT
rm -f "$DIR"/document/root.tex
# Creating isadof.cfg
echo "Template: $TEMPLATE" > "$DIR"/document/isadof.cfg
cp "$ISABELLE_HOME_USER/DOF/document-template/build" "$DIR"/document/
for o in $ONTOLOGY; do
echo "Ontology: $o" >> "$DIR"/document/isadof.cfg;
done
# Creating isadof.cfg
echo "Template: $TEMPLATE" > "$DIR"/document/isadof.cfg
cp "$ISABELLE_HOME_USER/DOF/document-template/build" "$DIR"/document/
for o in $ONTOLOGY; do
echo "Ontology: $o" >> "$DIR"/document/isadof.cfg;
done
# Creating praemble.tex
TITLE=$(echo "$NAME" | tr _ - | tr -d '\\')
AUTHOR=$(echo "By $USER" | tr _ - | tr -d '\\')
echo "%% This is a placeholder for user-specific configuration and packages." >> "$DIR"/document/preamble.tex
echo "\\title{$TITLE}{}{}{}{}{}{}" >> "$DIR"/document/preamble.tex
echo "\\author{$AUTHOR}{}{}{}{}{}" >> "$DIR"/document/preamble.tex
else
$ISABELLE_TOOL mkroot -n "$NAME"
fi
# Creating praemble.tex
TITLE=$(echo "$NAME" | tr _ - | tr -d '\\')
AUTHOR=$(echo "By $USER" | tr _ - | tr -d '\\')
echo "%% This is a placeholder for user-specific configuration and packages." >> "$DIR"/document/preamble.tex
echo "\\title{$TITLE}{}{}{}{}{}{}" >> "$DIR"/document/preamble.tex
echo "\\author{$AUTHOR}{}{}{}{}{}" >> "$DIR"/document/preamble.tex

View File

@ -1,6 +1,6 @@
%% Copyright (C) 2018 The University of Sheffield
%% 2018 The University of Paris-Sud
%% 2019 The University of Exeter
%% Copyright (c) 2019 University of Exeter
%% 2018-2019 University of Paris-Sud
%% 2018-2019 The University of Sheffield
%%
%% License:
%% This program can be redistributed and/or modified under the terms
@ -14,7 +14,11 @@
%% Warning: Do Not Edit!
%% =====================
%% This is the root file for the Isabelle/DOF using
%% This is the root file for the Isabelle/DOF using the eptcs class.
%% Note that eptcs cannot be distributed as part of Isabelle/DOF; you need
%% to download eptcs.cls from http://eptcs.web.cse.unsw.edu.au/style.shtml
%% and add it manually to the praemble.tex and the ROOT file.
%%
%% All customization and/or additional packages should be added to the file
%% preamble.tex.

View File

@ -1,5 +1,6 @@
%% Copyright (C) 2018 The University of Sheffield
%% 2018 The University of Paris-Sud
%% Copyright (c) 2019 University of Exeter
%% 2018-2019 University of Paris-Sud
%% 2018-2019 The University of Sheffield
%%
%% License:
%% This program can be redistributed and/or modified under the terms
@ -13,9 +14,14 @@
%% Warning: Do Not Edit!
%% =====================
%% This is the root file for the Isabelle/DOF using Springer's llncs.cls.
%% All customization and/or additional packages should be added to the file
%% preamble.tex.
%% This is the root file for the Isabelle/DOF using the lipics class.
%% Note that lipics cannot be distributed as part of Isabelle/DOF; you need
%% to download lipics.cls from
%% https://www.dagstuhl.de/en/publications/lipics/instructions-for-authors/
%% and add it manually to the praemble.tex and the ROOT file.
%%
%% All customization and/or additional packages should be added to the file
%% preamble.tex.
\documentclass[a4paper,USenglish,cleveref, autoref]{lipics-v2019}
\bibliographystyle{plainurl}% the mandatory bibstyle

View File

@ -1,5 +1,6 @@
%% Copyright (C) 2018 The University of Sheffield
%% 2018 The University of Paris-Sud
%% Copyright (c) 2019 University of Exeter
%% 2018-2019 University of Paris-Sud
%% 2018-2019 The University of Sheffield
%%
%% License:
%% This program can be redistributed and/or modified under the terms
@ -13,9 +14,14 @@
%% Warning: Do Not Edit!
%% =====================
%% This is the root file for the Isabelle/DOF using Springer's llncs.cls.
%% All customization and/or additional packages should be added to the file
%% preamble.tex.
%% This is the root file for the Isabelle/DOF using the lncs class.
%% Note that lncs cannot be distributed as part of Isabelle/DOF; you need
%% to download lncs.cls from
%% https://www.springer.com/gp/computer-science/lncs/conference-proceedings-guidelines
%% and add it manually to the praemble.tex and the ROOT file.
%%
%% All customization and/or additional packages should be added to the file
%% preamble.tex.
\RequirePackage{ifvtex}
\documentclass{llncs}

View File

@ -1,5 +1,6 @@
%% Copyright (C) 2018 The University of Sheffield
%% 2018 The University of Paris-Sud
%% Copyright (c) 2019 University of Exeter
%% 2018-2019 University of Paris-Sud
%% 2018-2019 The University of Sheffield
%%
%% License:
%% This program can be redistributed and/or modified under the terms
@ -13,10 +14,10 @@
%% Warning: Do Not Edit!
%% =====================
%% This is the root file for the Isabelle/DOF.
%% All customization and/or additional packages should be added to the file
%% preamble.tex.
%% This is the root file for the Isabelle/DOF using the scrartcl class.
%%
%% All customization and/or additional packages should be added to the file
%% preamble.tex.
\RequirePackage{ifvtex}
\documentclass[fontsize=11pt,DIV=12,paper=a4]{scrartcl}

View File

@ -1,5 +1,6 @@
%% Copyright (C) 2018 The University of Sheffield
%% 2018 The University of Paris-Sud
%% Copyright (c) 2019 University of Exeter
%% 2018-2019 University of Paris-Sud
%% 2018-2019 The University of Sheffield
%%
%% License:
%% This program can be redistributed and/or modified under the terms
@ -13,10 +14,10 @@
%% Warning: Do Not Edit!
%% =====================
%% This is the root file for the Isabelle/DOF.
%% All customization and/or additional packages should be added to the file
%% preamble.tex.
%% This is the root file for the Isabelle/DOF using the scrreprt class.
%%
%% All customization and/or additional packages should be added to the file
%% preamble.tex.
\RequirePackage{ifvtex}
\documentclass[fontsize=10pt,DIV=12,paper=a4,open=right,twoside,abstract=true]{scrreprt}

View File

@ -12,7 +12,7 @@ identifies:
(*<<*)
theory CENELEC_50128
imports "../Isa_COL"
imports "../../DOF/Isa_COL"
begin
(*>>*)
@ -388,7 +388,7 @@ doc_class TC = requirement +
section\<open>Software Assurance related Entities and Concepts\<close>
text{* subcategories are : *}
text\<open>subcategories are :\<close>
text\<open>Table A.13: \<close>
@ -726,4 +726,4 @@ Proof_Context.init_global;
\<close>
end

View File

@ -1,5 +1,5 @@
theory Conceptual
imports "../Isa_DOF" "../Isa_COL"
imports "../../DOF/Isa_DOF" "../../DOF/Isa_COL"
begin
doc_class A =

View File

@ -12,7 +12,7 @@
%% SPDX-License-Identifier: LPPL-1.0+ OR BSD-2-Clause
\NeedsTeXFormat{LaTeX2e}\relax
\ProvidesPackage{DOF-mathex}
\ProvidesPackage{DOF-math_exam}
[0000/00/00 Unreleased v0.0.0+%
Document-Type Support Framework for math classes.]

View File

@ -1,9 +1,9 @@
theory mathex_onto
imports "../Isa_COL"
theory math_exam
imports "../../DOF/Isa_COL"
begin
(*<<*)
text{* In our scenario, content has four different types of addressees:
text\<open>In our scenario, content has four different types of addressees:
\<^item> the @{emph \<open>setter\<close>}, i.e. the author of the exam,
\<^item> the @{emph \<open>student\<close>}, i.e. the addressee of the exam,
\<^item> the @{emph \<open>checker\<close>}, i.e. a person that checks the exam for
@ -13,7 +13,7 @@ text{* In our scenario, content has four different types of addressees:
Note that the latter quality assurance mechanism is used in many universities,
where for organizational reasons the execution of an exam takes place in facilities
where the author of the exam is not expected to be physically present.
*}
\<close>
datatype ContentClass =
@ -76,12 +76,12 @@ doc_class Exercise = Exam_item +
concerns :: "ContentClass set" <= "{setter,student,checker,externalExaminer}"
text{* In many institutions, it makes sense to have a rigorous process of validation
text\<open>In many institutions, it makes sense to have a rigorous process of validation
for exam subjects : is the initial question correct ? Is a proof in the sense of the
question possible ? We model the possibility that the @{term setter} validates a
question by a sample proof validated by Isabelle. In our scenario this sample proofs
are completely @{emph \<open>intern\<close>}, i.e. not exposed to the students but just additional
material for the internal review process of the exam.*}
material for the internal review process of the exam.\<close>
doc_class Validation =
tests :: "term list" <="[]"
@ -98,4 +98,4 @@ doc_class MathExam =
accepts "\<lbrace>Author\<rbrace>\<^sup>+ ~~ Header ~~ \<lbrace>Exercise ~~ Solution\<rbrace>\<^sup>+ "
(*>>*)
end
end

View File

@ -12,7 +12,7 @@ proving environment after all ! So this ontology provides:
theory math_paper
imports "../Isa_DOF"
imports "../../DOF/Isa_DOF"
begin

View File

@ -0,0 +1,12 @@
theory
ontologies
imports
"CENELEC_50128/CENELEC_50128"
"Conceptual/Conceptual"
"math_exam/math_exam"
"math_paper/math_paper"
"scholarly_paper/scholarly_paper"
"small_math/small_math"
"technical_report/technical_report"
begin
end

View File

@ -1,7 +1,7 @@
section{* An example ontology for a scholarly paper*}
section\<open>An example ontology for a scholarly paper\<close>
theory scholarly_paper
imports "../Isa_COL"
imports "../../DOF/Isa_COL"
begin
doc_class title =
@ -45,7 +45,7 @@ doc_class technical = text_section +
definition_list :: "string list" <= "[]"
formal_results :: "thm list"
text{* A very rough formatting style could be modeled as follows:*}
text\<open>A very rough formatting style could be modeled as follows:\<close>
doc_class example = text_section +
@ -185,9 +185,5 @@ fun check_group a = map (check_group_elem (check_level_hd (hd a))) (tl a) ;
*)
\<close>
gen_sty_template
end

View File

@ -1,7 +1,7 @@
section{* An example ontology for a math paper*}
section\<open>An example ontology for a math paper\<close>
theory small_math
imports "../Isa_COL"
imports "../../DOF/Isa_COL"
begin
doc_class title =
@ -156,10 +156,5 @@ setup\<open> let val cidS = ["small_math.introduction","small_math.technical", "
true)
in DOF_core.update_class_invariant "small_math.article" body end\<close>
gen_sty_template
end

View File

@ -1,7 +1,7 @@
section{* An example ontology for a scholarly paper*}
section\<open>An example ontology for a scholarly paper\<close>
theory technical_report
imports "scholarly_paper"
imports "../scholarly_paper/scholarly_paper"
begin
(* for reports paper: invariant: level \<ge> -1 *)

View File

@ -353,19 +353,16 @@ val ignore =
(if d = 0 then Scan.fail_with (K (fn () => "Bad nesting of meta-comments")) else opt_newline)
>> pair (d - 1));
val locale =
Scan.option ((Parse.$$$ "(" -- Document_Source.improper -- Parse.$$$ "in") |--
Parse.!!!
(Document_Source.improper |-- Parse.name --| (Document_Source.improper -- Parse.$$$ ")")));
(*
val meta_args_parser_hook = Synchronized.var "meta args parser hook"
((fn thy => fn s => ("",s)): theory -> string parser);
*)
val meta_args_parser_hook = Unsynchronized.ref
((fn _ => fn s => ("",s)): theory -> string parser);
val meta_args_parser_hook = Unsynchronized.ref ((fn _ => fn s => ("",s)): theory -> string parser);
in
@ -381,7 +378,6 @@ fun present_thy options thy (segments: segment list) =
val ignored = Scan.state --| ignore
>> (fn d => (NONE, (Ignore_Token, ("", d))));
fun markup pred mk flag = Scan.peek (fn d =>
Document_Source.improper
|-- Parse.position (Scan.one (fn tok => Token.is_command tok andalso
@ -476,9 +472,12 @@ fun present_thy options thy (segments: segment list) =
else error "Messed-up outer syntax for presentation"
end;
fun set_meta_args_parser f = let val _ = writeln "Meta-args parser set to new value"
in ( meta_args_parser_hook := f) end
fun set_meta_args_parser f =
let
val _ = writeln "Meta-args parser set to new value"
in
(meta_args_parser_hook := f)
end
end;

View File

@ -1,6 +1,7 @@
#!/usr/bin/env bash
# Copyright (c) 2018-2019 The University of Sheffield. All rights reserved.
# 2018 The University of Paris-Sud. All rights reserved.
# Copyright (c) 2019 University of Exeter
# 2018-2019 University of Paris-Sud
# 2018-2019 The University of Sheffield
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
@ -28,17 +29,17 @@
set -e
if [ ! -f $ISABELLE_HOME_USER/DOF/document-template/build_lib.sh ]; then
echo ""
echo "Error: Isabelle/DOF not installed"
echo "====="
echo "This is a Isabelle/DOF project. The document preparation requires"
echo "the Isabelle/DOF framework. Please obtain the framework by cloning"
echo "the Isabelle/DOF git repository, i.e.: "
echo " git clone https://git.logicalhacking.com/HOL-OCL/Isabelle_DOF"
echo "You can install the framework as follows:"
echo " cd Isabelle_DOF/document-generator"
echo " ./install"
echo ""
>&2 echo ""
>&2 echo "Error: Isabelle/DOF not installed"
>&2 echo "====="
>&2 echo "This is a Isabelle/DOF project. The document preparation requires"
>&2 echo "the Isabelle/DOF framework. Please obtain the framework by cloning"
>&2 echo "the Isabelle/DOF git repository, i.e.: "
>&2 echo " git clone https://git.logicalhacking.com/HOL-OCL/Isabelle_DOF"
>&2 echo "You can install the framework as follows:"
>&2 echo " cd Isabelle_DOF/document-generator"
>&2 echo " ./install"
>&2 echo ""
exit 1
fi

View File

@ -1,6 +1,7 @@
#!/usr/bin/env bash
# Copyright (c) 2018-2019 The University of Sheffield. All rights reserved.
# 2018 The University of Paris-Sud. All rights reserved.
# Copyright (c) 2019 University of Exeter
# 2018-2019 University of Paris-Sud
# 2018-2019 The University of Sheffield
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions

View File

@ -1,5 +1,8 @@
theory AssnsLemmaThmEtc
imports "../../ontologies/Conceptual" "../../ontologies/math_paper"
theory
AssnsLemmaThmEtc
imports
"../ontologies/Conceptual/Conceptual"
"../ontologies/math_paper/math_paper"
begin
section\<open>Elementary Creation of Doc-items and Access of their Attibutes\<close>
@ -17,14 +20,18 @@ text*[aa::F, property = "[@{term ''True''}]"]
\<open>Our definition of the HOL-Logic has the following properties:\<close>
assert*[aa::F] "True"
(* does not work in batch mode ...
(* sample for accessing a property filled with previous assert's of "aa" *)
ML\<open> ISA_core.property_list_dest @{context} @{docitem_attribute property :: aa};\<close>
assert*[aa::F] " X \<and> Y \<longrightarrow> True" (*example with uni-code *)
ML\<open> ISA_core.property_list_dest @{context} @{docitem_attribute property :: aa};
app writeln (tl (rev it));\<close>
*)
assert*[aa::F] "\<forall>x::bool. X \<and> Y \<longrightarrow> True" (*problem with uni-code *)
ML\<open>
@ -33,12 +40,12 @@ Syntax.read_term_global @{theory} "[@{termrepr ''True @<longrightarrow> True''}]
@{term "[@{term '' True @<longrightarrow> True ''}]"}; (* with isa-check *)
@{term "[@{termrepr '' True @<longrightarrow> True ''}]"}; (* without isa check *)
\<close>
(*
ML\<open>val [_,_,Const _ $ s,_] = (HOLogic.dest_list @{docitem_attribute property :: aa});
val t = HOLogic.dest_string s;
holstring_to_bstring @{context} t
\<close>
*)
lemma "All (\<lambda>x. X \<and> Y \<longrightarrow> True)" oops

View File

@ -1,5 +1,7 @@
theory Attributes
imports "../../ontologies/Conceptual"
theory
Attributes
imports
"../ontologies/Conceptual/Conceptual"
begin
section\<open>Elementary Creation of Doc-items and Access of their Attibutes\<close>
@ -12,19 +14,16 @@ print_doc_items
ML\<open>
val {docobj_tab={tab = docitem_tab, ...},docclass_tab, ISA_transformer_tab, monitor_tab,...}
= DOF_core.get_data @{context};
Symtab.dest docitem_tab;
Symtab.dest docclass_tab;
\<close>
ML\<open>
fun fac x = if x = 0 then 1 else x * (fac(x -1));
fac 3;
open Thm;
\<close>
text\<open>A text item containing standard theorem antiquotations and complex meta-information.\<close>
text*[dfgdfg::B, Conceptual.B.x ="''f''", y = "[''sdf'']"]\<open> Lorem ipsum ... @{thm refl} \<close>
(* crashes in batch mode ...
text*[dfgdfg::B, Conceptual.B.x ="''f''", y = "[''sdf'']"]\<open> Lorem ipsum ... @{thm refl} \<close>
*)
text*[dfgdfg::B]\<open> Lorem ipsum ... @{thm refl} \<close>
text\<open>document class declarations lead also HOL-type declarations (relevant for ontological links).\<close>
typ "C"
@ -112,7 +111,6 @@ text\<open> @{docitem_attribute omega::y} \<close>
section\<open>Simulation of a Monitor\<close>
open_monitor*[figs1::figure_group,
(* anchor="''fig-demo''", ? ? ? apparently eliminated by Achim *)
caption="''Sample ''"]
figure*[fig_A::figure, spawn_columns=False,

View File

@ -1,10 +1,12 @@
chapter\<open>Setting and modifying attributes of doc-items\<close>
theory Concept_Example
imports "../../ontologies/Conceptual" (* we use the generic "Conceptual" ontology *)
theory
Concept_Example
imports
"../ontologies/Conceptual/Conceptual" (* we use the generic "Conceptual" ontology *)
begin
text\<open>@{theory Draft.Conceptual} provides a monitor @{typ M} enforcing a particular document structure.
text\<open>@{theory "Isabelle_DOF-tests.Conceptual"} provides a monitor @{typ M} enforcing a particular document structure.
Here, we say: From now on, this structural rules are respected wrt. all doc\_classes M is
enabled for.\<close>
open_monitor*[struct::M]
@ -29,7 +31,7 @@ fancy unicode such as: @{term "\<open>\<beta>\<^sub>i''\<close>"} \<close>
text*[f::F] \<open> Lectus accumsan velit ultrices, ... }\<close>
theorem some_proof : "P" sorry
theorem some_proof : "True" by simp
text\<open>This is an example where we add a theorem into a kind of "result-list" of the doc-item f.\<close>
update_instance*[f::F,r:="[@{thm ''Concept_Example.some_proof''}]"]
@ -52,4 +54,4 @@ print_doc_items
end

View File

@ -1,7 +1,9 @@
chapter\<open>Setting and modifying attributes of doc-items\<close>
theory Concept_ExampleInvariant
imports "../../ontologies/Conceptual" (* we use the generic "Conceptual" ontology *)
theory
Concept_ExampleInvariant
imports
"../ontologies/Conceptual/Conceptual" (* we use the generic "Conceptual" ontology *)
begin
section\<open>Example: Standard Class Invariant\<close>
@ -108,4 +110,4 @@ close_monitor*[struct]
end

View File

@ -1,7 +1,9 @@
chapter\<open>Inner Syntax Antiquotations (ISA)'s\<close>
theory InnerSyntaxAntiquotations
imports "../../ontologies/Conceptual"
theory
InnerSyntaxAntiquotations
imports
"../ontologies/Conceptual/Conceptual"
begin
text\<open>Since the syntax chosen for values of doc-class attributes is HOL-syntax --- requiring
@ -14,7 +16,7 @@ They are the key-mechanism to denote
\<^item> Ontological Links, i.e. attributes refering to document classes defined by the ontology
\<^item> Ontological F-Links, i.e. attributes referring to formal entities inside Isabelle (such as thm's)
This file contains a number of examples resulting from the @{theory "Draft.Conceptual"} - ontology;
This file contains a number of examples resulting from the @{theory "Isabelle_DOF-tests.Conceptual"} - ontology;
the emphasis of this presentation is to present the expressivity of ODL on a paradigmatical example.
\<close>
@ -26,11 +28,13 @@ val {docobj_tab={tab = x, ...},docclass_tab, ISA_transformer_tab,...} = DOF_core
\<close>
text\<open>Some sample lemma:\<close>
lemma murks : "Example" sorry
lemma murks : "Example=Example" by simp
text\<open>Example for a meta-attribute of ODL-type @{typ "file"} with an appropriate ISA for the
file @{file "./Attributes.thy"}\<close>
text*[xcv::F, u="@{file ''./examples/conceptual/Attributes.thy''}"]\<open>Lorem ipsum ...\<close>
file @{file "InnerSyntaxAntiquotations.thy"}\<close>
(* not working:
text*[xcv::F, u="@{file ''InnerSyntaxAntiquotations.thy''}"]\<open>Lorem ipsum ...\<close>
*)
text*[xcv1::A, x=5]\<open>Lorem ipsum ...\<close>
text*[xcv3::A, x=7]\<open>Lorem ipsum ...\<close>

View File

Before

Width:  |  Height:  |  Size: 12 KiB

After

Width:  |  Height:  |  Size: 12 KiB

View File

Before

Width:  |  Height:  |  Size: 96 KiB

After

Width:  |  Height:  |  Size: 96 KiB

View File

@ -1,115 +0,0 @@
theory Example
imports "../../ontologies/CENELEC_50128"
begin
section{* Some show-off's of general antiquotations. *}
(* some show-off of standard anti-quotations: *)
print_attributes
print_antiquotations
text\<open> @{thm refl} of name @{thm [source] refl}
@{thm[mode=Rule] conjI}
@{file "../../Isa_DOF.thy"}
@{value "3+4::int"}
@{const hd}
@{theory List}}
@{term "3"}
@{type bool}
@{term [show_types] "f x = a + x"} \<close>
section{* Example *}
text*[tralala] {* Brexit means Brexit *}
text*[ass1::assumption] {* Brexit means Brexit *}
text*[hyp1::hypothesis] {* P inequal NP *}
text*[ass122::srac] {* The overall sampling frequence of the odometer
subsystem is therefore 14 khz, which includes sampling, computing and
result communication times... *}
text*[t10::test_result] {* This is a meta-test. This could be an ML-command
that governs the external test-execution via, eg., a makefile or specific calls
to a test-environment or test-engine *}
text \<open> @{ec \<open>ass122\<close>}}\<close>
text \<open> As established by @{docref (unchecked) \<open>t10\<close>},
@{docref (define) \<open>t10\<close>} \<close>
text \<open> the @{docref \<open>t10\<close>}
as well as the @{docref \<open>ass122\<close>}\<close>
text \<open> represent a justification of the safety related applicability
condition @{srac \<open>ass122\<close>} aka exported constraint @{ec \<open>ass122\<close>}.
\<close>
text{* And some ontologically inconsistent reference: @{hypothesis \<open>ass1\<close>} as well as *}
-- wrong
text{* ... some more ontologically inconsistent reference: @{assumption \<open>hyp1\<close>} as well as *}
-- wrong
text{* And a further ontologically totally inconsistent reference:
@{test_result \<open>ass122\<close>} as well as ... *}
-- wrong
text{* the ontologically inconsistent reference: @{ec \<open>t10\<close>} *}
-- wrong
section{* Some Tests for Ontology Framework and its CENELEC Instance *}
declare_reference* [lalala::requirement, alpha="main", beta=42]
declare_reference* [lalala::quod] (* shouldn't work: multiple declaration *)
declare_reference* [blablabla::cid, alpha="beta sdf", beta=gamma, delta=dfg_fgh\<^sub>1]
paragraph*[sdf]{* just a paragraph *}
paragraph* [sdfk] \<open> just a paragraph - lexical variant \<close>
subsection*[sdf]{* shouldn't work, multiple ref. *}
-- wrong
section*[sedf::requirement, alpja= refl]{* Shouldn't work - misspelled attribute. *}
text\<open>\label{sedf}\<close> (* Hack to make the LaTeX-ing running. Should disappear. *)
-- wrong
section*[seedf::test_case, dfg=34,fgdfg=zf]{* and another example with attribute setting,
but wrong doc_class constraint. *}
-- wrong
section{* Text Antiquotation Infrastructure ... *}
text{* @{docref \<open>lalala\<close>} -- produces warning. *}
text{* @{docref (unchecked) \<open>lalala\<close>} -- produces no warning. *}
text{* @{docref \<open>ass122\<close>} -- global reference to a text-item in another file. *}
text{* @{ec \<open>ass122\<close>} -- global reference to a exported constraint in another file.
Note that the link is actually a srac, which, according to
the ontology, happens to be an "ec". *}
text{* @{test_specification \<open>ass122\<close>} -- wrong: "reference ontologically inconsistent". *}
-- wrong
text{* Here is a reference to @{docref \<open>sedf\<close>} *}
(* shouldn't work: label exists, but definition was finally rejected to to errors. *)
check_doc_global (* shoudn't work : Unresolved forward references: lalala,blablabla *)
-- wrong
section \<open>Miscellaneous\<close>
section(in order){* sdfsdf*} (* undocumented trouvaille when analysing the code *)
end

View File

@ -1,119 +0,0 @@
theory Attributes
imports "../../ontologies/Conceptual"
begin
section\<open>Elementary Creation of DocItems and Access of their Attibutes\<close>
text\<open>Current status:\<close>
print_doc_classes
print_doc_items
(* corresponds to low-level accesses : *)
ML\<open>
val {docobj_tab={tab = docitem_tab, ...},docclass_tab, ISA_transformer_tab, monitor_tab,...}
= DOF_core.get_data @{context};
Symtab.dest docitem_tab;
"==============================================";
Symtab.dest docclass_tab;
\<close>
text*[dfgdfg::B, Conceptual.B.x ="''f''", y = "[''sdf'']"]\<open> sdfsdfs sdfsdf sdfsdf @{thm refl} \<close>
typ "C"
typ "D"
ML\<open>val Type("Conceptual.B.B_ext",[Type("Conceptual.C.C_ext",t)]) = @{typ "C"};
val @{typ "D"} = ODL_Command_Parser.cid_2_cidType "Conceptual.D" @{theory};
val @{typ "E"}= ODL_Command_Parser.cid_2_cidType "Conceptual.E" @{theory};
\<close>
text*[dfgdfg2::C, z = "None"]\<open> sdfsdfs sdfsdf sdfsdf @{thm refl} \<close>
text*[omega::E, x = "''def''"]\<open> sdfsdfs sdfsdf sdfsdf @{thm refl} \<close>
text\<open> @{docitem_ref \<open>dfgdfg\<close>} \<close>
term "A.x (undefined\<lparr>A.x := 3\<rparr>)"
term "B.x ((undefined::C)\<lparr>B.y := [''sdf'']\<rparr>)"
term "C.z ((undefined::C)\<lparr>B.y := [''sdf''], z:= Some undefined\<rparr>)"
ML\<open>
val SOME {def_occurrence = "Conceptual.A", long_name = "Conceptual.A.x", typ = t, def_pos}
= DOF_core.get_attribute_info "Conceptual.A" "x" @{theory};
DOF_core.get_attribute_info "Conceptual.B" "x" @{theory};
DOF_core.get_attribute_info "Conceptual.B" "y" @{theory};
DOF_core.get_attribute_info "Conceptual.C" "x" @{theory};
val SOME {def_occurrence = "Conceptual.C", long_name = "Conceptual.B.y", typ = t', def_pos}
= DOF_core.get_attribute_info "Conceptual.C" "y" @{theory};
(* this is the situation where an attribute is defined in C, but due to inheritance
from B, where it is firstly declared which results in a different long_name. *)
DOF_core.get_attribute_info "Conceptual.C" "z" @{theory};
\<close>
ML\<open>
DOF_core.get_value_local "sdf" @{context};
DOF_core.get_value_local "sdfg" @{context};
DOF_core.get_value_local "xxxy" @{context};
DOF_core.get_value_local "dfgdfg" @{context};
DOF_core.get_value_local "omega" @{context};
\<close>
text\<open>A not too trivial test: default y -> [].
At creation : x -> "f", y -> "sdf".
The latter wins at access time.
Then @{term "t"}: creation of a multi inheritance object omega,
triple updates, the last one wins.\<close>
ML\<open>val s = map HOLogic.dest_string (HOLogic.dest_list @{docitem_attribute y::dfgdfg});
val t = HOLogic.dest_string (@{docitem_attribute x::omega}); \<close>
section\<open>Mutation of Attibutes in DocItems\<close>
ML\<open> val Const ("Groups.zero_class.zero", @{typ "int"}) = @{docitem_attribute a2::omega} \<close>
update_instance*[omega::E, a2+="1"]
ML\<open> val Const ("Groups.one_class.one", @{typ "int"})= @{docitem_attribute a2::omega} \<close>
update_instance*[omega::E, a2+="6"]
ML\<open> @{docitem_attribute a2::omega} \<close>
ML\<open> HOLogic.dest_number @{docitem_attribute a2::omega} \<close>
update_instance*[omega::E, x+="''inition''"]
ML\<open> val s = HOLogic.dest_string ( @{docitem_attribute x::omega}) \<close>
update_instance*[omega::E, y+="[''defini'',''tion'']"]
update_instance*[omega::E, y+="[''en'']"]
ML\<open> val s = map HOLogic.dest_string (HOLogic.dest_list @{docitem_attribute y::omega}); \<close>
section\<open>Simulation of a Monitor\<close>
open_monitor*[figs1::figure_group,
(* anchor="''fig-demo''", Achim ...*)
caption="''Sample ''"]
figure*[fig_A::figure, spawn_columns=False,relative_width="90",
src="''figures/A.png''"]
\<open> The A train \ldots \<close>
figure*[fig_B::figure, spawn_columns=False,relative_width="90",
src="''figures/B.png''"]
\<open> The B train \ldots \<close>
close_monitor*[figs1]
text\<open>Resulting trace in figs1: \<close>
ML\<open>@{trace_attribute figs1}\<close>
print_doc_items
check_doc_global
end

View File

@ -1,109 +0,0 @@
chapter\<open>Setting and modifying attributes of doc-items\<close>
theory Concept_ExampleInvariant
imports "../../ontologies/Conceptual" (* we use the generic "Conceptual" ontology *)
begin
section\<open>Example: Standard Class Invariant\<close>
text\<open>Status:\<close>
print_doc_classes
print_doc_items
text\<open>Watch out: The current programming interface to document class invariants is pretty low-level:
\<^item> No inheritance principle
\<^item> No high-level notation in HOL
\<^item> Typing on ML level is assumed to be correct.
The implementor of an ontology must know what he does ...
\<close>
text\<open>Setting a sample invariant, which simply produces some side-effect:\<close>
setup\<open>DOF_core.update_class_invariant "Conceptual.A" (fn oid =>
fn {is_monitor = b} =>
fn ctxt =>
(writeln ("sample echo : "^oid); true))\<close>
subsection*[b::A, x = "5"] \<open> Lorem ipsum dolor sit amet, ... \<close>
text\<open>Setting a sample invariant, referring to attribute value "x":\<close>
ML\<open>fun check_A_invariant oid {is_monitor:bool} ctxt =
let val term = AttributeAccess.calc_attr_access ctxt "x" oid @{here}
val (@{typ "int"},x_value) = HOLogic.dest_number term
in if x_value > 5 then error("class A invariant violation") else true end
\<close>
setup\<open>DOF_core.update_class_invariant "Conceptual.A" check_A_invariant\<close>
(* test : should fail : *)
subsection*[c::A, x = "6"] \<open> Lorem ipsum dolor sit amet, ... \<close>
subsection*[d::A, x = "5"] \<open> Lorem ipsum dolor sit amet, ... \<close>
(* test : update should fail : *)
update_instance*[d::A, x += "1"]
section\<open>Example: Monitor Class Invariant\<close>
text\<open>Of particular interest are class invariants attached to monitor classes: since the
latter manage a trace-attribute, a class invariant on them can assure a global form of consistency.
It is possible to express:
\<^item> that attributes of a document element must satisfy particular conditions depending on the
prior document elements --- as long they have been observed in a monitor.
\<^item> non-regular properties on a trace not expressible in a regular expression
(like balanced ness of opening and closing text elements)
\<^item> etc.
\<close>
text\<open>A simple global trace-invariant is expressed in the following: it requires
that instances of class C occur more often as those of class D; note that this is meant
to take sub-classing into account:
\<close>
ML\<open>fun check_M_invariant oid {is_monitor} ctxt =
let val term = AttributeAccess.calc_attr_access ctxt "trace" oid @{here}
fun conv (Const(@{const_name "Pair"},_) $ Const(s,_) $ S) = (s, HOLogic.dest_string S)
val string_pair_list = map conv (HOLogic.dest_list term)
val cid_list = map fst string_pair_list
val ctxt' = Proof_Context.init_global(Context.theory_of ctxt)
fun is_C x = DOF_core.is_subclass ctxt' x "Conceptual.C"
fun is_D x = DOF_core.is_subclass ctxt' x "Conceptual.D"
val n = length (filter is_C cid_list)
val m = length (filter is_D cid_list)
in if m > n then error("class M invariant violation") else true end
\<close>
setup\<open>DOF_core.update_class_invariant "Conceptual.M" check_M_invariant\<close>
open_monitor*[struct::M]
section*[a::A, x = "3"] \<open> Lorem ipsum dolor sit amet, ... \<close>
text*[c1::C, x = "''beta''"] \<open> ... suspendisse non arcu malesuada mollis, nibh morbi, ... \<close>
text*[d1::E, a1 = "X3"] \<open> ... phasellus amet id massa nunc, pede suscipit repellendus, ... \<close>
text*[c2::C, x = "''delta''"] \<open> ... in ut tortor eleifend augue pretium consectetuer... \<close>
section*[f::E] \<open> Lectus accumsan velit ultrices, ... }\<close>
(* test : close_monitor should fail : *)
section*[f2::E] \<open> Lectus accumsan velit ultrices, ... }\<close>
ML\<open>val term = AttributeAccess.calc_attr_access (Context.Proof @{context}) "trace" "struct" @{here};
fun conv (Const(@{const_name "Pair"},_) $ Const(s,_) $ S) = (s, HOLogic.dest_string S)
val string_pair_list = map conv (HOLogic.dest_list term)
\<close>
(* trace example *)
text\<open>Setting a sample invariant, referring to attribute value "x":\<close>
close_monitor*[struct]
end

View File

@ -1,161 +0,0 @@
theory Example
imports "../../ontologies/CENELEC_50128"
keywords "Term" :: diag
begin
section\<open> Some show-off's of general antiquotations : for demos. \<close>
(* some show-off of standard anti-quotations: *)
print_attributes
print_antiquotations
text\<open> @{thm refl} of name @{thm [source] refl}
@{thm[mode=Rule] conjI}
@{file "../../Isa_DOF.thy"}
@{value "3+4::int"}
@{const hd}
@{theory List}}
@{term "3"}
@{type bool}
@{term [show_types] "f x = a + x"} \<close>
section\<open> Example \<close>
text*[ass1::assumption] \<open> Brexit means Brexit \<close>
text*[hyp1::hypothesis] \<open> P means not P \<close>
text*[ass122::srac] \<open> The overall sampling frequence of the odometer
subsystem is therefore 14 khz, which includes sampling, computing and
result communication times... \<close>
text*[t10::test_result] \<open> This is a meta-test. This could be an ML-command
that governs the external test-execution via, eg., a makefile or specific calls
to a test-environment or test-engine \<close>
text \<open> As established by @{docref (unchecked) \<open>t10\<close>},
@{docref (define) \<open>t10\<close>}
the @{docref \<open>t10\<close>}
the @{docref \<open>ass122\<close>}
\<close>
text \<open> safety related applicability condition @{srac \<open>ass122\<close>}.
exported constraint @{ec \<open>ass122\<close>}.
\<close>
text\<open>
And some ontologically inconsistent reference:
@{hypothesis \<open>ass1\<close>} as well as
\<close>
-- "very wrong"
text\<open>
And some ontologically inconsistent reference:
@{assumption \<open>hyp1\<close>} as well as
\<close>
-- "very wrong"
text\<open>
And some ontologically inconsistent reference:
@{test_result \<open>ass122\<close>} as well as
\<close>
-- wrong
text\<open>
And some other ontologically inconsistent reference:
@{ec \<open>t10\<close>} as well as
\<close>
-- wrong
section\<open> Some Tests for Ontology Framework and its CENELEC Instance \<close>
declare_reference* [lalala::requirement, alpha="main", beta=42]
declare_reference* [lalala::quod] (* multiple declaration*)
-- wrong
declare_reference* [blablabla::cid, alpha="beta sdf", beta=gamma, delta=dfg_fgh\<^sub>1]
paragraph*[sdf]\<open> just a paragraph \<close>
paragraph* [sdfk] \<open> just a paragraph - lexical variant \<close>
subsection*[sdf]\<open> shouldn't work, multiple ref. \<close>
-- wrong
section*[sedf::requirement, long_name = "None::string option"]
\<open> works again. One can hover over the class constraint and jump to its definition. \<close>
section*[seedf::test_case, dfg=34,fgdfg=zf]\<open> and another example with undefined attributes. \<close>
-- wrong
section\<open> Text Antiquotation Infrastructure ... \<close>
text\<open> @{docref \<open>lalala\<close>} -- produces warning. \<close>
text\<open> @{docref (unchecked) \<open>lalala\<close>} -- produces no warning. \<close>
text\<open> @{docref \<open>ass122\<close>} -- global reference to a text-item in another file. \<close>
text\<open> @{ec \<open>ass122\<close>} -- global reference to a exported constraint in another file.
Note that the link is actually a srac, which, according to
the ontology, happens to be an "ec". \<close>
text\<open> @{test_specification \<open>ass122\<close>} -- wrong: "reference ontologically inconsistent". \<close>
text\<open> Here is a reference to @{docref \<open>sedf\<close>} \<close>
(* works currently only in connection with the above label-hack.
Try to hover over the sedf - link and activate it !!! *)
section\<open> A Small Example for Isar-support of a Command Definition --- for demos. \<close>
ML\<open>
local
val opt_modes = Scan.optional (@{keyword "("}
|-- Parse.!!! (Scan.repeat1 Parse.name
--| @{keyword ")"})) [];
in
val _ =
Outer_Syntax.command @{command_keyword Term} "read and print term"
(opt_modes -- Parse.term >> Isar_Cmd.print_term);
end
\<close>
lemma "True" by simp
Term "a + b = b + a"
term "a + b = b + a"
section(in order)\<open> sdfsdf \<close> (* undocumented trouvaille when analysing the code *)
end