Update to Isabelle 2024.
@ -1,59 +0,0 @@
|
||||
chapter AFP
|
||||
|
||||
session "Isabelle_DOF" (AFP) = "Functional-Automata" +
|
||||
options [document = pdf, document_output = "output", document_build = dof, timeout = 300]
|
||||
sessions
|
||||
"Regular-Sets"
|
||||
directories
|
||||
"thys"
|
||||
"thys/manual"
|
||||
"ontologies"
|
||||
"ontologies/scholarly_paper"
|
||||
"ontologies/technical_report"
|
||||
theories [document = false]
|
||||
"ontologies/ontologies"
|
||||
"ontologies/technical_report/technical_report"
|
||||
"ontologies/scholarly_paper/scholarly_paper"
|
||||
"thys/RegExpInterface"
|
||||
"thys/Isa_DOF"
|
||||
"thys/Isa_COL"
|
||||
theories [document = true]
|
||||
"thys/manual/M_00_Frontmatter"
|
||||
"thys/manual/M_01_Introduction"
|
||||
"thys/manual/M_02_Background"
|
||||
"thys/manual/M_03_GuidedTour"
|
||||
"thys/manual/M_04_Document_Ontology"
|
||||
"thys/manual/M_05_Proofs_Ontologies"
|
||||
"thys/manual/M_06_RefMan"
|
||||
"thys/manual/M_07_Implementation"
|
||||
"thys/manual/Isabelle_DOF_Manual"
|
||||
document_files
|
||||
"root.bib"
|
||||
"dof_session.tex"
|
||||
"root.mst"
|
||||
"preamble.tex"
|
||||
"lstisadof-manual.sty"
|
||||
"figures/cicm2018-combined.png"
|
||||
"figures/document-hierarchy.pdf"
|
||||
"figures/Dogfood-figures.png"
|
||||
"figures/Dogfood-II-bgnd1.png"
|
||||
"figures/Dogfood-III-bgnd-text_section.png"
|
||||
"figures/Dogfood-IV-jumpInDocCLass.png"
|
||||
"figures/Dogfood-V-attribute.png"
|
||||
"figures/Dogfood-VI-linkappl.png"
|
||||
"figures/isabelle-architecture.pdf"
|
||||
"figures/Isabelle_DOF-logo.pdf"
|
||||
"figures/header_CSP_pdf.png"
|
||||
"figures/header_CSP_source.png"
|
||||
"figures/definition-use-CSP-pdf.png"
|
||||
"figures/definition-use-CSP.png"
|
||||
"figures/MyCommentedIsabelle.png"
|
||||
"figures/doc-mod-generic.pdf"
|
||||
"figures/doc-mod-isar.pdf"
|
||||
"figures/doc-mod-onto-docinst.pdf"
|
||||
"figures/doc-mod-DOF.pdf"
|
||||
"figures/doc-mod-term-aq.pdf"
|
||||
"figures/ThisPaperWithPreviewer.png"
|
||||
export_classpath
|
||||
|
||||
|
||||
@ -1,9 +0,0 @@
|
||||
\input{M_00_Frontmatter.tex}
|
||||
\input{M_01_Introduction.tex}
|
||||
\input{M_02_Background.tex}
|
||||
\input{M_03_GuidedTour.tex}
|
||||
\input{M_04_Document_Ontology.tex}
|
||||
\input{M_05_Proofs_Ontologies.tex}
|
||||
\input{M_06_RefMan.tex}
|
||||
\input{M_07_Implementation.tex}
|
||||
\input{Isabelle_DOF_Manual.tex}
|
||||
|
Before Width: | Height: | Size: 10 KiB |
|
Before Width: | Height: | Size: 10 KiB |
|
Before Width: | Height: | Size: 17 KiB |
|
Before Width: | Height: | Size: 13 KiB |
|
Before Width: | Height: | Size: 35 KiB |
|
Before Width: | Height: | Size: 18 KiB |
|
Before Width: | Height: | Size: 162 KiB |
|
Before Width: | Height: | Size: 541 KiB |
|
Before Width: | Height: | Size: 214 KiB |
|
Before Width: | Height: | Size: 70 KiB |
|
Before Width: | Height: | Size: 196 KiB |
|
Before Width: | Height: | Size: 203 KiB |
|
Before Width: | Height: | Size: 383 KiB |
@ -1,660 +0,0 @@
|
||||
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
|
||||
<!-- Created with Inkscape (http://www.inkscape.org/) -->
|
||||
|
||||
<svg
|
||||
xmlns:dc="http://purl.org/dc/elements/1.1/"
|
||||
xmlns:cc="http://creativecommons.org/ns#"
|
||||
xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#"
|
||||
xmlns:svg="http://www.w3.org/2000/svg"
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
|
||||
xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
|
||||
width="247.05695mm"
|
||||
height="113.70705mm"
|
||||
viewBox="0 0 247.05695 113.70704"
|
||||
version="1.1"
|
||||
id="svg8"
|
||||
inkscape:version="0.92.4 (5da689c313, 2019-01-14)"
|
||||
sodipodi:docname="document-hierarchy.svg">
|
||||
<defs
|
||||
id="defs2">
|
||||
<marker
|
||||
inkscape:isstock="true"
|
||||
style="overflow:visible"
|
||||
id="marker1659"
|
||||
refX="0"
|
||||
refY="0"
|
||||
orient="auto"
|
||||
inkscape:stockid="Arrow1Lstart">
|
||||
<path
|
||||
transform="matrix(0.8,0,0,0.8,10,0)"
|
||||
style="fill:#000000;fill-opacity:1;fill-rule:evenodd;stroke:#000000;stroke-width:1.00000003pt;stroke-opacity:1"
|
||||
d="M 0,0 5,-5 -12.5,0 5,5 Z"
|
||||
id="path1657"
|
||||
inkscape:connector-curvature="0" />
|
||||
</marker>
|
||||
<marker
|
||||
inkscape:stockid="Arrow1Lend"
|
||||
orient="auto"
|
||||
refY="0"
|
||||
refX="0"
|
||||
id="Arrow1Lend"
|
||||
style="overflow:visible"
|
||||
inkscape:isstock="true">
|
||||
<path
|
||||
id="path1224"
|
||||
d="M 0,0 5,-5 -12.5,0 5,5 Z"
|
||||
style="fill:#000000;fill-opacity:1;fill-rule:evenodd;stroke:#000000;stroke-width:1.00000003pt;stroke-opacity:1"
|
||||
transform="matrix(-0.8,0,0,-0.8,-10,0)"
|
||||
inkscape:connector-curvature="0" />
|
||||
</marker>
|
||||
<marker
|
||||
inkscape:stockid="Arrow1Lstart"
|
||||
orient="auto"
|
||||
refY="0"
|
||||
refX="0"
|
||||
id="marker1601-3-5"
|
||||
style="overflow:visible"
|
||||
inkscape:isstock="true"
|
||||
inkscape:collect="always">
|
||||
<path
|
||||
id="path1599-6-3"
|
||||
d="M 0,0 5,-5 -12.5,0 5,5 Z"
|
||||
style="fill:#000000;fill-opacity:1;fill-rule:evenodd;stroke:#000000;stroke-width:1.00000003pt;stroke-opacity:1"
|
||||
transform="matrix(0.8,0,0,0.8,10,0)"
|
||||
inkscape:connector-curvature="0" />
|
||||
</marker>
|
||||
<marker
|
||||
inkscape:stockid="Arrow1Lstart"
|
||||
orient="auto"
|
||||
refY="0"
|
||||
refX="0"
|
||||
id="marker1601-3-5-2"
|
||||
style="overflow:visible"
|
||||
inkscape:isstock="true"
|
||||
inkscape:collect="always">
|
||||
<path
|
||||
id="path1599-6-3-5"
|
||||
d="M 0,0 5,-5 -12.5,0 5,5 Z"
|
||||
style="fill:#000000;fill-opacity:1;fill-rule:evenodd;stroke:#000000;stroke-width:1.00000003pt;stroke-opacity:1"
|
||||
transform="matrix(0.8,0,0,0.8,10,0)"
|
||||
inkscape:connector-curvature="0" />
|
||||
</marker>
|
||||
<marker
|
||||
inkscape:stockid="Arrow1Lstart"
|
||||
orient="auto"
|
||||
refY="0"
|
||||
refX="0"
|
||||
id="marker1601-3-5-2-4"
|
||||
style="overflow:visible"
|
||||
inkscape:isstock="true"
|
||||
inkscape:collect="always">
|
||||
<path
|
||||
id="path1599-6-3-5-3"
|
||||
d="M 0,0 5,-5 -12.5,0 5,5 Z"
|
||||
style="fill:#000000;fill-opacity:1;fill-rule:evenodd;stroke:#000000;stroke-width:1.00000003pt;stroke-opacity:1"
|
||||
transform="matrix(0.8,0,0,0.8,10,0)"
|
||||
inkscape:connector-curvature="0" />
|
||||
</marker>
|
||||
<marker
|
||||
inkscape:stockid="Arrow1Lstart"
|
||||
orient="auto"
|
||||
refY="0"
|
||||
refX="0"
|
||||
id="marker1601-3-5-2-4-8"
|
||||
style="overflow:visible"
|
||||
inkscape:isstock="true"
|
||||
inkscape:collect="always">
|
||||
<path
|
||||
id="path1599-6-3-5-3-6"
|
||||
d="M 0,0 5,-5 -12.5,0 5,5 Z"
|
||||
style="fill:#000000;fill-opacity:1;fill-rule:evenodd;stroke:#000000;stroke-width:1.00000003pt;stroke-opacity:1"
|
||||
transform="matrix(0.8,0,0,0.8,10,0)"
|
||||
inkscape:connector-curvature="0" />
|
||||
</marker>
|
||||
</defs>
|
||||
<sodipodi:namedview
|
||||
id="base"
|
||||
pagecolor="#ffffff"
|
||||
bordercolor="#666666"
|
||||
borderopacity="1.0"
|
||||
inkscape:pageopacity="0.0"
|
||||
inkscape:pageshadow="2"
|
||||
inkscape:zoom="0.90414235"
|
||||
inkscape:cx="569.22671"
|
||||
inkscape:cy="-143.11208"
|
||||
inkscape:document-units="mm"
|
||||
inkscape:current-layer="layer1"
|
||||
showgrid="true"
|
||||
showguides="true"
|
||||
inkscape:guide-bbox="true"
|
||||
inkscape:window-width="1918"
|
||||
inkscape:window-height="1573"
|
||||
inkscape:window-x="0"
|
||||
inkscape:window-y="25"
|
||||
inkscape:window-maximized="0"
|
||||
fit-margin-top="0"
|
||||
fit-margin-left="0"
|
||||
fit-margin-right="0"
|
||||
fit-margin-bottom="0">
|
||||
<inkscape:grid
|
||||
type="xygrid"
|
||||
id="grid835"
|
||||
originx="-161.16314"
|
||||
originy="-192.38397" />
|
||||
<sodipodi:guide
|
||||
position="39.920202,188.61601"
|
||||
orientation="0,1"
|
||||
id="guide963"
|
||||
inkscape:locked="false" />
|
||||
</sodipodi:namedview>
|
||||
<metadata
|
||||
id="metadata5">
|
||||
<rdf:RDF>
|
||||
<cc:Work
|
||||
rdf:about="">
|
||||
<dc:format>image/svg+xml</dc:format>
|
||||
<dc:type
|
||||
rdf:resource="http://purl.org/dc/dcmitype/StillImage" />
|
||||
<dc:title></dc:title>
|
||||
</cc:Work>
|
||||
</rdf:RDF>
|
||||
</metadata>
|
||||
<g
|
||||
inkscape:label="Layer 1"
|
||||
inkscape:groupmode="layer"
|
||||
id="layer1"
|
||||
transform="translate(-161.16312,9.0910371)">
|
||||
<flowRoot
|
||||
xml:space="preserve"
|
||||
id="flowRoot965"
|
||||
style="font-style:normal;font-variant:normal;font-weight:600;font-stretch:normal;font-size:26.66666603px;line-height:1.25;font-family:'Open Sans';-inkscape-font-specification:'Open Sans Semi-Bold';text-align:center;letter-spacing:0px;word-spacing:0px;text-anchor:middle;fill:#000000;fill-opacity:1;stroke:none"
|
||||
transform="scale(0.26458333)"><flowRegion
|
||||
id="flowRegion967"
|
||||
style="font-style:normal;font-variant:normal;font-weight:600;font-stretch:normal;font-family:'Open Sans';-inkscape-font-specification:'Open Sans Semi-Bold'"><rect
|
||||
id="rect969"
|
||||
width="840"
|
||||
height="720"
|
||||
x="280"
|
||||
y="-257.48032"
|
||||
style="font-style:normal;font-variant:normal;font-weight:600;font-stretch:normal;font-family:'Open Sans';-inkscape-font-specification:'Open Sans Semi-Bold'" /></flowRegion><flowPara
|
||||
id="flowPara971" /></flowRoot> <rect
|
||||
style="opacity:1;fill:#cccccc;fill-opacity:0.81415926;stroke:#000000;stroke-width:0.46538228;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1"
|
||||
id="rect981-6-2"
|
||||
width="76.729164"
|
||||
height="55.5625"
|
||||
x="161.39581"
|
||||
y="16.541656" />
|
||||
<rect
|
||||
style="opacity:1;fill:#f36b21;fill-opacity:0.81415926;stroke:#000000;stroke-width:0.5243327;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1"
|
||||
id="rect837-3-0-1"
|
||||
width="63.421741"
|
||||
height="7.859282"
|
||||
x="172.0574"
|
||||
y="29.849043" />
|
||||
<text
|
||||
xml:space="preserve"
|
||||
style="font-style:normal;font-variant:normal;font-weight:600;font-stretch:normal;font-size:7.05555534px;line-height:1.25;font-family:'Open Sans';-inkscape-font-specification:'Open Sans Semi-Bold';text-align:center;letter-spacing:0px;word-spacing:0px;text-anchor:middle;fill:#f36b21;fill-opacity:1;stroke:none;stroke-width:0.26458332"
|
||||
x="203.8177"
|
||||
y="36.617695"
|
||||
id="text833-6-9-9"><tspan
|
||||
sodipodi:role="line"
|
||||
x="203.8177"
|
||||
y="36.617695"
|
||||
style="font-style:normal;font-variant:normal;font-weight:600;font-stretch:normal;font-family:'Open Sans';-inkscape-font-specification:'Open Sans Semi-Bold';fill:#080808;stroke-width:0.26458332"
|
||||
id="tspan860-6-7">context definition</tspan></text>
|
||||
<text
|
||||
xml:space="preserve"
|
||||
style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:7.76111126px;line-height:1.25;font-family:'Open Sans';-inkscape-font-specification:'Open Sans Bold';text-align:center;letter-spacing:0px;word-spacing:0px;text-anchor:middle;fill:#000000;fill-opacity:1;stroke:none;stroke-width:0.26458332"
|
||||
x="164.89302"
|
||||
y="25.267418"
|
||||
id="text961-5-6"><tspan
|
||||
sodipodi:role="line"
|
||||
id="tspan959-9-1"
|
||||
x="164.89302"
|
||||
y="25.267418"
|
||||
style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:7.76111126px;font-family:'Open Sans';-inkscape-font-specification:'Open Sans Bold';stroke-width:0.26458332">A</tspan></text>
|
||||
<path
|
||||
style="fill:none;stroke:#000000;stroke-width:0.25783753px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1;marker-start:url(#marker1601-3-5)"
|
||||
d="m 201.08331,72.104157 c 0.87864,20.534874 0,21.382403 0,21.382403 64.88872,-0.04303 42.33334,-0.215736 42.33334,-0.215736"
|
||||
id="path1174-7-5"
|
||||
inkscape:connector-curvature="0"
|
||||
sodipodi:nodetypes="ccc" />
|
||||
<rect
|
||||
style="opacity:1;fill:#1eaedb;fill-opacity:0.81415926;stroke:#000000;stroke-width:0.5243327;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1"
|
||||
id="rect837-3-0-1-3"
|
||||
width="63.421741"
|
||||
height="7.859282"
|
||||
x="172.0574"
|
||||
y="19.18749" />
|
||||
<text
|
||||
xml:space="preserve"
|
||||
style="font-style:normal;font-variant:normal;font-weight:600;font-stretch:normal;font-size:7.05555534px;line-height:1.25;font-family:'Open Sans';-inkscape-font-specification:'Open Sans Semi-Bold';text-align:center;letter-spacing:0px;word-spacing:0px;text-anchor:middle;fill:#f36b21;fill-opacity:1;stroke:none;stroke-width:0.26458332"
|
||||
x="203.8177"
|
||||
y="25.956142"
|
||||
id="text833-6-9-9-6"><tspan
|
||||
sodipodi:role="line"
|
||||
x="203.8177"
|
||||
y="25.956142"
|
||||
style="font-style:normal;font-variant:normal;font-weight:600;font-stretch:normal;font-family:'Open Sans';-inkscape-font-specification:'Open Sans Semi-Bold';fill:#080808;stroke-width:0.26458332"
|
||||
id="tspan860-6-7-7">header</tspan></text>
|
||||
<rect
|
||||
style="opacity:1;fill:#2a9c16;fill-opacity:0.81415926;stroke:#000000;stroke-width:0.5243327;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1"
|
||||
id="rect837-3-0-1-3-5"
|
||||
width="63.421741"
|
||||
height="7.859282"
|
||||
x="172.05748"
|
||||
y="39.903194" />
|
||||
<text
|
||||
xml:space="preserve"
|
||||
style="font-style:normal;font-variant:normal;font-weight:600;font-stretch:normal;font-size:7.05555534px;line-height:1.25;font-family:'Open Sans';-inkscape-font-specification:'Open Sans Semi-Bold';text-align:center;letter-spacing:0px;word-spacing:0px;text-anchor:middle;fill:#f36b21;fill-opacity:1;stroke:none;stroke-width:0.26458332"
|
||||
x="203.81781"
|
||||
y="46.671844"
|
||||
id="text833-6-9-9-6-3"><tspan
|
||||
sodipodi:role="line"
|
||||
x="203.81781"
|
||||
y="46.671844"
|
||||
style="font-style:normal;font-variant:normal;font-weight:600;font-stretch:normal;font-family:'Open Sans';-inkscape-font-specification:'Open Sans Semi-Bold';fill:#080808;stroke-width:0.26458332"
|
||||
id="tspan860-6-7-7-5">command</tspan></text>
|
||||
<flowRoot
|
||||
xml:space="preserve"
|
||||
id="flowRoot173"
|
||||
style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:26.66666603px;line-height:1.25;font-family:Raleway;-inkscape-font-specification:'Raleway Bold';text-align:center;letter-spacing:0px;word-spacing:0px;text-anchor:middle;fill:#000000;fill-opacity:1;stroke:none"
|
||||
transform="matrix(0.26458333,0,0,0.26458333,169.50707,-38.446423)"><flowRegion
|
||||
id="flowRegion175"><rect
|
||||
id="rect177"
|
||||
width="300"
|
||||
height="190"
|
||||
x="-10.656718"
|
||||
y="327.82898" /></flowRegion><flowPara
|
||||
id="flowPara179"></flowPara></flowRoot> <flowRoot
|
||||
xml:space="preserve"
|
||||
id="flowRoot181"
|
||||
style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:26.66666603px;line-height:1.25;font-family:Raleway;-inkscape-font-specification:'Raleway Bold';text-align:center;letter-spacing:0px;word-spacing:0px;text-anchor:middle;fill:#000000;fill-opacity:1;stroke:none"
|
||||
transform="matrix(0.26458333,0,0,0.26458333,169.50707,-38.446423)"><flowRegion
|
||||
id="flowRegion183"><rect
|
||||
id="rect185"
|
||||
width="290"
|
||||
height="120"
|
||||
x="-310.65671"
|
||||
y="327.82898" /></flowRegion><flowPara
|
||||
id="flowPara187"></flowPara></flowRoot> <rect
|
||||
style="opacity:1;fill:#2a9c16;fill-opacity:0.81415926;stroke:#000000;stroke-width:0.5243327;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1"
|
||||
id="rect837-3-0-1-3-5-6"
|
||||
width="63.421741"
|
||||
height="7.859282"
|
||||
x="172.05757"
|
||||
y="50.486526" />
|
||||
<text
|
||||
xml:space="preserve"
|
||||
style="font-style:normal;font-variant:normal;font-weight:600;font-stretch:normal;font-size:7.05555534px;line-height:1.25;font-family:'Open Sans';-inkscape-font-specification:'Open Sans Semi-Bold';text-align:center;letter-spacing:0px;word-spacing:0px;text-anchor:middle;fill:#f36b21;fill-opacity:1;stroke:none;stroke-width:0.26458332"
|
||||
x="203.81786"
|
||||
y="57.25518"
|
||||
id="text833-6-9-9-6-3-2"><tspan
|
||||
sodipodi:role="line"
|
||||
x="203.81786"
|
||||
y="57.25518"
|
||||
style="font-style:normal;font-variant:normal;font-weight:600;font-stretch:normal;font-family:'Open Sans';-inkscape-font-specification:'Open Sans Semi-Bold';fill:#080808;stroke-width:0.26458332"
|
||||
id="tspan860-6-7-7-5-9">command</tspan></text>
|
||||
<rect
|
||||
style="opacity:1;fill:#2a9c16;fill-opacity:0.81415926;stroke:#000000;stroke-width:0.5243327;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1"
|
||||
id="rect837-3-0-1-3-5-6-1"
|
||||
width="63.421741"
|
||||
height="7.859282"
|
||||
x="172.05757"
|
||||
y="61.069847" />
|
||||
<text
|
||||
xml:space="preserve"
|
||||
style="font-style:normal;font-variant:normal;font-weight:600;font-stretch:normal;font-size:7.05555534px;line-height:1.25;font-family:'Open Sans';-inkscape-font-specification:'Open Sans Semi-Bold';text-align:center;letter-spacing:0px;word-spacing:0px;text-anchor:middle;fill:#f36b21;fill-opacity:1;stroke:none;stroke-width:0.26458332"
|
||||
x="203.81783"
|
||||
y="67.838509"
|
||||
id="text833-6-9-9-6-3-2-2"><tspan
|
||||
sodipodi:role="line"
|
||||
x="203.81783"
|
||||
y="67.838509"
|
||||
style="font-style:normal;font-variant:normal;font-weight:600;font-stretch:normal;font-family:'Open Sans';-inkscape-font-specification:'Open Sans Semi-Bold';fill:#080808;stroke-width:0.26458332"
|
||||
id="tspan860-6-7-7-5-9-7">command</tspan></text>
|
||||
<rect
|
||||
style="opacity:1;fill:#cccccc;fill-opacity:0.81415926;stroke:#000000;stroke-width:0.46538228;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1"
|
||||
id="rect981-6-2-0"
|
||||
width="76.729164"
|
||||
height="55.5625"
|
||||
x="246.59166"
|
||||
y="48.82082" />
|
||||
<rect
|
||||
style="opacity:1;fill:#f36b21;fill-opacity:0.81415926;stroke:#000000;stroke-width:0.5243327;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1"
|
||||
id="rect837-3-0-1-9"
|
||||
width="63.421741"
|
||||
height="7.859282"
|
||||
x="257.25323"
|
||||
y="62.128208" />
|
||||
<text
|
||||
xml:space="preserve"
|
||||
style="font-style:normal;font-variant:normal;font-weight:600;font-stretch:normal;font-size:7.05555534px;line-height:1.25;font-family:'Open Sans';-inkscape-font-specification:'Open Sans Semi-Bold';text-align:center;letter-spacing:0px;word-spacing:0px;text-anchor:middle;fill:#f36b21;fill-opacity:1;stroke:none;stroke-width:0.26458332"
|
||||
x="289.01349"
|
||||
y="68.896866"
|
||||
id="text833-6-9-9-3"><tspan
|
||||
sodipodi:role="line"
|
||||
x="289.01349"
|
||||
y="68.896866"
|
||||
style="font-style:normal;font-variant:normal;font-weight:600;font-stretch:normal;font-family:'Open Sans';-inkscape-font-specification:'Open Sans Semi-Bold';fill:#080808;stroke-width:0.26458332"
|
||||
id="tspan860-6-7-6">context definition</tspan></text>
|
||||
<text
|
||||
xml:space="preserve"
|
||||
style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:7.76111126px;line-height:1.25;font-family:'Open Sans';-inkscape-font-specification:'Open Sans Bold';text-align:center;letter-spacing:0px;word-spacing:0px;text-anchor:middle;fill:#000000;fill-opacity:1;stroke:none;stroke-width:0.26458332"
|
||||
x="250.08887"
|
||||
y="57.546581"
|
||||
id="text961-5-6-0"><tspan
|
||||
sodipodi:role="line"
|
||||
id="tspan959-9-1-6"
|
||||
x="250.08887"
|
||||
y="57.546581"
|
||||
style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:7.76111126px;font-family:'Open Sans';-inkscape-font-specification:'Open Sans Bold';stroke-width:0.26458332">C</tspan></text>
|
||||
<rect
|
||||
style="opacity:1;fill:#1eaedb;fill-opacity:0.81415926;stroke:#000000;stroke-width:0.5243327;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1"
|
||||
id="rect837-3-0-1-3-2"
|
||||
width="63.421741"
|
||||
height="7.859282"
|
||||
x="257.25323"
|
||||
y="51.466656" />
|
||||
<text
|
||||
xml:space="preserve"
|
||||
style="font-style:normal;font-variant:normal;font-weight:600;font-stretch:normal;font-size:7.05555534px;line-height:1.25;font-family:'Open Sans';-inkscape-font-specification:'Open Sans Semi-Bold';text-align:center;letter-spacing:0px;word-spacing:0px;text-anchor:middle;fill:#f36b21;fill-opacity:1;stroke:none;stroke-width:0.26458332"
|
||||
x="289.01349"
|
||||
y="58.23531"
|
||||
id="text833-6-9-9-6-6"><tspan
|
||||
sodipodi:role="line"
|
||||
x="289.01349"
|
||||
y="58.23531"
|
||||
style="font-style:normal;font-variant:normal;font-weight:600;font-stretch:normal;font-family:'Open Sans';-inkscape-font-specification:'Open Sans Semi-Bold';fill:#080808;stroke-width:0.26458332"
|
||||
id="tspan860-6-7-7-1">header</tspan></text>
|
||||
<rect
|
||||
style="opacity:1;fill:#2a9c16;fill-opacity:0.81415926;stroke:#000000;stroke-width:0.5243327;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1"
|
||||
id="rect837-3-0-1-3-5-8"
|
||||
width="63.421741"
|
||||
height="7.859282"
|
||||
x="257.25333"
|
||||
y="72.182365" />
|
||||
<text
|
||||
xml:space="preserve"
|
||||
style="font-style:normal;font-variant:normal;font-weight:600;font-stretch:normal;font-size:7.05555534px;line-height:1.25;font-family:'Open Sans';-inkscape-font-specification:'Open Sans Semi-Bold';text-align:center;letter-spacing:0px;word-spacing:0px;text-anchor:middle;fill:#f36b21;fill-opacity:1;stroke:none;stroke-width:0.26458332"
|
||||
x="289.01361"
|
||||
y="78.951012"
|
||||
id="text833-6-9-9-6-3-7"><tspan
|
||||
sodipodi:role="line"
|
||||
x="289.01361"
|
||||
y="78.951012"
|
||||
style="font-style:normal;font-variant:normal;font-weight:600;font-stretch:normal;font-family:'Open Sans';-inkscape-font-specification:'Open Sans Semi-Bold';fill:#080808;stroke-width:0.26458332"
|
||||
id="tspan860-6-7-7-5-92">command</tspan></text>
|
||||
<rect
|
||||
style="opacity:1;fill:#2a9c16;fill-opacity:0.81415926;stroke:#000000;stroke-width:0.5243327;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1"
|
||||
id="rect837-3-0-1-3-5-6-0"
|
||||
width="63.421741"
|
||||
height="7.859282"
|
||||
x="257.25342"
|
||||
y="82.765694" />
|
||||
<text
|
||||
xml:space="preserve"
|
||||
style="font-style:normal;font-variant:normal;font-weight:600;font-stretch:normal;font-size:7.05555534px;line-height:1.25;font-family:'Open Sans';-inkscape-font-specification:'Open Sans Semi-Bold';text-align:center;letter-spacing:0px;word-spacing:0px;text-anchor:middle;fill:#f36b21;fill-opacity:1;stroke:none;stroke-width:0.26458332"
|
||||
x="289.01364"
|
||||
y="89.534348"
|
||||
id="text833-6-9-9-6-3-2-23"><tspan
|
||||
sodipodi:role="line"
|
||||
x="289.01364"
|
||||
y="89.534348"
|
||||
style="font-style:normal;font-variant:normal;font-weight:600;font-stretch:normal;font-family:'Open Sans';-inkscape-font-specification:'Open Sans Semi-Bold';fill:#080808;stroke-width:0.26458332"
|
||||
id="tspan860-6-7-7-5-9-75">command</tspan></text>
|
||||
<rect
|
||||
style="opacity:1;fill:#2a9c16;fill-opacity:0.81415926;stroke:#000000;stroke-width:0.5243327;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1"
|
||||
id="rect837-3-0-1-3-5-6-1-9"
|
||||
width="63.421741"
|
||||
height="7.859282"
|
||||
x="257.25342"
|
||||
y="93.349014" />
|
||||
<text
|
||||
xml:space="preserve"
|
||||
style="font-style:normal;font-variant:normal;font-weight:600;font-stretch:normal;font-size:7.05555534px;line-height:1.25;font-family:'Open Sans';-inkscape-font-specification:'Open Sans Semi-Bold';text-align:center;letter-spacing:0px;word-spacing:0px;text-anchor:middle;fill:#f36b21;fill-opacity:1;stroke:none;stroke-width:0.26458332"
|
||||
x="289.01361"
|
||||
y="100.11768"
|
||||
id="text833-6-9-9-6-3-2-2-2"><tspan
|
||||
sodipodi:role="line"
|
||||
x="289.01361"
|
||||
y="100.11768"
|
||||
style="font-style:normal;font-variant:normal;font-weight:600;font-stretch:normal;font-family:'Open Sans';-inkscape-font-specification:'Open Sans Semi-Bold';fill:#080808;stroke-width:0.26458332"
|
||||
id="tspan860-6-7-7-5-9-7-2">command</tspan></text>
|
||||
<rect
|
||||
style="opacity:1;fill:#cccccc;fill-opacity:0.81415926;stroke:#000000;stroke-width:0.46538228;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1"
|
||||
id="rect981-6-2-0-8"
|
||||
width="76.729164"
|
||||
height="55.5625"
|
||||
x="246.06255"
|
||||
y="-8.858346" />
|
||||
<rect
|
||||
style="opacity:1;fill:#f36b21;fill-opacity:0.81415926;stroke:#000000;stroke-width:0.5243327;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1"
|
||||
id="rect837-3-0-1-9-9"
|
||||
width="63.421741"
|
||||
height="7.859282"
|
||||
x="256.72409"
|
||||
y="4.4490485" />
|
||||
<text
|
||||
xml:space="preserve"
|
||||
style="font-style:normal;font-variant:normal;font-weight:600;font-stretch:normal;font-size:7.05555534px;line-height:1.25;font-family:'Open Sans';-inkscape-font-specification:'Open Sans Semi-Bold';text-align:center;letter-spacing:0px;word-spacing:0px;text-anchor:middle;fill:#f36b21;fill-opacity:1;stroke:none;stroke-width:0.26458332"
|
||||
x="288.48425"
|
||||
y="11.217703"
|
||||
id="text833-6-9-9-3-7"><tspan
|
||||
sodipodi:role="line"
|
||||
x="288.48425"
|
||||
y="11.217703"
|
||||
style="font-style:normal;font-variant:normal;font-weight:600;font-stretch:normal;font-family:'Open Sans';-inkscape-font-specification:'Open Sans Semi-Bold';fill:#080808;stroke-width:0.26458332"
|
||||
id="tspan860-6-7-6-3">context definition</tspan></text>
|
||||
<text
|
||||
xml:space="preserve"
|
||||
style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:7.76111126px;line-height:1.25;font-family:'Open Sans';-inkscape-font-specification:'Open Sans Bold';text-align:center;letter-spacing:0px;word-spacing:0px;text-anchor:middle;fill:#000000;fill-opacity:1;stroke:none;stroke-width:0.26458332"
|
||||
x="249.55975"
|
||||
y="-0.1325787"
|
||||
id="text961-5-6-0-6"><tspan
|
||||
sodipodi:role="line"
|
||||
id="tspan959-9-1-6-1"
|
||||
x="249.55975"
|
||||
y="-0.1325787"
|
||||
style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:7.76111126px;font-family:'Open Sans';-inkscape-font-specification:'Open Sans Bold';stroke-width:0.26458332">B</tspan></text>
|
||||
<rect
|
||||
style="opacity:1;fill:#1eaedb;fill-opacity:0.81415926;stroke:#000000;stroke-width:0.5243327;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1"
|
||||
id="rect837-3-0-1-3-2-2"
|
||||
width="63.421741"
|
||||
height="7.859282"
|
||||
x="256.72409"
|
||||
y="-6.2125063" />
|
||||
<text
|
||||
xml:space="preserve"
|
||||
style="font-style:normal;font-variant:normal;font-weight:600;font-stretch:normal;font-size:7.05555534px;line-height:1.25;font-family:'Open Sans';-inkscape-font-specification:'Open Sans Semi-Bold';text-align:center;letter-spacing:0px;word-spacing:0px;text-anchor:middle;fill:#f36b21;fill-opacity:1;stroke:none;stroke-width:0.26458332"
|
||||
x="288.48425"
|
||||
y="0.55614966"
|
||||
id="text833-6-9-9-6-6-9"><tspan
|
||||
sodipodi:role="line"
|
||||
x="288.48425"
|
||||
y="0.55614966"
|
||||
style="font-style:normal;font-variant:normal;font-weight:600;font-stretch:normal;font-family:'Open Sans';-inkscape-font-specification:'Open Sans Semi-Bold';fill:#080808;stroke-width:0.26458332"
|
||||
id="tspan860-6-7-7-1-3">header</tspan></text>
|
||||
<rect
|
||||
style="opacity:1;fill:#2a9c16;fill-opacity:0.81415926;stroke:#000000;stroke-width:0.5243327;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1"
|
||||
id="rect837-3-0-1-3-5-8-1"
|
||||
width="63.421741"
|
||||
height="7.859282"
|
||||
x="256.72418"
|
||||
y="14.503201" />
|
||||
<text
|
||||
xml:space="preserve"
|
||||
style="font-style:normal;font-variant:normal;font-weight:600;font-stretch:normal;font-size:7.05555534px;line-height:1.25;font-family:'Open Sans';-inkscape-font-specification:'Open Sans Semi-Bold';text-align:center;letter-spacing:0px;word-spacing:0px;text-anchor:middle;fill:#f36b21;fill-opacity:1;stroke:none;stroke-width:0.26458332"
|
||||
x="288.48438"
|
||||
y="21.271849"
|
||||
id="text833-6-9-9-6-3-7-9"><tspan
|
||||
sodipodi:role="line"
|
||||
x="288.48438"
|
||||
y="21.271849"
|
||||
style="font-style:normal;font-variant:normal;font-weight:600;font-stretch:normal;font-family:'Open Sans';-inkscape-font-specification:'Open Sans Semi-Bold';fill:#080808;stroke-width:0.26458332"
|
||||
id="tspan860-6-7-7-5-92-4">command</tspan></text>
|
||||
<rect
|
||||
style="opacity:1;fill:#2a9c16;fill-opacity:0.81415926;stroke:#000000;stroke-width:0.5243327;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1"
|
||||
id="rect837-3-0-1-3-5-6-0-7"
|
||||
width="63.421741"
|
||||
height="7.859282"
|
||||
x="256.72427"
|
||||
y="25.086535" />
|
||||
<text
|
||||
xml:space="preserve"
|
||||
style="font-style:normal;font-variant:normal;font-weight:600;font-stretch:normal;font-size:7.05555534px;line-height:1.25;font-family:'Open Sans';-inkscape-font-specification:'Open Sans Semi-Bold';text-align:center;letter-spacing:0px;word-spacing:0px;text-anchor:middle;fill:#f36b21;fill-opacity:1;stroke:none;stroke-width:0.26458332"
|
||||
x="288.48441"
|
||||
y="31.855186"
|
||||
id="text833-6-9-9-6-3-2-23-8"><tspan
|
||||
sodipodi:role="line"
|
||||
x="288.48441"
|
||||
y="31.855186"
|
||||
style="font-style:normal;font-variant:normal;font-weight:600;font-stretch:normal;font-family:'Open Sans';-inkscape-font-specification:'Open Sans Semi-Bold';fill:#080808;stroke-width:0.26458332"
|
||||
id="tspan860-6-7-7-5-9-75-4">command</tspan></text>
|
||||
<rect
|
||||
style="opacity:1;fill:#2a9c16;fill-opacity:0.81415926;stroke:#000000;stroke-width:0.5243327;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1"
|
||||
id="rect837-3-0-1-3-5-6-1-9-5"
|
||||
width="63.421741"
|
||||
height="7.859282"
|
||||
x="256.72427"
|
||||
y="35.669849" />
|
||||
<text
|
||||
xml:space="preserve"
|
||||
style="font-style:normal;font-variant:normal;font-weight:600;font-stretch:normal;font-size:7.05555534px;line-height:1.25;font-family:'Open Sans';-inkscape-font-specification:'Open Sans Semi-Bold';text-align:center;letter-spacing:0px;word-spacing:0px;text-anchor:middle;fill:#f36b21;fill-opacity:1;stroke:none;stroke-width:0.26458332"
|
||||
x="288.48438"
|
||||
y="42.438511"
|
||||
id="text833-6-9-9-6-3-2-2-2-0"><tspan
|
||||
sodipodi:role="line"
|
||||
x="288.48438"
|
||||
y="42.438511"
|
||||
style="font-style:normal;font-variant:normal;font-weight:600;font-stretch:normal;font-family:'Open Sans';-inkscape-font-specification:'Open Sans Semi-Bold';fill:#080808;stroke-width:0.26458332"
|
||||
id="tspan860-6-7-7-5-9-7-2-3">command</tspan></text>
|
||||
<rect
|
||||
style="opacity:1;fill:#cccccc;fill-opacity:0.81415926;stroke:#000000;stroke-width:0.46538228;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1"
|
||||
id="rect981-6-2-6"
|
||||
width="76.729164"
|
||||
height="55.5625"
|
||||
x="331.25821"
|
||||
y="16.541651" />
|
||||
<rect
|
||||
style="opacity:1;fill:#f36b21;fill-opacity:0.81415926;stroke:#000000;stroke-width:0.5243327;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1"
|
||||
id="rect837-3-0-1-1"
|
||||
width="63.421741"
|
||||
height="7.859282"
|
||||
x="341.91977"
|
||||
y="29.849037" />
|
||||
<text
|
||||
xml:space="preserve"
|
||||
style="font-style:normal;font-variant:normal;font-weight:600;font-stretch:normal;font-size:7.05555534px;line-height:1.25;font-family:'Open Sans';-inkscape-font-specification:'Open Sans Semi-Bold';text-align:center;letter-spacing:0px;word-spacing:0px;text-anchor:middle;fill:#f36b21;fill-opacity:1;stroke:none;stroke-width:0.26458332"
|
||||
x="373.67999"
|
||||
y="36.617695"
|
||||
id="text833-6-9-9-0"><tspan
|
||||
sodipodi:role="line"
|
||||
x="373.67999"
|
||||
y="36.617695"
|
||||
style="font-style:normal;font-variant:normal;font-weight:600;font-stretch:normal;font-family:'Open Sans';-inkscape-font-specification:'Open Sans Semi-Bold';fill:#080808;stroke-width:0.26458332"
|
||||
id="tspan860-6-7-63">context definition</tspan></text>
|
||||
<text
|
||||
xml:space="preserve"
|
||||
style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:7.76111126px;line-height:1.25;font-family:'Open Sans';-inkscape-font-specification:'Open Sans Bold';text-align:center;letter-spacing:0px;word-spacing:0px;text-anchor:middle;fill:#000000;fill-opacity:1;stroke:none;stroke-width:0.26458332"
|
||||
x="334.75543"
|
||||
y="25.267416"
|
||||
id="text961-5-6-2"><tspan
|
||||
sodipodi:role="line"
|
||||
id="tspan959-9-1-0"
|
||||
x="334.75543"
|
||||
y="25.267416"
|
||||
style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:7.76111126px;font-family:'Open Sans';-inkscape-font-specification:'Open Sans Bold';stroke-width:0.26458332">D</tspan></text>
|
||||
<rect
|
||||
style="opacity:1;fill:#1eaedb;fill-opacity:0.81415926;stroke:#000000;stroke-width:0.5243327;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1"
|
||||
id="rect837-3-0-1-3-6"
|
||||
width="63.421741"
|
||||
height="7.859282"
|
||||
x="341.91977"
|
||||
y="19.187481" />
|
||||
<text
|
||||
xml:space="preserve"
|
||||
style="font-style:normal;font-variant:normal;font-weight:600;font-stretch:normal;font-size:7.05555534px;line-height:1.25;font-family:'Open Sans';-inkscape-font-specification:'Open Sans Semi-Bold';text-align:center;letter-spacing:0px;word-spacing:0px;text-anchor:middle;fill:#f36b21;fill-opacity:1;stroke:none;stroke-width:0.26458332"
|
||||
x="373.67999"
|
||||
y="25.956137"
|
||||
id="text833-6-9-9-6-1"><tspan
|
||||
sodipodi:role="line"
|
||||
x="373.67999"
|
||||
y="25.956137"
|
||||
style="font-style:normal;font-variant:normal;font-weight:600;font-stretch:normal;font-family:'Open Sans';-inkscape-font-specification:'Open Sans Semi-Bold';fill:#080808;stroke-width:0.26458332"
|
||||
id="tspan860-6-7-7-55">header</tspan></text>
|
||||
<rect
|
||||
style="opacity:1;fill:#2a9c16;fill-opacity:0.81415926;stroke:#000000;stroke-width:0.5243327;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1"
|
||||
id="rect837-3-0-1-3-5-4"
|
||||
width="63.421741"
|
||||
height="7.859282"
|
||||
x="341.91986"
|
||||
y="39.903194" />
|
||||
<text
|
||||
xml:space="preserve"
|
||||
style="font-style:normal;font-variant:normal;font-weight:600;font-stretch:normal;font-size:7.05555534px;line-height:1.25;font-family:'Open Sans';-inkscape-font-specification:'Open Sans Semi-Bold';text-align:center;letter-spacing:0px;word-spacing:0px;text-anchor:middle;fill:#f36b21;fill-opacity:1;stroke:none;stroke-width:0.26458332"
|
||||
x="373.68008"
|
||||
y="46.671856"
|
||||
id="text833-6-9-9-6-3-76"><tspan
|
||||
sodipodi:role="line"
|
||||
x="373.68008"
|
||||
y="46.671856"
|
||||
style="font-style:normal;font-variant:normal;font-weight:600;font-stretch:normal;font-family:'Open Sans';-inkscape-font-specification:'Open Sans Semi-Bold';fill:#080808;stroke-width:0.26458332"
|
||||
id="tspan860-6-7-7-5-5">command</tspan></text>
|
||||
<rect
|
||||
style="opacity:1;fill:#2a9c16;fill-opacity:0.81415926;stroke:#000000;stroke-width:0.5243327;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1"
|
||||
id="rect837-3-0-1-3-5-6-6"
|
||||
width="63.421741"
|
||||
height="7.859282"
|
||||
x="341.91995"
|
||||
y="50.48653" />
|
||||
<text
|
||||
xml:space="preserve"
|
||||
style="font-style:normal;font-variant:normal;font-weight:600;font-stretch:normal;font-size:7.05555534px;line-height:1.25;font-family:'Open Sans';-inkscape-font-specification:'Open Sans Semi-Bold';text-align:center;letter-spacing:0px;word-spacing:0px;text-anchor:middle;fill:#f36b21;fill-opacity:1;stroke:none;stroke-width:0.26458332"
|
||||
x="373.68015"
|
||||
y="57.255199"
|
||||
id="text833-6-9-9-6-3-2-9"><tspan
|
||||
sodipodi:role="line"
|
||||
x="373.68015"
|
||||
y="57.255199"
|
||||
style="font-style:normal;font-variant:normal;font-weight:600;font-stretch:normal;font-family:'Open Sans';-inkscape-font-specification:'Open Sans Semi-Bold';fill:#080808;stroke-width:0.26458332"
|
||||
id="tspan860-6-7-7-5-9-3">command</tspan></text>
|
||||
<rect
|
||||
style="opacity:1;fill:#2a9c16;fill-opacity:0.81415926;stroke:#000000;stroke-width:0.5243327;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1"
|
||||
id="rect837-3-0-1-3-5-6-1-7"
|
||||
width="63.421741"
|
||||
height="7.859282"
|
||||
x="341.91995"
|
||||
y="61.069859" />
|
||||
<text
|
||||
xml:space="preserve"
|
||||
style="font-style:normal;font-variant:normal;font-weight:600;font-stretch:normal;font-size:7.05555534px;line-height:1.25;font-family:'Open Sans';-inkscape-font-specification:'Open Sans Semi-Bold';text-align:center;letter-spacing:0px;word-spacing:0px;text-anchor:middle;fill:#f36b21;fill-opacity:1;stroke:none;stroke-width:0.26458332"
|
||||
x="373.68011"
|
||||
y="67.838516"
|
||||
id="text833-6-9-9-6-3-2-2-4"><tspan
|
||||
sodipodi:role="line"
|
||||
x="373.68011"
|
||||
y="67.838516"
|
||||
style="font-style:normal;font-variant:normal;font-weight:600;font-stretch:normal;font-family:'Open Sans';-inkscape-font-specification:'Open Sans Semi-Bold';fill:#080808;stroke-width:0.26458332"
|
||||
id="tspan860-6-7-7-5-9-7-5">command</tspan></text>
|
||||
<path
|
||||
style="fill:none;stroke:#000000;stroke-width:0.25783753px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1;marker-start:url(#marker1601-3-5-2)"
|
||||
d="m 199.496,15.328092 c 0.87864,-20.5348759 0,-21.3824044 0,-21.3824044 64.88872,0.04303 42.33334,0.215736 42.33334,0.215736"
|
||||
id="path1174-7-5-4"
|
||||
inkscape:connector-curvature="0"
|
||||
sodipodi:nodetypes="ccc" />
|
||||
<path
|
||||
style="fill:none;stroke:#000000;stroke-width:0.26011065px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1;marker-start:url(#marker1601-3-5-2-4)"
|
||||
d="m 323.04732,-5.355514 c 45.95553,0.3995671 47.85221,0 47.85221,0 -0.0962,29.508532 -0.48279,19.251338 -0.48279,19.251338"
|
||||
id="path1174-7-5-4-7"
|
||||
inkscape:connector-curvature="0"
|
||||
sodipodi:nodetypes="ccc" />
|
||||
<path
|
||||
style="fill:none;stroke:#000000;stroke-width:0.26011065px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1;marker-start:url(#marker1601-3-5-2-4-8)"
|
||||
d="m 323.57654,93.311737 c 45.95553,-0.399568 47.85221,0 47.85221,0 -0.0962,-29.508534 -0.48279,-19.251339 -0.48279,-19.251339"
|
||||
id="path1174-7-5-4-7-8"
|
||||
inkscape:connector-curvature="0"
|
||||
sodipodi:nodetypes="ccc" />
|
||||
</g>
|
||||
</svg>
|
||||
|
Before Width: | Height: | Size: 36 KiB |
|
Before Width: | Height: | Size: 57 KiB |
@ -1,327 +0,0 @@
|
||||
%% Copyright (C) 2018 The University of Sheffield
|
||||
%% 2018-2021 The University of Paris-Saclay
|
||||
%% 2019-2021 The University of Exeter
|
||||
%%
|
||||
%% License:
|
||||
%% This program can be redistributed and/or modified under the terms
|
||||
%% of the LaTeX Project Public License Distributed from CTAN
|
||||
%% archives in directory macros/latex/base/lppl.txt; either
|
||||
%% version 1.3c of the License, or (at your option) any later version.
|
||||
%% OR
|
||||
%% The 2-clause BSD-style license.
|
||||
%%
|
||||
%% SPDX-License-Identifier: LPPL-1.3c+ OR BSD-2-Clause
|
||||
\usepackage{listings}
|
||||
\usepackage{listingsutf8}
|
||||
\usepackage{tikz}
|
||||
\usepackage[many]{tcolorbox}
|
||||
\tcbuselibrary{listings}
|
||||
\tcbuselibrary{skins}
|
||||
\usepackage{xstring}
|
||||
|
||||
\definecolor{OliveGreen} {cmyk}{0.64,0,0.95,0.40}
|
||||
\definecolor{BrickRed} {cmyk}{0,0.89,0.94,0.28}
|
||||
\definecolor{Blue} {cmyk}{1,1,0,0}
|
||||
\definecolor{CornflowerBlue}{cmyk}{0.65,0.13,0,0}
|
||||
|
||||
|
||||
|
||||
|
||||
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||
%% <antiquotations>
|
||||
%% Hack: re-defining tag types for supporting highlighting of antiquotations
|
||||
\gdef\lst@tagtypes{s}
|
||||
\gdef\lst@TagKey#1#2{%
|
||||
\lst@Delim\lst@tagstyle #2\relax
|
||||
{Tag}\lst@tagtypes #1%
|
||||
{\lst@BeginTag\lst@EndTag}%
|
||||
\@@end\@empty{}}
|
||||
\lst@Key{tag}\relax{\lst@TagKey\@empty{#1}}
|
||||
\lst@Key{tagstyle}{}{\def\lst@tagstyle{#1}}
|
||||
\lst@AddToHook{EmptyStyle}{\let\lst@tagstyle\@empty}
|
||||
\gdef\lst@BeginTag{%
|
||||
\lst@DelimOpen
|
||||
\lst@ifextags\else
|
||||
{\let\lst@ifkeywords\iftrue
|
||||
\lst@ifmarkfirstintag \lst@firstintagtrue \fi}}
|
||||
\lst@AddToHookExe{ExcludeDelims}{\let\lst@ifextags\iffalse}
|
||||
\gdef\lst@EndTag{\lst@DelimClose\lst@ifextags\else}
|
||||
\lst@Key{usekeywordsintag}t[t]{\lstKV@SetIf{#1}\lst@ifusekeysintag}
|
||||
\lst@Key{markfirstintag}f[t]{\lstKV@SetIf{#1}\lst@ifmarkfirstintag}
|
||||
\gdef\lst@firstintagtrue{\global\let\lst@iffirstintag\iftrue}
|
||||
\global\let\lst@iffirstintag\iffalse
|
||||
\lst@AddToHook{PostOutput}{\lst@tagresetfirst}
|
||||
\lst@AddToHook{Output}
|
||||
{\gdef\lst@tagresetfirst{\global\let\lst@iffirstintag\iffalse}}
|
||||
\lst@AddToHook{OutputOther}{\gdef\lst@tagresetfirst{}}
|
||||
\lst@AddToHook{Output}
|
||||
{\ifnum\lst@mode=\lst@tagmode
|
||||
\lst@iffirstintag \let\lst@thestyle\lst@gkeywords@sty \fi
|
||||
\lst@ifusekeysintag\else \let\lst@thestyle\lst@gkeywords@sty\fi
|
||||
\fi}
|
||||
\lst@NewMode\lst@tagmode
|
||||
\gdef\lst@Tag@s#1#2\@empty#3#4#5{%
|
||||
\lst@CArg #1\relax\lst@DefDelimB {}{}%
|
||||
{\ifnum\lst@mode=\lst@tagmode \expandafter\@gobblethree \fi}%
|
||||
#3\lst@tagmode{#5}%
|
||||
\lst@CArg #2\relax\lst@DefDelimE {}{}{}#4\lst@tagmode}%
|
||||
\gdef\lst@BeginCDATA#1\@empty{%
|
||||
\lst@TrackNewLines \lst@PrintToken
|
||||
\lst@EnterMode\lst@GPmode{}\let\lst@ifmode\iffalse
|
||||
\lst@mode\lst@tagmode #1\lst@mode\lst@GPmode\relax\lst@modetrue}
|
||||
%
|
||||
\def\beginlstdelim#1#2#3%
|
||||
{%
|
||||
\def\endlstdelim{\texttt{\textbf{\color{black!60}#2}}\egroup}%
|
||||
\ttfamily\textbf{\color{black!60}#1}\bgroup\rmfamily\color{#3}\aftergroup\endlstdelim%
|
||||
}
|
||||
%% </antiquotations>
|
||||
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||
|
||||
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||
%% <isar>
|
||||
\providecolor{isar}{named}{blue}
|
||||
\renewcommand{\isacommand}[1]{\textcolor{OliveGreen!60}{\ttfamily\bfseries #1}}
|
||||
\newcommand{\inlineisarbox}[1]{#1}
|
||||
\NewTColorBox[]{isarbox}{}{
|
||||
,boxrule=0pt
|
||||
,boxsep=0pt
|
||||
,colback=white!90!isar
|
||||
,enhanced jigsaw
|
||||
,borderline west={2pt}{0pt}{isar!60!black}
|
||||
,sharp corners
|
||||
%,before skip balanced=0.5\baselineskip plus 2pt % works only with Tex Live 2020 and later
|
||||
,enlarge top by=0mm
|
||||
,enhanced
|
||||
,overlay={\node[draw,fill=isar!60!black,xshift=0pt,anchor=north
|
||||
east,font=\bfseries\footnotesize\color{white}]
|
||||
at (frame.north east) {Isar};}
|
||||
}
|
||||
%% </isar>
|
||||
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||
|
||||
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||
%% <out>
|
||||
\providecolor{out}{named}{green}
|
||||
\newtcblisting{out}[1][]{%
|
||||
listing only%
|
||||
,boxrule=0pt
|
||||
,boxsep=0pt
|
||||
,colback=white!90!out
|
||||
,enhanced jigsaw
|
||||
,borderline west={2pt}{0pt}{out!60!black}
|
||||
,sharp corners
|
||||
% ,before skip=10pt
|
||||
% ,after skip=10pt
|
||||
,enlarge top by=0mm
|
||||
,enhanced
|
||||
,overlay={\node[draw,fill=out!60!black,xshift=0pt,anchor=north
|
||||
east,font=\bfseries\footnotesize\color{white}]
|
||||
at (frame.north east) {Document};}
|
||||
,listing options={
|
||||
breakatwhitespace=true
|
||||
,columns=flexible%
|
||||
,basicstyle=\small\rmfamily
|
||||
,mathescape
|
||||
,#1
|
||||
}
|
||||
}%
|
||||
%% </out>
|
||||
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||
|
||||
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||
%% <sml>
|
||||
\lstloadlanguages{ML}
|
||||
\providecolor{sml}{named}{red}
|
||||
\lstdefinestyle{sml}{
|
||||
,escapechar=ë%
|
||||
,basicstyle=\ttfamily%
|
||||
,commentstyle=\itshape%
|
||||
,keywordstyle=\bfseries\color{CornflowerBlue}%
|
||||
,ndkeywordstyle=\color{green}%
|
||||
,language=ML
|
||||
% ,literate={%
|
||||
% {<@>}{@}1%
|
||||
% }
|
||||
,keywordstyle=[6]{\itshape}%
|
||||
,morekeywords=[6]{args_type}%
|
||||
,tag=**[s]{@\{}{\}}%
|
||||
,tagstyle=\color{CornflowerBlue}%
|
||||
,markfirstintag=true%
|
||||
}%
|
||||
\def\inlinesml{\lstinline[style=sml,breaklines=true,breakatwhitespace=true]}
|
||||
\newtcblisting{sml}[1][]{%
|
||||
listing only%
|
||||
,boxrule=0pt
|
||||
,boxsep=0pt
|
||||
,colback=white!90!sml
|
||||
,enhanced jigsaw
|
||||
,borderline west={2pt}{0pt}{sml!60!black}
|
||||
,sharp corners
|
||||
% ,before skip=10pt
|
||||
% ,after skip=10pt
|
||||
,enlarge top by=0mm
|
||||
,enhanced
|
||||
,overlay={\node[draw,fill=sml!60!black,xshift=0pt,anchor=north
|
||||
east,font=\bfseries\footnotesize\color{white}]
|
||||
at (frame.north east) {SML};}
|
||||
,listing options={
|
||||
style=sml
|
||||
,columns=flexible%
|
||||
,basicstyle=\small\ttfamily
|
||||
,#1
|
||||
}
|
||||
}%
|
||||
%% </sml>
|
||||
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||
|
||||
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||
%% <latex>
|
||||
\lstloadlanguages{TeX}
|
||||
\providecolor{ltx}{named}{yellow}
|
||||
\lstdefinestyle{lltx}{language=[AlLaTeX]TeX,
|
||||
,basicstyle=\ttfamily%
|
||||
,showspaces=false%
|
||||
,escapechar=ë
|
||||
,showlines=false%
|
||||
,morekeywords={newisadof}
|
||||
% ,keywordstyle=\bfseries%
|
||||
% Defining 2-keywords
|
||||
,keywordstyle=[1]{\color{BrickRed!60}\bfseries}%
|
||||
% Defining 3-keywords
|
||||
,keywordstyle=[2]{\color{OliveGreen!60}\bfseries}%
|
||||
% Defining 4-keywords
|
||||
,keywordstyle=[3]{\color{black!60}\bfseries}%
|
||||
% Defining 5-keywords
|
||||
,keywordstyle=[4]{\color{Blue!70}\bfseries}%
|
||||
% Defining 6-keywords
|
||||
,keywordstyle=[5]{\itshape}%
|
||||
%
|
||||
}
|
||||
\lstdefinestyle{ltx}{style=lltx,
|
||||
basicstyle=\ttfamily\small}%
|
||||
\def\inlineltx{\lstinline[style=ltx, breaklines=true,columns=fullflexible]}
|
||||
% see
|
||||
% https://tex.stackexchange.com/questions/247643/problem-with-tcblisting-first-listed-latex-command-is-missing
|
||||
\NewTCBListing{ltx}{ !O{} }{%
|
||||
listing only%
|
||||
,boxrule=0pt
|
||||
,boxsep=0pt
|
||||
,colback=white!90!ltx
|
||||
,enhanced jigsaw
|
||||
,borderline west={2pt}{0pt}{ltx!60!black}
|
||||
,sharp corners
|
||||
% ,before skip=10pt
|
||||
% ,after skip=10pt
|
||||
,enlarge top by=0mm
|
||||
,enhanced
|
||||
,overlay={\node[draw,fill=ltx!60!black,xshift=0pt,anchor=north
|
||||
east,font=\bfseries\footnotesize\color{white}]
|
||||
at (frame.north east) {\LaTeX};}
|
||||
,listing options={
|
||||
style=lltx,
|
||||
,columns=flexible%
|
||||
,basicstyle=\small\ttfamily
|
||||
,#1
|
||||
}
|
||||
}%
|
||||
%% </latex>
|
||||
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||
|
||||
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||
%% <bash>
|
||||
\providecolor{bash}{named}{black}
|
||||
\lstloadlanguages{bash}
|
||||
\lstdefinestyle{bash}{%
|
||||
language=bash
|
||||
,escapechar=ë
|
||||
,basicstyle=\ttfamily%
|
||||
,showspaces=false%
|
||||
,showlines=false%
|
||||
,columns=flexible%
|
||||
% ,keywordstyle=\bfseries%
|
||||
% Defining 2-keywords
|
||||
,keywordstyle=[1]{\color{BrickRed!60}\bfseries}%
|
||||
% Defining 3-keywords
|
||||
,keywordstyle=[2]{\color{OliveGreen!60}\bfseries}%
|
||||
% Defining 4-keywords
|
||||
,keywordstyle=[3]{\color{black!60}\bfseries}%
|
||||
% Defining 5-keywords
|
||||
,keywordstyle=[4]{\color{Blue!80}\bfseries}%
|
||||
,alsoletter={*,-,:,~,/}
|
||||
,morekeywords=[4]{}%
|
||||
% Defining 6-keywords
|
||||
,keywordstyle=[5]{\itshape}%
|
||||
%
|
||||
}
|
||||
\def\inlinebash{\lstinline[style=bash, breaklines=true,columns=fullflexible]}
|
||||
\newcommand\@isabsolutepath[3]{%
|
||||
\StrLeft{#1}{1}[\firstchar]%
|
||||
\IfStrEq{\firstchar}{/}{#2}{#3}%
|
||||
}
|
||||
|
||||
\newcommand{\@homeprefix}[1]{%
|
||||
\ifthenelse{\equal{#1}{}}{\textasciitilde}{\textasciitilde/}%
|
||||
}
|
||||
|
||||
\newcommand{\prompt}[1]{%
|
||||
\color{Blue!80}\textbf{\texttt{%
|
||||
achim@logicalhacking:{\@isabsolutepath{#1}{#1}{\@homeprefix{#1}#1}}\$}}%
|
||||
}
|
||||
\newtcblisting{bash}[1][]{%
|
||||
listing only%
|
||||
,boxrule=0pt
|
||||
,boxsep=0pt
|
||||
,colback=white!90!bash
|
||||
,enhanced jigsaw
|
||||
,borderline west={2pt}{0pt}{bash!60!black}
|
||||
,sharp corners
|
||||
% ,before skip=10pt
|
||||
% ,after skip=10pt
|
||||
,enlarge top by=0mm
|
||||
,enhanced
|
||||
,overlay={\node[draw,fill=bash!60!black,xshift=0pt,anchor=north
|
||||
east,font=\bfseries\footnotesize\color{white}]
|
||||
at (frame.north east) {Bash};}
|
||||
,listing options={
|
||||
style=bash
|
||||
,columns=flexible%
|
||||
,breaklines=true%
|
||||
,prebreak=\mbox{\space\textbackslash}%
|
||||
,basicstyle=\small\ttfamily%
|
||||
,#1
|
||||
}
|
||||
}%
|
||||
%% </bash>
|
||||
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||
|
||||
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||
%% <config>
|
||||
\providecolor{config}{named}{gray}
|
||||
\newtcblisting{config}[2][]{%
|
||||
listing only%
|
||||
,boxrule=0pt
|
||||
,boxsep=0pt
|
||||
,colback=white!90!config
|
||||
,enhanced jigsaw
|
||||
,borderline west={2pt}{0pt}{config!60!black}
|
||||
,sharp corners
|
||||
% ,before skip=10pt
|
||||
% ,after skip=10pt
|
||||
,enlarge top by=0mm
|
||||
,enhanced
|
||||
,overlay={\node[draw,fill=config!60!black,xshift=0pt,anchor=north
|
||||
east,font=\bfseries\footnotesize\color{white}]
|
||||
at (frame.north east) {#2};}
|
||||
,listing options={
|
||||
breakatwhitespace=true
|
||||
,columns=flexible%
|
||||
,basicstyle=\small\ttfamily
|
||||
,mathescape
|
||||
,#1
|
||||
}
|
||||
}%
|
||||
%% </config>
|
||||
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||
|
||||
|
||||
@ -1,124 +0,0 @@
|
||||
%% Copyright (C) University of Exeter, UK
|
||||
%% The University of Sheffield, UK
|
||||
%% The University of Paris-Saclay, France
|
||||
%%
|
||||
%% License:
|
||||
%% This program can be redistributed and/or modified under the terms
|
||||
%% of the LaTeX Project Public License Distributed from CTAN
|
||||
%% archives in directory macros/latex/base/lppl.txt; either
|
||||
%% version 1.3c of the License, or (at your option) any later version.
|
||||
%% OR
|
||||
%% The 2-clause BSD-style license.
|
||||
%%
|
||||
%% SPDX-License-Identifier: LPPL-1.3c+ OR BSD-2-Clause
|
||||
|
||||
%% This is a placeholder for user-specific configuration and packages.
|
||||
\usepackage{etex}
|
||||
\ifdef{\reserveinserts}{\reserveinserts{28}}{}
|
||||
\usepackage{dirtree}
|
||||
\renewcommand*\DTstylecomment{\ttfamily\itshape}
|
||||
\usepackage{textcomp}
|
||||
\usepackage{xcolor}
|
||||
\usepackage{lstisadof-manual}
|
||||
\usepackage{xspace}
|
||||
\IfFileExists{hvlogos.sty}{\usepackage{hvlogos}}{\newcommand{\TeXLive}{\TeX Live}\newcommand{\BibTeX}{Bib\TeX}}
|
||||
\usepackage{railsetup}
|
||||
\setcounter{secnumdepth}{2}
|
||||
\usepackage{index}
|
||||
\newcommand{\bindex}[1]{\index{#1|textbf}}
|
||||
%\makeindex
|
||||
%\AtEndDocument{\printindex}
|
||||
|
||||
\newcommand{\dof}{DOF\xspace}
|
||||
\newcommand{\isactrlemph}{*}
|
||||
|
||||
\newcommand{\path}[1]{\texttt{\nolinkurl{#1}}}
|
||||
\title{<TITLE>}
|
||||
\author{<AUTHOR>}
|
||||
|
||||
\pagestyle{headings}
|
||||
|
||||
\uppertitleback{
|
||||
Copyright \copyright{} 2019--2024 University of Exeter, UK\\
|
||||
\phantom{Copyright \copyright{}} 2018--2024 Universit\'e Paris-Saclay, France\\
|
||||
\phantom{Copyright \copyright{}} 2018--2019 The University of Sheffield, UK\\
|
||||
|
||||
\smallskip
|
||||
\begin{small}
|
||||
Redistribution and use in source and binary forms, with or without
|
||||
modification, are permitted provided that the following conditions are
|
||||
met:
|
||||
\begin{itemize}
|
||||
\item Redistributions of source code must retain the above copyright
|
||||
notice, this list of conditions and the following disclaimer.
|
||||
\item Redistributions in binary form must reproduce the above
|
||||
copyright notice, this list of conditions and the following
|
||||
disclaimer in the documentation and/or other materials provided
|
||||
with the distribution.
|
||||
\end{itemize}
|
||||
\end{small}\begin{small}
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS ``AS IS''
|
||||
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
||||
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE
|
||||
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
|
||||
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
|
||||
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
|
||||
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
|
||||
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
\end{small}
|
||||
|
||||
\medskip
|
||||
\textbf{SPDX-License-Identifier:} BSD-2-Clause
|
||||
}
|
||||
|
||||
\lowertitleback{%
|
||||
This manual describes \isadof as available in the Archive of Formal Proofs (AFP). The latest development version as well as releases that can be installed as Isabelle component are available at
|
||||
\url{\dofurl}.
|
||||
|
||||
\paragraph*{Contributors.} We would like to thank the following contributors to \isadof
|
||||
(in alphabetical order): Idir Ait-Sadoune and Paolo Crisafulli.
|
||||
|
||||
\paragraph*{Acknowledgments.} This work has been partially supported by IRT SystemX, Paris-Saclay,
|
||||
France, and therefore granted with public funds of the Program ``Investissements d'Avenir.''
|
||||
|
||||
}
|
||||
|
||||
|
||||
|
||||
\publishers{
|
||||
\begin{center}
|
||||
\includegraphics[width=.28\textwidth]{figures/Isabelle_DOF-logo}
|
||||
\end{center}
|
||||
\vspace{3cm}
|
||||
|
||||
\begin{minipage}{\textwidth}
|
||||
\begin{minipage}{6cm}
|
||||
\normalsize
|
||||
Department of Computer Science\\
|
||||
University of Exeter\\
|
||||
Exeter, EX4 4QF\\
|
||||
UK
|
||||
\end{minipage}
|
||||
\hfill
|
||||
\begin{minipage}{8cm}
|
||||
\raggedleft\normalsize
|
||||
Laboratoire des Methodes Formelles (LMF)\\
|
||||
Universit\'e Paris-Saclay\\
|
||||
91405 Orsay Cedex\\
|
||||
France
|
||||
\end{minipage}
|
||||
\end{minipage}
|
||||
}
|
||||
% Index setup
|
||||
\usepackage{index}
|
||||
\makeindex
|
||||
\AtEndDocument{\printindex}
|
||||
|
||||
\newcommand{\DOFindex}[2]{%
|
||||
\marginnote{\normalfont\textbf{#1}: #2}%
|
||||
\expandafter\index\expandafter{\expanded{#2 (#1)}}%
|
||||
}%
|
||||
|
||||
\AtBeginDocument{\isabellestyle{literal}\newcommand{\lstnumberautorefname}{Line}}
|
||||
@ -1,573 +0,0 @@
|
||||
@STRING{pub-springer={Springer} }
|
||||
@STRING{pub-springer:adr="" }
|
||||
@STRING{s-lncs = "LNCS" }
|
||||
|
||||
@Misc{ w3c:ontologies:2015,
|
||||
author = {W3C},
|
||||
title = {Ontologies},
|
||||
organisation = {W3c},
|
||||
url = {https://www.w3.org/standards/semanticweb/ontology},
|
||||
year = 2015
|
||||
}
|
||||
|
||||
@
|
||||
Book{ books/daglib/0032976,
|
||||
added-at = {2014-03-12T00:00:00.000+0100},
|
||||
author = {Euzenat, J{\~A}<7D>r{\~A}<7D>me and Shvaiko, Pavel},
|
||||
biburl = {https://www.bibsonomy.org/bibtex/28d5372a81f181d9d5a761ca12209cf39/dblp},
|
||||
interhash = {fc55a5b84d114e38db0a0303cc1bd7da},
|
||||
intrahash = {8d5372a81f181d9d5a761ca12209cf39},
|
||||
isbn = {978-3-642-38720-3},
|
||||
keywords = {dblp},
|
||||
pages = {I-XVII, 1--511},
|
||||
publisher = {Springer},
|
||||
timestamp = {2015-06-18T09:49:52.000+0200},
|
||||
title = {Ontology Matching, Second Edition.},
|
||||
year = 2013,
|
||||
doi = {10.1007/978-3-642-38721-0}
|
||||
}
|
||||
|
||||
@Misc{ atl,
|
||||
title = {{ATL} -- A model transformation technology},
|
||||
url = {https://www.eclipse.org/atl/},
|
||||
author = {{Eclipse Foundation}},
|
||||
}
|
||||
|
||||
@InProceedings{ BGPP95,
|
||||
author = {Yamine A{\"{\i}}t Ameur and Frederic Besnard and Patrick Girard and Guy Pierra and Jean{-}Claude
|
||||
Potier},
|
||||
title = {Formal Specification and Metaprogramming in the {EXPRESS} Language},
|
||||
booktitle = {The 7th International Conference on Software Engineering and Knowledge Engineering (SEKE)},
|
||||
pages = {181--188},
|
||||
publisher = {Knowledge Systems Institute},
|
||||
year = 1995
|
||||
}
|
||||
|
||||
@Misc{ ibm:doors:2019,
|
||||
author = {IBM},
|
||||
title = {{IBM} Engineering Requirements Management {DOORS} Family},
|
||||
note = {\url{https://www.ibm.com/us-en/marketplace/requirements-management}},
|
||||
year = 2019
|
||||
}
|
||||
|
||||
@Manual{ wenzel:isabelle-isar:2020,
|
||||
title = {The Isabelle/Isar Reference Manual},
|
||||
author = {Makarius Wenzel},
|
||||
year = 2022,
|
||||
note = {Part of the Isabelle distribution.}
|
||||
}
|
||||
|
||||
@InCollection{ brucker.ea:isabelledof:2019,
|
||||
abstract = {DOF is a novel framework for defining ontologies and
|
||||
enforcing them during document development and evolution. A
|
||||
major goal of DOF is the integrated development of formal
|
||||
certification documents (e. g., for Common Criteria or
|
||||
CENELEC 50128) that require consistency across both formal
|
||||
and informal arguments.
|
||||
|
||||
To support a consistent development of formal and informal
|
||||
parts of a document, we provide Isabelle/DOF, an
|
||||
implementation of DOF on top of the formal methods
|
||||
framework Isabelle/HOL. A particular emphasis is put on a
|
||||
deep integration into Isabelle{\^a}s IDE, which allows for
|
||||
smooth ontology development as well as immediate
|
||||
ontological feedback during the editing of a document.
|
||||
|
||||
In this paper, we give an in-depth presentation of the
|
||||
design concepts of DOF's Ontology Definition Language
|
||||
(ODL) and key aspects of the technology of its
|
||||
implementation. Isabelle/DOF is the first ontology language
|
||||
supporting machine-checked links between the formal and
|
||||
informal parts in an LCF-style interactive theorem proving
|
||||
environment. Sufficiently annotated, large documents can
|
||||
easily be developed collabo- ratively, while ensuring their
|
||||
consistency, and the impact of changes (in the formal and
|
||||
the semi-formal content) is tracked automatically.},
|
||||
keywords = {Ontology, Formal Document Development, CERtification, DOF,
|
||||
Isabelle/DOF},
|
||||
location = {Oslo},
|
||||
author = {Achim D. Brucker and Burkhart Wolff},
|
||||
booktitle = {Software Engineering and Formal Methods (SEFM)},
|
||||
language = {USenglish},
|
||||
url = {https://www.brucker.ch/bibliography/abstract/brucker.ea-isabelledof-2019},
|
||||
publisher = {Springer-Verlag},
|
||||
address = {Heidelberg},
|
||||
series = {Lecture Notes in Computer Science},
|
||||
number = {11724},
|
||||
isbn = {3-540-25109-X},
|
||||
doi = {10.1007/978-3-030-30446-1_15},
|
||||
editor = {Peter C. {\"O}lveczky and Gwen Sala{\"u}n},
|
||||
pdf = {https://www.brucker.ch/bibliography/download/2019/brucker.ea-isabelledof-2019.pdf},
|
||||
title = {{Isabelle/DOF}: Design and Implementation},
|
||||
classification= {conference},
|
||||
areas = {formal methods, software},
|
||||
categories = {isadof},
|
||||
year = {2019},
|
||||
public = {yes}
|
||||
}
|
||||
|
||||
@InCollection{ brucker.ea:deep-ontologies:2023,
|
||||
author = {Achim D. Brucker and Idir Ait-Sadoune and Nicolas Meric and Burkhart Wolff},
|
||||
booktitle = {9th International Conference on Rigorous State-Based Methods (ABZ 2023)},
|
||||
language = {USenglish},
|
||||
publisher = {Springer-Verlag},
|
||||
address = {Heidelberg},
|
||||
series = {Lecture Notes in Computer Science},
|
||||
number = {14010},
|
||||
title = {{U}sing {D}eep {O}ntologies in {F}ormal {S}oftware {E}ngineering},
|
||||
year = {2023},
|
||||
abstract = {Isabelle/DOF is an ontology framework on top of Isabelle Isabelle/DOF allows for the
|
||||
formal development of ontologies as well as continuous conformity-checking of
|
||||
integrated documents annotated by ontological data. An integrated document may
|
||||
contain text, code, definitions, proofs and user-programmed constructs supporting a
|
||||
wide range of Formal Methods. Isabelle/DOF is designed to leverage traceability in
|
||||
integrated documents by supporting navigation in Isabelle’s IDE as well as the
|
||||
document generation process.
|
||||
In this paper we extend Isabelle/DOF with annotations of terms, a pervasive
|
||||
data-structure underlying Isabelle used to syntactically rep- resent expressions
|
||||
and formulas. Rather than introducing an own pro- gramming language for meta-data,
|
||||
we use Higher-order Logic (HOL) for expressions, data-constraints, ontological
|
||||
invariants, and queries via code-generation and reflection. This allows both for
|
||||
powerful query languages and logical reasoning over ontologies in, for example,
|
||||
ontological mappings. Our application examples cover documents targeting formal
|
||||
certifications such as CENELEC, Common Criteria, etc.}
|
||||
}
|
||||
|
||||
@InCollection{ brucker.ea:isabelle-ontologies:2018,
|
||||
abstract = {While Isabelle is mostly known as part of Isabelle/HOL (an
|
||||
interactive theorem prover), it actually provides a
|
||||
framework for developing a wide spectrum of applications. A
|
||||
particular strength of the Isabelle framework is the
|
||||
combination of text editing, formal verification, and code
|
||||
generation.
|
||||
|
||||
Up to now, Isabelle's document preparation system lacks a
|
||||
mechanism for ensuring the structure of different document
|
||||
types (as, e.g., required in certification processes) in
|
||||
general and, in particular, mechanism for linking informal
|
||||
and formal parts of a document.
|
||||
|
||||
In this paper, we present Isabelle/DOF, a novel Document
|
||||
Ontology Framework on top of Isabelle. Isabelle/DOF allows
|
||||
for conventional typesetting \emph{as well} as formal
|
||||
development. We show how to model document ontologies
|
||||
inside Isabelle/DOF, how to use the resulting
|
||||
meta-information for enforcing a certain document
|
||||
structure, and discuss ontology-specific IDE support.},
|
||||
keywords = {Isabelle/Isar, HOL, Ontologies},
|
||||
location = {Hagenberg, Austria},
|
||||
author = {Achim D. Brucker and Idir Ait-Sadoune and Paolo Crisafulli
|
||||
and Burkhart Wolff},
|
||||
booktitle = {Conference on Intelligent Computer Mathematics (CICM)},
|
||||
language = {USenglish},
|
||||
publisher = {Springer-Verlag},
|
||||
address = {Heidelberg},
|
||||
series = {Lecture Notes in Computer Science},
|
||||
number = {11006},
|
||||
url = {https://www.brucker.ch/bibliography/abstract/brucker.ea-isabelle-ontologies-2018},
|
||||
title = {Using the {Isabelle} Ontology Framework: Linking the
|
||||
Formal with the Informal},
|
||||
classification= {conference},
|
||||
areas = {formal methods, software},
|
||||
categories = {isadof},
|
||||
public = {yes},
|
||||
year = {2018},
|
||||
doi = {10.1007/978-3-319-96812-4_3},
|
||||
pdf = {https://www.brucker.ch/bibliography/download/2018/brucker.ea-isabelle-ontologies-2018.pdf}
|
||||
}
|
||||
|
||||
@InCollection{ taha.ea:philosophers:2020,
|
||||
keywords = {CSP, Isabelle/HOL, Process-Algebra,Formal Verification, Refinement},
|
||||
author = {Safouan Taha and Burkhart Wolff and Lina Ye},
|
||||
booktitle = {International Conference on Integrated Formal Methods (IFM)},
|
||||
language = {USenglish},
|
||||
publisher = {Springer-Verlag},
|
||||
address = {Heidelberg},
|
||||
series = {Lecture Notes in Computer Science},
|
||||
number = {to appear},
|
||||
title = {Philosophers may dine --- definitively!},
|
||||
classification= {conference},
|
||||
areas = {formal methods, software},
|
||||
public = {yes},
|
||||
year = {2020}
|
||||
}
|
||||
|
||||
|
||||
|
||||
@Book{ boulanger:cenelec-50128:2015,
|
||||
author = {Boulanger, Jean-Louis},
|
||||
title = {{CENELEC} 50128 and {IEC} 62279 Standards},
|
||||
publisher = {Wiley-ISTE},
|
||||
year = 2015,
|
||||
address = {Boston}
|
||||
}
|
||||
|
||||
@Booklet{ cc:cc-part3:2006,
|
||||
bibkey = {cc:cc-part3:2006},
|
||||
key = {Common Criteria},
|
||||
institution = {Common Criteria},
|
||||
language = {USenglish},
|
||||
month = sep,
|
||||
year = 2006,
|
||||
public = {yes},
|
||||
title = {Common Criteria for Information Technology Security
|
||||
Evaluation (Version 3.1), {Part} 3: Security assurance
|
||||
components},
|
||||
note = {Available as document
|
||||
\href{http://www.commoncriteriaportal.org/public/files/CCPART3V3.1R1.pdf}
|
||||
{CCMB-2006-09-003}},
|
||||
number = {CCMB-2006-09-003},
|
||||
acknowledgement={brucker, 2007-04-24}
|
||||
}
|
||||
|
||||
@Book{ nipkow.ea:isabelle:2002,
|
||||
author = {Tobias Nipkow and Lawrence C. Paulson and Markus Wenzel},
|
||||
title = {Isabelle/HOL---A Proof Assistant for Higher-Order Logic},
|
||||
publisher = pub-springer,
|
||||
address = pub-springer:adr,
|
||||
series = s-lncs,
|
||||
volume = 2283,
|
||||
doi = {10.1007/3-540-45949-9},
|
||||
abstract = {This book is a self-contained introduction to interactive
|
||||
proof in higher-order logic (\acs{hol}), using the proof
|
||||
assistant Isabelle2002. It is a tutorial for potential
|
||||
users rather than a monograph for researchers. The book has
|
||||
three parts.
|
||||
|
||||
1. Elementary Techniques shows how to model functional
|
||||
programs in higher-order logic. Early examples involve
|
||||
lists and the natural numbers. Most proofs are two steps
|
||||
long, consisting of induction on a chosen variable followed
|
||||
by the auto tactic. But even this elementary part covers
|
||||
such advanced topics as nested and mutual recursion. 2.
|
||||
Logic and Sets presents a collection of lower-level tactics
|
||||
that you can use to apply rules selectively. It also
|
||||
describes Isabelle/\acs{hol}'s treatment of sets, functions
|
||||
and relations and explains how to define sets inductively.
|
||||
One of the examples concerns the theory of model checking,
|
||||
and another is drawn from a classic textbook on formal
|
||||
languages. 3. Advanced Material describes a variety of
|
||||
other topics. Among these are the real numbers, records and
|
||||
overloading. Advanced techniques are described involving
|
||||
induction and recursion. A whole chapter is devoted to an
|
||||
extended example: the verification of a security protocol. },
|
||||
year = 2002,
|
||||
acknowledgement={brucker, 2007-02-19},
|
||||
bibkey = {nipkow.ea:isabelle:2002},
|
||||
tags = {noTAG},
|
||||
clearance = {unclassified},
|
||||
timestap = {2008-05-26}
|
||||
}
|
||||
|
||||
@InProceedings{wenzel:asynchronous:2014,
|
||||
author = {Makarius Wenzel},
|
||||
title = {Asynchronous User Interaction and Tool Integration in
|
||||
{Isabelle}/{PIDE}},
|
||||
booktitle = {ITP},
|
||||
pages = {515--530},
|
||||
year = 2014,
|
||||
crossref = {klein.ea:interactive:2014},
|
||||
doi = {10.1007/978-3-319-08970-6_33},
|
||||
timestamp = {Sun, 21 May 2017 00:18:59 +0200},
|
||||
abstract = { Historically, the LCF tradition of interactive theorem
|
||||
proving was tied to the read-eval-print loop, with
|
||||
sequential and synchronous evaluation of prover commands
|
||||
given on the command-line. This user-interface technology
|
||||
was adequate when R. Milner introduced his LCF proof
|
||||
assistant in the 1970-ies, but it severely limits the
|
||||
potential of current multicore hardware and advanced IDE
|
||||
front-ends.
|
||||
|
||||
Isabelle/PIDE breaks this loop and retrofits the
|
||||
read-eval-print phases into an asynchronous model of
|
||||
document-oriented proof processing. Instead of feeding a
|
||||
sequence of individual commands into the prover process,
|
||||
the primary interface works via edits over a family of
|
||||
document versions. Execution is implicit and managed by the
|
||||
prover on its own account in a timeless and stateless
|
||||
manner. Various aspects of interactive proof checking are
|
||||
scheduled according to requirements determined by the
|
||||
front-end perspective on the proof document, while making
|
||||
adequate use of the CPU resources on multicore hardware on
|
||||
the back-end.
|
||||
|
||||
Recent refinements of Isabelle/PIDE provide an explicit
|
||||
concept of asynchronous print functions over existing proof
|
||||
states. This allows to integrate long-running or
|
||||
potentially non-terminating tools into the document-model.
|
||||
Applications range from traditional proof state output
|
||||
(which may consume substantial time in interactive
|
||||
development) to automated provers and dis-provers that
|
||||
report on existing proof document content (e.g.
|
||||
Sledgehammer, Nitpick, Quickcheck in Isabelle/HOL).
|
||||
Moreover, it is possible to integrate query operations via
|
||||
additional GUI panels with separate input and output (e.g.
|
||||
for Sledgehammer or find-theorems). Thus the Prover IDE
|
||||
provides continuous proof processing, augmented by add-on
|
||||
tools that help the user to continue writing proofs. }
|
||||
}
|
||||
|
||||
@Proceedings{ klein.ea:interactive:2014,
|
||||
editor = {Gerwin Klein and Ruben Gamboa},
|
||||
title = {Interactive Theorem Proving - 5th International
|
||||
Conference, {ITP} 2014, Held as Part of the Vienna Summer
|
||||
of Logic, {VSL} 2014, Vienna, Austria, July 14-17, 2014.
|
||||
Proceedings},
|
||||
series = s-lncs,
|
||||
volume = 8558,
|
||||
publisher = pub-springer,
|
||||
year = 2014,
|
||||
doi = {10.1007/978-3-319-08970-6}
|
||||
}
|
||||
|
||||
@InProceedings{ bezzecchi.ea:making:2018,
|
||||
title = {Making Agile Development Processes fit for V-style
|
||||
Certification Procedures},
|
||||
author = {Bezzecchi, S. and Crisafulli, P. and Pichot, C. and Wolff,
|
||||
B.},
|
||||
booktitle = {{ERTS'18}},
|
||||
abstract = {We present a process for the development of safety and
|
||||
security critical components in transportation systems
|
||||
targeting a high-level certification (CENELEC 50126/50128,
|
||||
DO 178, CC ISO/IEC 15408).
|
||||
|
||||
The process adheres to the objectives of an ``agile
|
||||
development'' in terms of evolutionary flexibility and
|
||||
continuous improvement. Yet, it enforces the overall
|
||||
coherence of the development artifacts (ranging from proofs
|
||||
over tests to code) by a particular environment (CVCE).
|
||||
|
||||
In particular, the validation process is built around a
|
||||
formal development based on the interactive theorem proving
|
||||
system Isabelle/HOL, by linking the business logic of the
|
||||
application to the operating system model, down to code and
|
||||
concrete hardware models thanks to a series of refinement
|
||||
proofs.
|
||||
|
||||
We apply both the process and its support in CVCE to a
|
||||
case-study that comprises a model of an odometric service
|
||||
in a railway-system with its corresponding implementation
|
||||
integrated in seL4 (a secure kernel for which a
|
||||
comprehensive Isabelle development exists). Novel
|
||||
techniques implemented in Isabelle enforce the coherence of
|
||||
semi-formal and formal definitions within to specific
|
||||
certification processes in order to improve their
|
||||
cost-effectiveness. },
|
||||
pdf = {https://www.lri.fr/~wolff/papers/conf/2018erts-agile-fm.pdf},
|
||||
year = 2018,
|
||||
series = {ERTS Conference Proceedings},
|
||||
location = {Toulouse}
|
||||
}
|
||||
|
||||
@InCollection{ wenzel.ea:building:2007,
|
||||
abstract = {We present the generic system framework of
|
||||
Isabelle/Isarunderlying recent versions of Isabelle. Among
|
||||
other things, Isar provides an infrastructure for Isabelle
|
||||
plug-ins, comprising extensible state components and
|
||||
extensible syntax that can be bound to tactical ML
|
||||
programs. Thus the Isabelle/Isar architecture may be
|
||||
understood as an extension and refinement of the
|
||||
traditional LCF approach, with explicit infrastructure for
|
||||
building derivative systems. To demonstrate the technical
|
||||
potential of the framework, we apply it to a concrete
|
||||
formalmethods tool: the HOL-Z 3.0 environment, which is
|
||||
geared towards the analysis of Z specifications and formal
|
||||
proof of forward-refinements.},
|
||||
author = {Makarius Wenzel and Burkhart Wolff},
|
||||
booktitle = {TPHOLs 2007},
|
||||
editor = {Klaus Schneider and Jens Brandt},
|
||||
language = {USenglish},
|
||||
acknowledgement={none},
|
||||
pages = {352--367},
|
||||
publisher = pub-springer,
|
||||
address = pub-springer:adr,
|
||||
number = 4732,
|
||||
series = s-lncs,
|
||||
title = {Building Formal Method Tools in the {Isabelle}/{Isar}
|
||||
Framework},
|
||||
doi = {10.1007/978-3-540-74591-4_26},
|
||||
year = 2007
|
||||
}
|
||||
|
||||
@Misc{ biendarra.ea:defining:2019,
|
||||
title = {Defining (Co)datatypes and Primitively (Co)recursive
|
||||
Functions in Isabelle/HOL},
|
||||
author = {Julian Biendarra and Jasmin Christian Blanchette and
|
||||
Martin Desharnais and Lorenz Panny and Andrei Popescu and
|
||||
Dmitriy Traytel},
|
||||
note = {\url{https://isabelle.in.tum.de/doc/datatypes.pdf}},
|
||||
year = 2019
|
||||
}
|
||||
|
||||
@Misc{ kraus:defining:2020,
|
||||
title = {Defining Recursive Functions in Isabelle/HOL},
|
||||
author = {Alexander Kraus},
|
||||
note = {\url{https://isabelle.in.tum.de/doc/functions.pdf}},
|
||||
year = 2020
|
||||
}
|
||||
|
||||
@Misc{ nipkow:whats:2020,
|
||||
title = {What's in Main},
|
||||
author = {Tobias Nipkow},
|
||||
note = {\url{https://isabelle.in.tum.de/doc/main.pdf}},
|
||||
year = 2020
|
||||
}
|
||||
|
||||
@InProceedings{ wenzel:system:2014,
|
||||
author = {Makarius Wenzel},
|
||||
title = {System description: Isabelle/{jEdit} in 2014},
|
||||
booktitle = {UITP},
|
||||
pages = {84--94},
|
||||
year = 2014,
|
||||
doi = {10.4204/EPTCS.167.10}
|
||||
}
|
||||
|
||||
@InProceedings{ barras.ea:pervasive:2013,
|
||||
author = {Bruno Barras and Lourdes Del Carmen
|
||||
Gonz{\'{a}}lez{-}Huesca and Hugo Herbelin and Yann
|
||||
R{\'{e}}gis{-}Gianas and Enrico Tassi and Makarius Wenzel
|
||||
and Burkhart Wolff},
|
||||
title = {Pervasive Parallelism in Highly-Trustable Interactive
|
||||
Theorem Proving Systems},
|
||||
booktitle = {MKM},
|
||||
pages = {359--363},
|
||||
year = 2013,
|
||||
doi = {10.1007/978-3-642-39320-4_29}
|
||||
}
|
||||
|
||||
@Article{ faithfull.ea:coqoon:2018,
|
||||
author = {Faithfull, Alexander and Bengtson, Jesper and Tassi,
|
||||
Enrico and Tankink, Carst},
|
||||
title = {Coqoon},
|
||||
journal = {Int. J. Softw. Tools Technol. Transf.},
|
||||
issue_date = {April 2018},
|
||||
volume = 20,
|
||||
number = 2,
|
||||
month = apr,
|
||||
year = 2018,
|
||||
issn = {1433-2779},
|
||||
pages = {125--137},
|
||||
numpages = 13,
|
||||
doi = {10.1007/s10009-017-0457-2},
|
||||
acmid = 3204223,
|
||||
publisher = {Springer-Verlag},
|
||||
address = {Berlin, Heidelberg}
|
||||
}
|
||||
|
||||
@InProceedings{ abrial:steam-boiler:1996,
|
||||
author = {Abrial, Jean-Raymond},
|
||||
title = {Steam-Boiler Control Specification Problem},
|
||||
booktitle = {Formal Methods for Industrial Applications, Specifying and
|
||||
Programming the Steam Boiler Control (the Book Grow out of
|
||||
a Dagstuhl Seminar, June 1995).},
|
||||
year = 1996,
|
||||
isbn = {3-540-61929-1},
|
||||
pages = {500--509},
|
||||
numpages = 10,
|
||||
url = {http://dl.acm.org/citation.cfm?id=647370.723886},
|
||||
acmid = 723886,
|
||||
publisher = {Springer-Verlag},
|
||||
address = {London, UK, UK}
|
||||
}
|
||||
|
||||
@TechReport{ bsi:50128:2014,
|
||||
type = {Standard},
|
||||
key = {BS EN 50128:2011},
|
||||
month = apr,
|
||||
year = 2014,
|
||||
series = {British Standards Publication},
|
||||
title = {BS EN 50128:2011: Railway applications -- Communication,
|
||||
signalling and processing systems -- Software for railway
|
||||
control and protecting systems},
|
||||
institution = {Britisch Standards Institute (BSI)},
|
||||
keywords = {CENELEC},
|
||||
abstract = {This European Standard is part of a group of related
|
||||
standards. The others are EN 50126-1:1999 "Railway
|
||||
applications -- The specification and demonstration of
|
||||
Reliability, Availability, Maintainability and Safety
|
||||
(RAMS) -- Part 1: Basic requirements and generic process --
|
||||
and EN 50129:2003 "Railway applications -- Communication,
|
||||
signalling and processing systems -- Safety related
|
||||
electronic systems for signalling". EN 50126-1 addresses
|
||||
system issues on the widest scale, while EN 50129 addresses
|
||||
the approval process for individual systems which can exist
|
||||
within the overall railway control and protection system.
|
||||
This European Standard concentrates on the methods which
|
||||
need to be used in order to provide software which meets
|
||||
the demands for safety integrity which are placed upon it
|
||||
by these wider considerations. This European Standard
|
||||
provides a set of requirements with which the development,
|
||||
deployment and maintenance of any safety-related software
|
||||
intended for railway control and protection applications
|
||||
shall comply. It defines requirements concerning
|
||||
organisational structure, the relationship between
|
||||
organisations and division of responsibility involved in
|
||||
the development, deployment and maintenanceactivities.}
|
||||
}
|
||||
|
||||
@Article{ kraus.ea:regular-sets-afp:2010,
|
||||
author = {Alexander Krauss and Tobias Nipkow},
|
||||
title = {Regular Sets and Expressions},
|
||||
journal = {Archive of Formal Proofs},
|
||||
month = may,
|
||||
year = 2010,
|
||||
note = {\url{https://isa-afp.org/entries/Regular-Sets.html}, Formal
|
||||
proof development},
|
||||
issn = {2150-914x}
|
||||
}
|
||||
|
||||
@Article{ nipkow.ea:functional-Automata-afp:2004,
|
||||
author = {Tobias Nipkow},
|
||||
title = {Functional Automata},
|
||||
journal = {Archive of Formal Proofs},
|
||||
month = mar,
|
||||
year = 2004,
|
||||
note = {\url{https://isa-afp.org/entries/Functional-Automata.html},
|
||||
Formal proof development},
|
||||
issn = {2150-914x}
|
||||
}
|
||||
|
||||
@Booklet{ kohm:koma-script:2019,
|
||||
author = {Markus Kohm},
|
||||
title = {{KOMA-Script}: a versatile {\LaTeXe{}} bundle},
|
||||
year = 2019
|
||||
}
|
||||
|
||||
@Booklet{ wenzel:system-manual:2020,
|
||||
author = {Makarius Wenzel},
|
||||
title = {The {Isabelle} System Manual},
|
||||
year = 2020,
|
||||
note = {Part of the Isabelle distribution.}
|
||||
}
|
||||
|
||||
@Booklet{ chervet:keycommand:2010,
|
||||
author = {Florent Chervet},
|
||||
title = {The free and open source keycommand package: key-value
|
||||
interface for commands and environments in {\LaTeX}.},
|
||||
year = 2010
|
||||
}
|
||||
|
||||
@Book{ knuth:texbook:1986,
|
||||
author = {Knuth, Donald E.},
|
||||
title = {The TeXbook},
|
||||
year = 1986,
|
||||
isbn = 0201134470,
|
||||
publisher = {Addison-Wesley Professional}
|
||||
}
|
||||
|
||||
@Book{ mittelbach.ea:latex:1999,
|
||||
author = {Mittelbach, Frank and Goossens, Michel and Braams,
|
||||
Johannes and Carlisle, David and Rowley, Chris},
|
||||
title = {The LaTeX Companion},
|
||||
year = 2004,
|
||||
edition = {2nd},
|
||||
publisher = {Addison-Wesley Longman Publishing Co., Inc.},
|
||||
address = {Boston, MA, USA}
|
||||
}
|
||||
|
||||
@Book{ eijkhout:latex-cs:2012,
|
||||
author = {Victor Eijkhout},
|
||||
title = {The Computer Science of TeX and LaTeX},
|
||||
publisher = {Texas Advanced Computing Center},
|
||||
year = 2012
|
||||
}
|
||||
@ -1,5 +0,0 @@
|
||||
heading_prefix "{\\large\\textbf{"
|
||||
heading_suffix "}\\hfil}\\nopagebreak\n"
|
||||
headings_flag 1
|
||||
symhead_positive "Symbols"
|
||||
|
||||
@ -1,10 +0,0 @@
|
||||
title = Isabelle/DOF
|
||||
module = $ISABELLE_HOME_USER/DOF/isabelle_dof.jar
|
||||
no_build = false
|
||||
requirements = \
|
||||
env:ISABELLE_SCALA_JAR
|
||||
sources = \
|
||||
scala/dof.scala \
|
||||
scala/dof_document_build.scala
|
||||
services = \
|
||||
isabelle.dof.DOF_Document_Build$Engine
|
||||
@ -1,75 +0,0 @@
|
||||
%% Copyright (c) 2019-2022 University of Exeter
|
||||
%% 2018-2022 University of Paris-Saclay
|
||||
%% 2018-2019 The University of Sheffield
|
||||
%%
|
||||
%% License:
|
||||
%% This program can be redistributed and/or modified under the terms
|
||||
%% of the LaTeX Project Public License Distributed from CTAN
|
||||
%% archives in directory macros/latex/base/lppl.txt; either
|
||||
%% version 1.3c of the License, or (at your option) any later version.
|
||||
%% OR
|
||||
%% The 2-clause BSD-style license.
|
||||
%%
|
||||
%% SPDX-License-Identifier: LPPL-1.3c+ OR BSD-2-Clause
|
||||
|
||||
%% Warning: Do Not Edit!
|
||||
%% =====================
|
||||
%% This is the root file for the Isabelle/DOF using the lncs class.
|
||||
%%
|
||||
%% All customization and/or additional packages should be added to the file
|
||||
%% preamble.tex.
|
||||
|
||||
\RequirePackage{ifvtex}
|
||||
\documentclass{llncs}
|
||||
\usepackage{DOF-core}
|
||||
\bibliographystyle{splncs04}
|
||||
\title{No Title Given}
|
||||
\usepackage{hyperref}
|
||||
\setcounter{tocdepth}{3}
|
||||
\hypersetup{%
|
||||
bookmarksdepth=3
|
||||
,pdfpagelabels
|
||||
,pageanchor=true
|
||||
,bookmarksnumbered
|
||||
,plainpages=false
|
||||
} % more detailed digital TOC (aka bookmarks)
|
||||
\sloppy
|
||||
\allowdisplaybreaks[4]
|
||||
|
||||
\usepackage{subcaption}
|
||||
\usepackage[size=footnotesize]{caption}
|
||||
|
||||
\renewcommand{\topfraction}{0.9} % max fraction of floats at top
|
||||
\renewcommand{\bottomfraction}{0.8} % max fraction of floats at bottom
|
||||
\setcounter{topnumber}{2}
|
||||
\setcounter{bottomnumber}{2}
|
||||
\setcounter{totalnumber}{4} % 2 may work better
|
||||
\setcounter{dbltopnumber}{2} % for 2-column pages
|
||||
\renewcommand{\dbltopfraction}{0.9} % fit big float above 2-col. text
|
||||
\renewcommand{\textfraction}{0.07} % allow minimal text w. figs
|
||||
\renewcommand{\floatpagefraction}{0.7} % require fuller float pages
|
||||
\renewcommand{\dblfloatpagefraction}{0.7} % require fuller float pages
|
||||
|
||||
\begin{document}
|
||||
\selectlanguage{USenglish}%
|
||||
\renewcommand{\bibname}{References}%
|
||||
\renewcommand{\figurename}{Fig.}
|
||||
\renewcommand{\abstractname}{Abstract.}
|
||||
\renewcommand{\subsubsectionautorefname}{Sect.}
|
||||
\renewcommand{\subsectionautorefname}{Sect.}
|
||||
\renewcommand{\sectionautorefname}{Sect.}
|
||||
\renewcommand{\figureautorefname}{Fig.}
|
||||
\newcommand{\lstnumberautorefname}{Line}
|
||||
|
||||
|
||||
|
||||
\maketitle
|
||||
\IfFileExists{dof_session.tex}{\input{dof_session}}{\input{session}}
|
||||
% optional bibliography
|
||||
\IfFileExists{root.bib}{{\small\bibliography{root}}}{}
|
||||
\end{document}
|
||||
|
||||
%%% Local Variables:
|
||||
%%% mode: latex
|
||||
%%% TeX-master: t
|
||||
%%% End:
|
||||
@ -1,61 +0,0 @@
|
||||
%% Copyright (c) 2019-2022 University of Exeter
|
||||
%% 2018-2022 University of Paris-Saclay
|
||||
%% 2018-2019 The University of Sheffield
|
||||
%%
|
||||
%% License:
|
||||
%% This program can be redistributed and/or modified under the terms
|
||||
%% of the LaTeX Project Public License Distributed from CTAN
|
||||
%% archives in directory macros/latex/base/lppl.txt; either
|
||||
%% version 1.3c of the License, or (at your option) any later version.
|
||||
%% OR
|
||||
%% The 2-clause BSD-style license.
|
||||
%%
|
||||
%% SPDX-License-Identifier: LPPL-1.3c+ OR BSD-2-Clause
|
||||
|
||||
%% Warning: Do Not Edit!
|
||||
%% =====================
|
||||
%% This is the root file for the Isabelle/DOF using the scrartcl class.
|
||||
%%
|
||||
%% All customization and/or additional packages should be added to the file
|
||||
%% preamble.tex.
|
||||
|
||||
\RequirePackage{ifvtex}
|
||||
\documentclass[abstract=true,fontsize=11pt,DIV=12,paper=a4]{scrartcl}
|
||||
|
||||
\title{No Title Given}
|
||||
\usepackage{DOF-core}
|
||||
|
||||
\usepackage{textcomp}
|
||||
\bibliographystyle{abbrvnat}
|
||||
\RequirePackage{subcaption}
|
||||
|
||||
|
||||
\usepackage[numbers, sort&compress, sectionbib]{natbib}
|
||||
|
||||
\usepackage{hyperref}
|
||||
\setcounter{tocdepth}{3}
|
||||
\hypersetup{%
|
||||
bookmarksdepth=3
|
||||
,pdfpagelabels
|
||||
,pageanchor=true
|
||||
,bookmarksnumbered
|
||||
,plainpages=false
|
||||
} % more detailed digital TOC (aka bookmarks)
|
||||
\sloppy
|
||||
\allowdisplaybreaks[4]
|
||||
|
||||
\newenvironment{frontmatter}{}{}
|
||||
\raggedbottom
|
||||
\begin{document}
|
||||
\begin{frontmatter}
|
||||
\maketitle
|
||||
\end{frontmatter}
|
||||
\IfFileExists{dof_session.tex}{\input{dof_session}}{\input{session}}
|
||||
% optional bibliography
|
||||
\IfFileExists{root.bib}{{\bibliography{root}}}{}
|
||||
\end{document}
|
||||
|
||||
%%% Local Variables:
|
||||
%%% mode: latex
|
||||
%%% TeX-master: t
|
||||
%%% End:
|
||||
@ -1,85 +0,0 @@
|
||||
%% Copyright (c) 2019-2022 University of Exeter
|
||||
%% 2018-2022 University of Paris-Saclay
|
||||
%% 2018-2019 The University of Sheffield
|
||||
%%
|
||||
%% License:
|
||||
%% This program can be redistributed and/or modified under the terms
|
||||
%% of the LaTeX Project Public License Distributed from CTAN
|
||||
%% archives in directory macros/latex/base/lppl.txt; either
|
||||
%% version 1.3c of the License, or (at your option) any later version.
|
||||
%% OR
|
||||
%% The 2-clause BSD-style license.
|
||||
%%
|
||||
%% SPDX-License-Identifier: LPPL-1.3c+ OR BSD-2-Clause
|
||||
|
||||
%% Warning: Do Not Edit!
|
||||
%% =====================
|
||||
%% This is the root file for the Isabelle/DOF using the scrreprt class with
|
||||
%% as "modern" layout that is more common to technical standardisation
|
||||
%% documents (e.g. using sans serif fonts).
|
||||
%%
|
||||
%% All customization and/or additional packages should be added to the file
|
||||
%% preamble.tex.
|
||||
|
||||
\RequirePackage{ifvtex}
|
||||
\documentclass[fontsize=11pt,paper=a4,open=right,twoside,abstract=true]{scrreprt}
|
||||
|
||||
\title{No Title Given}
|
||||
|
||||
\usepackage{textcomp}
|
||||
\bibliographystyle{abbrvnat}
|
||||
\usepackage{amsthm}
|
||||
\usepackage{newtxsf}
|
||||
\usepackage{DOF-core}
|
||||
|
||||
\renewcommand\familydefault{\sfdefault}
|
||||
\colorlet{DOFsectioncolor}{blue!60!black}
|
||||
\addtokomafont{chapterentrypagenumber}{\color{DOFsectioncolor}}
|
||||
\addtokomafont{chapterentry}{\color{DOFsectioncolor}}
|
||||
\addtokomafont{title}{\color{DOFsectioncolor}\bfseries}
|
||||
\addtokomafont{chapter}{\color{DOFsectioncolor}\bfseries}
|
||||
\addtokomafont{section}{\color{DOFsectioncolor}}
|
||||
\addtokomafont{subsection}{\color{DOFsectioncolor}}
|
||||
\addtokomafont{subsubsection}{\color{DOFsectioncolor}}
|
||||
\addtokomafont{paragraph}{\color{DOFsectioncolor}}
|
||||
\addtokomafont{subparagraph}{\color{DOFsectioncolor}}
|
||||
|
||||
\RequirePackage{subcaption}
|
||||
\renewcommand{\isastyletext}{\normalsize\normalfont\sffamily}
|
||||
\renewcommand{\isastyletxt}{\normalfont\sffamily}
|
||||
\renewcommand{\isastylecmt}{\normalfont\sffamily}
|
||||
|
||||
\usepackage[numbers, sort&compress, sectionbib]{natbib}
|
||||
\usepackage{hyperref}
|
||||
\setcounter{tocdepth}{2}
|
||||
\hypersetup{%
|
||||
bookmarksdepth=3
|
||||
,pdfpagelabels
|
||||
,pageanchor=true
|
||||
,bookmarksnumbered
|
||||
,plainpages=false
|
||||
} % more detailed digital TOC (aka bookmarks)
|
||||
\sloppy
|
||||
\raggedbottom
|
||||
\allowdisplaybreaks[4]
|
||||
|
||||
\newenvironment{frontmatter}{}{}
|
||||
|
||||
\begin{document}
|
||||
\renewcommand{\chapterautorefname}{Chapter}
|
||||
\renewcommand{\sectionautorefname}{Section}
|
||||
\renewcommand{\subsectionautorefname}{Section}
|
||||
\renewcommand{\subsubsectionautorefname}{Section}
|
||||
\begin{frontmatter}
|
||||
\maketitle
|
||||
\tableofcontents
|
||||
\end{frontmatter}
|
||||
\IfFileExists{dof_session.tex}{\input{dof_session}}{\input{session}}
|
||||
% optional bibliography
|
||||
\IfFileExists{root.bib}{{\bibliography{root}}}{}
|
||||
\end{document}
|
||||
|
||||
%%% Local Variables:
|
||||
%%% mode: latex
|
||||
%%% TeX-master: t
|
||||
%%% End:
|
||||
@ -1,63 +0,0 @@
|
||||
%% Copyright (c) 2019-2022 University of Exeter
|
||||
%% 2018-2022 University of Paris-Saclay
|
||||
%% 2018-2019 The University of Sheffield
|
||||
%%
|
||||
%% License:
|
||||
%% This program can be redistributed and/or modified under the terms
|
||||
%% of the LaTeX Project Public License Distributed from CTAN
|
||||
%% archives in directory macros/latex/base/lppl.txt; either
|
||||
%% version 1.3c of the License, or (at your option) any later version.
|
||||
%% OR
|
||||
%% The 2-clause BSD-style license.
|
||||
%%
|
||||
%% SPDX-License-Identifier: LPPL-1.3c+ OR BSD-2-Clause
|
||||
|
||||
%% Warning: Do Not Edit!
|
||||
%% =====================
|
||||
%% This is the root file for the Isabelle/DOF using the scrreprt class.
|
||||
%%
|
||||
%% All customization and/or additional packages should be added to the file
|
||||
%% preamble.tex.
|
||||
|
||||
\RequirePackage{ifvtex}
|
||||
\documentclass[fontsize=11pt,paper=a4,open=right,twoside,abstract=true]{scrreprt}
|
||||
\title{No Title Given}
|
||||
|
||||
\usepackage{DOF-core}
|
||||
|
||||
\usepackage{textcomp}
|
||||
\bibliographystyle{abbrvnat}
|
||||
\RequirePackage{subcaption}
|
||||
|
||||
|
||||
\usepackage[numbers, sort&compress, sectionbib]{natbib}
|
||||
|
||||
\usepackage{hyperref}
|
||||
\setcounter{tocdepth}{2}
|
||||
\hypersetup{%
|
||||
bookmarksdepth=3
|
||||
,pdfpagelabels
|
||||
,pageanchor=true
|
||||
,bookmarksnumbered
|
||||
,plainpages=false
|
||||
} % more detailed digital TOC (aka bookmarks)
|
||||
\sloppy
|
||||
\raggedbottom
|
||||
\allowdisplaybreaks[4]
|
||||
|
||||
\begin{document}
|
||||
\renewcommand{\chapterautorefname}{Chapter}
|
||||
\renewcommand{\sectionautorefname}{Section}
|
||||
\renewcommand{\subsectionautorefname}{Section}
|
||||
\renewcommand{\subsubsectionautorefname}{Section}
|
||||
\maketitle
|
||||
\tableofcontents
|
||||
\IfFileExists{dof_session.tex}{\input{dof_session}}{\input{session}}
|
||||
% optional bibliography
|
||||
\IfFileExists{root.bib}{{\bibliography{root}}}{}
|
||||
\end{document}
|
||||
|
||||
%%% Local Variables:
|
||||
%%% mode: latex
|
||||
%%% TeX-master: t
|
||||
%%% End:
|
||||
@ -1,38 +0,0 @@
|
||||
%% Copyright (C) 2018 The University of Sheffield
|
||||
%% 2018 The University of Paris-Saclay
|
||||
%% 2019 The University of Exeter
|
||||
%%
|
||||
%% License:
|
||||
%% This program can be redistributed and/or modified under the terms
|
||||
%% of the LaTeX Project Public License Distributed from CTAN
|
||||
%% archives in directory macros/latex/base/lppl.txt; either
|
||||
%% version 1.3c of the License, or (at your option) any later version.
|
||||
%% OR
|
||||
%% The 2-clause BSD-style license.
|
||||
%%
|
||||
%% SPDX-License-Identifier: LPPL-1.3c+ OR BSD-2-Clause
|
||||
|
||||
\NeedsTeXFormat{LaTeX2e}\relax
|
||||
\ProvidesPackage{DOF-COL}
|
||||
[00/00/0000 Document-Type Support Framework for Isabelle.]
|
||||
|
||||
\RequirePackage{DOF-core}
|
||||
|
||||
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||
% begin: float*
|
||||
\NewEnviron{isamarkupfloat*}[1][]{\isaDof[env={float},#1]{\BODY}}
|
||||
\newisadof{IsaUNDERSCORECOLDOTfloat}%
|
||||
[label=,type=%
|
||||
,IsaUNDERSCORECOLDOTfloatDOTplacement=%
|
||||
,IsaUNDERSCORECOLDOTfloatDOTfloatUNDERSCOREkind=%
|
||||
,IsaUNDERSCORECOLDOTfloatDOTmainUNDERSCOREcaption=%
|
||||
,IsaUNDERSCORECOLDOTfloatDOTspawnUNDERSCOREcolumns=enum False True%
|
||||
][1]{%
|
||||
\begin{figure}
|
||||
#1
|
||||
\caption{\commandkey{IsaUNDERSCORECOLDOTfloatDOTmainUNDERSCOREcaption}}
|
||||
\label{\commandkey{label}}%
|
||||
\end{figure}
|
||||
}
|
||||
% end: float*
|
||||
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||
@ -1,25 +0,0 @@
|
||||
%% Copyright (C) 2021 University of Exeter
|
||||
%% 2021 University of Paris-Saclay
|
||||
%%
|
||||
%% License:
|
||||
%% This program can be redistributed and/or modified under the terms
|
||||
%% of the LaTeX Project Public License Distributed from CTAN
|
||||
%% archives in directory macros/latex/base/lppl.txt; either
|
||||
%% version 1.3c of the License, or (at your option) any later version.
|
||||
%% OR
|
||||
%% The 2-clause BSD-style license.
|
||||
%%
|
||||
%% SPDX-License-Identifier: LPPL-1.3c+ OR BSD-2-Clause
|
||||
|
||||
|
||||
\NeedsTeXFormat{LaTeX2e}\relax
|
||||
\ProvidesPackage{DOF-amssymb}
|
||||
[00/00/0000 Document-Type Support Framework for Isabelle (amssymb wrapper for lualatex/pdflatex).]
|
||||
|
||||
\usepackage{ifxetex,ifluatex}
|
||||
\ifnum 0\ifxetex 1\fi\ifluatex 1\fi=0 % if pdftex
|
||||
\usepackage{amssymb}
|
||||
\else % if luatex or xetex
|
||||
\usepackage{unicode-math}
|
||||
\usepackage{latexsym}
|
||||
\fi
|
||||
@ -1,177 +0,0 @@
|
||||
%% Copyright (C) 2018 The University of Sheffield
|
||||
%% 2018 The University of Paris-Saclay
|
||||
%% 2019 The University of Exeter
|
||||
%%
|
||||
%% License:
|
||||
%% This program can be redistributed and/or modified under the terms
|
||||
%% of the LaTeX Project Public License Distributed from CTAN
|
||||
%% archives in directory macros/latex/base/lppl.txt; either
|
||||
%% version 1.3c of the License, or (at your option) any later version.
|
||||
%% OR
|
||||
%% The 2-clause BSD-style license.
|
||||
%%
|
||||
%% SPDX-License-Identifier: LPPL-1.3c+ OR BSD-2-Clause
|
||||
|
||||
\NeedsTeXFormat{LaTeX2e}\relax
|
||||
\ProvidesPackage{DOF-core}
|
||||
[00/00/0000 Document-Type Support Framework for Isabelle.]
|
||||
|
||||
\usepackage[T1]{fontenc}
|
||||
\usepackage[utf8]{inputenc}
|
||||
\usepackage[english,USenglish]{babel}
|
||||
|
||||
\RequirePackage{keycommand}
|
||||
\RequirePackage{environ}
|
||||
\RequirePackage{graphicx}
|
||||
\RequirePackage{xcolor}
|
||||
\RequirePackage{xspace}
|
||||
\RequirePackage{etoolbox}
|
||||
\RequirePackage{fp}
|
||||
\usepackage{amsmath}
|
||||
\usepackage{DOF-amssymb}
|
||||
|
||||
\usepackage{isabelle}
|
||||
\usepackage{isabellesym}
|
||||
|
||||
\isabellestyle{it}
|
||||
|
||||
|
||||
|
||||
|
||||
\RequirePackage{dof-config}
|
||||
\newcommand{\isabelleversion}{\dof@isabelleversion\xspace}
|
||||
\newcommand{\dofversion}{\dof@version\xspace}
|
||||
\newcommand{\isadofversion}{\dofversion/\isabelleversion}
|
||||
\newcommand{\isadoflatestversion}{\doflatestversion/\isabellelatestversion}
|
||||
\newcommand{\isadofdir}{Isabelle_DOF-\dof@version_\dof@isabelleversion}
|
||||
\newcommand{\isadofdirn}{Isabelle\_DOF-\dof@version\_\dof@isabelleversion}
|
||||
\newcommand{\isadofarchive}{\isadofdir.tar.xz}
|
||||
\newcommand{\isadofarchiven}{\isadofdirn.tar.xz}
|
||||
\newcommand{\isadofarchiveurl}{\dof@artifacturl/\isadofarchive}
|
||||
|
||||
\newcommand{\isadof}{Isabelle/DOF\xspace}
|
||||
|
||||
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||
% begin: newcommand wrapper
|
||||
\newcommand\newisadof[1]{\expandafter\newkeycommand\csname isaDofDOT#1\endcsname}%
|
||||
\newcommand\renewisadof[1]{\expandafter\renewkeycommand\csname isaDofDOT#1\endcsname}%
|
||||
\newcommand\provideisadof[1]{\expandafter\providekeycommand\csname isaDofDOT#1\endcsname}%
|
||||
% end: newcommand wrapper
|
||||
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||
|
||||
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||
% begin: generic dispatcher
|
||||
\newkeycommand+[\|]\isaDof[env={UNKNOWN},label=,type={dummyT},args={}][1]{%
|
||||
\ifcsname isaDofDOT\commandkey{type}\endcsname%
|
||||
\csname isaDofDOT\commandkey{type}\endcsname%
|
||||
[label=\commandkey{label},\commandkey{args}]{#1}%
|
||||
\else%
|
||||
\ifcsname isaDofDOT\commandkey{env}DOT\commandkey{type}\endcsname%
|
||||
\csname isaDofDOT\commandkey{env}DOT\commandkey{type}\endcsname%
|
||||
[label=\commandkey{label},\commandkey{args}]{#1}%
|
||||
\else%
|
||||
\message{Isabelle/DOF: Using default LaTeX representation for concept %
|
||||
"\commandkey{env}.\commandkey{type}".}%
|
||||
\ifcsname isaDofDOT\commandkey{env}\endcsname%
|
||||
\csname isaDofDOT\commandkey{env}\endcsname%
|
||||
[label=\commandkey{label}]{#1}%
|
||||
\else%
|
||||
\errmessage{Isabelle/DOF: No LaTeX representation for concept %
|
||||
"\commandkey{env}.\commandkey{type}" defined and no default %
|
||||
definition for "\commandkey{env}" available either.}%
|
||||
\fi%
|
||||
\fi%
|
||||
\fi%
|
||||
}
|
||||
% end: generic dispatcher
|
||||
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||
|
||||
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||
% begin: text*-dispatcher
|
||||
\NewEnviron{isamarkuptext*}[1][]{\isaDof[env={text},#1]{\BODY}}
|
||||
% end: text*-dispatcher
|
||||
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||
|
||||
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||
% begin: chapter*-dispatcher
|
||||
\NewEnviron{isamarkupchapter*}[1][]{\isaDof[env={IsaUNDERSCORECOLDOTchapter},#1]{\BODY}}
|
||||
% end: chapter*-dispatcher
|
||||
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||
|
||||
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||
% begin: section*-dispatcher
|
||||
\NewEnviron{isamarkupsection*}[1][]{\isaDof[env={IsaUNDERSCORECOLDOTsection},#1]{\BODY}}
|
||||
% end: section*-dispatcher
|
||||
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||
|
||||
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||
% begin: subsection*-dispatcher
|
||||
\NewEnviron{isamarkupsubsection*}[1][]{\isaDof[env={IsaUNDERSCORECOLDOTsubsection},#1]{\BODY}}
|
||||
% end: subsection*-dispatcher
|
||||
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||
|
||||
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||
% begin: subsubsection*-dispatcher
|
||||
\NewEnviron{isamarkupsubsubsection*}[1][]{\isaDof[env={IsaUNDERSCORECOLDOTsubsubsection},#1]{\BODY}}
|
||||
% end: subsubsection*-dispatcher
|
||||
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||
|
||||
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||
% begin: paragraph*-dispatcher
|
||||
\NewEnviron{isamarkupparagraph*}[1][]{\isaDof[env={IsaUNDERSCORECOLDOTparagraph},#1]{\BODY}}
|
||||
% end: paragraph*-dispatcher
|
||||
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||
|
||||
|
||||
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||
% begin: text default implementation
|
||||
\newisadof{text}[label=,type=][1]{%
|
||||
\begin{isamarkuptext}\label{\commandkey{label}}%
|
||||
#1
|
||||
\end{isamarkuptext}%
|
||||
}
|
||||
% end: text default implementation
|
||||
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||
|
||||
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||
% begin: chapter/section default implementations
|
||||
\newisadof{IsaUNDERSCORECOLDOTchapter}[label=,type=,IsaUNDERSCORECOLDOTtextUNDERSCOREelementDOTlevel=, IsaUNDERSCORECOLDOTtextUNDERSCOREelementDOTreferentiable=,IsaUNDERSCORECOLDOTtextUNDERSCOREelementDOTvariants=][1]{%
|
||||
\isamarkupfalse\isamarkupchapter{#1}\label{\commandkey{label}}\isamarkuptrue%
|
||||
}
|
||||
\newisadof{IsaUNDERSCORECOLDOTsection}[label=,type=,IsaUNDERSCORECOLDOTtextUNDERSCOREelementDOTlevel=, IsaUNDERSCORECOLDOTtextUNDERSCOREelementDOTreferentiable=,IsaUNDERSCORECOLDOTtextUNDERSCOREelementDOTvariants=][1]{%
|
||||
\isamarkupfalse\isamarkupsection{#1}\label{\commandkey{label}}\isamarkuptrue%
|
||||
}
|
||||
\newisadof{IsaUNDERSCORECOLDOTsubsection}[label=,type=,IsaUNDERSCORECOLDOTtextUNDERSCOREelementDOTlevel=, IsaUNDERSCORECOLDOTtextUNDERSCOREelementDOTreferentiable=,IsaUNDERSCORECOLDOTtextUNDERSCOREelementDOTvariants=][1]{%
|
||||
\isamarkupfalse\isamarkupsubsection{#1}\label{\commandkey{label}}\isamarkuptrue%
|
||||
}
|
||||
\newisadof{IsaUNDERSCORECOLDOTsubsubsection}[label=,type=,IsaUNDERSCORECOLDOTtextUNDERSCOREelementDOTlevel=, IsaUNDERSCORECOLDOTtextUNDERSCOREelementDOTreferentiable=,IsaUNDERSCORECOLDOTtextUNDERSCOREelementDOTvariants=][1]{%
|
||||
\isamarkupfalse\isamarkupsubsubsection{#1}\label{\commandkey{label}}\isamarkuptrue%
|
||||
}
|
||||
\newisadof{IsaUNDERSCORECOLDOTparagraph}[label=,type=,IsaUNDERSCORECOLDOTtextUNDERSCOREelementDOTlevel=, IsaUNDERSCORECOLDOTtextUNDERSCOREelementDOTreferentiable=,IsaUNDERSCORECOLDOTtextUNDERSCOREelementDOTvariants=][1]{%
|
||||
\isamarkupfalse\isamarkupparagraph{#1}\label{\commandkey{label}}\isamarkuptrue%
|
||||
}
|
||||
% end: chapter/section default implementations
|
||||
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||
|
||||
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||
% begin: label and ref
|
||||
\newkeycommand\isaDof@label[label=,type=][1]{\label{#1}}
|
||||
\newcommand{\isaDofDOTlabel}{\isaDof@label}
|
||||
\newkeycommand\isaDof@ref[label=,type=][1]{\autoref{#1}}
|
||||
\newcommand{\isaDofDOTref}{\isaDof@ref}
|
||||
\newkeycommand\isaDof@macro[label=,type=][1]{MMM \label{#1}} %% place_holder
|
||||
\newcommand{\isaDofDOTmacroDef}{\iisaDof@macro}
|
||||
\newkeycommand\isaDof@macroExp[label=,type=][1]{MMM \autoref{#1}} %% place_holder
|
||||
\newcommand{\isaDofDOTmacroExp}{\isaDof@macroExp}
|
||||
% end: label and ref
|
||||
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||
|
||||
%\title{No Title Given}
|
||||
\input{ontologies}
|
||||
\IfFileExists{preamble.tex}{\input{preamble.tex}}{}%
|
||||
|
||||
% notation
|
||||
\newcommand{\isactrltermUNDERSCORE}{\isakeywordcontrol{term{\isacharunderscore}}}
|
||||
\newcommand{\isactrlvalueUNDERSCORE}{\isakeywordcontrol{value{\isacharunderscore}}}
|
||||
\newcommand{\isasymdoublequote}{\texttt{\upshape"}}
|
||||
\newcommand{\isasymquote}{\texttt{\upshape'}}
|
||||
@ -1,20 +0,0 @@
|
||||
(*************************************************************************
|
||||
* Copyright (C)
|
||||
* 2019 The University of Exeter
|
||||
* 2018-2019 The University of Paris-Saclay
|
||||
* 2018 The University of Sheffield
|
||||
*
|
||||
* License:
|
||||
* This program can be redistributed and/or modified under the terms
|
||||
* of the 2-clause BSD-style license.
|
||||
*
|
||||
* SPDX-License-Identifier: BSD-2-Clause
|
||||
*************************************************************************)
|
||||
|
||||
theory
|
||||
ontologies
|
||||
imports
|
||||
"scholarly_paper/scholarly_paper"
|
||||
"technical_report/technical_report"
|
||||
begin
|
||||
end
|
||||
@ -1,259 +0,0 @@
|
||||
%% Copyright (C) 2018 The University of Sheffield
|
||||
%% 2018 The University of Paris-Saclay
|
||||
%% 2019 The University of Exeter
|
||||
%%
|
||||
%% License:
|
||||
%% This program can be redistributed and/or modified under the terms
|
||||
%% of the LaTeX Project Public License Distributed from CTAN
|
||||
%% archives in directory macros/latex/base/lppl.txt; either
|
||||
%% version 1.3c of the License, or (at your option) any later version.
|
||||
%% OR
|
||||
%% The 2-clause BSD-style license.
|
||||
%%
|
||||
%% SPDX-License-Identifier: LPPL-1.3c+ OR BSD-2-Clause
|
||||
|
||||
\NeedsTeXFormat{LaTeX2e}\relax
|
||||
\ProvidesPackage{DOF-scholarly_paper}
|
||||
[2021/03/22 Unreleased/Isabelle2021%
|
||||
Document-Type Support Framework for Isabelle (LNCS).]
|
||||
|
||||
\RequirePackage{DOF-COL}
|
||||
\RequirePackage{ifthen}
|
||||
|
||||
\RequirePackage{ifthen}
|
||||
\newboolean{DOF@scholarlypaper@force}
|
||||
\DeclareOption{force}{\setboolean{DOF@scholarlypaper@force}{true}}
|
||||
|
||||
\ProcessOptions\relax
|
||||
|
||||
\ifthenelse{\boolean{DOF@scholarlypaper@force}}{%
|
||||
}{%
|
||||
\@ifclassloaded{llncs}%
|
||||
{}%
|
||||
{%
|
||||
\@ifclassloaded{scrartcl}%
|
||||
{%
|
||||
\RequirePackage{amsthm}
|
||||
\newcommand{\institute}[1]{}%
|
||||
\newcommand{\inst}[1]{}%
|
||||
\newcommand{\orcidID}[1]{}%
|
||||
\newcommand{\email}[1]{}%
|
||||
}%
|
||||
{%
|
||||
\@ifclassloaded{lipics-v2021}%
|
||||
{%
|
||||
\RequirePackage{amsthm}
|
||||
\newcommand{\institute}[1]{}%
|
||||
\newcommand{\inst}[1]{}%
|
||||
\newcommand{\orcidID}[1]{}%
|
||||
\newcommand{\email}[1]{}%
|
||||
}%
|
||||
{%
|
||||
{%
|
||||
\@ifclassloaded{svjour3}%
|
||||
{%
|
||||
\newcommand{\inst}[1]{}%
|
||||
}%
|
||||
{%
|
||||
\PackageError{DOF-scholarly_paper}
|
||||
{Scholarly Paper only supports LNCS or scrartcl as document class.}
|
||||
{}\stop%
|
||||
}%
|
||||
}%
|
||||
}%
|
||||
}
|
||||
}
|
||||
}
|
||||
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||
% begin: title*
|
||||
\NewEnviron{isamarkuptitle*}[1][]{\isaDof[env={title},#1]{\BODY}}
|
||||
\newisadof{titleDOTscholarlyUNDERSCOREpaperDOTtitle}%
|
||||
[label=,type=%
|
||||
,scholarlyUNDERSCOREpaperDOTtitleDOTshortUNDERSCOREtitle=%
|
||||
][1]{%
|
||||
\immediate\write\@auxout{\noexpand\title{#1}}%
|
||||
}
|
||||
% end: title*
|
||||
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||
|
||||
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||
% begin: subtitle*
|
||||
\NewEnviron{isamarkupsubtitle*}[1][]{\isaDof[env={subtitle},#1]{\BODY}}
|
||||
\newisadof{subtitleDOTscholarlyUNDERSCOREpaperDOTsubtitle}%
|
||||
[label=,type=%
|
||||
,scholarlyUNDERSCOREpaperDOTsubtitleDOTabbrev=%
|
||||
][1]{%
|
||||
\immediate\write\@auxout{\noexpand\subtitle{#1}}%
|
||||
}
|
||||
% end: subtitle*
|
||||
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||
|
||||
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||
% begin: scholarly_paper.author
|
||||
\def\dof@author{}%
|
||||
\def\dof@affiliation{}%
|
||||
|
||||
\newcommand{\DOFauthor}{\author{\dof@author}}
|
||||
\newcommand{\DOFinstitute}{\institute{\dof@affiliation}}
|
||||
|
||||
\AtBeginDocument{%
|
||||
\DOFauthor
|
||||
\DOFinstitute
|
||||
}
|
||||
|
||||
\def\leftadd#1#2{\expandafter\leftaddaux\expandafter{#1}{#2}{#1}}
|
||||
\def\leftaddaux#1#2#3{\gdef#3{#1#2}}
|
||||
|
||||
\newcounter{dof@cnt@author}
|
||||
\newcommand{\addauthor}[1]{%
|
||||
\ifthenelse{\equal{\dof@author}{}}{%
|
||||
\gdef\dof@author{#1}%
|
||||
}{%
|
||||
\leftadd\dof@author{\protect\and #1}%
|
||||
}
|
||||
}
|
||||
|
||||
\newcommand{\addaffiliation}[1]{%
|
||||
\ifthenelse{\equal{\dof@affiliation}{}}{%
|
||||
\gdef\dof@affiliation{#1}%
|
||||
}{%
|
||||
\leftadd\dof@affiliation{\protect\and #1}%
|
||||
}
|
||||
}
|
||||
|
||||
\NewEnviron{isamarkupauthor*}[1][]{\isaDof[env={text},#1]{\BODY}}
|
||||
\provideisadof{textDOTscholarlyUNDERSCOREpaperDOTauthor}%
|
||||
[label=,type=%
|
||||
,scholarlyUNDERSCOREpaperDOTauthorDOTemail=%
|
||||
,scholarlyUNDERSCOREpaperDOTauthorDOTaffiliation=%
|
||||
,scholarlyUNDERSCOREpaperDOTauthorDOTorcid=%
|
||||
,scholarlyUNDERSCOREpaperDOTauthorDOThttpUNDERSCOREsite=%
|
||||
][1]{%
|
||||
\stepcounter{dof@cnt@author}
|
||||
\def\dof@a{\commandkey{scholarlyUNDERSCOREpaperDOTauthorDOTaffiliation}}
|
||||
\ifthenelse{\equal{\commandkey{scholarlyUNDERSCOREpaperDOTauthorDOTorcid}}{}}{%
|
||||
\protected@write\@auxout{}{\string\addauthor{#1\string\inst{\thedof@cnt@author}}}%
|
||||
}{%
|
||||
\protected@write\@auxout{}{\string\addauthor{#1\string\inst{\thedof@cnt@author}\string\orcidID{\commandkey{scholarlyUNDERSCOREpaperDOTauthorDOTorcid}}}}%
|
||||
}
|
||||
\protected@write\@auxout{}{\string\addaffiliation{\dof@a\\\string\email{\commandkey{scholarlyUNDERSCOREpaperDOTauthorDOTemail}}}}%
|
||||
}
|
||||
% end: scholarly_paper.author
|
||||
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||
|
||||
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||
% begin: scholarly_paper.abstract
|
||||
\providecommand{\keywords}[1]{\mbox{}\\[2ex]\mbox{}\noindent{\textbf{Keywords:}} #1}
|
||||
\NewEnviron{isamarkupabstract*}[1][]{\isaDof[env={text},#1]{\BODY}}
|
||||
\newisadof{textDOTscholarlyUNDERSCOREpaperDOTabstract}%
|
||||
[label=,type=%
|
||||
,scholarlyUNDERSCOREpaperDOTabstractDOTkeywordlist=%
|
||||
][1]{%
|
||||
\begin{isamarkuptext}%
|
||||
\begin{abstract}%
|
||||
#1%
|
||||
\ifthenelse{\equal{\commandkey{scholarlyUNDERSCOREpaperDOTabstractDOTkeywordlist}}{}}{}{%
|
||||
\keywords{\commandkey{scholarlyUNDERSCOREpaperDOTabstractDOTkeywordlist}}%
|
||||
}%
|
||||
\end{abstract}%
|
||||
\end{isamarkuptext}%
|
||||
}
|
||||
|
||||
%\RequirePackage{amsthm}
|
||||
%\newtheorem{example}{Example}
|
||||
%\newtheorem{assumption}{Assumption}
|
||||
%\newtheorem{definition}{Definition}
|
||||
%\newtheorem{theorem}{Theorem}
|
||||
\newtheorem{defn}{Definition}
|
||||
\providecommand{\defnautorefname}{Definition}
|
||||
\NewEnviron{isamarkupDefinition*}[1][]{\isaDof[env={text},#1,type={scholarlyUNDERSCOREpaperDOTmathUNDERSCOREcontent}]{\BODY}}
|
||||
\newtheorem{axm}{Axiom}
|
||||
\providecommand{\axmautorefname}{Axiom}
|
||||
\NewEnviron{isamarkupAxiom*}[1][]{\isaDof[env={text},#1,type={scholarlyUNDERSCOREpaperDOTmathUNDERSCOREcontent}]{\BODY}}
|
||||
\newtheorem{theom}{Theorem}
|
||||
\providecommand{\theomautorefname}{Theorem}
|
||||
\NewEnviron{isamarkupTheorem*}[1][]{\isaDof[env={text},#1,type={scholarlyUNDERSCOREpaperDOTmathUNDERSCOREcontent}]{\BODY}}
|
||||
\newtheorem{lemm}{Lemma}
|
||||
\providecommand{\lemmautorefname}{Lemma}
|
||||
\NewEnviron{isamarkupLemma*}[1][]{\isaDof[env={text},#1,type={scholarlyUNDERSCOREpaperDOTmathUNDERSCOREcontent}]{\BODY}}
|
||||
\newtheorem{corr}{Corollary}
|
||||
\providecommand{\corrautorefname}{Corollary}
|
||||
\NewEnviron{isamarkupCorollary*}[1][]{\isaDof[env={text},#1,type={scholarlyUNDERSCOREpaperDOTmathUNDERSCOREcontent}]{\BODY}}
|
||||
\newtheorem{prpo}{Proposition}
|
||||
\providecommand{\prpoautorefname}{Proposition}
|
||||
\NewEnviron{isamarkupProposition*}[1][]{\isaDof[env={text},#1,type={scholarlyUNDERSCOREpaperDOTmathUNDERSCOREcontent}]{\BODY}}
|
||||
\newtheorem{rulE}{Rule}
|
||||
\providecommand{\rulEautorefname}{Rule}
|
||||
\NewEnviron{isamarkupRule*}[1][]{\isaDof[env={text},#1,type={scholarlyUNDERSCOREpaperDOTmathUNDERSCOREcontent}]{\BODY}}
|
||||
\newtheorem{assn}{Assertion}
|
||||
\providecommand{\assnautorefname}{Assertion}
|
||||
\NewEnviron{isamarkupAssertion*}[1][]{\isaDof[env={text},#1,type={scholarlyUNDERSCOREpaperDOTmathUNDERSCOREcontent}]{\BODY}}
|
||||
\newtheorem{hypt}{Hypothesis}
|
||||
\providecommand{\hyptautorefname}{Hypothesis}
|
||||
\NewEnviron{isamarkupHypothesis*}[1][]{\isaDof[env={text},#1,type={scholarlyUNDERSCOREpaperDOTmathUNDERSCOREcontent}]{\BODY}}
|
||||
\newtheorem{assm}{Assumption}
|
||||
\providecommand{\assmautorefname}{Assumption}
|
||||
\NewEnviron{isamarkupAssumption*}[1][]{\isaDof[env={text},#1,type={scholarlyUNDERSCOREpaperDOTmathUNDERSCOREcontent}]{\BODY}}
|
||||
\newtheorem{prms}{Premise}
|
||||
\providecommand{\prmsautorefname}{Premise}
|
||||
\NewEnviron{isamarkupPremise*}[1][]{\isaDof[env={text},#1,type={scholarlyUNDERSCOREpaperDOTmathUNDERSCOREcontent}]{\BODY}}
|
||||
\newtheorem{cons}{Consequence}
|
||||
\providecommand{\consautorefname}{Consequence}
|
||||
\NewEnviron{isamarkupConsequence*}[1][]{\isaDof[env={text},#1,type={scholarlyUNDERSCOREpaperDOTmathUNDERSCOREcontent}]{\BODY}}
|
||||
\newtheorem{concUNDERSCOREstmt}{Conclusion}
|
||||
\providecommand{\concUNDERSCOREstmtautorefname}{Conclusion}
|
||||
\NewEnviron{isamarkupConclusion*}[1][]{\isaDof[env={text},#1,type={scholarlyUNDERSCOREpaperDOTmathUNDERSCOREcontent}]{\BODY}}
|
||||
\newtheorem{prfUNDERSCOREstmt}{Proof}
|
||||
\providecommand{\prfUNDERSCOREstmtautorefname}{Proof}
|
||||
\NewEnviron{isamarkupProof*}[1][]{\isaDof[env={text},#1,type={scholarlyUNDERSCOREpaperDOTmathUNDERSCOREcontent}]{\BODY}}
|
||||
\newtheorem{explUNDERSCOREstmt}{Example}
|
||||
\providecommand{\explUNDERSCOREstmtautorefname}{Example}
|
||||
\NewEnviron{isamarkupExample*}[1][]{\isaDof[env={text},#1,type={scholarlyUNDERSCOREpaperDOTmathUNDERSCOREcontent}]{\BODY}}
|
||||
\newtheorem{rmrk}{Remark}
|
||||
\providecommand{\rmrkautorefname}{Remark}
|
||||
\NewEnviron{isamarkupRemark*}[1][]{\isaDof[env={text},#1,type={scholarlyUNDERSCOREpaperDOTmathUNDERSCOREcontent}]{\BODY}}
|
||||
\newtheorem{notn}{Notation}
|
||||
\providecommand{\notnautorefname}{Notation}
|
||||
\NewEnviron{isamarkupNotation*}[1][]{\isaDof[env={text},#1,type={scholarlyUNDERSCOREpaperDOTmathUNDERSCOREcontent}]{\BODY}}
|
||||
\newtheorem{tmgy}{Terminology}
|
||||
\providecommand{\tmgyautorefname}{Terminology}
|
||||
\NewEnviron{isamarkupTerminology*}[1][]{\isaDof[env={text},#1,type={scholarlyUNDERSCOREpaperDOTmathUNDERSCOREcontent}]{\BODY}}
|
||||
|
||||
\newisadof{textDOTscholarlyUNDERSCOREpaperDOTmathUNDERSCOREcontent}%
|
||||
[label=,type=%
|
||||
, scholarlyUNDERSCOREpaperDOTmathUNDERSCOREcontentDOTshortUNDERSCOREname ={}%
|
||||
, scholarlyUNDERSCOREpaperDOTmathUNDERSCOREcontentDOTmcc = %
|
||||
, IsaUNDERSCORECOLDOTtextUNDERSCOREelementDOTlevel =%
|
||||
, IsaUNDERSCORECOLDOTtextUNDERSCOREelementDOTreferentiable =%
|
||||
, IsaUNDERSCORECOLDOTtextUNDERSCOREelementDOTvariants =%
|
||||
, scholarlyUNDERSCOREpaperDOTtextUNDERSCOREsectionDOTmainUNDERSCOREauthor =%
|
||||
, scholarlyUNDERSCOREpaperDOTtextUNDERSCOREsectionDOTfixmeUNDERSCORElist =%
|
||||
, IsaUNDERSCORECOLDOTtextUNDERSCOREelementDOTlevel =%
|
||||
, scholarlyUNDERSCOREpaperDOTtechnicalDOTdefinitionUNDERSCORElist =%
|
||||
, scholarlyUNDERSCOREpaperDOTtechnicalDOTstatus =%
|
||||
]
|
||||
[1]
|
||||
{%
|
||||
\begin{isamarkuptext}%
|
||||
\ifthenelse{\equal{\commandkey{scholarlyUNDERSCOREpaperDOTmathUNDERSCOREcontentDOTshortUNDERSCOREname}} {} }
|
||||
{%
|
||||
\begin{\commandkey{scholarlyUNDERSCOREpaperDOTmathUNDERSCOREcontentDOTmcc}}\label{\commandkey{label}}
|
||||
#1
|
||||
\end{\commandkey{scholarlyUNDERSCOREpaperDOTmathUNDERSCOREcontentDOTmcc}}
|
||||
}{%
|
||||
\begin{\commandkey{scholarlyUNDERSCOREpaperDOTmathUNDERSCOREcontentDOTmcc}}[\commandkey{scholarlyUNDERSCOREpaperDOTmathUNDERSCOREcontentDOTshortUNDERSCOREname}]\label{\commandkey{label}}
|
||||
#1
|
||||
\end{\commandkey{scholarlyUNDERSCOREpaperDOTmathUNDERSCOREcontentDOTmcc}}
|
||||
}
|
||||
\end{isamarkuptext}%
|
||||
}
|
||||
|
||||
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||
|
||||
%% Miscellaneous
|
||||
\usepackage{xspace}
|
||||
\newcommand{\ie}{i.\,e.\xspace}
|
||||
\newcommand{\eg}{e.\,g.\xspace}
|
||||
\newcommand{\etc}{etc}
|
||||
|
||||
|
||||
@ -1,677 +0,0 @@
|
||||
(*************************************************************************
|
||||
* Copyright (C)
|
||||
* 2019 The University of Exeter
|
||||
* 2018-2019 The University of Paris-Saclay
|
||||
* 2018 The University of Sheffield
|
||||
*
|
||||
* License:
|
||||
* This program can be redistributed and/or modified under the terms
|
||||
* of the 2-clause BSD-style license.
|
||||
*
|
||||
* SPDX-License-Identifier: BSD-2-Clause
|
||||
*************************************************************************)
|
||||
|
||||
chapter\<open>An example ontology for scientific, MINT-oriented papers.\<close>
|
||||
|
||||
theory scholarly_paper
|
||||
imports "Isabelle_DOF.Isa_COL"
|
||||
keywords "author*" "abstract*" :: document_body
|
||||
and "Proposition*" "Definition*" "Lemma*" "Theorem*" :: document_body
|
||||
and "Premise*" "Corollary*" "Consequence*" "Conclusion*" :: document_body
|
||||
and "Assumption*" "Hypothesis*" "Assertion*" :: document_body
|
||||
and "Proof*" "Example*" :: document_body
|
||||
begin
|
||||
|
||||
define_ontology "DOF-scholarly_paper.sty" "Writing academic publications."
|
||||
|
||||
text\<open>Scholarly Paper provides a number of standard text - elements for scientific papers.
|
||||
They were introduced in the following.\<close>
|
||||
|
||||
section\<open>General Paper Structuring Elements\<close>
|
||||
doc_class title =
|
||||
short_title :: "string option" <= "None"
|
||||
|
||||
doc_class subtitle =
|
||||
abbrev :: "string option" <= "None"
|
||||
|
||||
(* adding a contribution list and checking that it is cited as well in tech as in conclusion. ? *)
|
||||
|
||||
doc_class author =
|
||||
email :: "string" <= "''''"
|
||||
http_site :: "string" <= "''''"
|
||||
orcid :: "string" <= "''''"
|
||||
affiliation :: "string"
|
||||
|
||||
|
||||
doc_class abstract =
|
||||
keywordlist :: "string list" <= "[]"
|
||||
principal_theorems :: "thm list"
|
||||
|
||||
|
||||
ML\<open>
|
||||
val _ =
|
||||
Monitor_Command_Parser.document_command \<^command_keyword>\<open>abstract*\<close> "Textual Definition"
|
||||
{markdown = true, body = true}
|
||||
(Onto_Macros.enriched_document_cmd_exp (SOME "abstract") []) [] I;
|
||||
|
||||
val _ =
|
||||
Monitor_Command_Parser.document_command \<^command_keyword>\<open>author*\<close> "Textual Definition"
|
||||
{markdown = true, body = true}
|
||||
(Onto_Macros.enriched_document_cmd_exp (SOME "author") []) [] I;
|
||||
\<close>
|
||||
|
||||
text\<open>Scholarly Paper is oriented towards the classical domains in science:
|
||||
\<^enum> mathematics
|
||||
\<^enum> informatics
|
||||
\<^enum> natural sciences
|
||||
\<^enum> technology (= engineering)
|
||||
|
||||
which we formalize into:\<close>
|
||||
|
||||
doc_class text_section = text_element +
|
||||
main_author :: "author option" <= None
|
||||
fixme_list :: "string list" <= "[]"
|
||||
level :: "int option" <= "None"
|
||||
(* this attribute enables doc-notation support section* etc.
|
||||
we follow LaTeX terminology on levels
|
||||
part = Some -1
|
||||
chapter = Some 0
|
||||
section = Some 1
|
||||
subsection = Some 2
|
||||
subsubsection = Some 3
|
||||
... *)
|
||||
(* for scholarly paper: invariant level > 0 *)
|
||||
|
||||
|
||||
doc_class "conclusion" = text_section +
|
||||
main_author :: "author option" <= None
|
||||
invariant level :: "(level \<sigma>) \<noteq> None \<and> the (level \<sigma>) > 0"
|
||||
|
||||
doc_class related_work = "conclusion" +
|
||||
main_author :: "author option" <= None
|
||||
invariant level :: "(level \<sigma>) \<noteq> None \<and> the (level \<sigma>) > 0"
|
||||
|
||||
doc_class bibliography = text_section +
|
||||
style :: "string option" <= "Some ''LNCS''"
|
||||
invariant level :: "(level \<sigma>) \<noteq> None \<and> the (level \<sigma>) > 0"
|
||||
|
||||
doc_class annex = "text_section" +
|
||||
main_author :: "author option" <= None
|
||||
invariant level :: "(level \<sigma>) \<noteq> None \<and> the (level \<sigma>) > 0"
|
||||
|
||||
find_consts name:"scholarly_paper.*Leeee"
|
||||
|
||||
(*
|
||||
datatype sc_dom = math | info | natsc | eng
|
||||
*)
|
||||
|
||||
|
||||
doc_class introduction = text_section +
|
||||
comment :: string
|
||||
claims :: "thm list"
|
||||
invariant level :: "(level \<sigma>) \<noteq> None \<and> the (level \<sigma>) > 0"
|
||||
|
||||
text\<open>Technical text-elements posses a status: they can be either an \<^emph>\<open>informal explanation\<close> /
|
||||
description or a kind of introductory text to definition etc. or a \<^emph>\<open>formal statement\<close> similar
|
||||
to :
|
||||
|
||||
\<^bold>\<open>Definition\<close> 3.1: Security.
|
||||
As Security of the system we define etc...
|
||||
|
||||
A formal statement can, but must not have a reference to true formal Isabelle/Isar definition.
|
||||
\<close>
|
||||
|
||||
doc_class background = text_section +
|
||||
comment :: string
|
||||
claims :: "thm list"
|
||||
invariant level :: "(level \<sigma>) \<noteq> None \<and> the (level \<sigma>) > 0"
|
||||
|
||||
|
||||
section\<open>Technical Content and Free-form Semi-formal Formats\<close>
|
||||
|
||||
datatype status = formal | semiformal | description
|
||||
|
||||
text\<open>The class \<^verbatim>\<open>technical\<close> regroups a number of text-elements that contain typical
|
||||
"technical content" in mathematical or engineering papers: definitions, theorems, lemmas, examples. \<close>
|
||||
|
||||
(* OPEN PROBLEM: connection between referentiable and status. This should be explicit
|
||||
and computable. *)
|
||||
|
||||
|
||||
doc_class technical = text_section +
|
||||
definition_list :: "string list" <= "[]"
|
||||
status :: status <= "description"
|
||||
formal_results :: "thm list"
|
||||
invariant level :: "(level \<sigma>) \<noteq> None \<and> the (level \<sigma>) > 0"
|
||||
|
||||
type_synonym tc = technical (* technical content *)
|
||||
|
||||
|
||||
text \<open>This a \<open>doc_class\<close> of \<^verbatim>\<open>examples\<close> in the broadest possible sense : they are \emph{not}
|
||||
necessarily considered as technical content, but may occur in an article.
|
||||
Note that there are \<open>doc_class\<close>es of \<^verbatim>\<open>math_example\<close>s and \<^verbatim>\<open>tech_example\<close>s which
|
||||
follow a more specific regime of mathematical or engineering content.
|
||||
\<close>
|
||||
(* An example for the need of multiple inheritance on classes ? *)
|
||||
|
||||
doc_class example = text_section +
|
||||
referentiable :: bool <= True
|
||||
status :: status <= "description"
|
||||
short_name :: string <= "''''"
|
||||
invariant level :: "(level \<sigma>) \<noteq> None \<and> the (level \<sigma>) > 0"
|
||||
|
||||
text\<open>The intended use for the \<open>doc_class\<close>es \<^verbatim>\<open>math_motivation\<close> (or \<^verbatim>\<open>math_mtv\<close> for short),
|
||||
\<^verbatim>\<open>math_explanation\<close> (or \<^verbatim>\<open>math_exp\<close> for short) and
|
||||
\<^verbatim>\<open>math_example\<close> (or \<^verbatim>\<open>math_ex\<close> for short)
|
||||
are \<^emph>\<open>informal\<close> descriptions of semi-formal definitions (by inheritance).
|
||||
Math-Examples can be made referentiable triggering explicit, numbered presentations.\<close>
|
||||
doc_class math_motivation = technical +
|
||||
referentiable :: bool <= False
|
||||
type_synonym math_mtv = math_motivation
|
||||
|
||||
doc_class math_explanation = technical +
|
||||
referentiable :: bool <= False
|
||||
type_synonym math_exp = math_explanation
|
||||
|
||||
|
||||
subsection\<open>Freeform Mathematical Content\<close>
|
||||
|
||||
text\<open>We follow in our enumeration referentiable mathematical content class the AMS style and its
|
||||
provided \<^emph>\<open>theorem environments\<close> (see \<^verbatim>\<open>texdoc amslatex\<close>). We add, however, the concepts
|
||||
\<^verbatim>\<open>axiom\<close>, \<^verbatim>\<open>rule\<close> and \<^verbatim>\<open>assertion\<close> to the list. A particular advantage of \<^verbatim>\<open>texdoc amslatex\<close> is
|
||||
that it is well-established and compatible with many LaTeX - styles.
|
||||
|
||||
The names for thhe following key's are deliberate non-standard abbreviations in order to avoid
|
||||
future name clashes.\<close>
|
||||
|
||||
datatype math_content_class =
|
||||
"defn" \<comment>\<open>definition\<close>
|
||||
| "axm" \<comment>\<open>axiom\<close>
|
||||
| "theom" \<comment>\<open>theorem\<close>
|
||||
| "lemm" \<comment>\<open>lemma\<close>
|
||||
| "corr" \<comment>\<open>corollary\<close>
|
||||
| "prpo" \<comment>\<open>proposition\<close>
|
||||
| "rulE" \<comment>\<open>rule\<close>
|
||||
| "assn" \<comment>\<open>assertion\<close>
|
||||
| "hypt" \<comment>\<open>hypothesis\<close>
|
||||
| "assm" \<comment>\<open>assumption\<close>
|
||||
| "prms" \<comment>\<open>premise\<close>
|
||||
| "cons" \<comment>\<open>consequence\<close>
|
||||
| "conc_stmt" \<comment>\<open>math. conclusion\<close>
|
||||
| "prf_stmt" \<comment>\<open>math. proof\<close>
|
||||
| "expl_stmt" \<comment>\<open>math. example\<close>
|
||||
| "rmrk" \<comment>\<open>remark\<close>
|
||||
| "notn" \<comment>\<open>notation\<close>
|
||||
| "tmgy" \<comment>\<open>terminology\<close>
|
||||
|
||||
text\<open>Instances of the \<open>doc_class\<close> \<^verbatim>\<open>math_content\<close> are by definition @{term "semiformal"}; they may
|
||||
be non-referential, but in this case they will not have a @{term "short_name"}.\<close>
|
||||
|
||||
doc_class math_content = technical +
|
||||
referentiable :: bool <= False
|
||||
short_name :: string <= "''''"
|
||||
status :: status <= "semiformal"
|
||||
mcc :: "math_content_class" <= "theom"
|
||||
invariant s1 :: "\<not>referentiable \<sigma> \<longrightarrow> short_name \<sigma> = ''''"
|
||||
invariant s2 :: "technical.status \<sigma> = semiformal"
|
||||
type_synonym math_tc = math_content
|
||||
|
||||
text\<open>The class \<^typ>\<open>math_content\<close> is perhaps more adequaltely described as "math-alike content".
|
||||
Sub-classes can englobe instances such as:
|
||||
\<^item> terminological definitions such as:
|
||||
\<open>Definition*[assessor::sfc, short_name="''assessor''"]\<open>entity that carries out an assessment\<close>\<close>
|
||||
\<^item> free-form mathematical definitions such as:
|
||||
\<open>Definition*[process_ordering, short_name="''process ordering''"]\<open>
|
||||
We define \<open>P \<sqsubseteq> Q \<equiv> \<psi>\<^sub>\<D> \<and> \<psi>\<^sub>\<R> \<and> \<psi>\<^sub>\<M> \<close>, where \<^vs>\<open>-0.2cm\<close>
|
||||
1) \<^vs>\<open>-0.2cm\<close> \<open>\<psi>\<^sub>\<D> = \<D> P \<supseteq> \<D> Q \<close>
|
||||
2) ...
|
||||
\<close>\<close>
|
||||
\<^item> semi-formal descriptions, which are free-form mathematical definitions on which finally
|
||||
an attribute with a formal Isabelle definition is attached. \<close>
|
||||
|
||||
|
||||
text\<open>Instances of the Free-form Content are Definition, Lemma, Assumption, Hypothesis, etc.
|
||||
The key class definitions are inspired by the AMS style, to which some target LaTeX's compile.\<close>
|
||||
|
||||
text\<open>A proposition (or: "sentence") is a central concept in philosophy of language and related
|
||||
fields, often characterized as the primary bearer of truth or falsity. Propositions are also often
|
||||
characterized as being the kind of thing that declarative sentences denote. \<close>
|
||||
|
||||
doc_class "proposition" = math_content +
|
||||
referentiable :: bool <= True
|
||||
level :: "int option" <= "Some 2"
|
||||
mcc :: "math_content_class" <= "prpo"
|
||||
invariant d :: "mcc \<sigma> = prpo" (* can not be changed anymore. *)
|
||||
|
||||
text\<open>A definition is used to give a precise meaning to a new term, by describing a
|
||||
condition which unambiguously qualifies what a mathematical term is and is not. Definitions and
|
||||
axioms form the basis on which all of modern mathematics is to be constructed.\<close>
|
||||
doc_class "definition" = math_content +
|
||||
referentiable :: bool <= True
|
||||
level :: "int option" <= "Some 2"
|
||||
mcc :: "math_content_class" <= "defn"
|
||||
invariant d :: "mcc \<sigma> = defn" (* can not be changed anymore. *)
|
||||
|
||||
doc_class "axiom" = math_content +
|
||||
referentiable :: bool <= True
|
||||
level :: "int option" <= "Some 2"
|
||||
mcc :: "math_content_class" <= "axm"
|
||||
invariant d :: "mcc \<sigma> = axm" (* can not be changed anymore. *)
|
||||
|
||||
text\<open>A lemma (plural lemmas or lemmata) is a generally minor, proven proposition which is used as
|
||||
a stepping stone to a larger result. For that reason, it is also known as a "helping theorem" or an
|
||||
"auxiliary theorem". In many cases, a lemma derives its importance from the theorem it aims to prove.\<close>
|
||||
doc_class "lemma" = math_content +
|
||||
referentiable :: bool <= "True"
|
||||
level :: "int option" <= "Some 2"
|
||||
mcc :: "math_content_class" <= "lemm"
|
||||
invariant d :: "mcc \<sigma> = lemm"
|
||||
|
||||
doc_class "theorem" = math_content +
|
||||
referentiable :: bool <= True
|
||||
level :: "int option" <= "Some 2"
|
||||
mcc :: "math_content_class" <= "theom"
|
||||
invariant d :: "mcc \<sigma> = theom"
|
||||
|
||||
text\<open>A corollary is a theorem of less importance which can be readily deduced from a previous,
|
||||
more notable lemma or theorem. A corollary could, for instance, be a proposition which is incidentally
|
||||
proved while proving another proposition.\<close>
|
||||
doc_class "corollary" = math_content +
|
||||
referentiable :: bool <= "True"
|
||||
level :: "int option" <= "Some 2"
|
||||
mcc :: "math_content_class" <= "corr"
|
||||
invariant d :: "mcc \<sigma> = corr"
|
||||
|
||||
|
||||
text\<open>A premise or premiss[a] is a proposition — a true or false declarative statement—
|
||||
used in an argument to prove the truth of another proposition called the conclusion.\<close>
|
||||
doc_class "premise" = math_content +
|
||||
referentiable :: bool <= "True"
|
||||
level :: "int option" <= "Some 2"
|
||||
mcc :: "math_content_class" <= "prms"
|
||||
invariant d :: "mcc \<sigma> = prms"
|
||||
|
||||
text\<open>A consequence describes the relationship between statements that hold true when one statement
|
||||
logically follows from one or more statements. A valid logical argument is one in which the
|
||||
conclusion is entailed by the premises, because the conclusion is the consequence of the premises.
|
||||
The philosophical analysis of logical consequence involves the questions: In what sense does a
|
||||
conclusion follow from its premises?\<close>
|
||||
doc_class "consequence" = math_content +
|
||||
referentiable :: bool <= "True"
|
||||
level :: "int option" <= "Some 2"
|
||||
mcc :: "math_content_class" <= "cons"
|
||||
invariant d :: "mcc \<sigma> = cons"
|
||||
|
||||
doc_class "conclusion_stmt" = math_content + \<comment> \<open>not to confuse with a section element: Conclusion\<close>
|
||||
referentiable :: bool <= "True"
|
||||
level :: "int option" <= "Some 2"
|
||||
mcc :: "math_content_class" <= "conc_stmt"
|
||||
invariant d :: "mcc \<sigma> = conc_stmt"
|
||||
|
||||
text\<open>An assertion is a statement that asserts that a certain premise is true.\<close>
|
||||
doc_class "assertion" = math_content +
|
||||
referentiable :: bool <= "True"
|
||||
level :: "int option" <= "Some 2"
|
||||
mcc :: "math_content_class" <= "assn"
|
||||
invariant d :: "mcc \<sigma> = assn"
|
||||
|
||||
text\<open>An assumption is an explicit or a tacit proposition about the world or a background belief
|
||||
relating to an proposition.\<close>
|
||||
doc_class "assumption" = math_content +
|
||||
referentiable :: bool <= "True"
|
||||
level :: "int option" <= "Some 2"
|
||||
mcc :: "math_content_class" <= "assm"
|
||||
invariant d :: "mcc \<sigma> = assm"
|
||||
|
||||
text\<open> A working hypothesis is a provisionally accepted hypothesis proposed for further research
|
||||
in a process beginning with an educated guess or thought.
|
||||
|
||||
A different meaning of the term hypothesis is used in formal logic, to denote the antecedent of a
|
||||
proposition; thus in the proposition "If \<open>P\<close>, then \<open>Q\<close>", \<open>P\<close> denotes the hypothesis (or antecedent);
|
||||
\<open>Q\<close> can be called a consequent. \<open>P\<close> is the assumption in a (possibly counterfactual) What If question.\<close>
|
||||
doc_class "hypothesis" = math_content +
|
||||
referentiable :: bool <= "True"
|
||||
level :: "int option" <= "Some 2"
|
||||
mcc :: "math_content_class" <= "hypt"
|
||||
invariant d :: "mcc \<sigma> = hypt"
|
||||
|
||||
doc_class "math_proof" = math_content +
|
||||
referentiable :: bool <= "True"
|
||||
level :: "int option" <= "Some 2"
|
||||
mcc :: "math_content_class" <= "prf_stmt"
|
||||
invariant d :: "mcc \<sigma> = prf_stmt"
|
||||
|
||||
doc_class "math_example"= math_content +
|
||||
referentiable :: bool <= "True"
|
||||
level :: "int option" <= "Some 2"
|
||||
mcc :: "math_content_class" <= "expl_stmt"
|
||||
invariant d :: "mcc \<sigma> = expl_stmt"
|
||||
|
||||
|
||||
|
||||
subsection\<open>Support of Command Macros \<^verbatim>\<open>Definition*\<close> , \<^verbatim>\<open>Lemma**\<close>, \<^verbatim>\<open>Theorem*\<close> ... \<close>
|
||||
|
||||
text\<open>These ontological macros allow notations are defined for the class
|
||||
\<^typ>\<open>math_content\<close> in order to allow for a variety of free-form formats;
|
||||
in order to provide specific sub-classes, default options can be set
|
||||
in order to support more succinct notations and avoid constructs
|
||||
such as :
|
||||
|
||||
\<^theory_text>\<open>Definition*[l::"definition"]\<open>...\<close>\<close>.
|
||||
|
||||
Instead, the more convenient global declaration
|
||||
\<^theory_text>\<open>declare[[Definition_default_class="definition"]]\<close>
|
||||
supports subsequent abbreviations:
|
||||
|
||||
\<^theory_text>\<open>Definition*[l]\<open>...\<close>\<close>.
|
||||
|
||||
Via the default classes, it is also possible to specify the precise concept
|
||||
that are not necessarily in the same inheritance hierarchy: for example:
|
||||
\<^item> mathematical definition vs terminological setting
|
||||
\<^item> mathematical example vs. technical example.
|
||||
\<^item> mathematical proof vs. some structured argument
|
||||
\<^item> mathematical hypothesis vs. philosophical/metaphysical assumption
|
||||
\<^item> ...
|
||||
|
||||
By setting the default classes, it is possible to reuse these syntactic support and give them
|
||||
different interpretations in an underlying ontological class-tree.
|
||||
|
||||
\<close>
|
||||
|
||||
ML\<open>
|
||||
|
||||
val (Proposition_default_class, Proposition_default_class_setup)
|
||||
= Attrib.config_string \<^binding>\<open>Proposition_default_class\<close> (K "");
|
||||
val (Definition_default_class, Definition_default_class_setup)
|
||||
= Attrib.config_string \<^binding>\<open>Definition_default_class\<close> (K "");
|
||||
val (Axiom_default_class, Axiom_default_class_setup)
|
||||
= Attrib.config_string \<^binding>\<open>Axiom_default_class\<close> (K "");
|
||||
val (Lemma_default_class, Lemma_default_class_setup)
|
||||
= Attrib.config_string \<^binding>\<open>Lemma_default_class\<close> (K "");
|
||||
val (Theorem_default_class, Theorem_default_class_setup)
|
||||
= Attrib.config_string \<^binding>\<open>Theorem_default_class\<close> (K "");
|
||||
val (Corollary_default_class, Corollary_default_class_setup)
|
||||
= Attrib.config_string \<^binding>\<open>Corollary_default_class\<close> (K "");
|
||||
val (Assumption_default_class, Assumption_default_class_setup)
|
||||
= Attrib.config_string \<^binding>\<open>Assumption_default_class\<close> (K "");
|
||||
val (Assertion_default_class, Assertion_default_class_setup)
|
||||
= Attrib.config_string \<^binding>\<open>Assertion_default_class\<close> (K "");
|
||||
val (Consequence_default_class, Consequence_default_class_setup)
|
||||
= Attrib.config_string \<^binding>\<open>Consequence_default_class\<close> (K "");
|
||||
val (Conclusion_default_class, Conclusion_default_class_setup)
|
||||
= Attrib.config_string \<^binding>\<open>Conclusion_default_class\<close> (K "");
|
||||
val (Premise_default_class, Premise_default_class_setup)
|
||||
= Attrib.config_string \<^binding>\<open>Premise_default_class\<close> (K "");
|
||||
val (Hypothesis_default_class, Hypothesis_default_class_setup)
|
||||
= Attrib.config_string \<^binding>\<open>Hypothesis_default_class\<close> (K "");
|
||||
val (Proof_default_class, Proof_default_class_setup)
|
||||
= Attrib.config_string \<^binding>\<open>Proof_default_class\<close> (K "");
|
||||
val (Example_default_class, Example_default_class_setup)
|
||||
= Attrib.config_string \<^binding>\<open>Example_default_class\<close> (K "");
|
||||
val (Remark_default_class, Remark_default_class_setup)
|
||||
= Attrib.config_string \<^binding>\<open>Remark_default_class\<close> (K "");
|
||||
val (Notation_default_class, Notation_default_class_setup)
|
||||
= Attrib.config_string \<^binding>\<open>Notation_default_class\<close> (K "");
|
||||
|
||||
\<close>
|
||||
|
||||
setup\<open> Proposition_default_class_setup
|
||||
#> Definition_default_class_setup
|
||||
#> Axiom_default_class_setup
|
||||
#> Lemma_default_class_setup
|
||||
#> Theorem_default_class_setup
|
||||
#> Corollary_default_class_setup
|
||||
#> Assertion_default_class_setup
|
||||
#> Assumption_default_class_setup
|
||||
#> Premise_default_class_setup
|
||||
#> Hypothesis_default_class_setup
|
||||
#> Consequence_default_class_setup
|
||||
#> Conclusion_default_class_setup
|
||||
#> Proof_default_class_setup
|
||||
#> Remark_default_class_setup
|
||||
#> Example_default_class_setup\<close>
|
||||
|
||||
ML\<open>
|
||||
local open ODL_Meta_Args_Parser in
|
||||
|
||||
local
|
||||
fun doc_cmd kwd txt flag key =
|
||||
Monitor_Command_Parser.document_command kwd txt {markdown = true, body = true}
|
||||
(fn meta_args => fn thy =>
|
||||
let
|
||||
val ddc = Config.get_global thy flag
|
||||
val default = SOME(((ddc = "") ? (K \<^const_name>\<open>math_content\<close>)) ddc)
|
||||
in
|
||||
Onto_Macros.enriched_formal_statement_command default [("mcc",key)] meta_args thy
|
||||
end)
|
||||
[\<^const_name>\<open>mcc\<close>]
|
||||
(Onto_Macros.transform_attr [(\<^const_name>\<open>mcc\<close>,key)])
|
||||
|
||||
in
|
||||
|
||||
val _ = doc_cmd \<^command_keyword>\<open>Definition*\<close> "Freeform Definition"
|
||||
Definition_default_class \<^const_name>\<open>defn\<close>;
|
||||
|
||||
val _ = doc_cmd \<^command_keyword>\<open>Lemma*\<close> "Freeform Lemma Description"
|
||||
Lemma_default_class \<^const_name>\<open>lemm\<close>;
|
||||
|
||||
val _ = doc_cmd \<^command_keyword>\<open>Theorem*\<close> "Freeform Theorem"
|
||||
Theorem_default_class \<^const_name>\<open>theom\<close>;
|
||||
|
||||
val _ = doc_cmd \<^command_keyword>\<open>Proposition*\<close> "Freeform Proposition"
|
||||
Proposition_default_class \<^const_name>\<open>prpo\<close>;
|
||||
|
||||
val _ = doc_cmd \<^command_keyword>\<open>Premise*\<close> "Freeform Premise"
|
||||
Premise_default_class \<^const_name>\<open>prms\<close>;
|
||||
|
||||
val _ = doc_cmd \<^command_keyword>\<open>Corollary*\<close> "Freeform Corollary"
|
||||
Corollary_default_class \<^const_name>\<open>corr\<close>;
|
||||
|
||||
val _ = doc_cmd \<^command_keyword>\<open>Consequence*\<close> "Freeform Consequence"
|
||||
Consequence_default_class \<^const_name>\<open>cons\<close>;
|
||||
|
||||
val _ = doc_cmd \<^command_keyword>\<open>Conclusion*\<close> "Freeform Conclusion"
|
||||
Conclusion_default_class \<^const_name>\<open>conc_stmt\<close>;
|
||||
|
||||
val _ = doc_cmd \<^command_keyword>\<open>Assumption*\<close> "Freeform Assumption"
|
||||
Assumption_default_class \<^const_name>\<open>assm\<close>;
|
||||
|
||||
val _ = doc_cmd \<^command_keyword>\<open>Hypothesis*\<close> "Freeform Hypothesis"
|
||||
Hypothesis_default_class \<^const_name>\<open>prpo\<close>;
|
||||
|
||||
val _ = doc_cmd \<^command_keyword>\<open>Assertion*\<close> "Freeform Assertion"
|
||||
Assertion_default_class \<^const_name>\<open>assn\<close>;
|
||||
|
||||
val _ = doc_cmd \<^command_keyword>\<open>Proof*\<close> "Freeform Proof"
|
||||
Proof_default_class \<^const_name>\<open>prf_stmt\<close>;
|
||||
|
||||
val _ = doc_cmd \<^command_keyword>\<open>Example*\<close> "Freeform Example"
|
||||
Example_default_class \<^const_name>\<open>expl_stmt\<close>;
|
||||
end
|
||||
end
|
||||
\<close>
|
||||
|
||||
|
||||
subsection\<open>Formal Mathematical Content\<close>
|
||||
text\<open>While this library is intended to give a lot of space to freeform text elements in
|
||||
order to counterbalance Isabelle's standard view, it should not be forgot that the real strength
|
||||
of Isabelle is its ability to handle both - and to establish links between both worlds. Therefore:\<close>
|
||||
|
||||
doc_class math_formal = math_content +
|
||||
referentiable :: bool <= False
|
||||
status :: status <= "formal"
|
||||
properties :: "term list"
|
||||
type_synonym math_fc = math_formal
|
||||
|
||||
|
||||
|
||||
subsubsection*[ex_ass::example]\<open>Logical Assertions\<close>
|
||||
|
||||
text\<open>Logical assertions allow for logical statements to be checked in the global context). \<close>
|
||||
|
||||
assert*[ass1::assertion, short_name = "\<open>This is an assertion\<close>"] \<open>(3::int) < 4\<close>
|
||||
|
||||
|
||||
subsection\<open>Content in Engineering/Tech Papers \<close>
|
||||
text\<open>This section is currently experimental and not supported by the documentation
|
||||
generation backend.\<close>
|
||||
|
||||
doc_class engineering_content = technical +
|
||||
short_name :: string <= "''''"
|
||||
status :: status
|
||||
type_synonym eng_content = engineering_content
|
||||
|
||||
|
||||
doc_class "experiment" = engineering_content +
|
||||
tag :: "string" <= "''''"
|
||||
|
||||
doc_class "evaluation" = engineering_content +
|
||||
tag :: "string" <= "''''"
|
||||
|
||||
doc_class "data" = engineering_content +
|
||||
tag :: "string" <= "''''"
|
||||
|
||||
doc_class tech_definition = engineering_content +
|
||||
referentiable :: bool <= True
|
||||
tag :: "string" <= "''''"
|
||||
|
||||
doc_class tech_code = engineering_content +
|
||||
referentiable :: bool <= True
|
||||
tag :: "string" <= "''''"
|
||||
|
||||
doc_class tech_example = engineering_content +
|
||||
referentiable :: bool <= True
|
||||
tag :: "string" <= "''''"
|
||||
|
||||
doc_class eng_example = engineering_content +
|
||||
referentiable :: bool <= True
|
||||
tag :: "string" <= "''''"
|
||||
|
||||
|
||||
subsection\<open>Some Summary\<close>
|
||||
|
||||
print_doc_classes
|
||||
|
||||
print_doc_class_template "definition" (* just a sample *)
|
||||
print_doc_class_template "lemma" (* just a sample *)
|
||||
print_doc_class_template "theorem" (* just a sample *)
|
||||
print_doc_class_template "premise" (* just a sample *)
|
||||
|
||||
|
||||
subsection\<open>Structuring Enforcement in Engineering/Math Papers \<close>
|
||||
(* todo : could be finer *)
|
||||
|
||||
text\<open> Besides subtyping, there is another relation between
|
||||
doc\_classes: a class can be a \<^emph>\<open>monitor\<close> to other ones,
|
||||
which is expressed by occurrence in the where clause.
|
||||
While sub-classing refers to data-inheritance of attributes,
|
||||
a monitor captures structural constraints -- the order --
|
||||
in which instances of monitored classes may occur.
|
||||
|
||||
The control of monitors is done by the commands:
|
||||
\<^item> \<^verbatim>\<open> monitor <oid::class_type, <attributes-defs> > \<close>
|
||||
\<^item> \<^verbatim>\<open> close_monitor <oid[::class_type],<attributes-updates>> \<close>
|
||||
|
||||
where the automaton of the monitor class is expected
|
||||
to be in a final state.
|
||||
|
||||
Monitors can be nested.
|
||||
|
||||
Classes neither directly or indirectly (via inheritance)
|
||||
mentioned in the monitor clause are \<^emph>\<open>independent\<close> from
|
||||
the monitor and may occur freely, \ie{} in arbitrary order.n \<close>
|
||||
|
||||
|
||||
text \<open>underlying idea: a monitor class automatically receives a
|
||||
\<^verbatim>\<open>trace\<close> attribute in which a list of observed class-ids is maintained.
|
||||
The \<^verbatim>\<open>trace\<close> is a \<^emph>\<open>`predefined id`\<close> like \<^verbatim>\<open>main\<close> in C. It can be accessed
|
||||
like any other attribute of a class instance, \ie{} a document item.\<close>
|
||||
|
||||
doc_class article =
|
||||
style_id :: string <= "''LNCS''"
|
||||
version :: "(int \<times> int \<times> int)" <= "(0,0,0)"
|
||||
accepts "(title ~~
|
||||
\<lbrakk>subtitle\<rbrakk> ~~
|
||||
\<lbrace>author\<rbrace>\<^sup>+ ~~
|
||||
abstract ~~
|
||||
\<lbrace>introduction\<rbrace>\<^sup>+ ~~
|
||||
\<lbrace>background\<rbrace>\<^sup>* ~~
|
||||
\<lbrace>technical || example || float \<rbrace>\<^sup>+ ~~
|
||||
\<lbrace>conclusion\<rbrace>\<^sup>+ ~~
|
||||
bibliography ~~
|
||||
\<lbrace>annex\<rbrace>\<^sup>* )"
|
||||
|
||||
|
||||
ML\<open>
|
||||
structure Scholarly_paper_trace_invariant =
|
||||
struct
|
||||
local
|
||||
|
||||
fun group _ _ _ [] = []
|
||||
|group f g cidS (a::S) = case find_first (f a) cidS of
|
||||
NONE => [a] :: group f g cidS S
|
||||
| SOME cid => let val (pref,suff) = chop_prefix (g cid) S
|
||||
in (a::pref)::(group f g cidS suff) end;
|
||||
|
||||
fun partition ctxt cidS trace =
|
||||
let fun find_lead (x,_) = DOF_core.is_subclass ctxt x;
|
||||
fun find_cont cid (cid',_) = DOF_core.is_subclass ctxt cid' cid
|
||||
in group find_lead find_cont cidS trace end;
|
||||
|
||||
fun dest_option _ (Const (@{const_name "None"}, _)) = NONE
|
||||
| dest_option f (Const (@{const_name "Some"}, _) $ t) = SOME (f t)
|
||||
|
||||
in
|
||||
|
||||
fun check ctxt cidS mon_id pos_opt =
|
||||
let val trace = AttributeAccess.compute_trace_ML ctxt mon_id pos_opt @{here}
|
||||
val groups = partition (Context.proof_of ctxt) cidS trace
|
||||
fun get_level_raw oid = ISA_core.compute_attr_access ctxt "level" oid NONE @{here};
|
||||
fun get_level oid = dest_option (snd o HOLogic.dest_number) (get_level_raw (oid));
|
||||
fun check_level_hd a = case (get_level (snd a)) of
|
||||
NONE => error("Invariant violation: leading section " ^ snd a ^
|
||||
" must have lowest level")
|
||||
| SOME X => X
|
||||
fun check_group_elem level_hd a = case (get_level (snd a)) of
|
||||
NONE => true
|
||||
| SOME y => if level_hd <= y then true
|
||||
(* or < ? But this is too strong ... *)
|
||||
else error("Invariant violation: "^
|
||||
"subsequent section " ^ snd a ^
|
||||
" must have higher level.");
|
||||
fun check_group [] = true
|
||||
|check_group [_] = true
|
||||
|check_group (a::S) = forall (check_group_elem (check_level_hd a)) (S)
|
||||
in if forall check_group groups then ()
|
||||
else error"Invariant violation: leading section must have lowest level"
|
||||
end
|
||||
end
|
||||
|
||||
end
|
||||
\<close>
|
||||
|
||||
setup\<open>
|
||||
(fn thy =>
|
||||
let val cidS = ["scholarly_paper.introduction","scholarly_paper.technical",
|
||||
"scholarly_paper.example", "scholarly_paper.conclusion"];
|
||||
fun body moni_oid _ ctxt = (Scholarly_paper_trace_invariant.check ctxt cidS moni_oid NONE; true)
|
||||
val ctxt = Proof_Context.init_global thy
|
||||
val cid = "article"
|
||||
val binding = DOF_core.binding_from_onto_class_pos cid thy
|
||||
val cid_long = DOF_core.get_onto_class_name_global cid thy
|
||||
in DOF_core.add_ml_invariant binding (DOF_core.make_ml_invariant (body, cid_long)) thy end)
|
||||
\<close>
|
||||
|
||||
term\<open>float\<close>
|
||||
section\<open>Miscelleous\<close>
|
||||
|
||||
subsection\<open>Common Abbreviations\<close>
|
||||
|
||||
define_shortcut* eg \<rightleftharpoons> \<open>\eg\<close> (* Latin: „exempli gratia“ meaning „for example“. *)
|
||||
ie \<rightleftharpoons> \<open>\ie\<close> (* Latin: „id est“ meaning „that is to say“. *)
|
||||
etc \<rightleftharpoons> \<open>\etc\<close> (* Latin : „et cetera“ meaning „et cetera“ *)
|
||||
|
||||
print_doc_classes
|
||||
|
||||
end
|
||||
|
||||
@ -1,53 +0,0 @@
|
||||
%% Copyright (C) 2018 The University of Sheffield
|
||||
%% 2018 The University of Paris-Saclay
|
||||
%%
|
||||
%% License:
|
||||
%% This program can be redistributed and/or modified under the terms
|
||||
%% of the LaTeX Project Public License Distributed from CTAN
|
||||
%% archives in directory macros/latex/base/lppl.txt; either
|
||||
%% version 1.3c of the License, or (at your option) any later version.
|
||||
%% OR
|
||||
%% The 2-clause BSD-style license.
|
||||
%%
|
||||
%% SPDX-License-Identifier: LPPL-1.3c+ OR BSD-2-Clause
|
||||
|
||||
\NeedsTeXFormat{LaTeX2e}\relax
|
||||
\ProvidesPackage{DOF-technical_report}
|
||||
[00/00/0000 Document-Type Support Framework for Isabelle (LNCS).]
|
||||
|
||||
\RequirePackage[force]{DOF-scholarly_paper}
|
||||
\RequirePackage{ifthen}
|
||||
|
||||
\@ifclassloaded{scrreprt}%
|
||||
{%
|
||||
\newcommand{\institute}[1]{}%
|
||||
\newcommand{\inst}[1]{}%
|
||||
\newcommand{\orcidID}[1]{}%
|
||||
\newcommand{\email}[1]{}%
|
||||
}{%
|
||||
{\PackageError{DOF-technical_report}{Technical Report only supports scrreprt as document class.}{}\stop}%
|
||||
}
|
||||
|
||||
% Code-Setups : or RequirePackage ?
|
||||
% \usepackage{xcolor}
|
||||
% \usepackage{lstisadof-manual}
|
||||
|
||||
\newisadof{text.technical_report.SML}%
|
||||
[ label=,type=%
|
||||
, IsaUNDERSCORECOLDOTtextUNDERSCOREelementDOTlevel=%
|
||||
, IsaUNDERSCORECOLDOTtextUNDERSCOREelementDOTreferentiable=%
|
||||
, IsaUNDERSCORECOLDOTtextUNDERSCOREelementDOTvariants=%
|
||||
, scholarlyUNDERSCOREpaperDOTtextUNDERSCOREsectionDOTmainUNDERSCOREauthor=%
|
||||
, scholarlyUNDERSCOREpaperDOTtextUNDERSCOREsectionDOTfixmeUNDERSCORElist=%
|
||||
, scholarlyUNDERSCOREpaperDOTtechnicalDOTdefinitionUNDERSCORElist=%
|
||||
, scholarlyUNDERSCOREpaperDOTtechnicalDOTstatus=%
|
||||
, scholarlyUNDERSCOREpaperDOTtechnicalDOTformalUNDERSCOREresults=%
|
||||
, technicalUNDERSCOREreportDOTcodeDOTchecked=%
|
||||
, technicalUNDERSCOREreportDOTcodeDOTcaption=%
|
||||
]
|
||||
[1]
|
||||
{%
|
||||
\begin{sml}%
|
||||
#1
|
||||
\end{sml}%
|
||||
}
|
||||
@ -1,208 +0,0 @@
|
||||
(*************************************************************************
|
||||
* Copyright (C)
|
||||
* 2019 The University of Exeter
|
||||
* 2018-2019 The University of Paris-Saclay
|
||||
* 2018 The University of Sheffield
|
||||
*
|
||||
* License:
|
||||
* This program can be redistributed and/or modified under the terms
|
||||
* of the 2-clause BSD-style license.
|
||||
*
|
||||
* SPDX-License-Identifier: BSD-2-Clause
|
||||
*************************************************************************)
|
||||
|
||||
section\<open>An example ontology for a scholarly paper\<close>
|
||||
|
||||
theory technical_report
|
||||
imports "Isabelle_DOF.scholarly_paper"
|
||||
begin
|
||||
|
||||
define_ontology "DOF-technical_report.sty" "Writing technical reports."
|
||||
|
||||
(* for reports paper: invariant: level \<ge> -1 *)
|
||||
|
||||
section\<open>More Global Text Elements for Reports\<close>
|
||||
|
||||
doc_class table_of_contents =
|
||||
bookmark_depth :: int <= 3
|
||||
depth :: int <= 3
|
||||
|
||||
doc_class front_matter =
|
||||
front_matter_style :: string (* TODO Achim :::: *)
|
||||
|
||||
doc_class index =
|
||||
kind :: "doc_class"
|
||||
level :: "int option"
|
||||
|
||||
section\<open>Code Statement Elements\<close>
|
||||
|
||||
doc_class "code" = technical +
|
||||
checked :: bool <= "False"
|
||||
caption :: "string" <= "''''"
|
||||
|
||||
typ code
|
||||
|
||||
text\<open>The \<^doc_class>\<open>code\<close> is a general stub for free-form and type-checked code-fragments such as:
|
||||
\<^enum> SML code
|
||||
\<^enum> bash code
|
||||
\<^enum> isar code (although this might be an unwanted concurrence
|
||||
to the Isabelle standard cartouche)
|
||||
\<^enum> C code.
|
||||
|
||||
It is intended that later refinements of this "stub" as done in \<^verbatim>\<open>Isabelle_C\<close> which come with their
|
||||
own content checking and presentation styles.
|
||||
\<close>
|
||||
|
||||
doc_class "SML" = code +
|
||||
checked :: bool <= "False"
|
||||
|
||||
doc_class "ISAR" = code +
|
||||
checked :: bool <= "False"
|
||||
|
||||
doc_class "LATEX" = code +
|
||||
checked :: bool <= "False"
|
||||
|
||||
print_doc_class_template "SML" (* just a sample *)
|
||||
|
||||
|
||||
doc_class report =
|
||||
style_id :: string <= "''LNCS''"
|
||||
version :: "(int \<times> int \<times> int)" <= "(0,0,0)"
|
||||
accepts "(title ~~
|
||||
\<lbrakk>subtitle\<rbrakk> ~~
|
||||
\<lbrace>author\<rbrace>\<^sup>+ ~~
|
||||
\<lbrakk>front_matter\<rbrakk> ~~
|
||||
abstract ~~
|
||||
\<lbrakk>table_of_contents\<rbrakk> ~~
|
||||
\<lbrace>introduction\<rbrace>\<^sup>+ ~~
|
||||
\<lbrace>background\<rbrace>\<^sup>* ~~
|
||||
\<lbrace>technical || example || float \<rbrace>\<^sup>+ ~~
|
||||
\<lbrace>conclusion\<rbrace>\<^sup>+ ~~
|
||||
bibliography ~~
|
||||
\<lbrakk>index\<rbrakk> ~~ \<lbrace>annex\<rbrace>\<^sup>* )"
|
||||
|
||||
|
||||
section\<open>Experimental\<close>
|
||||
|
||||
(* switch on regexp syntax *)
|
||||
notation Star ("\<lbrace>(_)\<rbrace>\<^sup>*" [0]100)
|
||||
notation Plus (infixr "||" 55)
|
||||
notation Times (infixr "~~" 60)
|
||||
notation Atom ("\<lfloor>_\<rfloor>" 65)
|
||||
|
||||
|
||||
|
||||
text\<open>Not a terribly deep theorem, but an interesting property of consistency between
|
||||
ontologies - so, a lemma that shouldn't break if the involved ontologies were changed.
|
||||
It reads as follows:
|
||||
"The structural language of articles should be included in the structural language of
|
||||
reports, that is to say, reports should just have a richer structure than ordinary papers." \<close>
|
||||
|
||||
theorem articles_sub_reports: \<open>(Lang article_monitor) \<subseteq> Lang report_monitor\<close>
|
||||
unfolding article_monitor_def report_monitor_def
|
||||
apply(rule regexp_seq_mono[OF subset_refl] | rule seq_cancel_opt | rule subset_refl)+
|
||||
done
|
||||
|
||||
text\<open>The proof proceeds by blindly applying the monotonicity rules
|
||||
on the language of regular expressions.\<close>
|
||||
|
||||
text\<open>All Class-Id's --- should be generated.\<close>
|
||||
|
||||
lemmas class_ids =
|
||||
SML_def code_def annex_def title_def figure_def chapter_def article_def theorem_def
|
||||
paragraph_def tech_code_def assumption_def definition_def hypothesis_def
|
||||
eng_example_def text_element_def math_content_def tech_example_def subsubsection_def
|
||||
engineering_content_def data_def float_def axiom_def LATEX_def author_def listing_def
|
||||
abstract_def assertion_def technical_def background_def evaluation_def math_proof_def
|
||||
math_formal_def bibliography_def math_example_def text_section_def conclusion_stmt_def
|
||||
math_explanation_def ISAR_def frame_def lemma_def index_def report_def section_def
|
||||
subtitle_def corollary_def subsection_def conclusion_def experiment_def consequence_def
|
||||
proposition_def introduction_def related_work_def front_matter_def math_motivation_def
|
||||
example_def table_of_contents_def tech_definition_def premise_def
|
||||
|
||||
|
||||
|
||||
|
||||
definition allClasses
|
||||
where \<open>allClasses \<equiv>
|
||||
{SML, code, annex, title,figure,chapter, article, theorem, paragraph,
|
||||
tech_code, assumption, definition, hypothesis, eng_example, text_element,
|
||||
math_content,tech_example, subsubsection,tech_definition,
|
||||
engineering_content,data,float,axiom,LATEX,author,listing, example,abstract,
|
||||
assertion,technical,background,evaluation,math_proof,math_formal,bibliography,
|
||||
math_example,text_section,conclusion_stmt,math_explanation,ISAR,frame,
|
||||
lemma,index,report,section,premise,subtitle,corollary,subsection,conclusion,
|
||||
experiment, consequence,proposition,introduction,related_work,front_matter,
|
||||
math_motivation,table_of_contents}\<close>
|
||||
|
||||
text\<open>A rudimentary fragment of the class hierarchy re-modeled on classid's :\<close>
|
||||
|
||||
|
||||
definition cid_of where \<open>cid_of = inv Regular_Exp.Atom\<close>
|
||||
|
||||
lemma Atom_inverse[simp]:\<open>cid_of (Regular_Exp.Atom a) = a\<close>
|
||||
unfolding cid_of_def by (meson UNIV_I f_inv_into_f image_eqI rexp.inject(1))
|
||||
|
||||
|
||||
|
||||
definition doc_class_rel
|
||||
where \<open>doc_class_rel \<equiv> {(cid_of proposition,cid_of math_content),
|
||||
(cid_of listing,cid_of float),
|
||||
(cid_of figure,cid_of float)} \<close>
|
||||
|
||||
instantiation "doc_class" :: ord
|
||||
begin
|
||||
|
||||
definition
|
||||
less_eq_doc_class: "x \<le> y \<longleftrightarrow> (x,y) \<in> doc_class_rel\<^sup>*"
|
||||
|
||||
definition
|
||||
less_doc_class: "(x::doc_class) < y \<longleftrightarrow> (x \<le> y \<and> \<not> y \<le> x)"
|
||||
|
||||
instance ..
|
||||
|
||||
end
|
||||
|
||||
lemma drc_acyclic : "acyclic doc_class_rel"
|
||||
proof -
|
||||
let ?measure = "(\<lambda>x.3::int)(cid_of float := 0, cid_of math_content := 0,
|
||||
cid_of listing := 1, cid_of figure := 1, cid_of proposition := 1)"
|
||||
show ?thesis
|
||||
unfolding doc_class_rel_def
|
||||
apply(rule acyclicI_order [where f = "?measure"])
|
||||
by(simp only: class_ids)(auto)
|
||||
qed
|
||||
|
||||
|
||||
instantiation "doc_class" :: order
|
||||
begin
|
||||
instance
|
||||
proof
|
||||
fix x::"doc_class"
|
||||
show \<open>x \<le> x\<close>
|
||||
unfolding less_eq_doc_class by simp
|
||||
next
|
||||
fix x y z:: "doc_class"
|
||||
show \<open>x \<le> y \<Longrightarrow> y \<le> z \<Longrightarrow> x \<le> z\<close>
|
||||
unfolding less_eq_doc_class
|
||||
by force
|
||||
next
|
||||
fix x y::"doc_class"
|
||||
have * : "antisym (doc_class_rel\<^sup>*)"
|
||||
by (simp add: acyclic_impl_antisym_rtrancl drc_acyclic)
|
||||
show \<open>x \<le> y \<Longrightarrow> y \<le> x \<Longrightarrow> x = y\<close>
|
||||
apply(insert antisymD[OF *])
|
||||
using less_eq_doc_class by auto
|
||||
next
|
||||
fix x y::"doc_class"
|
||||
show \<open>(x < y) = (x \<le> y \<and> \<not> y \<le> x)\<close>
|
||||
by(simp add: less_doc_class)
|
||||
qed
|
||||
end
|
||||
|
||||
theorem articles_Lsub_reports: \<open>(L\<^sub>s\<^sub>u\<^sub>b article_monitor) \<subseteq> L\<^sub>s\<^sub>u\<^sub>b report_monitor\<close>
|
||||
unfolding article_monitor_def report_monitor_def
|
||||
by (meson order_refl regexp_seq_mono' seq_cancel_opt')
|
||||
|
||||
|
||||
end
|
||||
@ -1,121 +0,0 @@
|
||||
/*
|
||||
* Copyright (c)
|
||||
* 2021-2022 The University of Exeter.
|
||||
* 2021-2022 The University of Paris-Saclay.
|
||||
*
|
||||
* Redistribution and use in source and binary forms, with or without
|
||||
* modification, are permitted provided that the following conditions
|
||||
* are met:
|
||||
* 1. Redistributions of source code must retain the above copyright
|
||||
* notice, this list of conditions and the following disclaimer.
|
||||
* 2. Redistributions in binary form must reproduce the above copyright
|
||||
* notice, this list of conditions and the following disclaimer in
|
||||
* the documentation and/or other materials provided with the
|
||||
* distribution.
|
||||
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
|
||||
* FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
|
||||
* COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
|
||||
* INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
|
||||
* BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
|
||||
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
|
||||
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
|
||||
* LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
|
||||
* ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
||||
* POSSIBILITY OF SUCH DAMAGE.
|
||||
*
|
||||
* SPDX-License-Identifier: BSD-2-Clause
|
||||
*/
|
||||
|
||||
|
||||
/*** constants and parameters for Isabelle/DOF ***/
|
||||
|
||||
package isabelle.dof
|
||||
|
||||
import isabelle._
|
||||
|
||||
|
||||
object DOF {
|
||||
/** parameters **/
|
||||
|
||||
val isabelle_version = ""
|
||||
val isabelle_url = "https://isabelle.sketis.net/devel/release_snapshot/"
|
||||
|
||||
val afp_version = "afp-devel"
|
||||
|
||||
// Isabelle/DOF version: "Unreleased" for development, semantic version for releases
|
||||
val version = "Unreleased"
|
||||
|
||||
val session = "Isabelle_DOF"
|
||||
val session_ontologies = "Isabelle_DOF-Ontologies"
|
||||
|
||||
val latest_version = "1.3.0"
|
||||
val latest_isabelle = "Isabelle2021-1"
|
||||
val latest_doi = "10.5281/zenodo.6810799"
|
||||
val generic_doi = "10.5281/zenodo.3370482"
|
||||
|
||||
// Isabelle/DOF source repository
|
||||
val url = "https://git.logicalhacking.com/Isabelle_DOF/Isabelle_DOF/"
|
||||
|
||||
// Isabelle/DOF release artifacts
|
||||
val artifact_dir = "releases/Isabelle_DOF/Isabelle_DOF"
|
||||
val artifact_host = "artifacts.logicalhacking.com"
|
||||
val artifact_url: String = "https://" + artifact_host + "/" + artifact_dir
|
||||
|
||||
def options(opts: Options): Options = opts + "document_comment_latex"
|
||||
|
||||
|
||||
|
||||
/** Isabelle tool wrapper **/
|
||||
|
||||
sealed case class Parameter(name: String, value: String) {
|
||||
override def toString: String = name
|
||||
|
||||
def print(value_only: Boolean): String =
|
||||
if (value_only) value else name + "=" + value
|
||||
}
|
||||
|
||||
val parameters: List[Parameter] =
|
||||
List(
|
||||
Parameter("isabelle_version", isabelle_version),
|
||||
Parameter("afp_version", afp_version),
|
||||
Parameter("dof_version", version)
|
||||
).sortBy(_.name)
|
||||
|
||||
def print_parameters(names: List[String],
|
||||
all: Boolean = false,
|
||||
value_only: Boolean = false,
|
||||
progress: Progress = new Progress
|
||||
): Unit = {
|
||||
val bad = names.filter(name => !parameters.exists(_.name == name))
|
||||
if (bad.nonEmpty) error("Unknown parameter(s): " + commas_quote(bad))
|
||||
|
||||
val params = if (all) parameters else parameters.filter(p => names.contains(p.name))
|
||||
for (p <- params) progress.echo(p.print(value_only))
|
||||
}
|
||||
|
||||
val isabelle_tool = Isabelle_Tool("dof_param", "print Isabelle/DOF parameters",
|
||||
Scala_Project.here, args =>
|
||||
{
|
||||
var all = false
|
||||
var value_only = false
|
||||
|
||||
val getopts = Getopts("""
|
||||
Usage: isabelle dof_param [OPTIONS] NAMES
|
||||
|
||||
Options are:
|
||||
-a print all parameters
|
||||
-b print values only (default: NAME=VALUE)
|
||||
|
||||
Print given Isabelle/DOF parameters, with names from the list:
|
||||
""" + commas_quote(parameters.map(_.toString)),
|
||||
"a" -> (_ => all = true),
|
||||
"b" -> (_ => value_only = true))
|
||||
|
||||
val names = getopts(args)
|
||||
if (names.isEmpty && !all) getopts.usage()
|
||||
|
||||
print_parameters(names, all = all, value_only = value_only, progress = new Console_Progress)
|
||||
})
|
||||
}
|
||||
@ -1,128 +0,0 @@
|
||||
/*
|
||||
* Copyright (c)
|
||||
* 2021-2022 The University of Exeter.
|
||||
* 2021-2022 The University of Paris-Saclay.
|
||||
*
|
||||
* Redistribution and use in source and binary forms, with or without
|
||||
* modification, are permitted provided that the following conditions
|
||||
* are met:
|
||||
* 1. Redistributions of source code must retain the above copyright
|
||||
* notice, this list of conditions and the following disclaimer.
|
||||
* 2. Redistributions in binary form must reproduce the above copyright
|
||||
* notice, this list of conditions and the following disclaimer in
|
||||
* the documentation and/or other materials provided with the
|
||||
* distribution.
|
||||
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
|
||||
* FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
|
||||
* COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
|
||||
* INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
|
||||
* BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
|
||||
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
|
||||
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
|
||||
* LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
|
||||
* ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
||||
* POSSIBILITY OF SUCH DAMAGE.
|
||||
*
|
||||
* SPDX-License-Identifier: BSD-2-Clause
|
||||
*/
|
||||
|
||||
/*** document build engine for Isabelle/DOF ***/
|
||||
|
||||
package isabelle.dof
|
||||
|
||||
import isabelle._
|
||||
|
||||
|
||||
object DOF_Document_Build
|
||||
{
|
||||
class Engine extends Document_Build.Bash_Engine("dof")
|
||||
{
|
||||
def the_document_entry(context: Document_Build.Context, name: String): Export.Entry = {
|
||||
val entries =
|
||||
for {
|
||||
node_name <- context.all_document_theories
|
||||
entry <- context.session_context.get(node_name.theory, name)
|
||||
} yield entry
|
||||
|
||||
entries match {
|
||||
case List(entry) => entry
|
||||
case Nil =>
|
||||
error("Missing export " + quote(name) + " for document theories of session " +
|
||||
quote(context.session))
|
||||
case dups =>
|
||||
error("Multiple exports " + quote(name) + " for theories " +
|
||||
commas_quote(dups.map(_.theory_name).sorted.distinct))
|
||||
}
|
||||
}
|
||||
|
||||
override def prepare_directory(
|
||||
context: Document_Build.Context,
|
||||
dir: Path,
|
||||
doc: Document_Build.Document_Variant,
|
||||
verbose: Boolean): Document_Build.Directory =
|
||||
{
|
||||
val options = DOF.options(context.options)
|
||||
val latex_output = new Latex_Output(options)
|
||||
val directory = context.prepare_directory(dir, doc, latex_output, verbose)
|
||||
|
||||
val isabelle_dof_dir = context.session_context.sessions_structure(DOF.session).dir
|
||||
|
||||
val ltx_ontologies = split_lines((the_document_entry(context, "dof/use_ontology")).text)
|
||||
|
||||
// LaTeX styles from Isabelle/DOF directory
|
||||
(List(Path.explode("latex/styles"), Path.explode("ontologies")) :::(ltx_ontologies.map(name =>
|
||||
context.session_context.sessions_structure((Long_Name.base_name(Long_Name.qualifier(name))).mkString).dir)))
|
||||
.flatMap(dir => File.find_files((isabelle_dof_dir + dir).file, _.getName.endsWith(".sty")))
|
||||
.foreach(sty => Isabelle_System.copy_file(sty, directory.doc_dir.file))
|
||||
|
||||
// ontologies.tex from session exports
|
||||
File.write(directory.doc_dir + Path.explode("ontologies.tex"),
|
||||
ltx_ontologies.map(name => "\\usepackage{DOF-" + Long_Name.base_name(name) + "}\n").mkString)
|
||||
|
||||
// root.tex from session exports
|
||||
File.write(directory.doc_dir + Path.explode("root.tex"),
|
||||
(the_document_entry(context, "dof/use_template")).text)
|
||||
|
||||
// dof-config.sty
|
||||
File.write(directory.doc_dir + Path.explode("dof-config.sty"), """
|
||||
\newcommand{\isabelleurl}{""" + DOF.isabelle_url + """}
|
||||
\newcommand{\dofurl}{""" + DOF.url + """}
|
||||
\newcommand{\dof@isabelleversion}{""" + DOF.isabelle_version + """}
|
||||
\newcommand{\isabellefullversion}{""" + DOF.isabelle_version + """\xspace}
|
||||
\newcommand{\dof@version}{""" + DOF.version + """}
|
||||
\newcommand{\dof@artifacturl}{""" + DOF.artifact_url + """}
|
||||
\newcommand{\doflatestversion}{""" + DOF.latest_version + """}
|
||||
\newcommand{\isadoflatestdoi}{""" + DOF.latest_doi + """}
|
||||
\newcommand{\isadofgenericdoi}{""" + DOF.generic_doi + """}
|
||||
\newcommand{\isabellelatestversion}{""" + DOF.latest_isabelle + """}
|
||||
""")
|
||||
|
||||
|
||||
val texinputs: Path = Path.explode("~~/lib/texinputs")
|
||||
val comment_latex = options.bool("document_comment_latex")
|
||||
if (!comment_latex) {
|
||||
Isabelle_System.copy_file(texinputs + Path.basic("comment.sty"), directory.doc_dir)
|
||||
}
|
||||
|
||||
doc.tags.sty(comment_latex).write(directory.doc_dir)
|
||||
|
||||
|
||||
directory
|
||||
}
|
||||
}
|
||||
|
||||
class Latex_Output(options: Options) extends Latex.Output(options)
|
||||
{
|
||||
override def latex_environment(
|
||||
name: String,
|
||||
body: Latex.Text,
|
||||
optional_argument: String = ""): Latex.Text =
|
||||
{
|
||||
XML.enclose(
|
||||
"\n\\begin{" + name + "}" + optional_argument + "\n",
|
||||
"\n\\end{" + name + "}", body)
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -1,903 +0,0 @@
|
||||
(*************************************************************************
|
||||
* Copyright (C)
|
||||
* 2019 The University of Exeter
|
||||
* 2018-2019 The University of Paris-Saclay
|
||||
* 2018 The University of Sheffield
|
||||
*
|
||||
* License:
|
||||
* This program can be redistributed and/or modified under the terms
|
||||
* of the 2-clause BSD-style license.
|
||||
*
|
||||
* SPDX-License-Identifier: BSD-2-Clause
|
||||
*************************************************************************)
|
||||
|
||||
chapter \<open>The Document Ontology Common Library for the Isabelle Ontology Framework\<close>
|
||||
|
||||
text\<open> Building a fundamental infrastructure for common document elements such as
|
||||
Structuring Text-Elements (the top classes), Figures, (Tables yet todo)
|
||||
|
||||
The COL provides a number of ontological "macros" like "section*" which
|
||||
automatically set a number of class-attributes in particular ways without
|
||||
user-interference.
|
||||
\<close>
|
||||
|
||||
theory Isa_COL
|
||||
imports Isa_DOF
|
||||
keywords "title*" "subtitle*"
|
||||
"chapter*" "section*" "paragraph*"
|
||||
"subsection*" "subsubsection*"
|
||||
"figure*" "listing*" "frame*" :: document_body
|
||||
|
||||
begin
|
||||
|
||||
section\<open>Basic Text and Text-Structuring Elements\<close>
|
||||
|
||||
text\<open> The attribute @{term "level"} in the subsequent enables doc-notation support section* etc.
|
||||
we follow LaTeX terminology on levels
|
||||
\<^enum> part = Some -1
|
||||
\<^enum> chapter = Some 0
|
||||
\<^enum> section = Some 1
|
||||
\<^enum> subsection = Some 2
|
||||
\<^enum> subsubsection = Some 3
|
||||
\<^enum> ...
|
||||
|
||||
for scholarly paper: invariant level > 0. \<close>
|
||||
|
||||
doc_class text_element =
|
||||
level :: "int option" <= "None"
|
||||
referentiable :: bool <= "False"
|
||||
variants :: "String.literal set" <= "{STR ''outline'', STR ''document''}"
|
||||
|
||||
doc_class "chapter" = text_element +
|
||||
level :: "int option" <= "Some 0"
|
||||
doc_class "section" = text_element +
|
||||
level :: "int option" <= "Some 1"
|
||||
doc_class "subsection" = text_element +
|
||||
level :: "int option" <= "Some 2"
|
||||
doc_class "subsubsection" = text_element +
|
||||
level :: "int option" <= "Some 3"
|
||||
doc_class "paragraph" = text_element +
|
||||
level :: "int option" <= "Some 4"
|
||||
|
||||
|
||||
subsection\<open>Ontological Macros\<close>
|
||||
|
||||
ML\<open>
|
||||
|
||||
structure Onto_Macros =
|
||||
struct
|
||||
local open ODL_Meta_Args_Parser in
|
||||
(* *********************************************************************** *)
|
||||
(* Ontological Macro Command Support *)
|
||||
(* *********************************************************************** *)
|
||||
|
||||
(* {markdown = true} sets the parsing process such that in the text-core markdown elements are
|
||||
accepted. *)
|
||||
|
||||
|
||||
fun enriched_text_element_cmd level =
|
||||
let fun transform doc_attrs = case level of
|
||||
NONE => doc_attrs
|
||||
| SOME(NONE) => (("level",@{here}),"None")::doc_attrs
|
||||
| SOME(SOME x) => (("level",@{here}),"Some("^ Int.toString x ^"::int)")::doc_attrs
|
||||
in Monitor_Command_Parser.gen_enriched_document_cmd {inline=true} I transform end;
|
||||
|
||||
local
|
||||
fun transform_cid _ NONE X = X
|
||||
|transform_cid _ (SOME ncid) NONE = (SOME(ncid,@{here}))
|
||||
|transform_cid thy (SOME cid) (SOME (sub_cid,pos)) =
|
||||
let val cid_long = DOF_core.get_onto_class_name_global' cid thy
|
||||
val sub_cid_long = DOF_core.get_onto_class_name_global' sub_cid thy
|
||||
in if DOF_core.is_subclass_global thy sub_cid_long cid_long
|
||||
then (SOME (sub_cid,pos))
|
||||
else (* BUG : check reveals problem of Definition* misuse. *)
|
||||
error("class "^sub_cid_long^
|
||||
" must be sub-class of "^cid_long)
|
||||
end
|
||||
in
|
||||
|
||||
fun transform_attr S doc_attrs =
|
||||
let
|
||||
fun transform_attr' [] doc_attrs = doc_attrs
|
||||
| transform_attr' (s::S) (doc_attrs) =
|
||||
let val (name', value) = s
|
||||
val doc_attrs' = doc_attrs
|
||||
|> map (fn (name, term) => if name = name'
|
||||
then (name, value)
|
||||
else (name, term))
|
||||
in if doc_attrs' = doc_attrs
|
||||
then transform_attr' S doc_attrs' |> cons (name', value)
|
||||
else transform_attr' S doc_attrs'
|
||||
end
|
||||
in transform_attr' S doc_attrs end
|
||||
|
||||
fun enriched_formal_statement_command ncid (S: (string * string) list) =
|
||||
let fun transform_attr doc_attrs = (map (fn(cat,tag) => ((cat,@{here}),tag)) S) @
|
||||
(("formal_results",@{here}),"([]::thm list)")::doc_attrs
|
||||
in fn margs => fn thy =>
|
||||
Monitor_Command_Parser.gen_enriched_document_cmd {inline=true}
|
||||
(transform_cid thy ncid) transform_attr margs thy
|
||||
end;
|
||||
|
||||
fun enriched_document_cmd_exp ncid (S: (string * string) list) =
|
||||
(* expands ncid into supertype-check. *)
|
||||
let fun transform_attr attrs = (map (fn(cat,tag) => ((cat,@{here}),tag)) S) @ attrs
|
||||
in fn margs => fn thy =>
|
||||
Monitor_Command_Parser.gen_enriched_document_cmd {inline=true} (transform_cid thy ncid)
|
||||
transform_attr margs thy
|
||||
end;
|
||||
end (* local *)
|
||||
|
||||
|
||||
fun heading_command (name, pos) descr level =
|
||||
Monitor_Command_Parser.document_command (name, pos) descr
|
||||
{markdown = false, body = true} (enriched_text_element_cmd level) [] I;
|
||||
|
||||
val _ = heading_command \<^command_keyword>\<open>title*\<close> "section heading" NONE;
|
||||
val _ = heading_command \<^command_keyword>\<open>subtitle*\<close> "section heading" NONE;
|
||||
val _ = heading_command \<^command_keyword>\<open>chapter*\<close> "section heading" (SOME (SOME 0));
|
||||
val _ = heading_command \<^command_keyword>\<open>section*\<close> "section heading" (SOME (SOME 1));
|
||||
val _ = heading_command \<^command_keyword>\<open>subsection*\<close> "subsection heading" (SOME (SOME 2));
|
||||
val _ = heading_command \<^command_keyword>\<open>subsubsection*\<close> "subsubsection heading" (SOME (SOME 3));
|
||||
val _ = heading_command \<^command_keyword>\<open>paragraph*\<close> "paragraph" (SOME (SOME 4));
|
||||
|
||||
|
||||
end
|
||||
end
|
||||
\<close>
|
||||
|
||||
|
||||
section\<open>Layout Trimming Commands (with syntactic checks)\<close>
|
||||
|
||||
ML\<open>
|
||||
local
|
||||
|
||||
val scan_cm = Scan.ahead (Basic_Symbol_Pos.$$$ "c" |-- Basic_Symbol_Pos.$$$ "m" ) ;
|
||||
val scan_pt = Scan.ahead (Basic_Symbol_Pos.$$$ "p" |-- Basic_Symbol_Pos.$$$ "t" ) ;
|
||||
val scan_blank = Scan.repeat ( Basic_Symbol_Pos.$$$ " "
|
||||
|| Basic_Symbol_Pos.$$$ "\t"
|
||||
|| Basic_Symbol_Pos.$$$ "\n");
|
||||
|
||||
in
|
||||
|
||||
val scan_latex_measure = (scan_blank
|
||||
|-- Scan.option (Basic_Symbol_Pos.$$$ "-")
|
||||
|-- Symbol_Pos.scan_nat
|
||||
|-- (Scan.option ((Basic_Symbol_Pos.$$$ ".") |-- Symbol_Pos.scan_nat))
|
||||
|-- scan_blank
|
||||
|-- (scan_cm || scan_pt)
|
||||
|-- scan_blank
|
||||
) ;
|
||||
|
||||
fun check_latex_measure _ src =
|
||||
let val _ = ((Scan.catch scan_latex_measure (Symbol_Pos.explode(Input.source_content src)))
|
||||
handle Fail _ => error ("syntax error in LaTeX measure") )
|
||||
in () end
|
||||
|
||||
val parse_latex_measure = Parse.embedded_input >> (fn src => (check_latex_measure () (* dummy arg *) src;
|
||||
(fst o Input.source_content) src ) )
|
||||
|
||||
end\<close>
|
||||
|
||||
|
||||
|
||||
setup\<open> DOF_lib.define_macro \<^binding>\<open>vs\<close> "\\vspace{" "}" (check_latex_measure) \<close>
|
||||
setup\<open> DOF_lib.define_macro \<^binding>\<open>hs\<close> "\\hspace{" "}" (check_latex_measure) \<close>
|
||||
define_shortcut* hfill \<rightleftharpoons> \<open>\hfill\<close>
|
||||
(*<*)
|
||||
|
||||
text\<open>Tests: \<^vs>\<open>-0.14cm\<close>\<close>
|
||||
|
||||
ML\<open> check_latex_measure @{context} (Input.string "-0.14 cm") \<close>
|
||||
define_macro* vs2 \<rightleftharpoons> \<open>\vspace{\<close> _ \<open>}\<close> (check_latex_measure) (* checkers NYI on Isar-level *)
|
||||
define_macro* hs2 \<rightleftharpoons> \<open>\hspace{\<close> _ \<open>}\<close> (* works fine without checker.*)
|
||||
|
||||
(*>*)
|
||||
|
||||
define_shortcut* clearpage \<rightleftharpoons> \<open>\clearpage{}\<close>
|
||||
hf \<rightleftharpoons> \<open>\hfill\<close>
|
||||
br \<rightleftharpoons> \<open>\break\<close>
|
||||
|
||||
|
||||
section\<open> Library of Standard Figure Ontology \<close>
|
||||
|
||||
datatype placement = here | top | bottom
|
||||
|
||||
(*
|
||||
ML\<open> "side_by_side_figure" |> Name_Space.declared (DOF_core.get_onto_classes \<^context>
|
||||
|> Name_Space.space_of_table)\<close>
|
||||
*)
|
||||
|
||||
datatype float_kind = listing | table | graphics
|
||||
|
||||
doc_class float =
|
||||
placement :: "placement list"
|
||||
kind :: float_kind
|
||||
spawn_columns :: bool <= False
|
||||
main_caption :: string <= "''''"
|
||||
|
||||
doc_class figure = float +
|
||||
kind :: float_kind <= graphics
|
||||
file_src :: string
|
||||
relative_width :: int
|
||||
relative_height :: int
|
||||
invariant fig_kind :: "kind \<sigma> = graphics"
|
||||
|
||||
|
||||
doc_class listing = float +
|
||||
kind :: float_kind
|
||||
invariant fig_kind' :: "kind \<sigma> = float_kind.listing"
|
||||
|
||||
|
||||
(* obsolete
|
||||
doc_class side_by_side_figure = figure +
|
||||
anchor :: "string"
|
||||
caption :: "string"
|
||||
relative_width2 :: "int" (* percent of textwidth *)
|
||||
src2 :: "string"
|
||||
anchor2 :: "string"
|
||||
caption2 :: "string"
|
||||
*)
|
||||
|
||||
|
||||
subsection\<open>Figures\<close>
|
||||
|
||||
(*<*)
|
||||
|
||||
ML\<open>
|
||||
fun setup source =
|
||||
ML_Context.expression (Input.pos_of source)
|
||||
(ML_Lex.read "Theory.setup (" @ ML_Lex.read_source source @ ML_Lex.read ")")
|
||||
|> Context.theory_map;
|
||||
\<close>
|
||||
|
||||
(*>*)
|
||||
|
||||
subsubsection\<open>The Figure Content Antiquotation\<close>
|
||||
text\<open>The intermediate development goal is to separate the ontological, top-level construct
|
||||
\<open>figure*\<close>, which will remain a referentiable, ontological document unit, from the more versatile
|
||||
\<^emph>\<open>import\<close> of a figure. This opens the way for more orthogonality and abstraction from the LaTeX
|
||||
engine.
|
||||
\<close>
|
||||
ML\<open>
|
||||
|
||||
type fig_content = {relative_width : int, (* percent of textwidth, default 100 *)
|
||||
relative_height : int, (* percent, default 100 *)
|
||||
caption : Input.source (* default empty *)}
|
||||
|
||||
val mt_fig_content = {relative_width = 100,
|
||||
relative_height = 100,
|
||||
caption = Input.empty }: fig_content
|
||||
|
||||
fun make_fig_content (relative_width, relative_height, caption) =
|
||||
{relative_width = relative_width, relative_height = relative_height, caption = caption}
|
||||
|
||||
fun upd_fig_content f =
|
||||
fn {relative_width, relative_height, caption} =>
|
||||
make_fig_content (f (relative_width, relative_height, caption))
|
||||
|
||||
fun upd_relative_width f =
|
||||
upd_fig_content (fn (relative_width, relative_height, caption) =>
|
||||
(f relative_width, relative_height, caption))
|
||||
|
||||
fun upd_relative_height f =
|
||||
upd_fig_content (fn (relative_width, relative_height, caption) =>
|
||||
(relative_width, f relative_height, caption))
|
||||
|
||||
fun upd_caption f =
|
||||
upd_fig_content (fn (relative_width, relative_height, caption) =>
|
||||
(relative_width, relative_height, f caption))
|
||||
|
||||
val widthN = "width"
|
||||
val heightN = "height"
|
||||
val captionN = "caption";
|
||||
|
||||
fun fig_content_modes (ctxt, toks) =
|
||||
let val (y, toks') = ((((Scan.optional
|
||||
(Args.parens
|
||||
(Parse.list1
|
||||
( (Args.$$$ widthN |-- Args.$$$ "=" -- Parse.int
|
||||
>> (fn (_, k) => upd_relative_width (K k)))
|
||||
|| (Args.$$$ heightN |-- Args.$$$ "=" -- Parse.int
|
||||
>> (fn (_, k) => upd_relative_height (K k)))
|
||||
|| (Args.$$$ captionN |-- Args.$$$ "=" -- Parse.document_source
|
||||
>> (fn (_, k) => upd_caption (K k)))
|
||||
))) [K mt_fig_content])
|
||||
: (fig_content -> fig_content) list parser)
|
||||
>> (foldl1 (op #>)))
|
||||
: (fig_content -> fig_content) parser)
|
||||
(toks)
|
||||
in (y, (ctxt, toks')) end
|
||||
|
||||
fun get_session_dir ctxt path =
|
||||
Resources.check_session_dir ctxt
|
||||
(SOME (path))
|
||||
(Syntax.read_input ".")
|
||||
handle ERROR s => (if String.isPrefix "Bad session root directory (missing ROOT or ROOTS): " s
|
||||
then get_session_dir ctxt (Path.dir path)
|
||||
else error s)
|
||||
|
||||
fun get_document_dir ctxt =
|
||||
let val thy = Proof_Context.theory_of ctxt
|
||||
val sess_dir = get_session_dir ctxt (Resources.master_directory thy)
|
||||
in Path.append sess_dir (Path.explode "document") end;
|
||||
|
||||
fun generate_caption ctxt caption =
|
||||
let
|
||||
val cap_txt= Document_Output.output_document ctxt {markdown = false} caption
|
||||
fun drop_latex_macro (XML.Elem (("latex_environment", [("name", "isabelle")]),xmlt)) = xmlt
|
||||
|drop_latex_macro X = [X]
|
||||
val drop_latex_macros = List.concat o map drop_latex_macro;
|
||||
in
|
||||
drop_latex_macros cap_txt
|
||||
end
|
||||
|
||||
fun process_args cfg_trans =
|
||||
let val {relative_width,relative_height,caption} = cfg_trans mt_fig_content
|
||||
val _ = if relative_width < 0 orelse relative_height <0
|
||||
then error("negative parameter.")
|
||||
else ()
|
||||
val wdth_val_s = Real.toString((Real.fromInt relative_width)
|
||||
/ (Real.fromInt 100))^"\\textwidth"
|
||||
val ht_s= if relative_height = 100
|
||||
then ""
|
||||
else "height="
|
||||
^ Real.toString((Real.fromInt relative_height)
|
||||
/ (Real.fromInt 100))
|
||||
^ "\\textheight"
|
||||
in (wdth_val_s, ht_s, caption) end
|
||||
|
||||
fun fig_content ctxt (cfg_trans,file:Input.source) =
|
||||
let val (wdth_val_s, ht_s, caption) = process_args cfg_trans
|
||||
val arg_single = enclose "[" "]" (commas ["keepaspectratio","width="^wdth_val_s,ht_s])
|
||||
val arg = enclose "[" "]" (commas ["keepaspectratio","width=\\textwidth",ht_s])
|
||||
val _ = Resources.check_file ctxt (SOME (get_document_dir ctxt)) file
|
||||
(* ToDo: must be declared source of type png or jpeg or pdf, ... *)
|
||||
|
||||
in if Input.string_of(caption) = "" then
|
||||
file
|
||||
|> (Latex.string o Input.string_of)
|
||||
|> Latex.macro ("includegraphics"^arg_single)
|
||||
else
|
||||
file
|
||||
|> (Latex.string o Input.string_of)
|
||||
|> (fn X => (Latex.string ("{"^wdth_val_s^"}"))
|
||||
@ (Latex.macro0 "centering")
|
||||
@ (Latex.macro ("includegraphics"^arg) X)
|
||||
@ (Latex.macro "caption" (generate_caption ctxt caption)))
|
||||
|> (Latex.environment ("subcaptionblock") )
|
||||
(* BUG: newline at the end of subcaptionlbock, making side-by-side a figure-below-figure setup *)
|
||||
end
|
||||
|
||||
fun fig_content_antiquotation name scan =
|
||||
(Document_Output.antiquotation_raw_embedded name
|
||||
(scan : ((fig_content -> fig_content) * Input.source) context_parser)
|
||||
(fig_content : Proof.context -> (fig_content -> fig_content) * Input.source -> Latex.text));
|
||||
|
||||
|
||||
fun figure_content ctxt (cfg_trans,file:Input.source) =
|
||||
let val _ = Resources.check_file ctxt (SOME (get_document_dir ctxt)) file
|
||||
(* ToDo: must be declared source of type png or jpeg or pdf, ... *)
|
||||
val (wdth_val_s, ht_s, caption) = process_args cfg_trans
|
||||
val args = ["keepaspectratio","width=" ^ wdth_val_s, ht_s]
|
||||
|> commas
|
||||
|> enclose "[" "]"
|
||||
in file
|
||||
|> (Latex.string o Input.string_of)
|
||||
|> Latex.macro ("includegraphics" ^ args)
|
||||
|> (fn X => X @ Latex.macro "caption" (generate_caption ctxt caption))
|
||||
|> Latex.environment ("figure")
|
||||
end
|
||||
|
||||
fun figure_antiquotation name scan =
|
||||
(Document_Output.antiquotation_raw_embedded name
|
||||
(scan : ((fig_content -> fig_content) * Input.source) context_parser)
|
||||
(figure_content : Proof.context -> (fig_content -> fig_content) * Input.source -> Latex.text));
|
||||
|
||||
val _ = Theory.setup
|
||||
( fig_content_antiquotation \<^binding>\<open>fig_content\<close>
|
||||
(fig_content_modes -- Scan.lift(Parse.path_input))
|
||||
#> figure_antiquotation \<^binding>\<open>figure_content\<close>
|
||||
(fig_content_modes -- Scan.lift(Parse.path_input)))
|
||||
|
||||
\<close>
|
||||
|
||||
|
||||
ML\<open>
|
||||
|
||||
|
||||
fun convert_meta_args ctxt (X, (((str,_),value) :: R)) =
|
||||
let fun conv_int x = snd(HOLogic.dest_number(Syntax.read_term ctxt x))
|
||||
handle TERM _ => error "Illegal int format."
|
||||
in
|
||||
(case YXML.content_of str of
|
||||
"relative_width" => upd_relative_width (K (conv_int value))
|
||||
o convert_meta_args ctxt (X, R)
|
||||
| "relative_height" => upd_relative_height (K (conv_int value))
|
||||
o convert_meta_args ctxt (X, R )
|
||||
| "file_src" => convert_meta_args ctxt (X, R)
|
||||
| s => error("!undefined attribute:"^s))
|
||||
end
|
||||
|convert_meta_args _ (_,[]) = I
|
||||
|
||||
fun convert_src_from_margs ctxt (X, (((str,_),value)::R)) =
|
||||
(case YXML.content_of str of
|
||||
"file_src" => Input.string (HOLogic.dest_string (Syntax.read_term ctxt value))
|
||||
| _ => convert_src_from_margs ctxt (X,R))
|
||||
|convert_src_from_margs _ (_, []) = error("No file_src provided.")
|
||||
|
||||
fun float_command (name, pos) descr cid =
|
||||
let fun set_default_class NONE = SOME(cid,pos)
|
||||
|set_default_class (SOME X) = SOME X
|
||||
fun create_instance (((binding,cid_pos), doc_attrs) : ODL_Meta_Args_Parser.meta_args_t) =
|
||||
Value_Command.Docitem_Parser.create_and_check_docitem
|
||||
{is_monitor = false}
|
||||
{is_inline = true}
|
||||
{define = true} binding (set_default_class cid_pos) doc_attrs
|
||||
fun generate_fig_ltx_ctxt ctxt cap_src oid body =
|
||||
Latex.macro0 "centering"
|
||||
@ body
|
||||
@ Latex.macro "caption" (generate_caption ctxt cap_src)
|
||||
@ Latex.macro "label" (DOF_core.get_instance_name_global oid (Proof_Context.theory_of ctxt)
|
||||
|> DOF_core.output_name
|
||||
|> Latex.string)
|
||||
fun parse_and_tex (margs as ((binding,_), _), cap_src) ctxt =
|
||||
let val oid = Binding.name_of binding
|
||||
in
|
||||
(convert_src_from_margs ctxt margs)
|
||||
|> pair (upd_caption (K Input.empty) #> convert_meta_args ctxt margs)
|
||||
|> fig_content ctxt
|
||||
|> generate_fig_ltx_ctxt ctxt cap_src oid
|
||||
|> (Latex.environment ("figure") )
|
||||
end
|
||||
in Monitor_Command_Parser.onto_macro_cmd_command (name, pos) descr create_instance parse_and_tex
|
||||
end
|
||||
|
||||
fun listing_command (name, pos) descr cid =
|
||||
let fun set_default_class NONE = SOME(cid,pos)
|
||||
|set_default_class (SOME X) = SOME X
|
||||
fun create_instance (((binding,cid_pos), doc_attrs) : ODL_Meta_Args_Parser.meta_args_t) =
|
||||
Value_Command.Docitem_Parser.create_and_check_docitem
|
||||
{is_monitor = false}
|
||||
{is_inline = true}
|
||||
{define = true} binding (set_default_class cid_pos) doc_attrs
|
||||
fun parse_and_tex (margs as ((binding,_), _), _) _ =
|
||||
let val pos = Binding.pos_of binding
|
||||
in
|
||||
ISA_core.err ("Not yet implemented.\n Please use text*[oid::listing]\<open>\<close> instead.") pos
|
||||
end
|
||||
in Monitor_Command_Parser.onto_macro_cmd_command (name, pos) descr create_instance parse_and_tex
|
||||
end
|
||||
|
||||
|
||||
(* *********************************************************************** *)
|
||||
(* Ontological Macro Command Support *)
|
||||
(* *********************************************************************** *)
|
||||
|
||||
val _ = float_command \<^command_keyword>\<open>figure*\<close> "figure" "Isa_COL.figure" ;
|
||||
val _ = listing_command \<^command_keyword>\<open>listing*\<close> "listing" "Isa_COL.listing" ; (* Hack ! *)
|
||||
\<close>
|
||||
|
||||
|
||||
subsection\<open>Tables\<close>
|
||||
(* Under development *)
|
||||
|
||||
text\<open>Tables are (sub) document-elements represented inside the documentation antiquotation
|
||||
language. The used technology is similar to the existing railroad-diagram support
|
||||
(cf. \<^url>\<open>https://isabelle.in.tum.de/doc/isar-ref.pdf\<close>, Sec. 4.5).
|
||||
|
||||
However, tables are not directly based on the idiosyncrasies of Knuth-based language design ---
|
||||
|
||||
However, tables come with a more abstract structure model than conventional typesetting in the
|
||||
LaTeX tradition. It is based of the following principles:
|
||||
\<^item> The core unit of a table is a \<^emph>\<open>cell\<close> having a \<^emph>\<open>configuration\<close>, i.e. a
|
||||
number of attributes specifying its width, height, borderline, etc.
|
||||
A cell may be \<^emph>\<open>elementary\<close>, i.e. containing structured text or \<^emph>\<open>compound\<close>,
|
||||
i.e. containing a sub-table.
|
||||
\<^item> A \<^emph>\<open>table\<close> contains either a list of \<^emph>\<open>rows\<close> or a list of \<^emph>\<open>columns\<close>, which are both
|
||||
lists of cells.
|
||||
\<^item> The tables, rows and columns posses own configurations.
|
||||
\<^item> Concerning the layout, \<^emph>\<open>propagation\<close> laws of configurations control that
|
||||
information flows top-down from tables to rows or columns, from rows/columns to cells,
|
||||
from left to right within rows and from top to bottom in columns; propagation produces
|
||||
the desired presentation effect of tables that cells appear somewhat uniform in it.
|
||||
\<^item> Since rows are lists of cells, configurations are also a list of attributes.
|
||||
Attributes of the same kind may appear repeatedly. If the sub-list of attributes
|
||||
of the same kind is shorter than the list of cells it is referring to, than
|
||||
the last element in this sub-list is duplicated as many times as necessary. This feature
|
||||
of configuration propagation is called \<^emph>\<open>filling\<close>.
|
||||
\<^item> Lists of rows and lists of cells consists of the same number of cells.
|
||||
\<^item> Since propagation and filling induce a congruence relation on table trees, a normalisation
|
||||
process is a necessary pre-requisite for the compilation to LaTeX.
|
||||
\<close>
|
||||
|
||||
ML\<open>
|
||||
local
|
||||
|
||||
fun mk_line _ st2 [a] = [a @ Latex.string st2]
|
||||
|mk_line st1 st2 (a::S) = [a @ Latex.string st1] @ mk_line st1 st2 S;
|
||||
|
||||
(* tab attributes for global setup *)
|
||||
|
||||
type cell_config = {cell_placing : string list,
|
||||
cell_height : string list,
|
||||
cell_width : string list,
|
||||
cell_bgnd_color : string list,
|
||||
cell_line_color : string list,
|
||||
cell_line_width : string list}
|
||||
|
||||
val mt_cell_config = {cell_placing = [],
|
||||
cell_height = [],
|
||||
cell_width = [],
|
||||
cell_bgnd_color= [],
|
||||
cell_line_color= [],
|
||||
cell_line_width= [] }: cell_config
|
||||
|
||||
fun upd_cell_placing key
|
||||
{cell_placing,cell_height,cell_width, cell_bgnd_color,
|
||||
cell_line_color,cell_line_width} : cell_config =
|
||||
{cell_placing = cell_placing @ [key], cell_height = cell_height,
|
||||
cell_width = cell_width, cell_bgnd_color= cell_bgnd_color,
|
||||
cell_line_color= cell_line_color, cell_line_width= cell_line_width }
|
||||
: cell_config
|
||||
|
||||
fun upd_cell_height num
|
||||
{cell_placing,cell_height,cell_width, cell_bgnd_color,
|
||||
cell_line_color,cell_line_width} : cell_config =
|
||||
{cell_placing = cell_placing , cell_height = cell_height @ [num],
|
||||
cell_width = cell_width, cell_bgnd_color= cell_bgnd_color,
|
||||
cell_line_color= cell_line_color,cell_line_width= cell_line_width }
|
||||
: cell_config
|
||||
|
||||
fun upd_cell_width num
|
||||
{cell_placing,cell_height,cell_width, cell_bgnd_color,
|
||||
cell_line_color,cell_line_width} : cell_config =
|
||||
{cell_placing = cell_placing , cell_height = cell_height,
|
||||
cell_width = cell_width@[num],cell_bgnd_color= cell_bgnd_color,
|
||||
cell_line_color= cell_line_color, cell_line_width= cell_line_width }
|
||||
: cell_config
|
||||
|
||||
fun upd_cell_bgnd_color str
|
||||
{cell_placing,cell_height,cell_width, cell_bgnd_color,
|
||||
cell_line_color,cell_line_width} : cell_config =
|
||||
{cell_placing = cell_placing , cell_height = cell_height,
|
||||
cell_width = cell_width, cell_bgnd_color= cell_bgnd_color@[str],
|
||||
cell_line_color= cell_line_color, cell_line_width= cell_line_width }
|
||||
: cell_config
|
||||
|
||||
fun upd_cell_line_color str
|
||||
{cell_placing,cell_height,cell_width, cell_bgnd_color,
|
||||
cell_line_color,cell_line_width} : cell_config =
|
||||
{cell_placing = cell_placing , cell_height = cell_height,
|
||||
cell_width = cell_width, cell_bgnd_color= cell_bgnd_color,
|
||||
cell_line_color= cell_line_color@[str], cell_line_width= cell_line_width }
|
||||
: cell_config
|
||||
|
||||
fun upd_cell_line_width num
|
||||
{cell_placing,cell_height,cell_width, cell_bgnd_color,
|
||||
cell_line_color,cell_line_width} : cell_config =
|
||||
{cell_placing = cell_placing , cell_height = cell_height,
|
||||
cell_width = cell_width, cell_bgnd_color = cell_bgnd_color,
|
||||
cell_line_color = cell_line_color, cell_line_width = cell_line_width@[num] }
|
||||
: cell_config
|
||||
|
||||
(* global default configs *)
|
||||
val (tab_cell_placing, tab_cell_placing_setup)
|
||||
= Attrib.config_string \<^binding>\<open>tab_cell_placing\<close> (K "center");
|
||||
val (tab_cell_height, tab_cell_height_setup)
|
||||
= Attrib.config_string \<^binding>\<open>tab_cell_height\<close> (K "0.0cm");
|
||||
val (tab_cell_width, tab_cell_width_setup)
|
||||
= Attrib.config_string \<^binding>\<open>tab_cell_width\<close> (K "0.0cm");
|
||||
val (tab_cell_bgnd_color, tab_cell_bgnd_color_setup)
|
||||
= Attrib.config_string \<^binding>\<open>tab_cell_bgnd_height\<close> (K "white");
|
||||
val (tab_cell_line_color, tab_cell_line_color_setup)
|
||||
= Attrib.config_string \<^binding>\<open>tab_cell_line_color\<close> (K "black");
|
||||
val (tab_cell_line_width, tab_cell_line_width_setup)
|
||||
= Attrib.config_string \<^binding>\<open>tab_cell_line_height\<close> (K "0.0cm");
|
||||
|
||||
fun default_cell_config ctxt = {cell_placing = [Config.get ctxt tab_cell_placing],
|
||||
cell_height = [Config.get ctxt tab_cell_height],
|
||||
cell_width = [Config.get ctxt tab_cell_width],
|
||||
cell_bgnd_color = [Config.get ctxt tab_cell_bgnd_color],
|
||||
cell_line_color = [Config.get ctxt tab_cell_line_color],
|
||||
cell_line_width = [Config.get ctxt tab_cell_line_width]}
|
||||
: cell_config
|
||||
|
||||
|
||||
val _ = Theory.setup( tab_cell_placing_setup
|
||||
#> tab_cell_height_setup
|
||||
#> tab_cell_width_setup
|
||||
#> tab_cell_bgnd_color_setup
|
||||
#> tab_cell_line_color_setup
|
||||
#> tab_cell_line_width_setup
|
||||
)
|
||||
|
||||
|
||||
(*syntax for local tab specifier *)
|
||||
val cell_placingN = "cell_placing"
|
||||
val cell_heightN = "cell_height"
|
||||
val cell_widthN = "cell_width"
|
||||
val cell_bgnd_colorN = "cell_bgnd_color"
|
||||
val cell_line_colorN = "cell_line_color"
|
||||
val cell_line_widthN = "cell_line_width"
|
||||
|
||||
val placing_scan = Args.$$$ "left" || Args.$$$ "center" || Args.$$$ "right"
|
||||
|
||||
val color_scan = Args.$$$ "none" || Args.$$$ "red" || Args.$$$ "green"
|
||||
|| Args.$$$ "blue" || Args.$$$ "black"
|
||||
|
||||
|
||||
fun lift scan (st, xs) =
|
||||
let val (y, xs') = scan xs
|
||||
in (y, (st, xs')) end;
|
||||
|
||||
|
||||
fun tabitem_modes (ctxt, toks) =
|
||||
let val (y, toks') = ((((Scan.optional
|
||||
(Args.parens
|
||||
(Parse.list1
|
||||
( (Args.$$$ cell_placingN |-- Args.$$$ "=" -- placing_scan
|
||||
>> (fn (_, k) => upd_cell_placing k))
|
||||
|| (Args.$$$ cell_heightN |-- Args.$$$ "=" -- parse_latex_measure
|
||||
>> (fn (_, k) => upd_cell_height k))
|
||||
|| (Args.$$$ cell_widthN |-- Args.$$$ "=" -- parse_latex_measure
|
||||
>> (fn (_, k) => upd_cell_width k))
|
||||
|| (Args.$$$ cell_bgnd_colorN |-- Args.$$$ "=" -- color_scan
|
||||
>> (fn (_, k) => upd_cell_bgnd_color k))
|
||||
|| (Args.$$$ cell_line_colorN |-- Args.$$$ "=" -- color_scan
|
||||
>> (fn (_, k) => upd_cell_line_color k))
|
||||
|| (Args.$$$ cell_line_widthN |-- Args.$$$ "=" -- parse_latex_measure
|
||||
>> (fn (_, k) => upd_cell_line_width k))
|
||||
))) [K (default_cell_config (Context.the_proof ctxt))])
|
||||
: (cell_config -> cell_config) list parser)
|
||||
>> (foldl1 (op #>)))
|
||||
: (cell_config -> cell_config) parser)
|
||||
(toks)
|
||||
in (y, (ctxt, toks')) end
|
||||
|
||||
|
||||
datatype table_tree = mk_tab of cell_config * cell_group
|
||||
| mk_cell of cell_config * Input.source
|
||||
and cell_group = mk_row of cell_config * table_tree list
|
||||
| mk_column of cell_config * table_tree list
|
||||
|
||||
|
||||
|
||||
val tab_config_parser = tabitem_modes : ((cell_config -> cell_config) ) context_parser
|
||||
val table_parser = tab_config_parser -- Scan.repeat1(Scan.repeat1(Scan.lift Args.cartouche_input))
|
||||
|
||||
fun table_antiquotation name scan =
|
||||
Document_Output.antiquotation_raw_embedded name
|
||||
scan
|
||||
(fn ctxt =>
|
||||
(fn (cfg_trans,content:Input.source list list) =>
|
||||
let val cfg = cfg_trans mt_cell_config
|
||||
val _ = writeln ("XXX"^ @{make_string} cfg)
|
||||
fun check _ = () (* ToDo *)
|
||||
val _ = check content
|
||||
in content
|
||||
|> (map(map (Document_Output.output_document ctxt {markdown = false})
|
||||
#> mk_line "&" "\\\\"
|
||||
#> List.concat )
|
||||
#> List.concat)
|
||||
|> XML.enclose "\\table[allerhandquatsch]{" "}"
|
||||
end
|
||||
)
|
||||
);
|
||||
|
||||
fun cell_antiquotation name scan =
|
||||
Document_Output.antiquotation_raw_embedded name
|
||||
scan
|
||||
(fn ctxt =>
|
||||
(fn (cfg_trans,content:Input.source) =>
|
||||
let val cfg = cfg_trans mt_cell_config
|
||||
val _ = writeln ("XXX"^ @{make_string} cfg)
|
||||
in content |> Document_Output.output_document ctxt {markdown = false}
|
||||
end
|
||||
)
|
||||
)
|
||||
|
||||
fun row_antiquotation name scan =
|
||||
Document_Output.antiquotation_raw_embedded name
|
||||
scan
|
||||
(fn ctxt =>
|
||||
(fn (cfg_trans,content:Input.source list) =>
|
||||
let val cfg = cfg_trans mt_cell_config
|
||||
val _ = writeln ("XXX"^ @{make_string} cfg)
|
||||
in content |> (map (Document_Output.output_document ctxt {markdown = false})
|
||||
#> List.concat)
|
||||
end
|
||||
)
|
||||
)
|
||||
|
||||
fun column_antiquotation name scan =
|
||||
Document_Output.antiquotation_raw_embedded name
|
||||
scan
|
||||
(fn ctxt =>
|
||||
(fn (cfg_trans,content:Input.source list) =>
|
||||
let val cfg = cfg_trans mt_cell_config
|
||||
val _ = writeln ("XXX"^ @{make_string} cfg)
|
||||
in content |> (map (Document_Output.output_document ctxt {markdown = false})
|
||||
#> List.concat)
|
||||
end
|
||||
)
|
||||
)
|
||||
|
||||
in
|
||||
|
||||
val _ = Theory.setup
|
||||
( table_antiquotation \<^binding>\<open>table_inline\<close>
|
||||
table_parser
|
||||
#> table_antiquotation \<^binding>\<open>subtab\<close> table_parser
|
||||
#> cell_antiquotation \<^binding>\<open>cell\<close>
|
||||
(tab_config_parser--Scan.lift Args.cartouche_input)
|
||||
#> row_antiquotation \<^binding>\<open>row\<close>
|
||||
(tab_config_parser--Scan.repeat1(Scan.lift Args.cartouche_input))
|
||||
#> column_antiquotation \<^binding>\<open>column\<close>
|
||||
(tab_config_parser--Scan.repeat1(Scan.lift Args.cartouche_input))
|
||||
);
|
||||
|
||||
end
|
||||
\<close>
|
||||
|
||||
|
||||
|
||||
(*<*)
|
||||
|
||||
declare[[tab_cell_placing="left",tab_cell_height="18.0cm"]]
|
||||
|
||||
section\<open>Some Rudimentary Tests\<close>
|
||||
|
||||
text\<open> @{fig_content [display] (height = 80, width=80, caption=\<open>this is \<^term>\<open>\<sigma>\<^sub>i+2\<close> \<dots>\<close>)
|
||||
\<open>figures/isabelle-architecture.pdf\<close>}\<close>
|
||||
text\<open> @{table_inline [display] (cell_placing = center,cell_height =\<open>12.0cm\<close>,
|
||||
cell_height =\<open>13pt\<close>, cell_width = \<open>12.0cm\<close>,
|
||||
cell_bgnd_color=black,cell_line_color=red,cell_line_width=\<open>12.0cm\<close>)
|
||||
\<open>\<open>\<^cell>\<open>dfg\<close> \<^col>\<open>dfg\<close> \<^row>\<open>dfg\<close> @{cell (cell_height =\<open>12.0cm\<close>) \<open>abracadabra\<close>}\<close>
|
||||
\<open>\<open>1\<close> \<open>2\<close> \<open>3\<sigma>\<close>\<close>
|
||||
\<close>}
|
||||
\<^cell>\<open>dfg\<close> @{row \<open>is technical\<close> \<open> \<open>\<sigma> * a\<^sub>4\<close> \<close>}\<close>
|
||||
|
||||
(*>*)
|
||||
|
||||
text\<open>beamer support\<close>
|
||||
(* Under development *)
|
||||
|
||||
doc_class frame =
|
||||
options :: string
|
||||
frametitle :: string
|
||||
framesubtitle :: string
|
||||
|
||||
ML\<open>
|
||||
type frame = {options: Input.source
|
||||
, frametitle: Input.source
|
||||
, framesubtitle: Input.source}
|
||||
|
||||
val empty_frame = {options = Input.empty
|
||||
, frametitle = Input.empty
|
||||
, framesubtitle = Input.empty}: frame
|
||||
|
||||
fun make_frame (options, frametitle, framesubtitle) =
|
||||
{options = options, frametitle = frametitle, framesubtitle = framesubtitle}
|
||||
|
||||
fun upd_frame f =
|
||||
fn {options, frametitle, framesubtitle} => make_frame (f (options, frametitle, framesubtitle))
|
||||
|
||||
fun upd_options f =
|
||||
upd_frame (fn (options, frametitle, framesubtitle) => (f options, frametitle, framesubtitle))
|
||||
|
||||
fun upd_frametitle f =
|
||||
upd_frame (fn (options, frametitle, framesubtitle) => (options, f frametitle, framesubtitle))
|
||||
|
||||
fun upd_framesubtitle f =
|
||||
upd_frame (fn (options, frametitle, framesubtitle) => (options, frametitle, f framesubtitle))
|
||||
|
||||
type block = {title: Input.source}
|
||||
|
||||
val empty_block = {title = Input.empty}
|
||||
|
||||
fun make_block title = {title = title}
|
||||
|
||||
fun upd_block f =
|
||||
fn {title} => make_block (f title)
|
||||
|
||||
fun upd_block_title f =
|
||||
upd_block (fn title => f title)
|
||||
|
||||
val unenclose_string = unenclose o unenclose
|
||||
|
||||
fun read_string s =
|
||||
let val s' = DOF_core.markup2string s
|
||||
val symbols = s' |> Symbol_Pos.explode0
|
||||
in if hd symbols |> fst |> equal Symbol.open_
|
||||
then Token.read_cartouche symbols |> Token.input_of
|
||||
else unenclose_string s' |> Syntax.read_input
|
||||
end
|
||||
|
||||
val block_titleN = "title"
|
||||
|
||||
fun block_modes (ctxt, toks) =
|
||||
let val (y, toks') = ((((Scan.optional
|
||||
(Args.parens
|
||||
(Parse.list1
|
||||
((Args.$$$ block_titleN |-- Args.$$$ "=" -- Parse.document_source
|
||||
>> (fn (_, k) => upd_block_title (K k)))
|
||||
))) [K empty_block])
|
||||
: (block -> block) list parser)
|
||||
>> (foldl1 (op #>)))
|
||||
: (block -> block) parser)
|
||||
(toks)
|
||||
in (y, (ctxt, toks')) end
|
||||
|
||||
fun process_args cfg_trans =
|
||||
let val {title} = cfg_trans empty_block
|
||||
in title end
|
||||
|
||||
fun block ctxt (cfg_trans,src) =
|
||||
let val title = process_args cfg_trans
|
||||
in Latex.string "{"
|
||||
@ (title |> Document_Output.output_document ctxt {markdown = false})
|
||||
@ Latex.string "}"
|
||||
@ (src |> Document_Output.output_document ctxt {markdown = false})
|
||||
|> (Latex.environment "block")
|
||||
end
|
||||
|
||||
fun block_antiquotation name scan =
|
||||
(Document_Output.antiquotation_raw_embedded name
|
||||
(scan : ((block -> block) * Input.source) context_parser)
|
||||
(block: Proof.context -> (block -> block) * Input.source -> Latex.text));
|
||||
|
||||
val _ = block_antiquotation \<^binding>\<open>block\<close> (block_modes -- Scan.lift Parse.document_source)
|
||||
|> Theory.setup
|
||||
|
||||
fun convert_meta_args ctxt (X, (((str,_),value) :: R)) =
|
||||
(case YXML.content_of str of
|
||||
"frametitle" => upd_frametitle (K(YXML.content_of value |> read_string))
|
||||
o convert_meta_args ctxt (X, R)
|
||||
| "framesubtitle" => upd_framesubtitle (K(YXML.content_of value |> read_string))
|
||||
o convert_meta_args ctxt (X, R)
|
||||
| "options" => upd_options (K(YXML.content_of value |> read_string))
|
||||
o convert_meta_args ctxt (X, R)
|
||||
| s => error("!undefined attribute:"^s))
|
||||
| convert_meta_args _ (_,[]) = I
|
||||
|
||||
fun frame_command (name, pos) descr cid =
|
||||
let fun set_default_class NONE = SOME(cid,pos)
|
||||
|set_default_class (SOME X) = SOME X
|
||||
fun create_instance (((binding,cid_pos), doc_attrs) : ODL_Meta_Args_Parser.meta_args_t) =
|
||||
Value_Command.Docitem_Parser.create_and_check_docitem
|
||||
{is_monitor = false}
|
||||
{is_inline = true}
|
||||
{define = true} binding (set_default_class cid_pos) doc_attrs
|
||||
fun titles_src ctxt frametitle framesubtitle src =
|
||||
Latex.string "{"
|
||||
@ Document_Output.output_document ctxt {markdown = false} frametitle
|
||||
@ Latex.string "}"
|
||||
@ Latex.string "{"
|
||||
@ (Document_Output.output_document ctxt {markdown = false} framesubtitle)
|
||||
@ Latex.string "}"
|
||||
@ Document_Output.output_document ctxt {markdown = true} src
|
||||
fun generate_src_ltx_ctxt ctxt src cfg_trans =
|
||||
let val {options, frametitle, framesubtitle} = cfg_trans empty_frame
|
||||
in
|
||||
let val options_str = Input.string_of options
|
||||
in if options_str = ""
|
||||
then titles_src ctxt frametitle framesubtitle src
|
||||
else (options_str
|
||||
|> enclose "[" "]"
|
||||
|> Latex.string)
|
||||
@ titles_src ctxt frametitle framesubtitle src
|
||||
end
|
||||
end
|
||||
fun parse_and_tex (margs, src) ctxt =
|
||||
convert_meta_args ctxt margs
|
||||
|> generate_src_ltx_ctxt ctxt src
|
||||
|> Latex.environment ("frame")
|
||||
|> Latex.environment ("isamarkuptext")
|
||||
in Monitor_Command_Parser.onto_macro_cmd_command (name, pos) descr create_instance parse_and_tex
|
||||
end
|
||||
|
||||
val _ = frame_command \<^command_keyword>\<open>frame*\<close> "frame environment" "Isa_COL.frame" ;
|
||||
\<close>
|
||||
|
||||
end
|
||||
@ -1,326 +0,0 @@
|
||||
(*************************************************************************
|
||||
* Copyright (C)
|
||||
* 2019 The University of Exeter
|
||||
* 2018-2019 The University of Paris-Saclay
|
||||
* 2018 The University of Sheffield
|
||||
*
|
||||
* License:
|
||||
* This program can be redistributed and/or modified under the terms
|
||||
* of the 2-clause BSD-style license.
|
||||
*
|
||||
* SPDX-License-Identifier: BSD-2-Clause
|
||||
*************************************************************************)
|
||||
|
||||
chapter\<open>The High-Level Interface to the Automata-Library\<close>
|
||||
|
||||
theory RegExpInterface
|
||||
imports "Functional-Automata.Execute"
|
||||
keywords
|
||||
"reflect_ML_exports" :: thy_decl
|
||||
|
||||
begin
|
||||
|
||||
|
||||
text\<open> The implementation of the monitoring concept follows the following design decisions:
|
||||
\<^enum> We re-use generated code from the AFP submissions @{theory "Regular-Sets.Regular_Set"} and
|
||||
@{theory "Functional-Automata.Automata"}, converted by the code-generator into executable SML code
|
||||
(ports to future Isabelle versions should just reuse future versions of these)
|
||||
\<^enum> Monitor-Expressions are regular expressions (in some adapted syntax)
|
||||
over Document Class identifiers; they denote the language of all possible document object
|
||||
instances belonging to these classes
|
||||
\<^enum> Instead of expanding the sub-class relation (and building the product automaton of all
|
||||
monitor expressions), we convert the monitor expressions into automata over class-id's
|
||||
executed in parallel, in order to avoid blowup.
|
||||
\<^enum> For efficiency reasons, the class-ids were internally abstracted to integers; the
|
||||
encoding table is called environment \<^verbatim>\<open>env\<close>.
|
||||
\<^enum> For reusability reasons, we did NOT abstract the internal state representation in the
|
||||
deterministic automata construction (lists of lists of bits - sic !) by replacing them
|
||||
by unique keys via a suitable coding-table; rather, we opted for keeping the automatas small
|
||||
(no products, no subclass-expansion).
|
||||
\<close>
|
||||
|
||||
section\<open>Monitor Syntax over RegExp - constructs\<close>
|
||||
|
||||
notation Star ("\<lbrace>(_)\<rbrace>\<^sup>*" [0]100)
|
||||
notation Plus (infixr "||" 55)
|
||||
notation Times (infixr "~~" 60)
|
||||
notation Atom ("\<lfloor>_\<rfloor>" 65)
|
||||
|
||||
definition rep1 :: "'a rexp \<Rightarrow> 'a rexp" ("\<lbrace>(_)\<rbrace>\<^sup>+")
|
||||
where "\<lbrace>A\<rbrace>\<^sup>+ \<equiv> A ~~ \<lbrace>A\<rbrace>\<^sup>*"
|
||||
|
||||
definition opt :: "'a rexp \<Rightarrow> 'a rexp" ("\<lbrakk>(_)\<rbrakk>")
|
||||
where "\<lbrakk>A\<rbrakk> \<equiv> A || One"
|
||||
|
||||
value "Star (Conc(Alt (Atom(CHR ''a'')) (Atom(CHR ''b''))) (Atom(CHR ''c'')))"
|
||||
text\<open>or better equivalently:\<close>
|
||||
value "\<lbrace>(\<lfloor>CHR ''a''\<rfloor> || \<lfloor>CHR ''b''\<rfloor>) ~~ \<lfloor>CHR ''c''\<rfloor>\<rbrace>\<^sup>*"
|
||||
|
||||
section\<open>Some Standard and Derived Semantics\<close>
|
||||
text\<open> This is just a reminder - already defined in @{theory "Regular-Sets.Regular_Exp"}
|
||||
as @{term lang}.\<close>
|
||||
|
||||
text\<open>In the following, we give a semantics for our regular expressions, which so far have
|
||||
just been a term language (i.e. abstract syntax). The semantics is a ``denotational semantics'',
|
||||
i.e. we give a direct meaning for regular expressions in some universe of ``denotations''.
|
||||
|
||||
This universe of denotations is in our concrete case:\<close>
|
||||
|
||||
text\<open>Now the denotational semantics for regular expression can be defined on a post-card:\<close>
|
||||
|
||||
fun Lang :: "'a rexp => 'a lang"
|
||||
where L_Emp : "Lang Zero = {}"
|
||||
|L_One: "Lang One = {[]}"
|
||||
|L_Atom: "Lang (\<lfloor>a\<rfloor>) = {[a]}"
|
||||
|L_Un: "Lang (el || er) = (Lang el) \<union> (Lang er)"
|
||||
|L_Conc: "Lang (el ~~ er) = {xs@ys | xs ys. xs \<in> Lang el \<and> ys \<in> Lang er}"
|
||||
|L_Star: "Lang (Star e) = Regular_Set.star(Lang e)"
|
||||
|
||||
|
||||
text\<open>A more useful definition is the sub-language - definition\<close>
|
||||
fun L\<^sub>s\<^sub>u\<^sub>b :: "'a::order rexp => 'a lang"
|
||||
where L\<^sub>s\<^sub>u\<^sub>b_Emp: "L\<^sub>s\<^sub>u\<^sub>b Zero = {}"
|
||||
|L\<^sub>s\<^sub>u\<^sub>b_One: "L\<^sub>s\<^sub>u\<^sub>b One = {[]}"
|
||||
|L\<^sub>s\<^sub>u\<^sub>b_Atom: "L\<^sub>s\<^sub>u\<^sub>b (\<lfloor>a\<rfloor>) = {z . \<forall>x. x \<le> a \<and> z=[x]}"
|
||||
|L\<^sub>s\<^sub>u\<^sub>b_Un: "L\<^sub>s\<^sub>u\<^sub>b (el || er) = (L\<^sub>s\<^sub>u\<^sub>b el) \<union> (L\<^sub>s\<^sub>u\<^sub>b er)"
|
||||
|L\<^sub>s\<^sub>u\<^sub>b_Conc: "L\<^sub>s\<^sub>u\<^sub>b (el ~~ er) = {xs@ys | xs ys. xs \<in> L\<^sub>s\<^sub>u\<^sub>b el \<and> ys \<in> L\<^sub>s\<^sub>u\<^sub>b er}"
|
||||
|L\<^sub>s\<^sub>u\<^sub>b_Star: "L\<^sub>s\<^sub>u\<^sub>b (Star e) = Regular_Set.star(L\<^sub>s\<^sub>u\<^sub>b e)"
|
||||
|
||||
|
||||
definition XX where "XX = (rexp2na example_expression)"
|
||||
definition YY where "YY = na2da(rexp2na example_expression)"
|
||||
(* reminder from execute *)
|
||||
value "NA.accepts (rexp2na example_expression) [0,1,1,0,0,1]"
|
||||
value "DA.accepts (na2da (rexp2na example_expression)) [0,1,1,0,0,1]"
|
||||
|
||||
section\<open>HOL - Adaptions and Export to SML\<close>
|
||||
|
||||
definition enabled :: "('a,'\<sigma> set)da \<Rightarrow> '\<sigma> set \<Rightarrow> 'a list \<Rightarrow> 'a list"
|
||||
where "enabled A \<sigma> = filter (\<lambda>x. next A x \<sigma> \<noteq> {}) "
|
||||
|
||||
|
||||
definition zero where "zero = (0::nat)"
|
||||
definition one where "one = (1::nat)"
|
||||
|
||||
export_code zero one Suc Int.nat nat_of_integer int_of_integer (* for debugging *)
|
||||
example_expression (* for debugging *)
|
||||
|
||||
Zero One Atom Plus Times Star (* regexp abstract syntax *)
|
||||
|
||||
rexp2na na2da enabled (* low-level automata interface *)
|
||||
NA.accepts DA.accepts
|
||||
in SML module_name RegExpChecker
|
||||
|
||||
subsection\<open>Infrastructure for Reflecting exported SML code\<close>
|
||||
ML\<open>
|
||||
fun reflect_local_ML_exports args trans = let
|
||||
fun eval_ML_context ctxt = let
|
||||
fun is_sml_file f = String.isSuffix ".ML" (Path.implode (#path f))
|
||||
val files = (map (Generated_Files.check_files_in (Context.proof_of ctxt)) args)
|
||||
val ml_files = filter is_sml_file (map #1 (maps Generated_Files.get_files_in files))
|
||||
val ml_content = map (fn f => Syntax.read_input (Bytes.content (#content f))) ml_files
|
||||
fun eval ml_content = fold (fn sml => (ML_Context.exec
|
||||
(fn () => ML_Context.eval_source ML_Compiler.flags sml)))
|
||||
ml_content
|
||||
in
|
||||
(eval ml_content #> Local_Theory.propagate_ml_env) ctxt
|
||||
end
|
||||
in
|
||||
Toplevel.generic_theory eval_ML_context trans
|
||||
end
|
||||
|
||||
|
||||
val files_in_theory =
|
||||
(Parse.underscore >> K [] || Scan.repeat1 Parse.path_binding) --
|
||||
Scan.option (\<^keyword>\<open>(\<close> |-- Parse.!!! (\<^keyword>\<open>in\<close>
|
||||
|-- Parse.theory_name --| \<^keyword>\<open>)\<close>));
|
||||
|
||||
val _ =
|
||||
Outer_Syntax.command \<^command_keyword>\<open>reflect_ML_exports\<close>
|
||||
"evaluate generated Standard ML files"
|
||||
(Parse.and_list1 files_in_theory >> (fn args => reflect_local_ML_exports args));
|
||||
\<close>
|
||||
|
||||
|
||||
|
||||
reflect_ML_exports _
|
||||
|
||||
|
||||
|
||||
section\<open>The Abstract Interface For Monitor Expressions\<close>
|
||||
text\<open>Here comes the hic : The reflection of the HOL-Automata module into an SML module
|
||||
with an abstract interface hiding some generation artefacts like the internal states
|
||||
of the deterministic automata ...\<close>
|
||||
|
||||
|
||||
ML\<open>
|
||||
|
||||
structure RegExpInterface : sig
|
||||
type automaton
|
||||
type env
|
||||
type cid
|
||||
val alphabet : term list -> env
|
||||
val ext_alphabet: env -> term list -> env
|
||||
val conv : theory -> term -> env -> int RegExpChecker.rexp (* for debugging *)
|
||||
val rexp_term2da: theory -> env -> term -> automaton
|
||||
val enabled : automaton -> env -> cid list
|
||||
val next : automaton -> env -> cid -> automaton
|
||||
val final : automaton -> bool
|
||||
val accepts : automaton -> env -> cid list -> bool
|
||||
end
|
||||
=
|
||||
struct
|
||||
local open RegExpChecker in
|
||||
|
||||
type state = bool list RegExpChecker.set
|
||||
type env = string list
|
||||
type cid = string
|
||||
|
||||
type automaton = state * ((Int.int -> state -> state) * (state -> bool))
|
||||
|
||||
val add_atom = fold_aterms (fn Const (c as (_, \<^Type>\<open>rexp _\<close>)) => insert (op=) c | _=> I);
|
||||
fun alphabet termS = rev(map fst (fold add_atom termS []));
|
||||
fun ext_alphabet env termS =
|
||||
let val res = rev(map fst (fold add_atom termS [])) @ env;
|
||||
val _ = if has_duplicates (op=) res
|
||||
then error("reject and accept alphabets must be disjoint!")
|
||||
else ()
|
||||
in res end;
|
||||
|
||||
fun conv _ \<^Const_>\<open>Regular_Exp.rexp.Zero _\<close> _ = Zero
|
||||
|conv _ \<^Const_>\<open>Regular_Exp.rexp.One _\<close> _ = Onea
|
||||
|conv thy \<^Const_>\<open>Regular_Exp.rexp.Times _ for X Y\<close> env = Times(conv thy X env, conv thy Y env)
|
||||
|conv thy \<^Const_>\<open>Regular_Exp.rexp.Plus _ for X Y\<close> env = Plus(conv thy X env, conv thy Y env)
|
||||
|conv thy \<^Const_>\<open>Regular_Exp.rexp.Star _ for X\<close> env = Star(conv thy X env)
|
||||
|conv thy \<^Const_>\<open>RegExpInterface.opt _ for X\<close> env = Plus(conv thy X env, Onea)
|
||||
|conv thy \<^Const_>\<open>RegExpInterface.rep1 _ for X\<close> env = Times(conv thy X env, Star(conv thy X env))
|
||||
|conv _ (Const (s, \<^Type>\<open>rexp _\<close>)) env =
|
||||
let val n = find_index (fn x => x = s) env
|
||||
val _ = if n<0 then error"conversion error of regexp." else ()
|
||||
in Atom(n) end
|
||||
|conv thy S _ = error("conversion error of regexp:" ^ (Syntax.string_of_term_global thy S))
|
||||
|
||||
val eq_int = {equal = curry(op =) : Int.int -> Int.int -> bool};
|
||||
val eq_bool_list = {equal = curry(op =) : bool list -> bool list -> bool};
|
||||
|
||||
fun rexp_term2da thy env term = let val rexp = conv thy term env;
|
||||
val nda = RegExpChecker.rexp2na eq_int rexp;
|
||||
val da = RegExpChecker.na2da eq_bool_list nda;
|
||||
in da end;
|
||||
|
||||
|
||||
(* here comes the main interface of the module:
|
||||
- "enabled" gives the part of the alphabet "env" for which the automatan does not
|
||||
go into a final state
|
||||
- next provides an automata transformation that produces an automaton that
|
||||
recognizes the rest of a word after a *)
|
||||
fun enabled (da as (state,(_,_))) env =
|
||||
let val inds = RegExpChecker.enabled da state (0 upto (length env - 1))
|
||||
in map (fn i => nth env i) inds end
|
||||
|
||||
fun next (current_state, (step,fin)) env a =
|
||||
let val index = find_index (fn x => x = a) env
|
||||
in if index < 0 then error"undefined id for monitor"
|
||||
else (step index current_state,(step,fin))
|
||||
end
|
||||
|
||||
fun final (current_state, (_,fin)) = fin current_state
|
||||
|
||||
fun accepts da env word = let fun index a = find_index (fn x => x = a) env
|
||||
val indexL = map index word
|
||||
val _ = if forall (fn x => x >= 0) indexL then ()
|
||||
else error"undefined id for monitor"
|
||||
in RegExpChecker.accepts da indexL end
|
||||
|
||||
end; (* local *)
|
||||
end (* struct *)
|
||||
\<close>
|
||||
|
||||
lemma regexp_sub : "a \<le> b \<Longrightarrow> L\<^sub>s\<^sub>u\<^sub>b (\<lfloor>a\<rfloor>) \<subseteq> L\<^sub>s\<^sub>u\<^sub>b (\<lfloor>b\<rfloor>)"
|
||||
using dual_order.trans by auto
|
||||
|
||||
lemma regexp_seq_mono:
|
||||
"Lang(a) \<subseteq> Lang (a') \<Longrightarrow> Lang(b) \<subseteq> Lang (b') \<Longrightarrow> Lang(a ~~ b) \<subseteq> Lang(a' ~~ b')" by auto
|
||||
|
||||
lemma regexp_seq_mono':
|
||||
"L\<^sub>s\<^sub>u\<^sub>b(a) \<subseteq> L\<^sub>s\<^sub>u\<^sub>b (a') \<Longrightarrow> L\<^sub>s\<^sub>u\<^sub>b(b) \<subseteq> L\<^sub>s\<^sub>u\<^sub>b (b') \<Longrightarrow> L\<^sub>s\<^sub>u\<^sub>b(a ~~ b) \<subseteq> L\<^sub>s\<^sub>u\<^sub>b(a' ~~ b')" by auto
|
||||
|
||||
lemma regexp_alt_mono :"Lang(a) \<subseteq> Lang (a') \<Longrightarrow> Lang(a || b) \<subseteq> Lang(a' || b)" by auto
|
||||
|
||||
lemma regexp_alt_mono' :"L\<^sub>s\<^sub>u\<^sub>b(a) \<subseteq> L\<^sub>s\<^sub>u\<^sub>b (a') \<Longrightarrow> L\<^sub>s\<^sub>u\<^sub>b(a || b) \<subseteq> L\<^sub>s\<^sub>u\<^sub>b(a' || b)" by auto
|
||||
|
||||
lemma regexp_alt_commute : "Lang(a || b) = Lang(b || a)" by auto
|
||||
|
||||
lemma regexp_alt_commute' : "L\<^sub>s\<^sub>u\<^sub>b(a || b) = L\<^sub>s\<^sub>u\<^sub>b(b || a)" by auto
|
||||
|
||||
lemma regexp_unit_right : "Lang (a) = Lang (a ~~ One) " by simp
|
||||
|
||||
lemma regexp_unit_right' : "L\<^sub>s\<^sub>u\<^sub>b (a) = L\<^sub>s\<^sub>u\<^sub>b (a ~~ One) " by simp
|
||||
|
||||
lemma regexp_unit_left : "Lang (a) = Lang (One ~~ a) " by simp
|
||||
|
||||
lemma regexp_unit_left' : "L\<^sub>s\<^sub>u\<^sub>b (a) = L\<^sub>s\<^sub>u\<^sub>b (One ~~ a) " by simp
|
||||
|
||||
lemma opt_star_incl :"Lang (opt a) \<subseteq> Lang (Star a)" by (simp add: opt_def subset_iff)
|
||||
|
||||
lemma opt_star_incl':"L\<^sub>s\<^sub>u\<^sub>b (opt a) \<subseteq> L\<^sub>s\<^sub>u\<^sub>b (Star a)" by (simp add: opt_def subset_iff)
|
||||
|
||||
lemma rep1_star_incl:"Lang (rep1 a) \<subseteq> Lang (Star a)"
|
||||
unfolding rep1_def by(subst L_Star, subst L_Conc)(force)
|
||||
|
||||
lemma rep1_star_incl':"L\<^sub>s\<^sub>u\<^sub>b (rep1 a) \<subseteq> L\<^sub>s\<^sub>u\<^sub>b (Star a)"
|
||||
unfolding rep1_def by(subst L\<^sub>s\<^sub>u\<^sub>b_Star, subst L\<^sub>s\<^sub>u\<^sub>b_Conc)(force)
|
||||
|
||||
lemma cancel_rep1 : "Lang (a) \<subseteq> Lang (rep1 a)"
|
||||
unfolding rep1_def by auto
|
||||
|
||||
lemma cancel_rep1' : "L\<^sub>s\<^sub>u\<^sub>b (a) \<subseteq> L\<^sub>s\<^sub>u\<^sub>b (rep1 a)"
|
||||
unfolding rep1_def by auto
|
||||
|
||||
lemma seq_cancel_opt : "Lang (a) \<subseteq> Lang (c) \<Longrightarrow> Lang (a) \<subseteq> Lang (opt b ~~ c)"
|
||||
by(subst regexp_unit_left, rule regexp_seq_mono)(simp_all add: opt_def)
|
||||
|
||||
lemma seq_cancel_opt' : "L\<^sub>s\<^sub>u\<^sub>b (a) \<subseteq> L\<^sub>s\<^sub>u\<^sub>b (c) \<Longrightarrow> L\<^sub>s\<^sub>u\<^sub>b (a) \<subseteq> L\<^sub>s\<^sub>u\<^sub>b (opt b ~~ c)"
|
||||
by(subst regexp_unit_left', rule regexp_seq_mono')(simp_all add: opt_def)
|
||||
|
||||
lemma seq_cancel_Star : "Lang (a) \<subseteq> Lang (c) \<Longrightarrow> Lang (a) \<subseteq> Lang (Star b ~~ c)"
|
||||
by auto
|
||||
|
||||
lemma seq_cancel_Star' : "L\<^sub>s\<^sub>u\<^sub>b (a) \<subseteq> L\<^sub>s\<^sub>u\<^sub>b (c) \<Longrightarrow> L\<^sub>s\<^sub>u\<^sub>b (a) \<subseteq> L\<^sub>s\<^sub>u\<^sub>b (Star b ~~ c)"
|
||||
by auto
|
||||
|
||||
lemma mono_Star : "Lang (a) \<subseteq> Lang (b) \<Longrightarrow> Lang (Star a) \<subseteq> Lang (Star b)"
|
||||
by(auto)(metis in_star_iff_concat order.trans)
|
||||
|
||||
lemma mono_Star' : "L\<^sub>s\<^sub>u\<^sub>b (a) \<subseteq> L\<^sub>s\<^sub>u\<^sub>b (b) \<Longrightarrow> L\<^sub>s\<^sub>u\<^sub>b (Star a) \<subseteq> L\<^sub>s\<^sub>u\<^sub>b (Star b)"
|
||||
by(auto)(metis in_star_iff_concat order.trans)
|
||||
|
||||
lemma mono_rep1_star:"Lang (a) \<subseteq> Lang (b) \<Longrightarrow> Lang (rep1 a) \<subseteq> Lang (Star b)"
|
||||
using mono_Star rep1_star_incl by blast
|
||||
|
||||
lemma mono_rep1_star':"L\<^sub>s\<^sub>u\<^sub>b (a) \<subseteq> L\<^sub>s\<^sub>u\<^sub>b (b) \<Longrightarrow> L\<^sub>s\<^sub>u\<^sub>b (rep1 a) \<subseteq> L\<^sub>s\<^sub>u\<^sub>b (Star b)"
|
||||
using mono_Star' rep1_star_incl' by blast
|
||||
|
||||
|
||||
no_notation Star ("\<lbrace>(_)\<rbrace>\<^sup>*" [0]100)
|
||||
no_notation Plus (infixr "||" 55)
|
||||
no_notation Times (infixr "~~" 60)
|
||||
no_notation Atom ("\<lfloor>_\<rfloor>" 65)
|
||||
no_notation rep1 ("\<lbrace>(_)\<rbrace>\<^sup>+")
|
||||
no_notation opt ("\<lbrakk>(_)\<rbrakk>")
|
||||
|
||||
ML\<open>
|
||||
structure RegExpInterface_Notations =
|
||||
struct
|
||||
val Star = (\<^term>\<open>Regular_Exp.Star\<close>, Mixfix (Syntax.read_input "\<lbrace>(_)\<rbrace>\<^sup>*", [0], 100, Position.no_range))
|
||||
val Plus = (\<^term>\<open>Regular_Exp.Plus\<close>, Infixr (Syntax.read_input "||", 55, Position.no_range))
|
||||
val Times = (\<^term>\<open>Regular_Exp.Times\<close>, Infixr (Syntax.read_input "~~", 60, Position.no_range))
|
||||
val Atom = (\<^term>\<open>Regular_Exp.Atom\<close>, Mixfix (Syntax.read_input "\<lfloor>_\<rfloor>", [], 65, Position.no_range))
|
||||
val opt = (\<^term>\<open>RegExpInterface.opt\<close>, Mixfix (Syntax.read_input "\<lbrakk>(_)\<rbrakk>", [], 1000, Position.no_range))
|
||||
val rep1 = (\<^term>\<open>RegExpInterface.rep1\<close>, Mixfix (Syntax.read_input "\<lbrace>(_)\<rbrace>\<^sup>+", [], 1000, Position.no_range))
|
||||
val notations = [Star, Plus, Times, Atom, rep1, opt]
|
||||
end
|
||||
\<close>
|
||||
|
||||
end
|
||||
|
||||
@ -1,27 +0,0 @@
|
||||
(*************************************************************************
|
||||
* Copyright (C)
|
||||
* 2019-2023 The University of Exeter
|
||||
* 2018-2023 The University of Paris-Saclay
|
||||
* 2018 The University of Sheffield
|
||||
*
|
||||
* License:
|
||||
* This program can be redistributed and/or modified under the terms
|
||||
* of the 2-clause BSD-style license.
|
||||
*
|
||||
* SPDX-License-Identifier: BSD-2-Clause
|
||||
*************************************************************************)
|
||||
|
||||
(*<*)
|
||||
theory "Isabelle_DOF_Manual"
|
||||
imports "M_07_Implementation"
|
||||
begin
|
||||
close_monitor*[this]
|
||||
check_doc_global
|
||||
text\<open>Resulting trace in \<^verbatim>\<open>doc_item\<close> ''this'': \<close>
|
||||
ML\<open>@{trace_attribute this}\<close>
|
||||
|
||||
|
||||
end
|
||||
(*>*)
|
||||
|
||||
|
||||
@ -1,152 +0,0 @@
|
||||
(*************************************************************************
|
||||
* Copyright (C)
|
||||
* 2019-2022 The University of Exeter
|
||||
* 2018-2022 The University of Paris-Saclay
|
||||
* 2018 The University of Sheffield
|
||||
*
|
||||
* License:
|
||||
* This program can be redistributed and/or modified under the terms
|
||||
* of the 2-clause BSD-style license.
|
||||
*
|
||||
* SPDX-License-Identifier: BSD-2-Clause
|
||||
*************************************************************************)
|
||||
|
||||
(*<*)
|
||||
theory "M_00_Frontmatter"
|
||||
imports
|
||||
"Isabelle_DOF.technical_report"
|
||||
begin
|
||||
|
||||
use_template "scrreprt-modern"
|
||||
use_ontology "technical_report"
|
||||
|
||||
section\<open>Local Document Setup.\<close>
|
||||
text\<open>Introducing document specific abbreviations and macros:\<close>
|
||||
|
||||
define_shortcut* dof \<rightleftharpoons> \<open>\dof\<close>
|
||||
isadof \<rightleftharpoons> \<open>\isadof{}\<close>
|
||||
|
||||
define_shortcut* TeXLive \<rightleftharpoons> \<open>\TeXLive\<close>
|
||||
BibTeX \<rightleftharpoons> \<open>\BibTeX{}\<close>
|
||||
LaTeX \<rightleftharpoons> \<open>\LaTeX{}\<close>
|
||||
TeX \<rightleftharpoons> \<open>\TeX{}\<close>
|
||||
dofurl \<rightleftharpoons> \<open>\dofurl\<close>
|
||||
pdf \<rightleftharpoons> \<open>PDF\<close>
|
||||
|
||||
text\<open>Note that these setups assume that the associated \<^LaTeX> macros
|
||||
are defined, \<^eg>, in the document prelude. \<close>
|
||||
|
||||
define_macro* index \<rightleftharpoons> \<open>\index{\<close> _ \<open>}\<close>
|
||||
define_macro* bindex \<rightleftharpoons> \<open>\bindex{\<close> _ \<open>}\<close>
|
||||
define_macro* nolinkurl \<rightleftharpoons> \<open>\nolinkurl{\<close> _ \<open>}\<close>
|
||||
define_macro* center \<rightleftharpoons> \<open>\center{\<close> _ \<open>}\<close>
|
||||
define_macro* ltxinline \<rightleftharpoons> \<open>\inlineltx|\<close> _ \<open>|\<close>
|
||||
|
||||
ML\<open>
|
||||
|
||||
fun boxed_text_antiquotation name (* redefined in these more abstract terms *) =
|
||||
DOF_lib.gen_text_antiquotation name DOF_lib.report_text
|
||||
(fn ctxt => DOF_lib.string_2_text_antiquotation ctxt
|
||||
#> DOF_lib.enclose_env false ctxt "isarbox")
|
||||
|
||||
val neant = K(Latex.text("",\<^here>))
|
||||
|
||||
fun boxed_theory_text_antiquotation name (* redefined in these more abstract terms *) =
|
||||
DOF_lib.gen_text_antiquotation name DOF_lib.report_theory_text
|
||||
(fn ctxt => DOF_lib.string_2_theory_text_antiquotation ctxt
|
||||
#> DOF_lib.enclose_env false ctxt "isarbox"
|
||||
(* #> neant *)) (*debugging *)
|
||||
|
||||
fun boxed_sml_text_antiquotation name =
|
||||
DOF_lib.gen_text_antiquotation name (K(K()))
|
||||
(fn ctxt => Input.source_content
|
||||
#> Latex.text
|
||||
#> DOF_lib.enclose_env true ctxt "sml")
|
||||
(* the simplest conversion possible *)
|
||||
|
||||
fun boxed_pdf_antiquotation name =
|
||||
DOF_lib.gen_text_antiquotation name (K(K()))
|
||||
(fn ctxt => Input.source_content
|
||||
#> Latex.text
|
||||
#> DOF_lib.enclose_env true ctxt "out")
|
||||
(* the simplest conversion possible *)
|
||||
|
||||
fun boxed_latex_antiquotation name =
|
||||
DOF_lib.gen_text_antiquotation name (K(K()))
|
||||
(fn ctxt => Input.source_content
|
||||
#> Latex.text
|
||||
#> DOF_lib.enclose_env true ctxt "ltx")
|
||||
(* the simplest conversion possible *)
|
||||
|
||||
fun boxed_bash_antiquotation name =
|
||||
DOF_lib.gen_text_antiquotation name (K(K()))
|
||||
(fn ctxt => Input.source_content
|
||||
#> Latex.text
|
||||
#> DOF_lib.enclose_env true ctxt "bash")
|
||||
(* the simplest conversion possible *)
|
||||
\<close>
|
||||
|
||||
setup\<open>boxed_text_antiquotation \<^binding>\<open>boxed_text\<close> #>
|
||||
boxed_text_antiquotation \<^binding>\<open>boxed_cartouche\<close> #>
|
||||
boxed_theory_text_antiquotation \<^binding>\<open>boxed_theory_text\<close> #>
|
||||
|
||||
boxed_sml_text_antiquotation \<^binding>\<open>boxed_sml\<close> #>
|
||||
boxed_pdf_antiquotation \<^binding>\<open>boxed_pdf\<close> #>
|
||||
boxed_latex_antiquotation \<^binding>\<open>boxed_latex\<close>#>
|
||||
boxed_bash_antiquotation \<^binding>\<open>boxed_bash\<close>
|
||||
\<close>
|
||||
|
||||
open_monitor*[this::report]
|
||||
|
||||
(*>*)
|
||||
|
||||
title*[title::title] \<open>Isabelle/DOF\<close>
|
||||
subtitle*[subtitle::subtitle]\<open>User and Implementation Manual\<close>
|
||||
author*[ adb,
|
||||
email ="\<open>a.brucker@exeter.ac.uk\<close>",
|
||||
orcid ="\<open>0000-0002-6355-1200\<close>",
|
||||
http_site ="\<open>https://www.brucker.ch/\<close>",
|
||||
affiliation ="\<open>University of Exeter, Exeter, UK\<close>"]\<open>Achim D. Brucker\<close>
|
||||
author*[ nico,
|
||||
email = "\<open>nicolas.meric@lri.fr\<close>",
|
||||
affiliation = "\<open>Université Paris-Saclay, LRI, Paris, France\<close>"]\<open>Nicolas Méric\<close>
|
||||
author*[ bu,
|
||||
email = "\<open>wolff@lri.fr\<close>",
|
||||
affiliation = "\<open>Université Paris-Saclay, LRI, Paris, France\<close>"]\<open>Burkhart Wolff\<close>
|
||||
|
||||
abstract*[abs, keywordlist="[\<open>Ontology\<close>, \<open>Ontological Modeling\<close>, \<open>Document Management\<close>,
|
||||
\<open>Formal Document Development\<close>,\<open>Isabelle/DOF\<close>]"]
|
||||
\<open> \<^isadof> provides an implementation of \<^dof> on top of Isabelle/HOL.
|
||||
\<^dof> itself is a novel framework for \<^emph>\<open>defining\<close> ontologies
|
||||
and \<^emph>\<open>enforcing\<close> them during document development and document
|
||||
evolution. \<^isadof> targets use-cases such as mathematical texts referring
|
||||
to a theory development or technical reports requiring a particular structure.
|
||||
A major application of \<^dof> is the integrated development of
|
||||
formal certification documents (\<^eg>, for Common Criteria or CENELEC
|
||||
50128) that require consistency across both formal and informal
|
||||
arguments.
|
||||
|
||||
\<^isadof> is integrated into Isabelle's IDE, which
|
||||
allows for smooth ontology development as well as immediate
|
||||
ontological feedback during the editing of a document.
|
||||
Its checking facilities leverage the collaborative
|
||||
development of documents required to be consistent with an
|
||||
underlying ontological structure.
|
||||
|
||||
In this user-manual, we give an in-depth presentation of the design
|
||||
concepts of \<^dof>'s Ontology Definition Language (ODL) and describe
|
||||
comprehensively its major commands. Many examples show typical best-practice
|
||||
applications of the system.
|
||||
|
||||
It is a unique feature of \<^isadof> that ontologies may be used to control
|
||||
the link between formal and informal content in documents inan automatic-checked way.
|
||||
These links can connect both text elements and formal
|
||||
modeling elements such as terms, definitions, code and logical formulas,
|
||||
altogether \<^emph>\<open>integrated\<close> into a state-of-the-art interactive theorem prover.
|
||||
|
||||
\<close>
|
||||
|
||||
(*<*)
|
||||
end
|
||||
(*>*)
|
||||
|
||||
@ -1,157 +0,0 @@
|
||||
(*************************************************************************
|
||||
* Copyright (C)
|
||||
* 2019-2023 The University of Exeter
|
||||
* 2018-2023 The University of Paris-Saclay
|
||||
* 2018 The University of Sheffield
|
||||
*
|
||||
* License:
|
||||
* This program can be redistributed and/or modified under the terms
|
||||
* of the 2-clause BSD-style license.
|
||||
*
|
||||
* SPDX-License-Identifier: BSD-2-Clause
|
||||
*************************************************************************)
|
||||
|
||||
(*<*)
|
||||
theory "M_01_Introduction"
|
||||
imports "M_00_Frontmatter"
|
||||
begin
|
||||
(*>*)
|
||||
|
||||
|
||||
chapter*[intro::introduction]\<open> Introduction \<close>
|
||||
text*[introtext::introduction]\<open>
|
||||
The linking of the \<^emph>\<open>formal\<close> to the \<^emph>\<open>informal\<close> is perhaps the most pervasive challenge in the
|
||||
digitization of knowledge and its propagation. This challenge incites numerous research efforts
|
||||
summarized under the labels ``semantic web,'' ``data mining,'' or any form of advanced ``semantic''
|
||||
text processing. A key role in structuring this linking plays is \<^emph>\<open>document ontologies\<close> (also called
|
||||
\<^emph>\<open>vocabulary\<close> in the semantic web community~\<^cite>\<open>"w3c:ontologies:2015"\<close>), \<^ie>, a machine-readable
|
||||
form of the structure of documents as well as the document discourse.
|
||||
|
||||
Such ontologies can be used for the scientific discourse within scholarly articles, mathematical
|
||||
libraries, and in the engineering discourse of standardized software certification
|
||||
documents~\<^cite>\<open>"boulanger:cenelec-50128:2015" and "cc:cc-part3:2006"\<close>. All these
|
||||
documents contain formal content and have to follow a given structure. In practice, large groups of developers have to produce a substantial
|
||||
set of documents where consistency is notoriously difficult to maintain. In particular,
|
||||
certifications are centred around the \<^emph>\<open>traceability\<close> of requirements throughout the entire
|
||||
set of documents. While technical solutions for the traceability problem exist (most notably:
|
||||
DOORS~\<^cite>\<open>"ibm:doors:2019"\<close>), they are weak in the treatment of formal entities (such as formulas
|
||||
and their logical contexts).
|
||||
|
||||
Further applications are the domain-specific discourse in juridical texts or medical reports.
|
||||
In general, an ontology is a formal explicit description of \<^emph>\<open>concepts\<close> in a domain of discourse
|
||||
(called \<^emph>\<open>classes\<close>), components (called \<^emph>\<open>attributes\<close>) of the concept, and properties (called
|
||||
\<^emph>\<open>invariants\<close>) on concepts. Logically, classes are represented by a type (the class type) and
|
||||
particular terms representing \<^emph>\<open>instances\<close> of them. Since components are typed, it is therefore
|
||||
possible to express \<^emph>\<open>links\<close> like \<open>m\<close>-to-\<open>n\<close> relations between classes.
|
||||
Another form of link between concepts is the \<^emph>\<open>is-a\<close> relation declaring
|
||||
the instances of a subclass to be instances of the super-class.
|
||||
|
||||
Engineering an ontological language for documents that contain both formal and informal elements
|
||||
as occurring in formal theories is a particular challenge. To address this latter, we present
|
||||
the Document Ontology Framework (\<^dof>) and an implementation of \<^dof> called \<^isadof>.
|
||||
\<^dof> is designed for building scalable and user-friendly tools on top of interactive theorem
|
||||
provers. \<^isadof> is an instance of this novel framework, implemented as an extension of Isabelle/HOL,
|
||||
to \<^emph>\<open>model\<close> typed ontologies and to \<^emph>\<open>enforce\<close> them during document evolution. Based on Isabelle's
|
||||
infrastructures, ontologies may refer to types, terms, proven theorems, code, or established
|
||||
assertions. Based on a novel adaption of the Isabelle IDE (called PIDE, \<^cite>\<open>"wenzel:asynchronous:2014"\<close>),
|
||||
a document is checked to be \<^emph>\<open>conform\<close> to a particular ontology---\<^isadof> is designed to give fast
|
||||
user-feedback \<^emph>\<open>during the capture of content\<close>. This is particularly valuable in the case of document
|
||||
evolution, where the \<^emph>\<open>coherence\<close> between the formal and the informal parts of the content can
|
||||
be mechanically checked.
|
||||
|
||||
To avoid any misunderstanding: \<^isadof> is \<^emph>\<open>not a theory in HOL\<close> on ontologies and operations to
|
||||
track and trace links in texts. It is an \<^emph>\<open>environment to write structured text\<close> which
|
||||
\<^emph>\<open>may contain\<close> Isabelle/HOL definitions and proofs like mathematical articles, tech-reports and
|
||||
scientific papers---as the present one, which is written in \<^isadof> itself. \<^isadof> is a plugin
|
||||
into the Isabelle/Isar framework in the style of~\<^cite>\<open>"wenzel.ea:building:2007"\<close>. However,
|
||||
\<^isadof> will generate from ontologies a theory infrastructure consisting of types, terms, theorems
|
||||
and code that allows both interactive checking and formal reasoning over meta-data
|
||||
related to annotated documents.\<close>
|
||||
|
||||
subsubsection\<open>How to Read This Manual\<close>
|
||||
(*<*)
|
||||
declare_reference*[background::text_section]
|
||||
declare_reference*[isadof_tour::text_section]
|
||||
declare_reference*[isadof_ontologies::text_section]
|
||||
declare_reference*[writing_doc::text_section]
|
||||
declare_reference*[isadof_developers::text_section]
|
||||
(*>*)
|
||||
text\<open>
|
||||
This manual can be read in different ways, depending on what you want to accomplish. We see three
|
||||
different main user groups:
|
||||
\<^enum> \<^emph>\<open>\<^isadof> users\<close>, \<^ie>, users that just want to edit a core document, be it for a paper or a
|
||||
technical report, using a given ontology. These users should focus on
|
||||
@{docitem (unchecked) \<open>isadof_tour\<close>} and, depending on their knowledge of Isabelle/HOL, also on
|
||||
@{docitem (unchecked) \<open>background\<close>}.
|
||||
\<^enum> \<^emph>\<open>Ontology developers\<close>, \<^ie>, users that want to develop new ontologies or modify existing
|
||||
document ontologies. These users should, after having gained acquaintance as a user, focus
|
||||
on @{docitem (unchecked) \<open>isadof_ontologies\<close>}.
|
||||
\<^enum> \<^emph>\<open>\<^isadof> developers\<close>, \<^ie>, users that want to extend or modify \<^isadof>, \<^eg>, by adding new
|
||||
text-elements. These users should read @{docitem (unchecked) \<open>isadof_developers\<close>}.
|
||||
\<close>
|
||||
|
||||
subsubsection\<open>Typographical Conventions\<close>
|
||||
text\<open>
|
||||
We acknowledge that understanding \<^isadof> and its implementation in all details requires
|
||||
separating multiple technological layers or languages. To help the reader with this, we
|
||||
will type-set the different languages in different styles. In particular, we will use
|
||||
\<^item> a light-blue background for input written in Isabelle's Isar language, \<^eg>:
|
||||
@{boxed_theory_text [display]
|
||||
\<open>lemma refl: "x = x"
|
||||
by simp\<close>}
|
||||
\<^item> a green background for examples of generated document fragments (\<^ie>, PDF output):
|
||||
@{boxed_pdf [display] \<open>The axiom refl\<close>}
|
||||
\<^item> a red background for SML-code:
|
||||
@{boxed_sml [display] \<open>fun id x = x\<close>}
|
||||
\<^item> a yellow background for \<^LaTeX>-code:
|
||||
@{boxed_latex [display] \<open>\newcommand{\refl}{$x = x$}\<close>}
|
||||
\<^item> a grey background for shell scripts and interactive shell sessions:
|
||||
@{boxed_bash [display]\<open>ë\prompt{}ë ls
|
||||
CHANGELOG.md CITATION examples install LICENSE README.md ROOTS src\<close>}
|
||||
\<close>
|
||||
|
||||
subsubsection\<open>How to Cite \<^isadof>\<close>
|
||||
text\<open>
|
||||
If you use or extend \<^isadof> in your publications, please use
|
||||
\<^item> for the \<^isadof> system~\<^cite>\<open>"brucker.ea:isabelle-ontologies:2018"\<close>:
|
||||
\begin{quote}\small
|
||||
A.~D. Brucker, I.~Ait-Sadoune, N. Méric, and B.~Wolff. Using Deep Ontologies in Formal
|
||||
Software Engineering. In \<^emph>\<open>International Conference on Rigorous State-Based Methods (ABZ 2023)\<close>,
|
||||
To appear in Lecture Notes in Computer Science. Springer-Verlag,
|
||||
Heidelberg, 2023. \href{10.1007/978-3-031-33163-3_2} {10.1007/978-3-031-33163-3\_2}.
|
||||
\end{quote}
|
||||
A \<^BibTeX>-entry is available at:
|
||||
\<^url>\<open>https://www.lri.fr/~wolff/bibtex/wolff.html\<close>.
|
||||
\<^item> an older description of the system~\<^cite>\<open>"brucker.ea:isabelle-ontologies:2018"\<close>:
|
||||
\begin{quote}\small
|
||||
A.~D. Brucker, I.~Ait-Sadoune, P.~Crisafulli, and B.~Wolff. Using the {Isabelle} ontology
|
||||
framework: Linking the formal with the informal. In \<^emph>\<open>Conference on Intelligent Computer
|
||||
Mathematics (CICM)\<close>, number 11006 in Lecture Notes in Computer Science. Springer-Verlag,
|
||||
Heidelberg, 2018. \href{https://doi.org/10.1007/978-3-319-96812-4_3}
|
||||
{10.1007/978-3-319-96812-4\_3}.
|
||||
\end{quote}
|
||||
A \<^BibTeX>-entry is available at:
|
||||
\<^url>\<open>https://www.brucker.ch/bibliography/abstract/brucker.ea-isabelle-ontologies-2018\<close>.
|
||||
\<^item> for the implementation of \<^isadof>~\<^cite>\<open>"brucker.ea:isabelledof:2019"\<close>:
|
||||
\begin{quote}\small
|
||||
A.~D. Brucker and B.~Wolff. \<^isadof>: Design and implementation. In P.C.~{\"O}lveczky and
|
||||
G.~Sala{\"u}n, editors, \<^emph>\<open>Software Engineering and Formal Methods (SEFM)\<close>, number 11724 in
|
||||
Lecture Notes in Computer Science. Springer-Verlag, Heidelberg, 2019.
|
||||
\href{https://doi.org/10.1007/978-3-030-30446-1_15}{10.1007/978-3-030-30446-1\_15}.
|
||||
\end{quote}
|
||||
A \<^BibTeX>-entry is available at:
|
||||
\<^url>\<open>https://www.brucker.ch/bibliography/abstract/brucker.ea-isabelledof-2019\<close>.
|
||||
\<^item> for an application of \<^isadof> in the context of certifications:
|
||||
\begin{quote}\small
|
||||
A.~D. Brucker and B.~Wolff.
|
||||
Using Ontologies in Formal Developments Targeting Certification.
|
||||
In W. Ahrendt and S. Tarifa, editors. \<^emph>\<open>Integrated Formal Methods (IFM)\<close>, number 11918.
|
||||
Lecture Notes in Computer Science. Springer-Verlag, Heidelberg, 2019.
|
||||
\<^url>\<open>https://doi.org/10.1007/978-3-030-34968-4_4\<close>.
|
||||
\end{quote}
|
||||
\<close>
|
||||
|
||||
(*<*)
|
||||
end
|
||||
(*>*)
|
||||
|
||||
@ -1,239 +0,0 @@
|
||||
(*************************************************************************
|
||||
* Copyright (C)
|
||||
* 2019-2023 The University of Exeter
|
||||
* 2018-2023 The University of Paris-Saclay
|
||||
* 2018 The University of Sheffield
|
||||
*
|
||||
* License:
|
||||
* This program can be redistributed and/or modified under the terms
|
||||
* of the 2-clause BSD-style license.
|
||||
*
|
||||
* SPDX-License-Identifier: BSD-2-Clause
|
||||
*************************************************************************)
|
||||
|
||||
(*<*)
|
||||
theory "M_02_Background"
|
||||
imports "M_01_Introduction"
|
||||
begin
|
||||
(*>*)
|
||||
|
||||
chapter*[background::text_section]\<open> Background\<close>
|
||||
section*[bgrnd1::introduction]\<open>The Isabelle System Architecture\<close>
|
||||
|
||||
figure*[architecture::figure,relative_width="95",file_src="''figures/isabelle-architecture.pdf''"]\<open>
|
||||
The system architecture of Isabelle (left-hand side) and the
|
||||
asynchronous communication between the Isabelle system and
|
||||
the IDE (right-hand side). \<close>
|
||||
|
||||
text*[bg::introduction]\<open>
|
||||
While Isabelle is widely perceived as an interactive theorem
|
||||
prover for HOL (Higher-order Logic)~\<^cite>\<open>"nipkow.ea:isabelle:2002"\<close>, we would like to emphasize
|
||||
the view that Isabelle is far more than that: it is the \<^emph>\<open>Eclipse of Formal Methods Tools\<close>. This
|
||||
refers to the ``\<^emph>\<open>generic system framework of Isabelle/Isar underlying recent versions of Isabelle.
|
||||
Among other things, Isabelle provides an infrastructure for Isabelle plug-ins, comprising extensible
|
||||
state components and extensible syntax that can be bound to SML programs. Thus, the Isabelle
|
||||
architecture may be understood as an extension and refinement of the traditional `LCF approach',
|
||||
with explicit infrastructure for building derivative systems.\<close>''~\<^cite>\<open>"wenzel.ea:building:2007"\<close>
|
||||
|
||||
The current system framework offers moreover the following features:
|
||||
\<^item> a build management grouping components into to pre-compiled sessions,
|
||||
\<^item> a prover IDE (PIDE) framework~\<^cite>\<open>"wenzel:asynchronous:2014"\<close> with various front-ends,
|
||||
\<^item> documentation-generation,
|
||||
\<^item> code generators for various target languages,
|
||||
\<^item> an extensible front-end language Isabelle/Isar, and,
|
||||
\<^item> last but not least, an LCF style, generic theorem prover kernel as
|
||||
the most prominent and deeply integrated system component.
|
||||
\<close>
|
||||
text\<open>
|
||||
The Isabelle system architecture shown in @{figure \<open>architecture\<close>} comes with many layers,
|
||||
with Standard ML (SML) at the bottom layer as implementation language. The architecture actually
|
||||
foresees a \<^emph>\<open>Nano-Kernel\<close> (our terminology) which resides in the SML structure \<^boxed_sml>\<open>Context\<close>.
|
||||
This structure provides a kind of container called \<^emph>\<open>context\<close> providing an identity, an
|
||||
ancestor-list as well as typed, user-defined state for plugins such as \<^isadof>.
|
||||
On top of the latter, the LCF-Kernel, tactics, automated proof procedures as well as specific
|
||||
support for higher specification constructs were built.\<^footnote>\<open>We use the term \<^emph>\<open>plugin\<close> for a collection
|
||||
of HOL-definitions, SML and Scala code in order to distinguish it from the official Isabelle
|
||||
term \<^emph>\<open>component\<close> which implies a particular format and support by the Isabelle build system.\<close>
|
||||
\<close>
|
||||
|
||||
section*[dof::introduction]\<open>The Document Model Required by \<^dof>\<close>
|
||||
text\<open>
|
||||
In this section, we explain the assumed document model underlying our Document Ontology Framework
|
||||
(\<^dof>) in general. In particular we discuss the concepts
|
||||
\<^emph>\<open>integrated document\<close>\<^bindex>\<open>integrated document\<close>, \<^emph>\<open>sub-document\<close>\<^bindex>\<open>sub-document\<close>,
|
||||
\<^emph>\<open>document-element\<close>\<^bindex>\<open>document-element\<close>, and \<^emph>\<open>semantic macros\<close>\<^bindex>\<open>semantic macros\<close> occurring
|
||||
inside document-elements. This type of document structure is quite common for scripts interactively
|
||||
evaluated in an incremental fashion.
|
||||
Furthermore, we assume a bracketing mechanism that unambiguously allows to separate different
|
||||
syntactic fragments and that can be nested. In the case of Isabelle, these are the guillemot
|
||||
symbols \<open>\<open>...\<close>\<close>, which represent the begin and end of a \<^emph>\<open>cartouche\<close>\<^bindex>\<open>cartouche\<close>.\<close>
|
||||
|
||||
|
||||
(*<*)
|
||||
declare_reference*[docModGenConcr::figure]
|
||||
(*>*)
|
||||
text\<open>
|
||||
The Isabelle Framework is based on a \<^emph>\<open>document-centric view\<close>\<^bindex>\<open>document-centric view\<close> of
|
||||
a document, treating the input in its integrality as set of (user-programmable) \<^emph>\<open>document element\<close>
|
||||
that may mutually depend on and link to each other; A \<^emph>\<open>document\<close> in our sense is what is configured
|
||||
in a set of \<^verbatim>\<open>ROOT\<close>- and \<^verbatim>\<open>ROOTS\<close>-files.
|
||||
|
||||
Isabelle assumes a hierarchical document model\<^index>\<open>document model\<close>, \<^ie>, an \<^emph>\<open>integrated\<close> document
|
||||
consist of a hierarchy of \<^emph>\<open>sub-documents\<close> (files); dependencies are restricted to be
|
||||
acyclic at this level (c.f. @{figure (unchecked) "docModGenConcr"}).
|
||||
Document parts can have different document types in order to capture documentations consisting of
|
||||
documentation, models, proofs, code of various forms and other technical artifacts. We call the
|
||||
main sub-document type, for historical reasons, \<^emph>\<open>theory\<close>-files. A theory file\<^bindex>\<open>theory!file\<close>
|
||||
consists of a \<^emph>\<open>header\<close>\<^bindex>\<open>header\<close>, a \<^emph>\<open>context definition\<close>\<^index>\<open>context\<close>, and a body
|
||||
consisting of a sequence of document elements called
|
||||
\<^emph>\<open>command\<close>s (see @{figure (unchecked) "docModGenConcr"} (left-hand side)). Even
|
||||
the header consists of a sequence of commands used for introductory text elements not depending on
|
||||
any context. The context-definition contains an \<^boxed_theory_text>\<open>import\<close> and a
|
||||
\<^boxed_theory_text>\<open>keyword\<close> section, for example:
|
||||
@{boxed_theory_text [display]\<open>
|
||||
theory Example \<comment>\<open>Name of the 'theory'\<close>
|
||||
imports \<comment>\<open>Declaration of 'theory' dependencies\<close>
|
||||
Main \<comment>\<open>Imports a library called 'Main'\<close>
|
||||
keywords \<comment>\<open>Registration of keywords defined locally\<close>
|
||||
requirement \<comment>\<open>A command for describing requirements\<close> \<close>}
|
||||
where \<^boxed_theory_text>\<open>Example\<close> is the abstract name of the text-file, \<^boxed_theory_text>\<open>Main\<close>
|
||||
refers to an imported theory (recall that the import relation must be acyclic) and
|
||||
\<^boxed_theory_text>\<open>keywords\<close> are used to separate commands from each other.
|
||||
\<close>
|
||||
|
||||
text*[docModGenConcr::float,
|
||||
main_caption="\<open>A Representation of a Document Model.\<close>"]
|
||||
\<open>
|
||||
@{fig_content (width=45, caption="Schematic Representation.") "figures/doc-mod-generic.pdf"
|
||||
}\<^hfill>@{fig_content (width=45, caption="The Isar Instance.") "figures/doc-mod-isar.pdf"}
|
||||
\<close>
|
||||
|
||||
text\<open>The body of a theory file consists of a sequence of \<^emph>\<open>commands\<close> that must be introduced
|
||||
by a command keyword such as \<^boxed_theory_text>\<open>requirement\<close> above. Command keywords may mark
|
||||
the the begin of a text that is parsed by a command-specific parser; the end of the
|
||||
command-span is defined by the next keyword. Commands were used to define definitions, lemmas,
|
||||
code and text-elements (see @{float "docModGenConcr"} (right-hand side)). \<close>
|
||||
|
||||
text\<open> A simple text-element \<^index>\<open>text-element\<close> may look like this:
|
||||
|
||||
@{boxed_theory_text [display]\<open>
|
||||
text\<open> This is a simple text.\<close>\<close>}
|
||||
\ldots so it is a command \<^theory_text>\<open>text\<close> followed by an argument (here in \<open>\<open> ... \<close>\<close> parenthesis) which
|
||||
contains characters. While \<^theory_text>\<open>text\<close>-elements play a major role in this manual---document
|
||||
generation is the main use-case of \<^dof> in its current stage---it is important to note that there
|
||||
are actually three families of ``ontology aware'' document elements with analogous
|
||||
syntax to standard ones. The difference is a bracket with meta-data of the form:
|
||||
@{theory_text [display,indent=5, margin=70]
|
||||
\<open>
|
||||
text*[label::classid, attr\<^sub>1=E\<^sub>1, ... attr\<^sub>n=E\<^sub>n]\<open> some semi-formal text \<close>
|
||||
ML*[label::classid, attr\<^sub>1=E\<^sub>1, ... attr\<^sub>n=E\<^sub>n]\<open> some SML code \<close>
|
||||
value*[label::classid, attr\<^sub>1=E\<^sub>1, ... attr\<^sub>n=E\<^sub>n]\<open> some annotated \<lambda>-term \<close>
|
||||
\<close>}
|
||||
|
||||
Other instances of document elements belonging to these families are, for example, the free-form
|
||||
\<^theory_text>\<open>Definition*\<close> and \<^theory_text>\<open>Lemma*\<close> as well as their formal counterparts \<^theory_text>\<open>definition*\<close> and \<^theory_text>\<open>lemma*\<close>,
|
||||
which allow in addition to their standard Isabelle functionality the creation and management of
|
||||
ontology-generated meta-data associated to them (cf. -@{text_section (unchecked) "writing_doc"}).
|
||||
|
||||
Depending on the family, we will speak about \<^emph>\<open>(formal) text-contexts\<close>,\<^bindex>\<open>formal text-contexts\<close>
|
||||
\<^emph>\<open>(ML) code-contexts\<close>\<^bindex>\<open>code-contexts\<close> and \<^emph>\<open>term-contexts\<close>\<^bindex>\<open>term-contexts\<close> if we refer
|
||||
to sub-elements inside the \<open>\<open>...\<close>\<close> cartouches of these command families.
|
||||
|
||||
Text- code- or term contexts may contain a special form comment, that may be considered as a
|
||||
"semantic macro" or a machine-checked annotation: the so-called antiquotations\<^bindex>\<open>antiquotation\<close>.
|
||||
Its Its general syntactic format reads as follows:
|
||||
|
||||
@{boxed_theory_text [display]\<open> @{antiquotation_name (args) [more_args] \<open>sub-context\<close> }\<close>}
|
||||
|
||||
The sub-context may be different from the surrounding one; therefore, it is possible
|
||||
to switch from a text-context to a term-context, for example. Therefore, antiquotations allow
|
||||
the nesting of cartouches, albeit not all combinations are actually supported.\<^footnote>\<open>In the
|
||||
literature, this concept has been referred to \<open>Cascade-Syntax\<close> and was used in the
|
||||
Centaur-system and is existing in some limited form in some Emacs-implementations these days. \<close>
|
||||
Isabelle comes with a number of built-in antiquotations for text- and code-contexts;
|
||||
a detailed overview can be found in \<^cite>\<open>"wenzel:isabelle-isar:2020"\<close>. \<^dof> reuses this general
|
||||
infrastructure but \<^emph>\<open>generates\<close> its own families of antiquotations from ontologies.\<close>
|
||||
|
||||
text\<open> An example for a text-element \<^index>\<open>text-element\<close> using built-in antoquotations
|
||||
may look like this:
|
||||
|
||||
@{boxed_theory_text [display]\<open>
|
||||
text\<open> According to the \<^emph>\<open>reflexivity\<close> axiom @{thm refl},
|
||||
we obtain in \<Gamma> for @{term "fac 5"} the result @{value "fac 5"}.\<close>\<close>}
|
||||
... so it is a command \<^theory_text>\<open>text\<close> followed by an argument (here in \<open>\<open> ... \<close>\<close> parenthesis) which
|
||||
contains characters and a special notation for semantic macros \<^bindex>\<open>semantic macros\<close>
|
||||
(here \<^theory_text>\<open>@{term "fac 5"}\<close>).
|
||||
|
||||
The above text element is represented in the final document (\<^eg>, a PDF) by:
|
||||
|
||||
@{boxed_pdf [display]
|
||||
\<open>According to the $\emph{reflexivity}$ axiom $\mathrm{x = x}$, we obtain in $\Gamma$
|
||||
for $\operatorname{fac} \text{\textrm{5}}$ the result $\text{\textrm{120}}$.\<close>
|
||||
}\<close>
|
||||
|
||||
|
||||
text\<open>Antiquotations seen as semantic macros are partial functions of type \<open>logical_context \<rightarrow> text\<close>;
|
||||
since they can use the system state, they can perform all sorts of specific checks or evaluations
|
||||
(type-checks, executions of code-elements, references to text-elements or proven theorems such as
|
||||
\<open>refl\<close>, which is the reference to the axiom of reflexivity).
|
||||
|
||||
Therefore, semantic macros can establish \<^emph>\<open>formal content\<close> inside informal content; they can be
|
||||
type-checked before being displayed and can be used for calculations before being
|
||||
typeset. They represent the device for linking formal with the informal content.
|
||||
\<close>
|
||||
|
||||
text*[docModOnto::float,
|
||||
main_caption="\<open>Documents conform to Ontologies.\<close>"]
|
||||
\<open>
|
||||
@{fig_content (width=47, caption="A Document with Ontological Annotations.") "figures/doc-mod-DOF.pdf"
|
||||
}\<^hfill>@{fig_content (width=47, caption="Ontological References.") "figures/doc-mod-onto-docinst.pdf"}
|
||||
\<close>
|
||||
|
||||
text\<open>Since Isabelle's commands are freely programmable, it is possible to implement \<^dof> as an
|
||||
extension of the system. In particular, the ontology language of \<^dof> provides an ontology
|
||||
definition language ODL\<^bindex>\<open>ODL\<close> that \<^emph>\<open>generates\<close> anti-quotations and the infrastructure to check and evaluate
|
||||
them. This allows for checking an annotated document with respect to a given ontology, which may be
|
||||
specific for a given domain-specific universe of discourse (see @{float "docModOnto"}). ODL will
|
||||
be described in @{text_section (unchecked) "isadof_tour"} in more detail.\<close>
|
||||
|
||||
section*[bgrnd21::introduction]\<open>Implementability of the Document Model in other ITP's\<close>
|
||||
text\<open>
|
||||
Batch-mode checkers for \<^dof> can be implemented in all systems of the LCF-style prover family,
|
||||
\<^ie>, systems with a type-checked \<open>term\<close>, and abstract \<open>thm\<close>-type for theorems
|
||||
(protected by a kernel). This includes, \<^eg>, ProofPower, HOL4, HOL-light, Isabelle, or Coq
|
||||
and its derivatives. \<^dof> is, however, designed for fast interaction in an IDE. If a user wants
|
||||
to benefit from this experience, only Isabelle and Coq have the necessary infrastructure of
|
||||
asynchronous proof-processing and support by a PIDE~\<^cite>\<open>"wenzel:asynchronous:2014" and
|
||||
"wenzel:system:2014" and "barras.ea:pervasive:2013" and "faithfull.ea:coqoon:2018"\<close> which
|
||||
in many features over-accomplishes the required features of \<^dof>.
|
||||
\<close>
|
||||
|
||||
figure*["fig_dof_ide",relative_width="95",file_src="''figures/cicm2018-combined.png''"]\<open>
|
||||
The \<^isadof> IDE (left) and the corresponding PDF (right), showing the first page
|
||||
of~\<^cite>\<open>"brucker.ea:isabelle-ontologies:2018"\<close>.\<close>
|
||||
|
||||
text\<open>
|
||||
We call the present implementation of \<^dof> on the Isabelle platform \<^isadof> .
|
||||
@{figure "fig_dof_ide"} shows a screenshot of an introductory paper on
|
||||
\<^isadof>~\<^cite>\<open>"brucker.ea:isabelle-ontologies:2018"\<close>: the \<^isadof> PIDE can be seen on the left,
|
||||
while the generated presentation in PDF is shown on the right.
|
||||
|
||||
Isabelle provides, beyond the features required for \<^dof>, a lot of additional benefits.
|
||||
Besides UTF8-support for characters used in text-elements, Isabelle offers built-in already a
|
||||
mechanism for user-programmable antiquotations \<^index>\<open>antiquotations\<close> which we use to implement
|
||||
semantic macros \<^index>\<open>semantic macros\<close> in \<^isadof> (We will actually use these two terms
|
||||
as synonym in the context of \<^isadof>). Moreover, \<^isadof> allows for the asynchronous
|
||||
evaluation and checking of the document content~\<^cite>\<open>"wenzel:asynchronous:2014" and
|
||||
"wenzel:system:2014" and "barras.ea:pervasive:2013"\<close> and is dynamically extensible. Its PIDE
|
||||
provides a \<^emph>\<open>continuous build, continuous check\<close> functionality, syntax highlighting, and
|
||||
auto-completion. It also provides infrastructure for displaying meta-information (\<^eg>, binding
|
||||
and type annotation) as pop-ups, while hovering over sub-expressions. A fine-grained dependency
|
||||
analysis allows the processing of individual parts of theory files asynchronously, allowing
|
||||
Isabelle to interactively process large (hundreds of theory files) documents. Isabelle can group
|
||||
sub-documents into sessions, \<^ie>, sub-graphs of the document-structure that can be ``pre-compiled''
|
||||
and loaded instantaneously, \<^ie>, without re-processing, which is an important means to scale up. \<close>
|
||||
|
||||
(*<*)
|
||||
end
|
||||
(*>*)
|
||||
|
||||
@ -1,661 +0,0 @@
|
||||
(*************************************************************************
|
||||
* Copyright (C)
|
||||
* 2019-2022 The University of Exeter
|
||||
* 2018-2022 The University of Paris-Saclay
|
||||
* 2018 The University of Sheffield
|
||||
*
|
||||
* License:
|
||||
* This program can be redistributed and/or modified under the terms
|
||||
* of the 2-clause BSD-style license.
|
||||
*
|
||||
* SPDX-License-Identifier: BSD-2-Clause
|
||||
*************************************************************************)
|
||||
|
||||
(*<*)
|
||||
theory
|
||||
"M_03_GuidedTour"
|
||||
imports
|
||||
"M_02_Background"
|
||||
begin
|
||||
(*>*)
|
||||
|
||||
chapter*[isadof_tour::text_section]\<open>\<^isadof>: A Guided Tour\<close>
|
||||
|
||||
text\<open>
|
||||
In this chapter, we will give an introduction into using \<^isadof> for users that want to create and
|
||||
maintain documents following an existing document ontology\<^bindex>\<open>ontology\<close> in ODL\<^bindex>\<open>ODL\<close>.
|
||||
\<close>
|
||||
|
||||
section*[getting_started::technical]\<open>Getting Started\<close>
|
||||
|
||||
subsection*[installation::technical]\<open>Installation\<close>
|
||||
text\<open>
|
||||
In this section, we will show how to install \<^isadof>. We assume a basic familiarity with a
|
||||
Linux/Unix-like command line (i.e., a shell).
|
||||
We focus on the installation of the latest official release of \<^isadof> as
|
||||
available in the Archive of Formal Proofs (AFP).\<^footnote>\<open>If you want to work with the development version
|
||||
of \<^isadof>, please obtain its source code from the \<^isadof> Git repostitory
|
||||
(\<^url>\<open>https://git.logicalhacking.com/Isabelle_DOF/Isabelle_DOF\<close> and follow the instructions
|
||||
in provided \<^verbatim>\<open>README.MD\<close> file.\<close>
|
||||
\<^isadof> requires Isabelle\<^bindex>\<open>Isabelle\<close> with a recent \<^LaTeX>-distribution
|
||||
(e.g., Tex Live 2022 or later).
|
||||
\<close>
|
||||
|
||||
paragraph\<open>Installing Isabelle and the AFP.\<close>
|
||||
text\<open>
|
||||
Please download and install the latest official Isabelle release from the Isabelle Website
|
||||
(\<^url>\<open>https://isabelle.in.tum.de\<close>). After the successful installation of Isabelle, you should be
|
||||
able to call the \<^boxed_bash>\<open>isabelle\<close> tool on the command line:
|
||||
@{boxed_bash [display]\<open>ë\prompt{}ë isabelle version\<close>}
|
||||
|
||||
Depending on your operating system and depending if you put Isabelle's \<^boxed_bash>\<open>bin\<close> directory
|
||||
in your \<^boxed_bash>\<open>PATH\<close>, you will need to invoke \<^boxed_bash>\<open>isabelle\<close> using its
|
||||
full qualified path.
|
||||
\<close>
|
||||
|
||||
text\<open>
|
||||
Next, download the the AFP from \<^url>\<open>https://www.isa-afp.org/download/\<close> and
|
||||
follow the instructions given at \<^url>\<open>https://www.isa-afp.org/help/\<close> for installing the AFP as an
|
||||
Isabelle component.\<close>
|
||||
|
||||
paragraph\<open>Installing \<^TeXLive>.\<close>
|
||||
text\<open>
|
||||
On a Debian-based Linux system (\<^eg>, Ubuntu), the following command
|
||||
should install all required \<^LaTeX> packages:
|
||||
@{boxed_bash [display]\<open>ë\prompt{}ë sudo aptitude install texlive-full\<close>}
|
||||
\<close>
|
||||
|
||||
subsubsection*[isadof::technical]\<open>Installing \<^isadof>\<close>
|
||||
text\<open>
|
||||
By installing the AFP in the previous steps, you already installed \<^isadof>. In fact, \<^isadof>
|
||||
is currently consisting out of three AFP entries:
|
||||
|
||||
\<^item> \<^verbatim>\<open>Isabelle_DOF\<close>: This entry
|
||||
contains the \<^isadof> system itself, including the \<^isadof> manual.
|
||||
\<^item> \<^verbatim>\<open>Isabelle_DOF-Example-I\<close>: This entry contains an example of
|
||||
an academic paper written using the \<^isadof> system oriented towards an
|
||||
introductory paper. The text is based on~\<^cite>\<open>"brucker.ea:isabelle-ontologies:2018"\<close>;
|
||||
in the document, we deliberately refrain from integrating references to formal content in order
|
||||
to demonstrate that \<^isadof> can be used for writing documents with very little direct use of
|
||||
\<^LaTeX>.
|
||||
\<^item> \<^verbatim>\<open>Isabelle_DOF-Example-II\<close>: This entry contains another example of
|
||||
a mathematics-oriented academic paper. It is based on~\<^cite>\<open>"taha.ea:philosophers:2020"\<close>.
|
||||
It represents a typical mathematical text, heavy in definitions with complex mathematical notation
|
||||
and a lot of non-trivial cross-referencing between statements, definitions, and proofs which
|
||||
are ontologically tracked. However, with respect to the possible linking between the underlying formal theory
|
||||
and this mathematical presentation, it follows a pragmatic path without any ``deep'' linking to
|
||||
types, terms and theorems, and therefore does deliberately not exploit \<^isadof> 's full potential.\<close>
|
||||
|
||||
|
||||
section*[writing_doc::technical]\<open>Writing Documents\<close>
|
||||
|
||||
subsection*[document::example]\<open>Document Generation\<close>
|
||||
|
||||
text\<open>\<^isadof> provides an enhanced setup for generating PDF document. In particular, it does
|
||||
not make use of a file called \<^verbatim>\<open>document/root.tex\<close>. Instead, the use of document templates and
|
||||
ontology represenations is done within theory files. To make use of this feature, one needs
|
||||
to add the option \<^verbatim>\<open>document_build = dof\<close> to the \<^verbatim>\<open>ROOT\<close> file.
|
||||
An example \<^verbatim>\<open>ROOT\<close> file looks as follows:
|
||||
\<close>
|
||||
text\<open>
|
||||
\begin{config}{ROOT}
|
||||
session example = Isabelle_DOF +
|
||||
options [document = pdf, document_output = "output", document_build = dof]
|
||||
(*theories [document = false]
|
||||
A
|
||||
theories
|
||||
B*)
|
||||
\end{config}
|
||||
|
||||
The document template and ontology can be selected as follows:
|
||||
@{boxed_theory_text [display]
|
||||
\<open>
|
||||
theory C imports Isabelle_DOF.technical_report Isabelle_DOF.scholarly_paper begin
|
||||
list_templates
|
||||
use_template "scrreprt-modern"
|
||||
list_ontologies
|
||||
use_ontology "technical_report" and "scholarly_paper"
|
||||
end
|
||||
\<close>}
|
||||
|
||||
The commands @{boxed_theory_text
|
||||
\<open>
|
||||
list_templates
|
||||
\<close>} and
|
||||
@{boxed_theory_text
|
||||
\<open>
|
||||
list_ontologies
|
||||
\<close>}
|
||||
can be used for inspecting (and selecting) the available ontologies and templates:
|
||||
@{boxed_theory_text [display]
|
||||
\<open>
|
||||
list_templates
|
||||
list_ontologies
|
||||
\<close>}
|
||||
|
||||
Note that you need to import the theories that define the ontologies that you
|
||||
want to use. Otherwise, they will not be available.
|
||||
\<close>
|
||||
|
||||
paragraph\<open>Warning.\<close>
|
||||
text\<open>
|
||||
Note that the session \<^session>\<open>Isabelle_DOF\<close> needs to be part of the ``main'' session
|
||||
hierarchy. Loading the \<^isadof> theories as part of a session section, e.g.,
|
||||
\<close>
|
||||
text\<open>\<^latex>\<open>
|
||||
\begin{config}{ROOT}
|
||||
session example = HOL +
|
||||
options [document = pdf, document_output = "output", document_build = dof]
|
||||
session
|
||||
Isabelle_DOF.scholarly_paper
|
||||
theories
|
||||
C
|
||||
\end{config}
|
||||
\<close>\<close>
|
||||
text\<open>
|
||||
will not work. Trying to build a document using such a setup will result in the
|
||||
following error message:
|
||||
|
||||
@{boxed_bash [display]\<open>ë\prompt{}ë
|
||||
isabelle build -D .
|
||||
Running example ...
|
||||
Bad document_build engine "dof"
|
||||
example FAILED\<close>}
|
||||
\<close>
|
||||
|
||||
subsection*[naming::example]\<open>Name-Spaces, Long- and Short-Names\<close>
|
||||
text\<open>\<^isadof> is built upon the name space and lexical conventions of Isabelle. Long-names were
|
||||
composed of a name of the session, the name of the theory, and a sequence of local names referring
|
||||
to, \<^eg>, nested specification constructs that were used to identify types, constant symbols,
|
||||
definitions, \<^etc>. The general format of a long-name is
|
||||
|
||||
\<^boxed_theory_text>\<open> session_name.theory_name.local_name. ... .local_name\<close>
|
||||
|
||||
By lexical conventions, theory-names must be unique inside a session
|
||||
(and session names must be unique too), such that long-names are unique by construction.
|
||||
There are actually different name categories that form a proper name space, \<^eg>, the name space for
|
||||
constant symbols and type symbols are distinguished.
|
||||
Additionally, \<^isadof> objects also come with a proper name space: classes (and monitors), instances,
|
||||
low-level class invariants (SML-defined invariants) all follow the lexical conventions of
|
||||
Isabelle. For instance, a class can be referenced outside its theory using
|
||||
its short-name or its long-name if another class with the same name is defined
|
||||
in the current theory.
|
||||
Isabelle identifies names already with the shortest suffix that is unique in the global
|
||||
context and in the appropriate name category. This also holds for pretty-printing, which can
|
||||
at times be confusing since names stemming from the same specification construct may
|
||||
be printed with different prefixes according to their uniqueness.
|
||||
\<close>
|
||||
|
||||
subsection*[cartouches::example]\<open>Caveat: Lexical Conventions of Cartouches, Strings, Names, ... \<close>
|
||||
text\<open>WARNING: The embedding of strings, terms, names \<^etc>, as parts of commands, anti-quotations,
|
||||
terms, \<^etc>, is unfortunately not always so consistent as one might expect, when it comes
|
||||
to variants that should be lexically equivalent in principle. This can be a nuisance for
|
||||
users, but is again a consequence that we build on existing technology that has been developed
|
||||
over decades.
|
||||
\<close>
|
||||
|
||||
text\<open>At times, this causes idiosyncrasies like the ones cited in the following incomplete list:
|
||||
\<^item> text-antiquotations
|
||||
\<^theory_text>\<open>text\<open>@{thm \<doublequote>srac\<^sub>1_def\<doublequote>}\<close>\<close>
|
||||
while \<^theory_text>\<open>text\<open>@{thm \<open>srac\<^sub>1_def\<close>}\<close>\<close> fails
|
||||
\<^item> commands \<^theory_text>\<open>thm fundamental_theorem_of_calculus\<close> and
|
||||
\<^theory_text>\<open>thm \<doublequote>fundamental_theorem_of_calculus\<doublequote>\<close>
|
||||
or \<^theory_text>\<open>lemma \<doublequote>H\<doublequote>\<close> and \<^theory_text>\<open>lemma \<open>H\<close>\<close> and \<^theory_text>\<open>lemma H\<close>
|
||||
\<^item> string expressions
|
||||
\<^theory_text>\<open>term\<open>\<quote>\<quote>abc\<quote>\<quote> @ \<quote>\<quote>cd\<quote>\<quote>\<close>\<close> and equivalent
|
||||
\<^theory_text>\<open>term \<open>\<open>abc\<close> @ \<open>cd\<close>\<close>\<close>;
|
||||
but \<^theory_text>\<open>term\<open>\<open>A \<longrightarrow> B\<close>\<close>\<close> not equivalent to \<^theory_text>\<open>term\<open>\<quote>\<quote>A \<longrightarrow> B\<quote>\<quote>\<close>\<close>
|
||||
which fails.
|
||||
\<close>
|
||||
|
||||
section*[scholar_onto::example]\<open>Writing Academic Publications in \<^boxed_theory_text>\<open>scholarly_paper\<close>\<close>
|
||||
subsection\<open>Editing Major Examples\<close>
|
||||
text\<open>
|
||||
The ontology \<^verbatim>\<open>scholarly_paper\<close> \<^index>\<open>ontology!scholarly\_paper\<close> is an ontology modeling
|
||||
academic/scientific papers, with a slight bias towards texts in the domain of mathematics and
|
||||
engineering.
|
||||
|
||||
You can inspect/edit the example in Isabelle's IDE, by either
|
||||
\<^item> starting Isabelle/jEdit using your graphical user interface (\<^eg>, by clicking on the
|
||||
Isabelle-Icon provided by the Isabelle installation) and loading the file
|
||||
\<^nolinkurl>\<open>Isabelle_DOF-Example-I/IsaDofApplications.thy"\<close>
|
||||
\<close>
|
||||
text\<open> You can build the \<^pdf>-document at the command line by calling:
|
||||
@{boxed_bash [display] \<open>ë\prompt{}ë isabelle build Isabelle_DOF-Example-I\<close>}
|
||||
\<close>
|
||||
|
||||
subsection*[sss::technical]\<open>A Bluffers Guide to the \<^verbatim>\<open>scholarly_paper\<close> Ontology\<close>
|
||||
text\<open> In this section we give a minimal overview of the ontology formalized in
|
||||
\<^theory>\<open>Isabelle_DOF.scholarly_paper\<close>. We start by modeling the usual text-elements of an
|
||||
academic paper: the title and author information, abstract, and text section:
|
||||
@{boxed_theory_text [display]
|
||||
\<open>doc_class title =
|
||||
short_title :: "string option" <= "None"
|
||||
|
||||
doc_class subtitle =
|
||||
abbrev :: "string option" <= "None"
|
||||
|
||||
doc_class author =
|
||||
email :: "string" <= "''''"
|
||||
http_site :: "string" <= "''''"
|
||||
orcid :: "string" <= "''''"
|
||||
affiliation :: "string"
|
||||
|
||||
doc_class abstract =
|
||||
keywordlist :: "string list" <= "[]"
|
||||
principal_theorems :: "thm list"\<close>}
|
||||
\<close>
|
||||
|
||||
text\<open>Note \<^const>\<open>short_title\<close> and \<^const>\<open>abbrev\<close> are optional and have the default \<^const>\<open>None\<close>
|
||||
(no value). Note further, that \<^typ>\<open>abstract\<close>s may have a \<^const>\<open>principal_theorems\<close> list, where
|
||||
the built-in \<^isadof> type \<^typ>\<open>thm list\<close> contains references to formally proven theorems that must
|
||||
exist in the logical context of this document; this is a decisive feature of \<^isadof> that
|
||||
conventional ontological languages lack.\<close>
|
||||
|
||||
text\<open>We continue by the introduction of a main class: the text-element \<^typ>\<open>text_section\<close>
|
||||
(in contrast to \<^typ>\<open>figure\<close> or \<open>table\<close> or similar). Note that
|
||||
the \<^const>\<open>main_author\<close> is typed with the class \<^typ>\<open>author\<close>, a HOL type that is automatically
|
||||
derived from the document class definition \<^typ>\<open>author\<close> shown above. It is used to express which
|
||||
author currently ``owns'' this \<^typ>\<open>text_section\<close>, an information that can give rise to
|
||||
presentational or even access-control features in a suitably adapted front-end.
|
||||
|
||||
@{boxed_theory_text [display] \<open>
|
||||
doc_class text_section = text_element +
|
||||
main_author :: "author option" <= None
|
||||
fixme_list :: "string list" <= "[]"
|
||||
level :: "int option" <= "None"
|
||||
\<close>}
|
||||
|
||||
The \<^const>\<open>Isa_COL.text_element.level\<close>-attibute \<^index>\<open>level\<close> enables doc-notation support for
|
||||
headers, chapters, sections, and subsections; we follow here the \<^LaTeX> terminology on levels to
|
||||
which \<^isadof> is currently targeting at. The values are interpreted accordingly to the \<^LaTeX>
|
||||
standard. The correspondence between the levels and the structural entities is summarized
|
||||
as follows:
|
||||
|
||||
\<^item> part \<^index>\<open>part\<close> \<open>Some -1\<close> \<^vs>\<open>-0.3cm\<close>
|
||||
\<^item> chapter \<^index>\<open>chapter\<close> \<open>Some 0\<close> \<^vs>\<open>-0.3cm\<close>
|
||||
\<^item> section \<^index>\<open>section\<close> \<open>Some 1\<close> \<^vs>\<open>-0.3cm\<close>
|
||||
\<^item> subsection \<^index>\<open>subsection\<close> \<open>Some 2\<close> \<^vs>\<open>-0.3cm\<close>
|
||||
\<^item> subsubsection \<^index>\<open>subsubsection\<close> \<open>Some 3\<close> \<^vs>\<open>-0.3cm\<close>
|
||||
|
||||
Additional means assure that the following invariant is maintained in a document
|
||||
conforming to \<^verbatim>\<open>scholarly_paper\<close>: \<open>level > 0\<close>.
|
||||
\<close>
|
||||
|
||||
text\<open> The rest of the ontology introduces concepts for \<^typ>\<open>introduction\<close>, \<^typ>\<open>conclusion\<close>,
|
||||
\<^typ>\<open>related_work\<close>, \<^typ>\<open>bibliography\<close> etc. More details can be found in \<^verbatim>\<open>scholarly_paper\<close>
|
||||
contained in the theory \<^theory>\<open>Isabelle_DOF.scholarly_paper\<close>. \<close>
|
||||
|
||||
subsection\<open>Writing Academic Publications: A Freeform Mathematics Text \<close>
|
||||
text*[csp_paper_synthesis::technical, main_author = "Some bu"]\<open>We present a typical mathematical
|
||||
paper focusing on its form, not referring in any sense to its content which is out of scope here.
|
||||
As mentioned before, we chose the paper~\<^cite>\<open>"taha.ea:philosophers:2020"\<close> for this purpose,
|
||||
which is written in the so-called free-form style: Formulas are superficially parsed and
|
||||
type-set, but no deeper type-checking and checking with the underlying logical context
|
||||
is undertaken. \<close>
|
||||
|
||||
figure*[fig0::figure,relative_width="85",file_src="''figures/header_CSP_source.png''"]
|
||||
\<open> A mathematics paper as integrated document source ... \<close>
|
||||
|
||||
figure*[fig0B::figure,relative_width="85",file_src="''figures/header_CSP_pdf.png''"]
|
||||
\<open> ... and as corresponding \<^pdf>-output. \<close>
|
||||
|
||||
text\<open>The integrated source of this paper-excerpt is shown in \<^figure>\<open>fig0\<close>, while the
|
||||
document build process converts this to the corresponding \<^pdf>-output shown in \<^figure>\<open>fig0B\<close>.\<close>
|
||||
|
||||
|
||||
text\<open>Recall that the standard syntax for a text-element in \<^isadof> is
|
||||
\<^theory_text>\<open>text*[<id>::<class_id>,<attrs>]\<open> ... text ...\<close>\<close>, but there are a few built-in abbreviations like
|
||||
\<^theory_text>\<open>title*[<id>,<attrs>]\<open> ... text ...\<close>\<close> that provide special command-level syntax for text-elements.
|
||||
The other text-elements provide the authors and the abstract as specified by their
|
||||
\<^emph>\<open>class\_id\<close>\<^index>\<open>class\_id@\<open>class_id\<close>\<close>
|
||||
referring to the \<^theory_text>\<open>doc_class\<close>es of \<^verbatim>\<open>scholarly_paper\<close>;
|
||||
we say that these text elements are \<^emph>\<open>instances\<close>
|
||||
\<^bindex>\<open>instance\<close> of the \<^theory_text>\<open>doc_class\<close>es \<^bindex>\<open>doc\_class\<close> of the underlying ontology. \<close>
|
||||
|
||||
text\<open>The paper proceeds by providing instances for introduction, technical sections,
|
||||
examples, \<^etc>. We would like to concentrate on one --- mathematical paper oriented --- detail in the
|
||||
ontology \<^verbatim>\<open>scholarly_paper\<close>:
|
||||
|
||||
@{boxed_theory_text [display]
|
||||
\<open>doc_class technical = text_section + ...
|
||||
|
||||
type_synonym tc = technical (* technical content *)
|
||||
|
||||
datatype math_content_class = "defn" | "axm" | "thm" | "lem" | "cor" | "prop" | ...
|
||||
|
||||
doc_class math_content = tc + ...
|
||||
|
||||
doc_class "definition" = math_content +
|
||||
mcc :: "math_content_class" <= "defn" ...
|
||||
|
||||
doc_class "theorem" = math_content +
|
||||
mcc :: "math_content_class" <= "thm" ...
|
||||
\<close>}\<close>
|
||||
|
||||
|
||||
text\<open>The class \<^typ>\<open>technical\<close> regroups a number of text-elements that contain typical
|
||||
technical content in mathematical or engineering papers: code, definitions, theorems,
|
||||
lemmas, examples. From this class, the stricter class of @{typ \<open>math_content\<close>} is derived,
|
||||
which is grouped into @{typ "definition"}s and @{typ "theorem"}s (the details of these
|
||||
class definitions are omitted here). Note, however, that class identifiers can be abbreviated by
|
||||
standard \<^theory_text>\<open>type_synonym\<close>s for convenience and enumeration types can be defined by the
|
||||
standard inductive \<^theory_text>\<open>datatype\<close> definition mechanism in Isabelle, since any HOL type is admitted
|
||||
for attribute declarations. Vice-versa, document class definitions imply a corresponding HOL type
|
||||
definition. \<close>
|
||||
|
||||
figure*[fig01::figure,relative_width="95",file_src="''figures/definition-use-CSP.png''"]
|
||||
\<open> A screenshot of the integrated source with definitions ...\<close>
|
||||
text\<open>An example for a sequence of (Isabelle-formula-)texts, their ontological declarations as
|
||||
\<^typ>\<open>definition\<close>s in terms of the \<^verbatim>\<open>scholarly_paper\<close>-ontology and their type-conform referencing
|
||||
later is shown in \<^figure>\<open>fig01\<close> in its presentation as the integrated source.
|
||||
|
||||
Note that the use in the ontology-generated antiquotation \<^theory_text>\<open>@{definition X4}\<close>
|
||||
is type-checked; referencing \<^verbatim>\<open>X4\<close> as \<^theory_text>\<open>theorem\<close> would be a type-error and be reported directly
|
||||
by \<^isadof> in Isabelle/jEdit. Note further, that if referenced correctly wrt. the sub-typing
|
||||
hierarchy makes \<^verbatim>\<open>X4\<close> \<^emph>\<open>navigable\<close> in Isabelle/jEdit; a click will cause the IDE to present the
|
||||
defining occurrence of this text-element in the integrated source.
|
||||
|
||||
Note, further, how \<^isadof>-commands like \<^theory_text>\<open>text*\<close> interact with standard Isabelle document
|
||||
antiquotations described in the Isabelle Isar Reference Manual in Chapter 4.2 in great detail.
|
||||
We refrain ourselves here to briefly describe three freeform antiquotations used in this text:
|
||||
|
||||
\<^item> the freeform term antiquotation, also called \<^emph>\<open>cartouche\<close>, written by
|
||||
\<open>@{cartouche [style-parms] \<open>...\<close>}\<close> or just by \<open>\<open>...\<close>\<close> if the list of style parameters
|
||||
is empty,
|
||||
\<^item> the freeform antiquotation for theory fragments written \<open>@{theory_text [style-parms] \<open>...\<close>}\<close>
|
||||
or just \<^verbatim>\<open>\<^theory_text>\<close>\<open>\<open>...\<close>\<close> if the list of style parameters is empty,
|
||||
\<^item> the freeform antiquotations for verbatim, emphasized, bold, or footnote text elements.
|
||||
\<close>
|
||||
|
||||
figure*[fig02::figure,relative_width="95",file_src="''figures/definition-use-CSP-pdf.png''"]
|
||||
\<open> ... and the corresponding \<^pdf>-output.\<close>
|
||||
|
||||
text\<open>
|
||||
\<^isadof> text-elements such as \<^theory_text>\<open>text*\<close> allow to have such standard term-antiquotations inside their
|
||||
text, permitting to give the whole text entity a formal, referentiable status with typed
|
||||
meta-information attached to it that may be used for presentation issues, search,
|
||||
or other technical purposes.
|
||||
The corresponding output of this snippet in the integrated source is shown in \<^figure>\<open>fig02\<close>.
|
||||
\<close>
|
||||
|
||||
|
||||
subsection*[scholar_pide::example]\<open>More Freeform Elements, and Resulting Navigation\<close>
|
||||
text\<open> In the following, we present some other text-elements provided by the Common Ontology Library
|
||||
in @{theory "Isabelle_DOF.Isa_COL"}. It provides a document class for figures:
|
||||
|
||||
@{boxed_theory_text [display]\<open>
|
||||
datatype placement = h | t | b | ht | hb
|
||||
doc_class figure = text_section +
|
||||
relative_width :: "int" (* percent of textwidth *)
|
||||
src :: "string"
|
||||
placement :: placement
|
||||
spawn_columns :: bool <= True
|
||||
\<close>}
|
||||
\<close>
|
||||
figure*[fig_figures::figure,relative_width="85",file_src="''figures/Dogfood-figures.png''"]
|
||||
\<open> Declaring figures in the integrated source.\<close>
|
||||
|
||||
text\<open>
|
||||
The document class \<^typ>\<open>figure\<close> (supported by the \<^isadof> command abbreviation
|
||||
\<^boxed_theory_text>\<open>figure*\<close>) makes it possible to express the pictures and diagrams
|
||||
as shown in \<^figure>\<open>fig_figures\<close>, which presents its own representation in the
|
||||
integrated source as screenshot.\<close>
|
||||
|
||||
text\<open>
|
||||
Finally, we define a \<^emph>\<open>monitor class\<close> \<^index>\<open>monitor class\<close> that enforces a textual ordering
|
||||
in the document core by a regular expression:
|
||||
|
||||
@{boxed_theory_text [display]\<open>
|
||||
doc_class article =
|
||||
style_id :: string <= "''LNCS''"
|
||||
version :: "(int \<times> int \<times> int)" <= "(0,0,0)"
|
||||
accepts "(title ~~ \<lbrakk>subtitle\<rbrakk> ~~ \<lbrace>author\<rbrace>\<^sup>+ ~~ abstract ~~ \<lbrace>introduction\<rbrace>\<^sup>+
|
||||
~~ \<lbrace>background\<rbrace>\<^sup>* ~~ \<lbrace>technical || example \<rbrace>\<^sup>+ ~~ \<lbrace>conclusion\<rbrace>\<^sup>+
|
||||
~~ bibliography ~~ \<lbrace>annex\<rbrace>\<^sup>* )"
|
||||
\<close>}\<close>
|
||||
|
||||
text\<open>
|
||||
In a integrated document source, the body of the content can be paranthesized into:
|
||||
|
||||
@{boxed_theory_text [display]\<open>
|
||||
open_monitor* [this::article]
|
||||
...
|
||||
close_monitor*[this]
|
||||
\<close>}
|
||||
|
||||
which signals to \<^isadof> begin and end of the part of the integrated source
|
||||
in which the text-elements instances are expected to appear in the textual ordering
|
||||
defined by \<^typ>\<open>article\<close>.
|
||||
\<close>
|
||||
text*[exploring::float,
|
||||
main_caption="\<open>Exploring text elements.\<close>"]
|
||||
\<open>
|
||||
@{fig_content (width=45, caption="Exploring a reference of a text-element.") "figures/Dogfood-II-bgnd1.png"
|
||||
}\<^hfill>@{fig_content (width=45, caption="Exploring the class of a text element.") "figures/Dogfood-III-bgnd-text_section.png"}
|
||||
\<close>
|
||||
|
||||
text*[hyperlinks::float,
|
||||
main_caption="\<open>Navigation via generated hyperlinks.\<close>"]
|
||||
\<open>
|
||||
@{fig_content (width=45, caption="Hyperlink to class-definition.") "figures/Dogfood-IV-jumpInDocCLass.png"
|
||||
}\<^hfill>@{fig_content (width=45, caption="Exploring an attribute (hyperlinked to the class).") "figures/Dogfood-V-attribute.png"}
|
||||
\<close>
|
||||
|
||||
text\<open>
|
||||
From these class definitions, \<^isadof> also automatically generated editing
|
||||
support for Isabelle/jEdit. In
|
||||
@{float "exploring"}(left)
|
||||
% \autoref{fig-Dogfood-II-bgnd1}
|
||||
and
|
||||
@{float "exploring"}(right)
|
||||
% \autoref{fig-bgnd-text_section}
|
||||
we show how hovering over links permits to explore its
|
||||
meta-information. Clicking on a document class identifier permits to hyperlink into the
|
||||
corresponding class definition (
|
||||
@{float "hyperlinks"}(left)
|
||||
%\autoref{fig:Dogfood-IV-jumpInDocCLass})
|
||||
; hovering over an attribute-definition (which is qualified in order to disambiguate; cf.
|
||||
@{float "hyperlinks"}(right)
|
||||
%\autoref{fig:Dogfood-V-attribute}
|
||||
) shows its type.
|
||||
\<close>
|
||||
|
||||
figure*[figDogfoodVIlinkappl::figure,relative_width="80",file_src="''figures/Dogfood-VI-linkappl.png''"]
|
||||
\<open>Exploring an ontological reference.\<close>
|
||||
|
||||
text\<open>
|
||||
An ontological reference application in @{figure "figDogfoodVIlinkappl"}: the
|
||||
ontology-dependant antiquotation \<^boxed_theory_text>\<open>@{example \<open>ex1\<close>}\<close> refers to the corresponding
|
||||
text-element \<^boxed_theory_text>\<open>ex1\<close>. Hovering allows for inspection, clicking for jumping to the
|
||||
definition. If the link does not exist or has a non-compatible type, the text is not validated,
|
||||
\<^ie>, Isabelle/jEdit will respond with an error.\<close>
|
||||
|
||||
text\<open>We advise users to experiment with different notation variants.
|
||||
Note, further, that the Isabelle \<^latex>\<open>@\{cite ...\}\<close>-text-anti-quotation makes its checking
|
||||
on the level of generated \<^verbatim>\<open>.aux\<close>-files, which are not necessarily up-to-date. Ignoring the PIDE
|
||||
error-message and compiling it with a consistent bibtex usually makes disappear this behavior.
|
||||
\<close>
|
||||
|
||||
subsection*["using_term_aq"::technical, main_author = "Some @{author ''bu''}"]
|
||||
\<open>Using Term-Antiquotations\<close>
|
||||
|
||||
text\<open>The present version of \<^isadof> is the first version that supports the novel feature of
|
||||
\<^dof>-generated term-antiquotations\<^bindex>\<open>term-antiquotations\<close>, \<^ie>, antiquotations embedded
|
||||
in HOL-\<open>\<lambda>\<close>-terms possessing arguments that were validated in the ontological context.
|
||||
These \<open>\<lambda>\<close>-terms may occur in definitions, lemmas, or in values to define attributes
|
||||
in class instances. They have the format: \<open>@{name arg\<^sub>1 ... arg\<^sub>n\<^sub>-\<^sub>1} arg\<^sub>n\<close>\<close>
|
||||
|
||||
text\<open>Logically, they are defined as an identity in the last argument \<open>arg\<^sub>n\<close>; thus,
|
||||
ontologically checked prior arguments \<open>arg\<^sub>1 ... arg\<^sub>n\<^sub>-\<^sub>1\<close> can be ignored during a proof
|
||||
process; ontologically, they can be used to assure the traceability of, \<^eg>, semi-formal
|
||||
assumptions throughout their way to formalisation and use in lemmas and proofs. \<close>
|
||||
|
||||
figure*[doc_termAq::figure,relative_width="35",file_src="''figures/doc-mod-term-aq.pdf''"]
|
||||
\<open>Term-Antiquotations Referencing to Annotated Elements\<close>
|
||||
text\<open>As shown in @{figure \<open>doc_termAq\<close>}, this feature of \<^isadof> substantially increases
|
||||
the expressibility of links between the formal and the informal in \<^dof> documents.\<close>
|
||||
|
||||
text\<open> In the following, we describe a common scenario linking semi-formal assumptions and
|
||||
formal ones:
|
||||
|
||||
@{boxed_theory_text [display]
|
||||
\<open>
|
||||
declare_reference*[e2::"definition"]
|
||||
|
||||
Assumption*[a1::"assumption", short_name="\<open>safe environment.\<close>"]
|
||||
\<open>The system shall only be used in the temperature range from 0 to 60 degrees Celsius.
|
||||
Formally, this is stated as follows in @{definition (unchecked) \<open>e2\<close>}.\<close>
|
||||
|
||||
definition*[e2, status=formal] safe_env :: "state \<Rightarrow> bool"
|
||||
where "safe_env \<sigma> \<equiv> (temp \<sigma> \<in> {0 .. 60})"
|
||||
|
||||
theorem*[e3::"theorem"] safety_preservation::" @{assumption \<open>a1\<close>} (safe_env \<sigma>) \<Longrightarrow> ... "
|
||||
\<close>}
|
||||
\<close>
|
||||
text\<open>Note that Isabelle procedes in a strict ``declaration-before-use''-manner, \<^ie> assumes
|
||||
linear visibility on all references. This also holds for the declaration of ontological
|
||||
references. In order to represent cyclic dependencies, it is therefore necessary to
|
||||
start with the forward declaration \<^theory_text>\<open>declare_reference*\<close>. From there on, this reference
|
||||
can be used in text, term, and code-contexts, albeit its definition appears textually later.
|
||||
The corresponding freeform-formulation of this assumption can be explicitly referred in the
|
||||
assumption of a theorem establishing the link. The \<^theory_text>\<open>theorem*\<close>-variant of the common
|
||||
Isabelle/Isar \<^theory_text>\<open>theorem\<close>-command will in contrast to the latter not ignore \<open>\<open>a1\<close>\<close>,
|
||||
\<^ie> parse just as string, but also validate it in the previous context.
|
||||
|
||||
Note that the \<^theory_text>\<open>declare_reference*\<close> command will appear in the \<^LaTeX> generated from this
|
||||
document fragment. In order to avoid this, one has to enclose this command into the
|
||||
document comments : \<open>(*<*) ... (*>*)\<close>.\<close>
|
||||
|
||||
|
||||
section*[tech_onto::example]\<open>Writing Technical Reports in \<^boxed_theory_text>\<open>technical_report\<close>\<close>
|
||||
text\<open>While it is perfectly possible to write documents in the
|
||||
\<^verbatim>\<open>technical_report\<close> ontology in freeform-style (this manual is mostly such an
|
||||
example), we will briefly explain here the tight-checking-style in which
|
||||
most Isabelle reference manuals themselves are written.
|
||||
|
||||
The idea has already been put forward by Isabelle itself; besides the general infrastructure on
|
||||
which this work is also based, current Isabelle versions provide around 20 built-in
|
||||
document and code antiquotations described in the Reference Manual pp.75 ff. in great detail.
|
||||
|
||||
Most of them provide strict-checking, \<^ie> the argument strings were parsed and machine-checked in the
|
||||
underlying logical context, which turns the arguments into \<^emph>\<open>formal content\<close> in the integrated
|
||||
source, in contrast to the free-form antiquotations which basically influence the presentation.
|
||||
|
||||
We still mention a few of these document antiquotations here:
|
||||
\<^item> \<^theory_text>\<open>@{thm \<doublequote>refl\<doublequote>}\<close> or \<^theory_text>\<open>@{thm [display] \<doublequote>refl\<doublequote>}\<close>
|
||||
check that \<^theory_text>\<open>refl\<close> is indeed a reference
|
||||
to a theorem; the additional ``style" argument changes the presentation by printing the
|
||||
formula into the output instead of the reference itself,
|
||||
\<^item> \<^theory_text>\<open>@{lemma \<open>prop\<close> by method} \<close> allows deriving \<open>prop\<close> on the fly, thus guarantee
|
||||
that it is a corollary of the current context,
|
||||
\<^item> \<^theory_text>\<open>@{term \<open>term\<close> }\<close> parses and type-checks \<open>term\<close>,
|
||||
\<^item> \<^theory_text>\<open>@{value \<open>term\<close> }\<close> performs the evaluation of \<open>term\<close>,
|
||||
\<^item> \<^theory_text>\<open>@{ML \<open>ml-term\<close> }\<close> parses and type-checks \<open>ml-term\<close>,
|
||||
\<^item> \<^theory_text>\<open>@{ML_file \<open>ml-file\<close> }\<close> parses the path for \<open>ml-file\<close> and
|
||||
verifies its existance in the (Isabelle-virtual) file-system.
|
||||
\<close>
|
||||
|
||||
text\<open>There are options to display sub-parts of formulas etc., but it is a consequence
|
||||
of tight-checking that the information must be given complete and exactly in the syntax of
|
||||
Isabelle. This may be over-precise and a burden to readers not familiar with Isabelle, which may
|
||||
motivate authors to choose the aforementioned freeform-style.
|
||||
|
||||
Additionally, documents antiquotations were added to check and evaluate terms with
|
||||
term antiquotations:
|
||||
\<^item> \<^theory_text>\<open>@{term_ \<open>term\<close> }\<close> parses and type-checks \<open>term\<close> with term antiquotations,
|
||||
for instance \<^theory_text>\<open>\<^term_> \<open>@{technical \<open>isadof\<close>}\<close>\<close> will parse and check
|
||||
that \<open>isadof\<close> is indeed an instance of the class \<^typ>\<open>technical\<close>,
|
||||
\<^item> \<^theory_text>\<open>@{value_ \<open>term\<close> }\<close> performs the evaluation of \<open>term\<close> with term antiquotations,
|
||||
for instance \<^theory_text>\<open>@{value_ \<open>definition_list @{technical \<open>isadof\<close>}\<close>}\<close>
|
||||
will print the value of the \<^const>\<open>definition_list\<close> attribute of the instance \<open>isadof\<close>.
|
||||
\<^theory_text>\<open>value_\<close> may have an optional argument between square brackets to specify the evaluator but this
|
||||
argument must be specified after a default optional argument already defined
|
||||
by the text antiquotation implementation.
|
||||
So one must use the following syntax if he does not want to specify the first optional argument:
|
||||
\<^theory_text>\<open>@{value_ [] [nbe] \<open>definition_list @{technical \<open>isadof\<close>}\<close>}\<close>. Note the empty brackets.
|
||||
\<close>
|
||||
|
||||
(*<*)
|
||||
declare_reference*["subsec_onto_term_ctxt"::technical]
|
||||
(*>*)
|
||||
|
||||
text\<open>They are text-contexts equivalents to the \<^theory_text>\<open>term*\<close> and \<^theory_text>\<open>value*\<close> commands
|
||||
for term-contexts introduced in @{technical (unchecked) \<open>subsec_onto_term_ctxt\<close>}\<close>
|
||||
|
||||
subsection\<open>A Technical Report with Tight Checking\<close>
|
||||
text\<open>An example of tight checking is a small programming manual to document programming trick
|
||||
discoveries while implementing in Isabelle. While not necessarily a meeting standards of a scientific text, it appears to us that this information
|
||||
is often missing in the Isabelle community.
|
||||
|
||||
So, if this text addresses only a very limited audience and will never be famous for its style,
|
||||
it is nevertheless important to be \<^emph>\<open>exact\<close> in the sense that code-snippets and interface descriptions
|
||||
should be accurate with the most recent version of Isabelle in which this document is generated.
|
||||
So its value is that readers can just reuse some of these snippets and adapt them to their
|
||||
purposes.
|
||||
\<close>
|
||||
|
||||
figure*[strict_em::figure, relative_width="95", file_src="''figures/MyCommentedIsabelle.png''"]
|
||||
\<open>A table with a number of SML functions, together with their type.\<close>
|
||||
|
||||
text\<open>
|
||||
This manual is written according to the \<^verbatim>\<open>technical_report\<close> ontology in
|
||||
\<^theory>\<open>Isabelle_DOF.technical_report\<close>.
|
||||
\<^figure>\<open>strict_em\<close> shows a snippet from this integrated source and gives an idea why
|
||||
its tight-checking allows for keeping track of underlying Isabelle changes:
|
||||
Any reference to an SML operation in some library module is type-checked, and the displayed
|
||||
SML-type really corresponds to the type of the operations in the underlying SML environment.
|
||||
In the \<^pdf> output, these text-fragments were displayed verbatim.
|
||||
\<close>
|
||||
|
||||
|
||||
|
||||
section\<open>Some Recommendations: A little Style Guide\<close>
|
||||
text\<open>
|
||||
The document generation of \<^isadof> is based on Isabelle's document generation framework,
|
||||
using \<^LaTeX>{} as the underlying back-end. As Isabelle's document generation framework, it is
|
||||
possible to embed (nearly) arbitrary \<^LaTeX>-commands in text-commands, \<^eg>:
|
||||
|
||||
@{boxed_theory_text [display]\<open>
|
||||
text\<open> This is \emph{emphasized} and this is a
|
||||
citation~\cite{brucker.ea:isabelle-ontologies:2018}\<close>
|
||||
\<close>}
|
||||
|
||||
In general, we advise against this practice and, whenever positive, use the \<^isadof> (respetively
|
||||
Isabelle) provided alternatives:
|
||||
|
||||
@{boxed_theory_text [display]\<open>
|
||||
text\<open> This is *\<open>emphasized\<close> and this is a
|
||||
citation @{cite "brucker.ea:isabelle-ontologies:2018"}.\<close>
|
||||
\<close>}
|
||||
The list of standard Isabelle document antiquotations, as well as their options and styles,
|
||||
can be found in the Isabelle reference manual \<^cite>\<open>"wenzel:isabelle-isar:2020"\<close>,
|
||||
section 4.2.
|
||||
|
||||
In practice, \<^isadof> documents with ambitious layout will contain a certain number of
|
||||
\<^LaTeX>-commands, but this should be restricted to layout improvements that otherwise are (currently)
|
||||
not possible. As far as possible, raw \<^LaTeX> should be restricted to the definition
|
||||
of ontologies and macros (see @{docitem (unchecked) \<open>isadof_ontologies\<close>}). If raw
|
||||
\<^LaTeX> commands can not be avoided, it is recommended to use the Isabelle document comment
|
||||
\<^latex>\<open>\verb+\+\verb+<^latex>+\<close>\<open>\<open>argument\<close>\<close> to isolate these parts
|
||||
(cf. \<^cite>\<open>"wenzel:isabelle-isar:2020"\<close>).
|
||||
|
||||
Restricting the use of \<^LaTeX> has two advantages: first, \<^LaTeX> commands can circumvent the
|
||||
consistency checks of \<^isadof> and, hence, only if no \<^LaTeX> commands are used, \<^isadof> can
|
||||
ensure that a document that does not generate any error messages in Isabelle/jEdit also generated
|
||||
a \<^pdf> document. Second, future version of \<^isadof> might support different targets for the
|
||||
document generation (\<^eg>, HTML) which, naturally, are only available to documents not using
|
||||
too complex native \<^LaTeX>-commands.
|
||||
|
||||
Similarly, (unchecked) forward references should, if possible, be avoided, as they also might
|
||||
create dangling references during the document generation that break the document generation.
|
||||
|
||||
Finally, we recommend using the @{command "check_doc_global"} command at the end of your
|
||||
document to check the global reference structure.
|
||||
|
||||
\<close>
|
||||
|
||||
(*<*)
|
||||
end
|
||||
(*>*)
|
||||
|
||||
@ -1,227 +0,0 @@
|
||||
(*************************************************************************
|
||||
* Copyright (C)
|
||||
* 2019-2022 University of Exeter
|
||||
* 2018-2022 University of Paris-Saclay
|
||||
* 2018 The University of Sheffield
|
||||
*
|
||||
* License:
|
||||
* This program can be redistributed and/or modified under the terms
|
||||
* of the 2-clause BSD-style license.
|
||||
*
|
||||
* SPDX-License-Identifier: BSD-2-Clause
|
||||
*************************************************************************)
|
||||
|
||||
(*<*)
|
||||
theory
|
||||
"M_04_Document_Ontology"
|
||||
imports
|
||||
"M_03_GuidedTour"
|
||||
keywords "class_synonym" :: thy_defn
|
||||
begin
|
||||
|
||||
(*>*)
|
||||
|
||||
|
||||
(*<*)
|
||||
definition combinator1 :: "'a \<Rightarrow> ('a \<Rightarrow> 'b) \<Rightarrow> 'b" (infixl "|>" 65)
|
||||
where "x |> f = f x"
|
||||
|
||||
|
||||
ML\<open>
|
||||
local
|
||||
val _ =
|
||||
Outer_Syntax.local_theory \<^command_keyword>\<open>class_synonym\<close> "declare type abbreviation"
|
||||
(Parse.type_args -- Parse.binding --
|
||||
(\<^keyword>\<open>=\<close> |-- Parse.!!! (Parse.typ -- Parse.opt_mixfix'))
|
||||
>> (fn ((args, a), (rhs, mx)) => snd o Typedecl.abbrev_cmd (a, args, mx) rhs));
|
||||
|
||||
in end
|
||||
\<close>
|
||||
|
||||
(*>*)
|
||||
|
||||
|
||||
(*<*)
|
||||
|
||||
doc_class "title" = short_title :: "string option" <= "None"
|
||||
|
||||
|
||||
doc_class elsevier =
|
||||
organization :: string
|
||||
address_line :: string
|
||||
postcode :: int
|
||||
city :: string
|
||||
|
||||
(*doc_class elsevier_affiliation = affiliation +*)
|
||||
|
||||
doc_class acm =
|
||||
position :: string
|
||||
institution :: string
|
||||
department :: int
|
||||
street_address :: string
|
||||
city :: string
|
||||
state :: int
|
||||
country :: string
|
||||
postcode :: int
|
||||
|
||||
(*doc_class acm_affiliation = affiliation +*)
|
||||
|
||||
doc_class lncs =
|
||||
institution :: string
|
||||
|
||||
(*doc_class lncs_affiliation = affiliation +*)
|
||||
|
||||
|
||||
doc_class author =
|
||||
name :: string
|
||||
email :: "string" <= "''''"
|
||||
invariant ne_name :: "name \<sigma> \<noteq> ''''"
|
||||
|
||||
doc_class elsevier_author = "author" +
|
||||
affiliations :: "elsevier list"
|
||||
short_author :: string
|
||||
url :: string
|
||||
footnote :: string
|
||||
|
||||
text*[el1:: "elsevier"]\<open>\<close>
|
||||
(*text*[el_aff1:: "affiliation", journal_style = "@{elsevier \<open>el1\<close>}"]\<open>\<close>*)
|
||||
term*\<open>@{elsevier \<open>el1\<close>}\<close>
|
||||
text*[el_auth1:: "elsevier_author", affiliations = "[@{elsevier \<open>el1\<close>}]"]\<open>\<close>
|
||||
|
||||
doc_class acm_author = "author" +
|
||||
affiliations :: "acm list"
|
||||
orcid :: int
|
||||
footnote :: string
|
||||
|
||||
text*[acm1:: "acm"]\<open>\<close>
|
||||
(*text*[acm_aff1:: "acm affiliation", journal_style = "@{acm \<open>acm1\<close>}"]\<open>\<close>*)
|
||||
text*[acm_auth1:: "acm_author", affiliations = "[@{acm \<open>acm1\<close>}]"]\<open>\<close>
|
||||
|
||||
doc_class lncs_author = "author" +
|
||||
affiliations :: "lncs list"
|
||||
orcid :: int
|
||||
short_author :: string
|
||||
footnote :: string
|
||||
|
||||
text*[lncs1:: "lncs"]\<open>\<close>
|
||||
(*text*[lncs_aff1:: "lncs affiliation", journal_style = "@{lncs \<open>lncs1\<close>}"]\<open>\<close>*)
|
||||
text*[lncs_auth1:: "lncs_author", affiliations = "[@{lncs \<open>lncs1\<close>}]"]\<open>\<close>
|
||||
|
||||
|
||||
doc_class "text_element" =
|
||||
authored_by :: "author set" <= "{}"
|
||||
level :: "int option" <= "None"
|
||||
invariant authors_req :: "authored_by \<sigma> \<noteq> {}"
|
||||
and level_req :: "the (level \<sigma>) > 1"
|
||||
|
||||
doc_class "introduction" = "text_element" +
|
||||
authored_by :: "(author) set" <= "UNIV"
|
||||
|
||||
doc_class "technical" = "text_element" +
|
||||
formal_results :: "thm list"
|
||||
|
||||
doc_class "definition" = "technical" +
|
||||
is_formal :: "bool"
|
||||
|
||||
doc_class "theorem" = "technical" +
|
||||
is_formal :: "bool"
|
||||
assumptions :: "term list" <= "[]"
|
||||
statement :: "term option" <= "None"
|
||||
|
||||
doc_class "conclusion" = "text_element" +
|
||||
resumee :: "(definition set \<times> theorem set)"
|
||||
invariant is_form :: "(\<exists>x\<in>(fst (resumee \<sigma>)). definition.is_formal x) \<longrightarrow>
|
||||
(\<exists>y\<in>(snd (resumee \<sigma>)). is_formal y)"
|
||||
|
||||
text*[def::"definition", is_formal = "True"]\<open>\<close>
|
||||
text*[theo::"theorem", is_formal = "False"]\<open>\<close>
|
||||
text*[conc::"conclusion", resumee="({@{definition \<open>def\<close>}}, {@{theorem \<open>theo\<close>}})"]\<open>\<close>
|
||||
|
||||
value*\<open>resumee @{conclusion \<open>conc\<close>} |> fst\<close>
|
||||
value*\<open>resumee @{conclusion \<open>conc\<close>} |> snd\<close>
|
||||
|
||||
doc_class "article" =
|
||||
style_id :: string <= "''LNCS''"
|
||||
accepts "(title ~~ \<lbrace>author\<rbrace>\<^sup>+ ~~ \<lbrace>introduction\<rbrace>\<^sup>+
|
||||
~~ \<lbrace>\<lbrace>definition ~~ example\<rbrace>\<^sup>+ || theorem\<rbrace>\<^sup>+ ~~ \<lbrace>conclusion\<rbrace>\<^sup>+)"
|
||||
|
||||
|
||||
datatype kind = expert_opinion | argument | "proof"
|
||||
|
||||
onto_class result = " technical" +
|
||||
evidence :: kind
|
||||
property :: " theorem list" <= "[]"
|
||||
invariant has_property :: "evidence \<sigma> = proof \<longleftrightarrow> property \<sigma> \<noteq> []"
|
||||
|
||||
(*>*)
|
||||
|
||||
text*[paper_m::float, main_caption="\<open>A Basic Document Ontology: paper$^m$\<close>"]\<open>
|
||||
@{boxed_theory_text [display,indent=5]
|
||||
\<open>doc_class "title" = short_title :: "string option" <= "None"
|
||||
doc_class affiliation =
|
||||
journal_style :: '\<alpha>
|
||||
doc_class author =
|
||||
affiliations :: "'\<alpha> affiliation list"
|
||||
name :: string
|
||||
email :: "string" <= "''''"
|
||||
invariant ne_name :: "name \<sigma> \<noteq> ''''"
|
||||
doc_class "text_element" =
|
||||
authored_by :: "('\<alpha> author) set" <= "{}"
|
||||
level :: "int option" <= "None"
|
||||
invariant authors_req :: "authored_by \<noteq> {}"
|
||||
and level_req :: "the (level) > 1"
|
||||
doc_class "introduction" = text_element +
|
||||
authored_by :: "('\<alpha> author) set" <= "UNIV"
|
||||
doc_class "technical" = text_element +
|
||||
formal_results :: "thm list"
|
||||
doc_class "definition" = technical +
|
||||
is_formal :: "bool"
|
||||
doc_class "theorem" = technical +
|
||||
assumptions :: "term list" <= "[]"
|
||||
statement :: "term option" <= "None"
|
||||
doc_class "conclusion" = text_element +
|
||||
resumee :: "(definition set \<times> theorem set)"
|
||||
invariant (\<forall>x\<in>fst resumee. is_formal x)
|
||||
\<longrightarrow> (\<exists>y\<in>snd resumee. is_formal y)
|
||||
doc_class "article" =
|
||||
style_id :: string <= "''LNCS''"
|
||||
accepts "(title ~~ \<lbrace>author\<rbrace>\<^sup>+ ~~ \<lbrace>introduction\<rbrace>\<^sup>+
|
||||
~~ \<lbrace>\<lbrace>definition ~~ example\<rbrace>\<^sup>+ || theorem\<rbrace>\<^sup>+ ~~ \<lbrace>conclusion\<rbrace>\<^sup>+)"\<close>}
|
||||
\<close>
|
||||
|
||||
|
||||
(*<*)
|
||||
datatype role = PM \<comment> \<open>Program Manager\<close>
|
||||
| RQM \<comment> \<open>Requirements Manager\<close>
|
||||
| DES \<comment> \<open>Designer\<close>
|
||||
| IMP \<comment> \<open>Implementer\<close>
|
||||
| ASR \<comment> \<open>Assessor\<close>
|
||||
| INT \<comment> \<open>Integrator\<close>
|
||||
| TST \<comment> \<open>Tester\<close>
|
||||
| VER \<comment> \<open>Verifier\<close>
|
||||
| VnV \<comment> \<open>Verification and Validation\<close>
|
||||
| VAL \<comment> \<open>Validator\<close>
|
||||
|
||||
abbreviation developer where "developer == DES"
|
||||
abbreviation validator where "validator == VAL"
|
||||
abbreviation verifier where "verifier == VER"
|
||||
|
||||
doc_class requirement = Isa_COL.text_element +
|
||||
long_name :: "string option"
|
||||
is_concerned :: "role set"
|
||||
|
||||
text*[req1::requirement,
|
||||
is_concerned="{developer, validator}"]
|
||||
\<open>The operating system shall provide secure
|
||||
memory separation.\<close>
|
||||
|
||||
text\<open>
|
||||
The recurring issue of the certification
|
||||
is @{requirement \<open>req1\<close>} ...\<close>
|
||||
|
||||
term "\<lparr>long_name = None,is_concerned = {developer,validator}\<rparr>"
|
||||
(*>*)
|
||||
|
||||
(*<*)
|
||||
end
|
||||
(*>*)
|
||||
@ -1,454 +0,0 @@
|
||||
(*************************************************************************
|
||||
* Copyright (C)
|
||||
* 2019-2022 University of Exeter
|
||||
* 2018-2022 University of Paris-Saclay
|
||||
* 2018 The University of Sheffield
|
||||
*
|
||||
* License:
|
||||
* This program can be redistributed and/or modified under the terms
|
||||
* of the 2-clause BSD-style license.
|
||||
*
|
||||
* SPDX-License-Identifier: BSD-2-Clause
|
||||
*************************************************************************)
|
||||
|
||||
(*<*)
|
||||
theory
|
||||
"M_05_Proofs_Ontologies"
|
||||
imports
|
||||
"M_04_Document_Ontology"
|
||||
begin
|
||||
|
||||
(*>*)
|
||||
|
||||
chapter*[onto_proofs::technical]\<open>Proofs over Ontologies\<close>
|
||||
|
||||
text\<open>It is a distinguishing feature of \<^dof> that it does not directly generate meta-data rather
|
||||
than generating a \<^emph>\<open>theory of meta-data\<close> that can be used in HOL-terms on various levels
|
||||
of the Isabelle-system and its document generation technology. Meta-data theories can be
|
||||
converted into executable code and efficiently used in validations, but also used for theoretic
|
||||
reasoning over given ontologies. While the full potential of this latter possibility still
|
||||
needs to be explored, we present in the following sections two applications:
|
||||
|
||||
\<^enum> Verified ontological morphisms, also called \<^emph>\<open>ontological mappings\<close> in the literature
|
||||
\<^cite>\<open>"books/daglib/0032976" and "atl" and "BGPP95"\<close>, \<^ie> proofs of invariant preservation
|
||||
along translation-functions of all instances of \<^verbatim>\<open>doc_class\<close>-es.
|
||||
\<^enum> Verified refinement relations between the structural descriptions of theory documents,
|
||||
\<^ie> proofs of language inclusions of monitors of global ontology classes.
|
||||
\<close>
|
||||
|
||||
section*["morphisms"::scholarly_paper.text_section] \<open>Proving Properties over Ontologies\<close>
|
||||
|
||||
subsection\<open>Ontology-Morphisms: a Prototypical Example\<close>
|
||||
|
||||
text\<open>We define a small ontology with the following classes:\<close>
|
||||
|
||||
doc_class AA = aa :: nat
|
||||
doc_class BB = bb :: int
|
||||
doc_class CC = cc :: int
|
||||
|
||||
doc_class DD = dd :: int
|
||||
doc_class EE = ee :: int
|
||||
doc_class FF = ff :: int
|
||||
|
||||
onto_morphism (AA, BB) to CC and (DD, EE) to FF
|
||||
where "convert\<^sub>A\<^sub>A\<^sub>\<times>\<^sub>B\<^sub>B\<^sub>\<Rightarrow>\<^sub>C\<^sub>C \<sigma> = \<lparr> CC.tag_attribute = 1::int,
|
||||
CC.cc = int(aa (fst \<sigma>)) + bb (snd \<sigma>)\<rparr>"
|
||||
and "convert\<^sub>D\<^sub>D\<^sub>\<times>\<^sub>E\<^sub>E\<^sub>\<Rightarrow>\<^sub>F\<^sub>F \<sigma> = \<lparr> FF.tag_attribute = 1::int,
|
||||
FF.ff = dd (fst \<sigma>) + ee (snd \<sigma>) \<rparr>"
|
||||
|
||||
text\<open>Note that the \<^term>\<open>convert\<^sub>A\<^sub>A\<^sub>\<times>\<^sub>B\<^sub>B\<^sub>\<Rightarrow>\<^sub>C\<^sub>C\<close>-morphism involves a data-format conversion, and that the
|
||||
resulting transformation of @{doc_class AA}-instances and @{doc_class BB}-instances is surjective
|
||||
but not injective. The \<^term>\<open>CC.tag_attribute\<close> is used to potentially differentiate instances with
|
||||
equal attribute-content and is irrelevant here.\<close>
|
||||
|
||||
(*<*) (* Just a test, irrelevant for the document.*)
|
||||
|
||||
doc_class A_A = aa :: nat
|
||||
doc_class BB' = bb :: int
|
||||
onto_morphism (A_A, BB', CC, DD, EE) to FF
|
||||
where "convert\<^sub>A\<^sub>_\<^sub>A\<^sub>\<times>\<^sub>B\<^sub>B\<^sub>'\<^sub>\<times>\<^sub>C\<^sub>C\<^sub>\<times>\<^sub>D\<^sub>D\<^sub>\<times>\<^sub>E\<^sub>E\<^sub>\<Rightarrow>\<^sub>F\<^sub>F \<sigma> = \<lparr> FF.tag_attribute = 1::int,
|
||||
FF.ff = int(aa (fst \<sigma>)) + bb (fst (snd \<sigma>))\<rparr>"
|
||||
|
||||
(*>*)
|
||||
|
||||
text\<open>This specification construct introduces the following constants and definitions:
|
||||
\<^item> @{term [source] \<open>convert\<^sub>A\<^sub>A\<^sub>_\<^sub>B\<^sub>B\<^sub>_\<^sub>C\<^sub>C :: AA \<times> BB \<Rightarrow> CC\<close>}
|
||||
\<^item> @{term [source] \<open>convert\<^sub>D\<^sub>D\<^sub>_\<^sub>E\<^sub>E\<^sub>_\<^sub>F\<^sub>F :: DD \<times> EE \<Rightarrow> FF\<close>}
|
||||
% @{term [source] \<open>convert\<^sub>A\<^sub>_\<^sub>A\<^sub>\<times>\<^sub>B\<^sub>B\<^sub>'\<^sub>\<times>\<^sub>C\<^sub>C\<^sub>\<times>\<^sub>D\<^sub>D\<^sub>\<times>\<^sub>E\<^sub>E\<^sub>\<Rightarrow>\<^sub>F\<^sub>F :: A_A \<times> BB' \<times> CC \<times> DD \<times> EE \<Rightarrow> FF\<close>}
|
||||
|
||||
and corresponding definitions. \<close>
|
||||
|
||||
subsection\<open>Proving the Preservation of Ontological Mappings : A Document-Ontology Morphism\<close>
|
||||
|
||||
text\<open>\<^dof> as a system is currently particularly geared towards \<^emph>\<open>document\<close>-ontologies, in
|
||||
particular for documentations generated from Isabelle theories. We used it meanwhile for the
|
||||
generation of various conference and journal papers, notably using the
|
||||
\<^theory>\<open>Isabelle_DOF.scholarly_paper\<close> and \<^theory>\<open>Isabelle_DOF.technical_report\<close>-ontologies,
|
||||
targeting a (small) variety of \<^LaTeX> style-files. A particular aspect of these ontologies,
|
||||
especially when targeting journals from publishers such as ACM, Springer or Elsevier, is the
|
||||
description of publication meta-data. Publishers tend to have their own styles on what kind
|
||||
meta-data should be associated with a journal publication; thus, the depth and
|
||||
precise format of affiliations, institution, their relation to authors, and author descriptions
|
||||
(with photos or without, hair left-combed or right-combed, etcpp...) varies.
|
||||
|
||||
In the following, we present an attempt to generalized ontology with several ontology mappings
|
||||
to more specialized ones such as concrete journals and/or the \<^theory>\<open>Isabelle_DOF.scholarly_paper\<close>-
|
||||
ontology which we mostly used for our own publications.
|
||||
\<close>
|
||||
|
||||
|
||||
doc_class elsevier_org =
|
||||
organization :: string
|
||||
address_line :: string
|
||||
postcode :: int
|
||||
city :: string
|
||||
|
||||
doc_class acm_org =
|
||||
position :: string
|
||||
institution :: string
|
||||
department :: int
|
||||
street_address :: string
|
||||
city :: string
|
||||
state :: int
|
||||
country :: string
|
||||
postcode :: int
|
||||
|
||||
doc_class lncs_inst =
|
||||
institution :: string
|
||||
|
||||
doc_class author =
|
||||
name :: string
|
||||
email :: "string" <= "''''"
|
||||
invariant ne_fsnames :: "name \<sigma> \<noteq> ''''"
|
||||
|
||||
doc_class elsevierAuthor = "author" +
|
||||
affiliations :: "elsevier_org list"
|
||||
firstname :: string
|
||||
surname :: string
|
||||
short_author :: string
|
||||
url :: string
|
||||
footnote :: string
|
||||
invariant ne_fsnames :: "firstname \<sigma> \<noteq> '''' \<and> surname \<sigma> \<noteq> ''''"
|
||||
|
||||
(*<*)
|
||||
text*[el1:: "elsevier_org"]\<open>An example elsevier-journal affiliation.\<close>
|
||||
term*\<open>@{elsevier_org \<open>el1\<close>}\<close>
|
||||
text*[el_auth1:: "elsevierAuthor", affiliations = "[@{elsevier_org \<open>el1\<close>}]"]\<open>\<close>
|
||||
(*>*)
|
||||
text\<open>\<close>
|
||||
|
||||
doc_class acmAuthor = "author" +
|
||||
affiliations :: "acm_org list"
|
||||
firstname :: string
|
||||
familyname :: string
|
||||
orcid :: int
|
||||
footnote :: string
|
||||
invariant ne_fsnames :: "firstname \<sigma> \<noteq> '''' \<and> familyname \<sigma> \<noteq> ''''"
|
||||
|
||||
(*<*)
|
||||
text*[acm1:: "acm"]\<open>An example acm-style affiliation\<close>
|
||||
(*>*)
|
||||
text\<open>\<close>
|
||||
|
||||
doc_class lncs_author = "author" +
|
||||
affiliations :: "lncs list"
|
||||
orcid :: int
|
||||
short_author :: string
|
||||
footnote :: string
|
||||
|
||||
(*<*)
|
||||
text*[lncs1:: "lncs"]\<open>An example lncs-style affiliation\<close>
|
||||
text*[lncs_auth1:: "lncs_author", affiliations = "[@{lncs \<open>lncs1\<close>}]"]\<open>Another example lncs-style affiliation\<close>
|
||||
find_theorems elsevier.tag_attribute
|
||||
(*>*)
|
||||
text\<open>\<close>
|
||||
|
||||
definition acm_name where "acm_name f s = f @ '' '' @ s"
|
||||
|
||||
fun concatWith :: "string \<Rightarrow> string list \<Rightarrow> string"
|
||||
where "concatWith str [] = ''''"
|
||||
|"concatWith str [a] = a"
|
||||
|"concatWith str (a#R) = a@str@(concatWith str R)"
|
||||
|
||||
lemma concatWith_non_mt : "(S\<noteq>[] \<and> (\<exists> s\<in>set S. s\<noteq>'''')) \<longrightarrow> (concatWith sep S) \<noteq> ''''"
|
||||
proof(induct S)
|
||||
case Nil
|
||||
then show ?case by simp
|
||||
next
|
||||
case (Cons a S)
|
||||
then show ?case apply(auto)[1]
|
||||
using concatWith.elims apply blast
|
||||
using concatWith.elims apply blast
|
||||
using list.set_cases by force
|
||||
qed
|
||||
|
||||
onto_morphism (acm) to elsevier
|
||||
where "convert\<^sub>a\<^sub>c\<^sub>m\<^sub>\<Rightarrow>\<^sub>e\<^sub>l\<^sub>s\<^sub>e\<^sub>v\<^sub>i\<^sub>e\<^sub>r \<sigma> =
|
||||
\<lparr>elsevier.tag_attribute = acm.tag_attribute \<sigma>,
|
||||
organization = acm.institution \<sigma>,
|
||||
address_line = concatWith '','' [acm.street_address \<sigma>, acm.city \<sigma>],
|
||||
postcode = acm.postcode \<sigma> ,
|
||||
city = acm.city \<sigma> \<rparr>"
|
||||
|
||||
text\<open>Here is a more basic, but equivalent definition for the other way round:\<close>
|
||||
|
||||
definition elsevier_to_acm_morphism :: "elsevier_org \<Rightarrow> acm_org"
|
||||
("_ \<langle>acm\<rangle>\<^sub>e\<^sub>l\<^sub>s\<^sub>e\<^sub>v\<^sub>i\<^sub>e\<^sub>r" [1000]999)
|
||||
where "\<sigma> \<langle>acm\<rangle>\<^sub>e\<^sub>l\<^sub>s\<^sub>e\<^sub>v\<^sub>i\<^sub>e\<^sub>r = \<lparr> acm_org.tag_attribute = 1::int,
|
||||
acm_org.position = ''no position'',
|
||||
acm_org.institution = organization \<sigma>,
|
||||
acm_org.department = 0,
|
||||
acm_org.street_address = address_line \<sigma>,
|
||||
acm_org.city = elsevier_org.city \<sigma>,
|
||||
acm_org.state = 0,
|
||||
acm_org.country = ''no country'',
|
||||
acm_org.postcode = elsevier_org.postcode \<sigma> \<rparr>"
|
||||
|
||||
text\<open>The following onto-morphism links \<^typ>\<open>elsevierAuthor\<close>'s and \<^typ>\<open>acmAuthor\<close>. Note that
|
||||
the conversion implies trivial data-conversions (renaming of attributes in the classes),
|
||||
string-representation conversions, and conversions of second-staged, referenced instances.\<close>
|
||||
|
||||
onto_morphism (elsevierAuthor) to acmAuthor
|
||||
where "convert\<^sub>e\<^sub>l\<^sub>s\<^sub>e\<^sub>v\<^sub>i\<^sub>e\<^sub>r\<^sub>A\<^sub>u\<^sub>t\<^sub>h\<^sub>o\<^sub>r\<^sub>\<Rightarrow>\<^sub>a\<^sub>c\<^sub>m\<^sub>A\<^sub>u\<^sub>t\<^sub>h\<^sub>o\<^sub>r \<sigma> =
|
||||
\<lparr>author.tag_attribute = undefined,
|
||||
name = concatWith '','' [elsevierAuthor.firstname \<sigma>,elsevierAuthor.surname \<sigma>],
|
||||
email = author.email \<sigma>,
|
||||
acmAuthor.affiliations = (elsevierAuthor.affiliations \<sigma>)
|
||||
|> map (\<lambda>x. x \<langle>acm\<rangle>\<^sub>e\<^sub>l\<^sub>s\<^sub>e\<^sub>v\<^sub>i\<^sub>e\<^sub>r),
|
||||
firstname = elsevierAuthor.firstname \<sigma>,
|
||||
familyname = elsevierAuthor.surname \<sigma>,
|
||||
orcid = 0, \<comment> \<open>la triche ! ! !\<close>
|
||||
footnote = elsevierAuthor.footnote \<sigma>\<rparr>"
|
||||
|
||||
|
||||
lemma elsevier_inv_preserved :
|
||||
"elsevierAuthor.ne_fsnames_inv \<sigma>
|
||||
\<Longrightarrow> acmAuthor.ne_fsnames_inv (convert\<^sub>e\<^sub>l\<^sub>s\<^sub>e\<^sub>v\<^sub>i\<^sub>e\<^sub>r\<^sub>A\<^sub>u\<^sub>t\<^sub>h\<^sub>o\<^sub>r\<^sub>\<Rightarrow>\<^sub>a\<^sub>c\<^sub>m\<^sub>A\<^sub>u\<^sub>t\<^sub>h\<^sub>o\<^sub>r \<sigma>)
|
||||
\<and> author.ne_fsnames_inv (convert\<^sub>e\<^sub>l\<^sub>s\<^sub>e\<^sub>v\<^sub>i\<^sub>e\<^sub>r\<^sub>A\<^sub>u\<^sub>t\<^sub>h\<^sub>o\<^sub>r\<^sub>\<Rightarrow>\<^sub>a\<^sub>c\<^sub>m\<^sub>A\<^sub>u\<^sub>t\<^sub>h\<^sub>o\<^sub>r \<sigma>)"
|
||||
unfolding elsevierAuthor.ne_fsnames_inv_def acmAuthor.ne_fsnames_inv_def
|
||||
convert\<^sub>e\<^sub>l\<^sub>s\<^sub>e\<^sub>v\<^sub>i\<^sub>e\<^sub>r\<^sub>A\<^sub>u\<^sub>t\<^sub>h\<^sub>o\<^sub>r\<^sub>_\<^sub>a\<^sub>c\<^sub>m\<^sub>A\<^sub>u\<^sub>t\<^sub>h\<^sub>o\<^sub>r_def author.ne_fsnames_inv_def
|
||||
by auto
|
||||
|
||||
text\<open>The proof is, in order to quote Tony Hoare, ``as simple as it should be''. Note that it involves
|
||||
the lemmas like @{thm concatWith_non_mt} which in itself require inductions, \<^ie>, which are out of
|
||||
reach of pure ATP proof-techniques. \<close>
|
||||
|
||||
subsection\<open>Proving the Preservation of Ontological Mappings : A Domain-Ontology Morphism\<close>
|
||||
text\<open>The following example is drawn from a domain-specific scenario: For conventional data-models,
|
||||
be it represented by UML-class diagrams or SQL-like "tables" or Excel-sheet like presentations
|
||||
of uniform data, we can conceive an ontology (which is equivalent here to a conventional style-sheet)
|
||||
and annotate textual raw data. This example describes how meta-data can be used to
|
||||
calculate and transform this kind of representations in a type-safe and verified way. \<close>
|
||||
|
||||
(*<*)
|
||||
(* Mapped_PILIB_Ontology example *)
|
||||
(* rethink examples: should we "morph" providence too ? ? ? Why not ? bu *)
|
||||
|
||||
term\<open>fold (+) S 0\<close>
|
||||
|
||||
definition sum
|
||||
where "sum S = (fold (+) S 0)"
|
||||
(*>*)
|
||||
|
||||
text\<open>We model some basic enumerations as inductive data-types: \<close>
|
||||
datatype Hardware_Type =
|
||||
Motherboard | Expansion_Card | Storage_Device | Fixed_Media |
|
||||
Removable_Media | Input_Device | Output_Device
|
||||
|
||||
datatype Software_Type =
|
||||
Operating_system | Text_editor | Web_Navigator | Development_Environment
|
||||
|
||||
text\<open>In the sequel, we model a ''Reference Ontology'', \<^ie> a data structure in which we assume
|
||||
that standards or some de-facto-standard data-base refer to the data in the domain of electronic
|
||||
devices:\<close>
|
||||
|
||||
onto_class Resource =
|
||||
name :: string
|
||||
|
||||
onto_class Electronic = Resource +
|
||||
provider :: string
|
||||
manufacturer :: string
|
||||
|
||||
onto_class Component = Electronic +
|
||||
mass :: int
|
||||
|
||||
onto_class Simulation_Model = Electronic +
|
||||
simulate :: Component
|
||||
composed_of :: "Component list"
|
||||
version :: int
|
||||
|
||||
onto_class Informatic = Resource +
|
||||
description :: string
|
||||
|
||||
onto_class Hardware = Informatic +
|
||||
type :: Hardware_Type
|
||||
mass :: int
|
||||
composed_of :: "Component list"
|
||||
invariant c1 :: "mass \<sigma> = sum(map Component.mass (composed_of \<sigma>))"
|
||||
|
||||
onto_class Software = Informatic +
|
||||
type :: Software_Type
|
||||
version :: int
|
||||
|
||||
text\<open>Finally, we present a \<^emph>\<open>local ontology\<close>, \<^ie> a data structure used in a local store
|
||||
in its data-base of cash-system:\<close>
|
||||
|
||||
onto_class Item =
|
||||
name :: string
|
||||
|
||||
onto_class Product = Item +
|
||||
serial_number :: int
|
||||
provider :: string
|
||||
mass :: int
|
||||
|
||||
onto_class Electronic_Component = Product +
|
||||
serial_number :: int
|
||||
|
||||
onto_class Monitor = Product +
|
||||
composed_of :: "Electronic_Component list"
|
||||
invariant c2 :: "Product.mass \<sigma> = sum(map Product.mass (composed_of \<sigma>))"
|
||||
|
||||
term\<open>Product.mass \<sigma> = sum(map Product.mass (composed_of \<sigma>))\<close>
|
||||
|
||||
onto_class Computer_Software = Item +
|
||||
type :: Software_Type
|
||||
version :: int
|
||||
|
||||
text\<open>These two example ontologies were linked via conversion functions called \<^emph>\<open>morphisms\<close>.
|
||||
The hic is that we can prove for the morphisms connecting these ontologies, that the conversions
|
||||
are guaranteed to preserve the data-invariants, although the data-structures (and, of course,
|
||||
the presentation of them) is very different. Besides, morphisms functions can be ``forgetful''
|
||||
(\<^ie> surjective), ``embedding'' (\<^ie> injective) or even ``one-to-one'' ((\<^ie> bijective).\<close>
|
||||
|
||||
definition Item_to_Resource_morphism :: "Item \<Rightarrow> Resource"
|
||||
("_ \<langle>Resource\<rangle>\<^sub>I\<^sub>t\<^sub>e\<^sub>m" [1000]999)
|
||||
where " \<sigma> \<langle>Resource\<rangle>\<^sub>I\<^sub>t\<^sub>e\<^sub>m =
|
||||
\<lparr> Resource.tag_attribute = 1::int ,
|
||||
Resource.name = name \<sigma> \<rparr>"
|
||||
|
||||
definition Product_to_Resource_morphism :: "Product \<Rightarrow> Resource"
|
||||
("_ \<langle>Resource\<rangle>\<^sub>P\<^sub>r\<^sub>o\<^sub>d\<^sub>u\<^sub>c\<^sub>t" [1000]999)
|
||||
where " \<sigma> \<langle>Resource\<rangle>\<^sub>P\<^sub>r\<^sub>o\<^sub>d\<^sub>u\<^sub>c\<^sub>t =
|
||||
\<lparr> Resource.tag_attribute = 2::int ,
|
||||
Resource.name = name \<sigma> \<rparr>"
|
||||
|
||||
definition Computer_Software_to_Software_morphism :: "Computer_Software \<Rightarrow> Software"
|
||||
("_ \<langle>Software\<rangle>\<^sub>S\<^sub>o\<^sub>f\<^sub>t\<^sub>C\<^sub>m\<^sub>p" [1000]999)
|
||||
where "\<sigma> \<langle>Software\<rangle>\<^sub>S\<^sub>o\<^sub>f\<^sub>t\<^sub>C\<^sub>m\<^sub>p =
|
||||
\<lparr> Resource.tag_attribute = 3::int ,
|
||||
Resource.name = name \<sigma> ,
|
||||
Informatic.description = ''no description'',
|
||||
Software.type = type \<sigma> ,
|
||||
Software.version = version \<sigma> \<rparr>"
|
||||
|
||||
definition Electronic_Component_to_Component_morphism :: "Electronic_Component \<Rightarrow> Component"
|
||||
("_ \<langle>Component\<rangle>\<^sub>E\<^sub>l\<^sub>e\<^sub>c\<^sub>C\<^sub>m\<^sub>p" [1000]999)
|
||||
where "\<sigma> \<langle>Component\<rangle>\<^sub>E\<^sub>l\<^sub>e\<^sub>c\<^sub>C\<^sub>m\<^sub>p =
|
||||
\<lparr> Resource.tag_attribute = 4::int ,
|
||||
Resource.name = name \<sigma> ,
|
||||
Electronic.provider = provider \<sigma> ,
|
||||
Electronic.manufacturer = ''no manufacturer'' ,
|
||||
Component.mass = mass \<sigma> \<rparr>"
|
||||
|
||||
definition Monitor_to_Hardware_morphism :: "Monitor \<Rightarrow> Hardware"
|
||||
("_ \<langle>Hardware\<rangle>\<^sub>C\<^sub>o\<^sub>m\<^sub>p\<^sub>u\<^sub>t\<^sub>e\<^sub>r\<^sub>H\<^sub>a\<^sub>r\<^sub>d\<^sub>w\<^sub>a\<^sub>r\<^sub>e" [1000]999)
|
||||
where "\<sigma> \<langle>Hardware\<rangle>\<^sub>C\<^sub>o\<^sub>m\<^sub>p\<^sub>u\<^sub>t\<^sub>e\<^sub>r\<^sub>H\<^sub>a\<^sub>r\<^sub>d\<^sub>w\<^sub>a\<^sub>r\<^sub>e =
|
||||
\<lparr> Resource.tag_attribute = 5::int ,
|
||||
Resource.name = name \<sigma> ,
|
||||
Informatic.description = ''no description'',
|
||||
Hardware.type = Output_Device,
|
||||
Hardware.mass = mass \<sigma> ,
|
||||
Hardware.composed_of = map Electronic_Component_to_Component_morphism (composed_of \<sigma>)
|
||||
\<rparr>"
|
||||
|
||||
text\<open>On this basis, we can state the following invariant preservation theorems:\<close>
|
||||
|
||||
lemma inv_c2_preserved :
|
||||
"c2_inv \<sigma> \<Longrightarrow> c1_inv (\<sigma> \<langle>Hardware\<rangle>\<^sub>C\<^sub>o\<^sub>m\<^sub>p\<^sub>u\<^sub>t\<^sub>e\<^sub>r\<^sub>H\<^sub>a\<^sub>r\<^sub>d\<^sub>w\<^sub>a\<^sub>r\<^sub>e)"
|
||||
unfolding c1_inv_def c2_inv_def
|
||||
Monitor_to_Hardware_morphism_def Electronic_Component_to_Component_morphism_def
|
||||
by (auto simp: comp_def)
|
||||
|
||||
lemma Monitor_to_Hardware_morphism_total :
|
||||
"Monitor_to_Hardware_morphism ` ({X::Monitor. c2_inv X}) \<subseteq> ({X::Hardware. c1_inv X})"
|
||||
using inv_c2_preserved
|
||||
by auto
|
||||
|
||||
type_synonym local_ontology = "Item * Electronic_Component * Monitor"
|
||||
type_synonym reference_ontology = "Resource * Component * Hardware"
|
||||
|
||||
fun ontology_mapping :: "local_ontology \<Rightarrow> reference_ontology"
|
||||
where "ontology_mapping (x, y, z) = (x\<langle>Resource\<rangle>\<^sub>I\<^sub>t\<^sub>e\<^sub>m, y\<langle>Component\<rangle>\<^sub>E\<^sub>l\<^sub>e\<^sub>c\<^sub>C\<^sub>m\<^sub>p, z\<langle>Hardware\<rangle>\<^sub>C\<^sub>o\<^sub>m\<^sub>p\<^sub>u\<^sub>t\<^sub>e\<^sub>r\<^sub>H\<^sub>a\<^sub>r\<^sub>d\<^sub>w\<^sub>a\<^sub>r\<^sub>e)"
|
||||
|
||||
lemma ontology_mapping_total :
|
||||
"ontology_mapping ` {X. c2_inv (snd (snd X))} \<subseteq> {X. c1_inv (snd (snd X))}"
|
||||
using inv_c2_preserved
|
||||
by auto
|
||||
|
||||
text\<open>Note that in contrast to conventional data-translations, the preservation of a class-invariant
|
||||
is not just established by a validation of the result, it is proven once and for all for all instances
|
||||
of the classes.\<close>
|
||||
|
||||
subsection\<open>Proving Monitor-Refinements\<close>
|
||||
|
||||
(*<*)
|
||||
(* switch on regexp syntax *)
|
||||
notation Star ("\<lbrace>(_)\<rbrace>\<^sup>*" [0]100)
|
||||
notation Plus (infixr "||" 55)
|
||||
notation Times (infixr "~~" 60)
|
||||
notation Atom ("\<lfloor>_\<rfloor>" 65)
|
||||
(*>*)
|
||||
|
||||
|
||||
text\<open>Monitors are regular-expressions that allow for specifying instances of classes to appear in
|
||||
a particular order in a document. They are used to specify some structural aspects of a document.
|
||||
Based on an AFP theory by Tobias Nipkow on Functional Automata
|
||||
(\<^ie> a characterization of regular automata using functional polymorphic descriptions of transition
|
||||
functions avoiding any of the ad-hoc finitizations commonly used in automata theory), which
|
||||
comprises also functions to generate executable deterministic and non-deterministic automata,
|
||||
this theory is compiled to SML-code that was integrated in the \<^dof> system. The necessary
|
||||
adaptions of this integration can be found in the theory \<^theory>\<open>Isabelle_DOF.RegExpInterface\<close>,
|
||||
which also contains the basic definitions and theorems for the concepts used here.
|
||||
|
||||
Recall that the monitor of \<^term>\<open>scholarly_paper.article\<close> is defined by: \<^vs>\<open>0.5cm\<close>
|
||||
|
||||
@{thm [indent=20, margin=70, names_short] scholarly_paper.article_monitor_def}
|
||||
|
||||
\<^vs>\<open>0.5cm\<close> However, it is possible to reason over the language of monitors and prove classical
|
||||
refinement notions such as trace-refinement on the monitor-level, so once-and-for-all for all
|
||||
instances of validated documents conforming to a particular ontology. The primitive recursive
|
||||
operators \<^term>\<open>RegExpInterface.Lang\<close> and \<^term>\<open>RegExpInterface.L\<^sub>s\<^sub>u\<^sub>b\<close> generate the languages of the
|
||||
regular expression language, where \<^term>\<open>L\<^sub>s\<^sub>u\<^sub>b\<close> takes the sub-ordering relation of classes into
|
||||
account.
|
||||
|
||||
The proof of : \<^vs>\<open>0.5cm\<close>
|
||||
|
||||
@{thm [indent=20,margin=70,names_short] articles_sub_reports}
|
||||
|
||||
\<^vs>\<open>0.5cm\<close> can be found in theory \<^theory>\<open>Isabelle_DOF.technical_report\<close>;
|
||||
it is, again, "as simple as it should be" (to cite Tony Hoare).
|
||||
|
||||
The proof of: \<^vs>\<open>0.5cm\<close>
|
||||
|
||||
@{thm [indent=20,margin=70,names_short] articles_Lsub_reports}
|
||||
|
||||
\<^vs>\<open>0.5cm\<close> is slightly more evolved; this is due to the fact that \<^dof> does not generate a proof of
|
||||
the acyclicity of the graph of the class-hierarchy @{term doc_class_rel} automatically. For a given
|
||||
hierarchy, this proof will always succeed (since \<^dof> checks this on the meta-level, of course),
|
||||
which permits to deduce the anti-symmetry of the transitive closure of @{term doc_class_rel}
|
||||
and therefore to establish that the doc-classes can be organized in an order (\<^ie> the
|
||||
type \<^typ>\<open>doc_class\<close> is an instance of the type-class \<^class>\<open>order\<close>). On this basis, the proof
|
||||
of the above language refinement is quasi automatic. This proof is also part of
|
||||
\<^theory>\<open>Isabelle_DOF.technical_report\<close>.
|
||||
\<close>
|
||||
|
||||
(*<*)
|
||||
|
||||
(* switch off regexp syntax *)
|
||||
no_notation Star ("\<lbrace>(_)\<rbrace>\<^sup>*" [0]100)
|
||||
no_notation Plus (infixr "||" 55)
|
||||
no_notation Times (infixr "~~" 60)
|
||||
no_notation Atom ("\<lfloor>_\<rfloor>" 65)
|
||||
|
||||
end
|
||||
(*>*)
|
||||
@ -1,237 +0,0 @@
|
||||
(*************************************************************************
|
||||
* Copyright (C)
|
||||
* 2019-2022 University of Exeter
|
||||
* 2018-2022 University of Paris-Saclay
|
||||
* 2018 The University of Sheffield
|
||||
*
|
||||
* License:
|
||||
* This program can be redistributed and/or modified under the terms
|
||||
* of the 2-clause BSD-style license.
|
||||
*
|
||||
* SPDX-License-Identifier: BSD-2-Clause
|
||||
*************************************************************************)
|
||||
|
||||
(*<*)
|
||||
theory "M_07_Implementation"
|
||||
imports "M_06_RefMan"
|
||||
begin
|
||||
(*>*)
|
||||
|
||||
|
||||
chapter*[isadof_developers::text_section]\<open>Extending \<^isadof>\<close>
|
||||
text\<open>
|
||||
In this chapter, we describe the basic implementation aspects of \<^isadof>, which is based on
|
||||
the following design-decisions:
|
||||
\<^item> the entire \<^isadof> is a ``pure add-on,'' \<^ie>, we deliberately resign to the possibility to
|
||||
modify Isabelle itself,
|
||||
\<^item> \<^isadof> has been organized as an AFP entry and a form of an Isabelle component that is
|
||||
compatible with this goal,
|
||||
\<^item> we decided to make the markup-generation by itself to adapt it as well as possible to the
|
||||
needs of tracking the linking in documents,
|
||||
\<^item> \<^isadof> is deeply integrated into the Isabelle's IDE (PIDE) to give immediate feedback during
|
||||
editing and other forms of document evolution.
|
||||
\<close>
|
||||
text\<open>
|
||||
Semantic macros, as required by our document model, are called \<^emph>\<open>document antiquotations\<close>
|
||||
in the Isabelle literature~\<^cite>\<open>"wenzel:isabelle-isar:2020"\<close>. While Isabelle's code-antiquotations
|
||||
are an old concept going back to Lisp and having found via SML and OCaml their ways into modern
|
||||
proof systems, special annotation syntax inside documentation comments have their roots in
|
||||
documentation generators such as Javadoc. Their use, however, as a mechanism to embed
|
||||
machine-checked \<^emph>\<open>formal content\<close> is usually very limited and also lacks
|
||||
IDE support.
|
||||
\<close>
|
||||
|
||||
section\<open>\<^isadof>: A User-Defined Plugin in Isabelle/Isar\<close>
|
||||
text\<open>
|
||||
A plugin in Isabelle starts with defining the local data and registering it in the framework. As
|
||||
mentioned before, contexts are structures with independent cells/compartments having three
|
||||
primitives \<^boxed_sml>\<open>init\<close>, \<^boxed_sml>\<open>extend\<close> and \<^boxed_sml>\<open>merge\<close>. Technically this is done by
|
||||
instantiating a functor \<^boxed_sml>\<open>Theory_Data\<close>, and the following fairly typical code-fragment
|
||||
is drawn from \<^isadof>:
|
||||
|
||||
@{boxed_sml [display]
|
||||
\<open>structure Onto_Classes = Theory_Data
|
||||
(
|
||||
type T = onto_class Name_Space.table;
|
||||
val empty : T = Name_Space.empty_table onto_classN;
|
||||
fun merge data : T = Name_Space.merge_tables data;
|
||||
);\<close>}
|
||||
where the table \<^boxed_sml>\<open>Name_Space.table\<close> manages
|
||||
the environment for class definitions (\<^boxed_sml>\<open>onto_class\<close>), inducing the inheritance relation,
|
||||
using a \<^boxed_sml>\<open>Name_Space\<close> table. Other tables capture, \eg,
|
||||
the class instances, class invariants, inner-syntax antiquotations.
|
||||
Operations follow the MVC-pattern, where
|
||||
Isabelle/Isar provides the controller part. A typical model operation has the type:
|
||||
|
||||
@{boxed_sml [display]
|
||||
\<open>val opn :: <args_type> -> theory -> theory\<close>}
|
||||
representing a transformation on system contexts. For example, the operation of defining a class
|
||||
in the context is presented as follows:
|
||||
|
||||
@{boxed_sml [display]
|
||||
\<open>fun add_onto_class name onto_class thy =
|
||||
thy |> Onto_Classes.map
|
||||
(Name_Space.define (Context.Theory thy) true (name, onto_class) #> #2);
|
||||
\<close>}
|
||||
This code fragment uses operations from the library structure \<^boxed_sml>\<open>Name_Space\<close>
|
||||
that were used to update the appropriate table for document objects in
|
||||
the plugin-local state.
|
||||
A name space manages a collection of long names, together with a mapping
|
||||
between partially qualified external names and fully qualified internal names
|
||||
(in both directions).
|
||||
It can also keep track of the declarations and updates position of objects,
|
||||
and then allows a simple markup-generation.
|
||||
Possible exceptions to the update operation are automatically triggered.
|
||||
|
||||
Finally, the view-aspects were handled by an API for parsing-combinators. The library structure
|
||||
\<^boxed_sml>\<open>Scan\<close> provides the operators:
|
||||
|
||||
@{boxed_sml [display]
|
||||
\<open>op || : ('a -> 'b) * ('a -> 'b) -> 'a -> 'b
|
||||
op -- : ('a -> 'b * 'c) * ('c -> 'd * 'e) -> 'a -> ('b * 'd) * 'e
|
||||
op >> : ('a -> 'b * 'c) * ('b -> 'd) -> 'a -> 'd * 'c
|
||||
op option : ('a -> 'b * 'a) -> 'a -> 'b option * 'a
|
||||
op repeat : ('a -> 'b * 'a) -> 'a -> 'b list * 'a \<close>}
|
||||
for alternative, sequence, and piping, as well as combinators for option and repeat. Parsing
|
||||
combinators have the advantage that they can be integrated into standard programs,
|
||||
and they enable the dynamic extension of the grammar. There is a more high-level structure
|
||||
\<^boxed_sml>\<open>Parse\<close> providing specific combinators for the command-language Isar:
|
||||
|
||||
@{boxed_sml [display]
|
||||
\<open>val attribute = Parse.position Parse.name
|
||||
-- Scan.optional(Parse.$$$ "=" |-- Parse.!!! Parse.name)"";
|
||||
val reference = Parse.position Parse.name
|
||||
-- Scan.option (Parse.$$$ "::" |-- Parse.!!!
|
||||
(Parse.position Parse.name));
|
||||
val attributes =(Parse.$$$ "[" |-- (reference
|
||||
-- (Scan.optional(Parse.$$$ ","
|
||||
|--(Parse.enum ","attribute)))[]))--| Parse.$$$ "]"
|
||||
\<close>}
|
||||
|
||||
The ``model'' \<^boxed_sml>\<open>create_and_check_docitem\<close> and ``new''
|
||||
\<^boxed_sml>\<open>ODL_Meta_Args_Parser.attributes\<close> parts were
|
||||
combined via the piping operator and registered in the Isar toplevel:
|
||||
|
||||
@{boxed_sml [display]
|
||||
\<open>val _ =
|
||||
let fun create_and_check_docitem (((oid, pos),cid_pos),doc_attrs)
|
||||
= (Value_Command.Docitem_Parser.create_and_check_docitem
|
||||
{is_monitor = false} {is_inline=true}
|
||||
{define = false} oid pos (cid_pos) (doc_attrs))
|
||||
in Outer_Syntax.command @{command_keyword "declare_reference*"}
|
||||
"declare document reference"
|
||||
(ODL_Meta_Args_Parser.attributes
|
||||
>> (Toplevel.theory o create_and_check_docitem))
|
||||
end;\<close>}
|
||||
|
||||
Altogether, this gives the extension of Isabelle/HOL with Isar syntax and semantics for the
|
||||
new \emph{command}:
|
||||
|
||||
@{boxed_theory_text [display]\<open>
|
||||
declare_reference* [lal::requirement, alpha="main", beta=42]
|
||||
\<close>}
|
||||
|
||||
The construction also generates implicitly some markup information; for example, when hovering
|
||||
over the \<^boxed_theory_text>\<open>declare_reference*\<close> command in the IDE, a popup window with the text:
|
||||
``declare document reference'' will appear.
|
||||
\<close>
|
||||
|
||||
section\<open>Programming Antiquotations\<close>
|
||||
text\<open>
|
||||
The definition and registration of text antiquotations and ML-antiquotations is similar in
|
||||
principle: based on a number of combinators, new user-defined antiquotation syntax and semantics
|
||||
can be added to the system that works on the internal plugin-data freely. For example, in
|
||||
|
||||
@{boxed_sml [display]
|
||||
\<open>val _ = Theory.setup
|
||||
(docitem_antiquotation @{binding "docitem"} DOF_core.default_cid #>
|
||||
|
||||
ML_Antiquotation.inline @{binding "docitem_value"}
|
||||
ML_antiquotation_docitem_value)\<close>}
|
||||
the text antiquotation \<^boxed_sml>\<open>docitem\<close> is declared and bounded to a parser for the argument
|
||||
syntax and the overall semantics. This code defines a generic antiquotation to be used in text
|
||||
elements such as
|
||||
|
||||
@{boxed_theory_text [display]\<open>
|
||||
text\<open>as defined in @{docitem \<open>d1\<close>} ...\<close>
|
||||
\<close>}
|
||||
|
||||
The subsequent registration \<^boxed_sml>\<open>docitem_value\<close> binds code to a ML-antiquotation usable
|
||||
in an ML context for user-defined extensions; it permits the access to the current ``value''
|
||||
of document element, \<^ie>, a term with the entire update history.
|
||||
|
||||
It is possible to generate antiquotations \emph{dynamically}, as a consequence of a class
|
||||
definition in ODL. The processing of the ODL class \<^typ>\<open>definition\<close> also \emph{generates}
|
||||
a text antiquotation \<^boxed_theory_text>\<open>@{"definition" \<open>d1\<close>}\<close>, which works similar to
|
||||
\<^boxed_theory_text>\<open>@{docitem \<open>d1\<close>}\<close> except for an additional type-check that assures that
|
||||
\<^boxed_theory_text>\<open>d1\<close> is a reference to a definition. These type-checks support the subclass hierarchy.
|
||||
\<close>
|
||||
|
||||
section\<open>Implementing Second-level Type-Checking\<close>
|
||||
|
||||
text\<open>
|
||||
On expressions for attribute values, for which we chose to use HOL syntax to avoid that users
|
||||
need to learn another syntax, we implemented an own pass over type-checked terms. Stored in the
|
||||
late-binding table \<^boxed_sml>\<open>ISA_transformer_tab\<close>, we register for each term-annotation
|
||||
(ISA's), a function of type
|
||||
|
||||
@{boxed_sml [display]
|
||||
\<open> theory -> term * typ * Position.T -> term option\<close>}
|
||||
|
||||
Executed in a second pass of term parsing, ISA's may just return \<^boxed_theory_text>\<open>None\<close>. This is
|
||||
adequate for ISA's just performing some checking in the logical context \<^boxed_theory_text>\<open>theory\<close>;
|
||||
ISA's of this kind report errors by exceptions. In contrast, \<^emph>\<open>transforming\<close> ISA's will
|
||||
yield a term; this is adequate, for example, by replacing a string-reference to some term denoted
|
||||
by it. This late-binding table is also used to generate standard inner-syntax-antiquotations from
|
||||
a \<^boxed_theory_text>\<open>doc_class\<close>.
|
||||
\<close>
|
||||
|
||||
section\<open>Programming Class Invariants\<close>
|
||||
text\<open>
|
||||
See \<^technical>\<open>sec_low_level_inv\<close>.
|
||||
\<close>
|
||||
|
||||
section\<open>Implementing Monitors\<close>
|
||||
|
||||
text\<open>
|
||||
Since monitor-clauses have a regular expression syntax, it is natural to implement them as
|
||||
deterministic automata. These are stored in the \<^boxed_sml>\<open>docobj_tab\<close> for monitor-objects
|
||||
in the \<^isadof> component. We implemented the functions:
|
||||
|
||||
@{boxed_sml [display]
|
||||
\<open> val enabled : automaton -> env -> cid list
|
||||
val next : automaton -> env -> cid -> automaton\<close>}
|
||||
where \<^boxed_sml>\<open>env\<close> is basically a map between internal automaton states and class-id's
|
||||
(\<^boxed_sml>\<open>cid\<close>'s). An automaton is said to be \<^emph>\<open>enabled\<close> for a class-id,
|
||||
iff it either occurs in its accept-set or its reject-set (see @{docitem "sec_monitors"}). During
|
||||
top-down document validation, whenever a text-element is encountered, it is checked if a monitor
|
||||
is \emph{enabled} for this class; in this case, the \<^boxed_sml>\<open>next\<close>-operation is executed. The
|
||||
transformed automaton recognizing the suffix is stored in \<^boxed_sml>\<open>docobj_tab\<close> if
|
||||
possible;
|
||||
otherwise, if \<^boxed_sml>\<open>next\<close> fails, an error is reported. The automata implementation
|
||||
is, in large parts, generated from a formalization of functional automata
|
||||
\<^cite>\<open>"nipkow.ea:functional-Automata-afp:2004"\<close>.
|
||||
\<close>
|
||||
|
||||
section\<open>The \<^LaTeX>-Core of \<^isadof>\<close>
|
||||
text\<open>
|
||||
The \<^LaTeX>-implementation of \<^isadof> heavily relies on the
|
||||
``keycommand''~\<^cite>\<open>"chervet:keycommand:2010"\<close> package. In fact, the core \<^isadof> \<^LaTeX>-commands
|
||||
are just wrappers for the corresponding commands from the keycommand package:
|
||||
|
||||
@{boxed_latex [display]
|
||||
\<open>\newcommand\newisadof[1]{%
|
||||
\expandafter\newkeycommand\csname isaDof.#1\endcsname}%
|
||||
\newcommand\renewisadof[1]{%
|
||||
\expandafter\renewkeycommand\csname isaDof.#1\endcsname}%
|
||||
\newcommand\provideisadof[1]{%
|
||||
\expandafter\providekeycommand\csname isaDof.#1\endcsname}%\<close>}
|
||||
|
||||
The \<^LaTeX>-generator of \<^isadof> maps each \<^boxed_theory_text>\<open>doc_item\<close> to an \<^LaTeX>-environment (recall
|
||||
@{docitem "text_elements"}). As generic \<^boxed_theory_text>\<open>doc_item\<close>s are derived from the text element,
|
||||
the environment \inlineltx|isamarkuptext*| builds the core of \<^isadof>'s \<^LaTeX> implementation.
|
||||
|
||||
\<close>
|
||||
(*<*)
|
||||
end
|
||||
(*>*)
|
||||
22
README.md
@ -7,7 +7,7 @@ Isabelle/DOF allows for both conventional typesetting and formal development.
|
||||
|
||||
Isabelle/DOF has two major prerequisites:
|
||||
|
||||
* **Isabelle (Development Version):** Isabelle/DOF requires [Isabelle](https://isabelle.in.tum.de/)
|
||||
* **Isabelle 2024:** Isabelle/DOF requires [Isabelle](https://isabelle.in.tum.de/)
|
||||
and several entries from the [Archive of Formal Proofs
|
||||
(AFP)](https://www.isa-afp.org/).
|
||||
* **LaTeX:** Isabelle/DOF requires a modern LaTeX installation, i.e., at least
|
||||
@ -16,7 +16,6 @@ Isabelle/DOF has two major prerequisites:
|
||||
|
||||
## Installation
|
||||
|
||||
<!--
|
||||
Isabelle/DOF is available as part of the [Archive of Formal Proofs
|
||||
(AFP)](https://www.isa-afp.org/). This is the most convenient way to install
|
||||
Isabelle/DOF for the latest official release of Isabelle.
|
||||
@ -29,28 +28,21 @@ distribution for your operating system from the [Isabelle
|
||||
website](https://isabelle.in.tum.de/). Furthermore, please install the AFP
|
||||
following the instructions given at <https://www.isa-afp.org/help.html>.
|
||||
|
||||
Isabelle/DOF is currently consisting out of three AFP entries:
|
||||
Isabelle/DOF is provided as one AFP entry:
|
||||
|
||||
* [Isabelle_DOF:](https://www.isa-afp.org/entries/Isabelle_DOF.html) This entry
|
||||
contains the Isabelle/DOF system itself, including the Isabelle/DOF manual.
|
||||
* [Isabelle_DOF-Example-I:](https://www.isa-afp.org/entries/Isabelle_DOF-Example-I.html)
|
||||
This entry contains an example of an academic paper written using the
|
||||
Isabelle/DOF system.
|
||||
* [Isabelle_DOF-Example-II:](https://www.isa-afp.org/entries/Isabelle_DOF-Example-II.html)
|
||||
This entry contains an example of an academic paper written using the
|
||||
Isabelle/DOF system.
|
||||
|
||||
-->
|
||||
|
||||
### Installation of the Development Version (Git Repository)
|
||||
|
||||
The development version of Isabelle/DOF that is available in this Git repository
|
||||
provides, over the AFP version, additional ontologies, document templates, and
|
||||
examples that might not yet "ready for general use". Furthermore, as it is
|
||||
examples that might not yet “ready for general use”. Furthermore, as it is
|
||||
provided as an Isabelle component, it can also provide additional tools that
|
||||
cannot be distributed via the AFP. For more details on installing the
|
||||
development version, please consult the
|
||||
[README_DEVELOPMENT.md](./README_DEVELOPMENT.md) file.
|
||||
cannot be distributed via the AFP. As this repository provides a (potentially)
|
||||
updated version of Isabelle/DOF, it conflicts with a complete installation of
|
||||
the AFP. For more details on installing the development version, please consult
|
||||
the [README_DEVELOPMENT.md](./README_DEVELOPMENT.md) file.
|
||||
|
||||
After installing the prerequisites, change into the directory containing
|
||||
Isabelle/DOF (this should be the directory containing this ``README.md`` file)
|
||||
|
||||
@ -5,9 +5,9 @@
|
||||
Isabelle/DOF has three major prerequisites:
|
||||
|
||||
* **Isabelle:** Isabelle/DOF requires [Isabelle
|
||||
2022](https://isabelle.in.tum.de/website-Isabelle2022/). Please download the
|
||||
Isabelle 2022 distribution for your operating system from the [Isabelle
|
||||
website](https://isabelle.in.tum.de/website-Isabelle2022/).
|
||||
2023](https://isabelle.in.tum.de/website-Isabelle2023/). Please download the
|
||||
Isabelle 2023 distribution for your operating system from the [Isabelle
|
||||
website](https://isabelle.in.tum.de/website-Isabelle2023/).
|
||||
* **AFP:** Isabelle/DOF requires several entries from the [Archive of Formal Proofs
|
||||
(AFP)](https://www.isa-afp.org/).
|
||||
* **LaTeX:** Isabelle/DOF requires a modern LaTeX installation, i.e., at least
|
||||
@ -25,14 +25,12 @@ Both have their own advantages and disadvantages.
|
||||
If you use the AFP with other Isabelle projects, you might want to install the
|
||||
complete AFP. For this, please follow the instructions given at
|
||||
<https://www.isa-afp.org/using.html>.
|
||||
<!--
|
||||
As Isabelle session names need to be
|
||||
unique, you will need to disable the entries ``Isabelle_DOF`` and
|
||||
``Isabelle_DOF-Example-I`` provided as part of the AFP. For this,
|
||||
you will need to edit the file ``$AFP/thys/ROOTS`` (where ``$AFP`` refers to the
|
||||
directory in which you installed the AFP) and delete the two entries
|
||||
``Isabelle_DOF`` and ``Isabelle_DOF-Example-I``.
|
||||
-->
|
||||
|
||||
As Isabelle session names need to be unique, you will need to disable the entry
|
||||
``Isabelle_DOF`` that is provided as part of the AFP. For doing so, you will
|
||||
need to edit the file ``$AFP/thys/ROOTS`` (where ``$AFP`` refers to the
|
||||
directory in which you installed the AFP) and delete the entry
|
||||
``Isabelle_DOF``.
|
||||
|
||||
For the development version of Isabelle, installing the complete AFP
|
||||
by cloning the [afp-devel](https://foss.heptapod.net/isa-afp/afp-devel/)
|
||||
@ -95,7 +93,7 @@ Using the ``-o`` option, different ontology setups can be selected and using the
|
||||
foo@bar:~$ isabelle dof_mkroot -o scholarly_paper -t scrartcl
|
||||
```
|
||||
|
||||
creates a setup using the scholarly_paper ontology and the article class from
|
||||
creates a setup using the ``scholarly_paper`` ontology and the article class from
|
||||
the KOMA-Script bundle.
|
||||
|
||||
The help (option ``-h``) show a list of all supported ontologies and document
|
||||
|
||||
2
ROOTS
@ -1,4 +1,4 @@
|
||||
Isabelle_DOF
|
||||
upstream_afp
|
||||
Isabelle_DOF-Proofs
|
||||
Isabelle_DOF-Ontologies
|
||||
Isabelle_DOF-Unit-Tests
|
||||
|
||||
1
upstream_afp/ROOTS
Normal file
@ -0,0 +1 @@
|
||||
Isabelle_DOF
|
||||