First Version with patched LaTeX Generator thy_output.ML
HOL-OCL/Isabelle_DOF/Isabelle2018 There was a failure building this commit Details

This commit is contained in:
Burkhart Wolff 2019-04-29 22:24:32 +02:00
parent 4a9e765cd3
commit 40537d4009
18 changed files with 793 additions and 823 deletions

View File

@ -19,7 +19,7 @@ that provide direct support in the PIDE framework. \<close>
theory Isa_DOF (* Isabelle Document Ontology Framework *)
imports Main
RegExpInterface (* Interface to functional regular automata for monitoring *)
Assert
Assert
keywords "+=" ":=" "accepts" "rejects"
@ -45,6 +45,9 @@ theory Isa_DOF (* Isabelle Document Ontology Framework *)
begin
text\<open> @{footnote \<open>sdf\<close>}, @{file "$ISABELLE_HOME/src/Pure/ROOT.ML"}\<close>
section\<open>Primitive Markup Generators\<close>
ML\<open>
@ -84,9 +87,9 @@ fun bstring_to_holstring ctxt x (* (x:bstring) *) : string =
fun chopper p (x:string) =
let fun hss buff [] = rev buff
|hss buff (S as a::R) = if p a then let val (front,rest) = take_prefix p S
|hss buff (S as a::R) = if p a then let val (front,rest) = chop_prefix p S
in hss (String.implode front :: buff) rest end
else let val (front,rest) = take_prefix (not o p) S
else let val (front,rest) = chop_prefix (not o p) S
in hss (String.implode front ::buff) rest end
in hss [] (String.explode x) end;
@ -109,11 +112,12 @@ section\<open> A HomeGrown Document Type Management (the ''Model'') \<close>
ML\<open>
structure DOF_core =
struct
type docclass_struct = {params : (string * sort) list, (*currently not used *)
type docclass_struct = {params : (string * sort) list, (*currently not used *)
name : binding,
thy_name : string, id : serial, (* for pide *)
inherits_from : (typ list * string) option, (* imports *)
attribute_decl : (binding * typ * term option) list, (* class local *)
thy_name : string, id : serial, (* for pide *)
inherits_from : (typ list * string) option, (* imports *)
attribute_decl : (binding*typ*term option)list, (* class local *)
rejectS : term list,
rex : term list } (* monitoring regexps --- product semantics*)
@ -170,7 +174,8 @@ struct
type open_monitor_info = {accepted_cids : string list,
rejected_cids : string list,
automatas : RegExpInterface.automaton list }
automatas : RegExpInterface.automaton list
}
type monitor_tab = open_monitor_info Symtab.table
val initial_monitor_tab:monitor_tab = Symtab.empty
@ -361,9 +366,10 @@ fun check_reject_atom cid_long term =
(n,_)::_ => error("No schematic variables allowed in monitor regexp:" ^ n)
| _ => ()
(* Missing: Checks on constants such as undefined, ... *)
val _ = case term of
(* val _ = case term of
Const(_ ,Type(@{type_name "rexp"},[_])) => ()
| _ => error("current restriction: only atoms allowed here!")
*)
in term end
@ -382,7 +388,7 @@ fun define_doc_class_global (params', binding) parent fields rexp reject_Atoms t
arbitrary type *)
NONE => ()
| SOME(_,cid_parent) =>
if not (is_defined_cid_global cid_parent thy)
if not (is_defined_cid_global cid_parent thy)
then error("document class undefined : " ^ cid_parent)
else ()
val cid_long = name2doc_class_name thy cid
@ -527,7 +533,7 @@ fun update_value_global oid upd thy =
SOME{pos,thy_name,value,id,cid} =>
let val tab' = Symtab.update(oid,SOME{pos=pos,thy_name=thy_name,
value=upd value,id=id, cid=cid})
in map_data_global (upd_docobj_tab(fn{tab,maxano}=>{tab=tab' tab,maxano=maxano})) thy end
in map_data_global (upd_docobj_tab(fn{tab,maxano}=>{tab=tab' tab,maxano=maxano})) thy end
| NONE => error("undefined doc object: "^oid)
@ -619,22 +625,16 @@ fun check_doc_global (strict_checking : bool) ctxt =
end
val _ =
Outer_Syntax.command @{command_keyword print_doc_classes}
"print document classes"
(Parse.opt_bang >> (fn b =>
Toplevel.keep (print_doc_classes b o Toplevel.context_of)));
Outer_Syntax.command \<^command_keyword>\<open>print_doc_classes\<close> "print document classes"
(Parse.opt_bang >> (fn b => Toplevel.keep (print_doc_classes b o Toplevel.context_of)));
val _ =
Outer_Syntax.command @{command_keyword print_doc_items}
"print document items"
(Parse.opt_bang >> (fn b =>
Toplevel.keep (print_doc_items b o Toplevel.context_of)));
Outer_Syntax.command \<^command_keyword>\<open>print_doc_items\<close> "print document items"
(Parse.opt_bang >> (fn b => Toplevel.keep (print_doc_items b o Toplevel.context_of)));
val _ =
Outer_Syntax.command @{command_keyword check_doc_global}
"check global document consistency"
(Parse.opt_bang >> (fn b =>
Toplevel.keep (check_doc_global b o Toplevel.context_of)));
Outer_Syntax.command \<^command_keyword>\<open>check_doc_global\<close> "check global document consistency"
(Parse.opt_bang >> (fn b => Toplevel.keep (check_doc_global b o Toplevel.context_of)));
@ -709,13 +709,11 @@ fun write_ontology_latex_sty_template thy =
val _ =
Outer_Syntax.command @{command_keyword gen_sty_template}
"generate a template LaTeX style file for this ontology"
(Parse.opt_bang >> (fn b =>
Toplevel.keep (write_ontology_latex_sty_template o Toplevel.theory_of)));
Outer_Syntax.command \<^command_keyword>\<open>gen_sty_template\<close> "generate a LaTeX style template"
(Parse.opt_bang>>(fn b => Toplevel.keep(write_ontology_latex_sty_template o Toplevel.theory_of)));
val (strict_monitor_checking, strict_monitor_checking_setup)
= Attrib.config_bool @{binding strict_monitor_checking} (K false);
= Attrib.config_bool \<^binding>\<open>strict_monitor_checking\<close> (K false);
end (* struct *)
@ -735,7 +733,7 @@ typedecl "thm"
typedecl "file"
typedecl "thy"
-- \<open> and others in the future : file, http, thy, ... \<close>
\<comment> \<open> and others in the future : file, http, thy, ... \<close>
consts ISA_typ :: "string \<Rightarrow> typ" ("@{typ _}")
consts ISA_term :: "string \<Rightarrow> term" ("@{term _}")
@ -748,71 +746,8 @@ consts ISA_docitem_attr :: "string \<Rightarrow> string \<Rightarrow> 'a" (
\<comment> \<open>Dynamic setup of inner syntax cartouche\<close>
ML \<open>
(* Author: Frédéric Tuong, Université Paris-Saclay *)
(* Title: HOL/ex/Cartouche_Examples.thy
Author: Makarius
*)
local
fun mk_char f_char (s, _) accu =
fold
(fn c => fn (accu, l) =>
(f_char c accu,
Syntax.const @{const_syntax Cons}
$ (Syntax.const @{const_syntax Char} $ HOLogic.mk_numeral c)
$ l))
(rev (map Char.ord (String.explode s)))
accu;
fun mk_string _ accu [] = (accu, Const (@{const_syntax Nil}, @{typ "char list"}))
| mk_string f_char accu (s :: ss) = mk_char f_char s (mk_string f_char accu ss);
in
fun string_tr f f_char accu content args =
let fun err () = raise TERM ("string_tr", args) in
(case args of
[(c as Const (@{syntax_const "_constrain"}, _)) $ Free (s, _) $ p] =>
(case Term_Position.decode_position p of
SOME (pos, _) => c $ f (mk_string f_char accu (content (s, pos))) $ p
| NONE => err ())
| _ => err ())
end;
end;
\<close>
syntax "_cartouche_string" :: "cartouche_position \<Rightarrow> _" ("_")
ML\<open>
val cartouche_grammar =
[ ("char list", snd)
, ("String.literal", (fn (_, x) => Syntax.const @{const_syntax STR} $ x))]
\<close>
ML\<open>
fun parse_translation_cartouche binding l f_char accu =
let val cartouche_type = Attrib.setup_config_string binding (K (fst (hd l)))
(* if there is no type specified, by default we set the first element
to be the default type of cartouches *) in
fn ctxt =>
string_tr
let val cart_type = Config.get ctxt cartouche_type in
case (List.find (fn (s, _) => s = cart_type)
l) of
NONE => error ("Unregistered return type for the cartouche: \"" ^ cart_type ^ "\"")
| SOME (_, f) => f
end
f_char
accu
(Symbol_Pos.cartouche_content o Symbol_Pos.explode)
end
\<close>
parse_translation \<open>
[( @{syntax_const "_cartouche_string"}
, parse_translation_cartouche @{binding cartouche_type} cartouche_grammar (K I) ())]
\<close>
(* (* PORT TO ISABELLE2018 *)
(* PORT TO ISABELLE2018 *)
ML \<open>
(* Author: Frédéric Tuong, Université Paris-Saclay *)
(* Title: HOL/ex/Cartouche_Examples.thy
@ -875,9 +810,8 @@ fun parse_translation_cartouche binding l f_integer accu =
parse_translation \<open>
[( @{syntax_const "_cartouche_string"}
, parse_translation_cartouche @{binding cartouche_type} Cartouche_Grammar.default (K I) ())]
, parse_translation_cartouche \<^binding>\<open>cartouche_type\<close> Cartouche_Grammar.default (K I) ())]
\<close>
*)
(* tests *)
term "@{typ ''int => int''}"
@ -885,18 +819,18 @@ term "@{term ''Bound 0''}"
term "@{thm ''refl''}"
term "@{docitem ''<doc_ref>''}"
ML\<open> @{term "@{docitem ''<doc_ref>''}"}\<close>
(**)
term "@{typ \<open>int => int\<close>}"
term "@{term \<open>Bound 0\<close>}"
term "@{typ \<open>int \<Rightarrow> int\<close>}"
term "@{term \<open>\<forall>x. P x \<longrightarrow> Q\<close>}"
term "@{thm \<open>refl\<close>}"
term "@{docitem \<open><doc_ref>\<close>}"
ML\<open> @{term "@{docitem \<open><doc_ref>\<close>}"}\<close>
term "@{docitem \<open>doc_ref\<close>}"
ML\<open> @{term "@{docitem \<open>doc_ref\<close>}"}\<close>
(**)
declare [[cartouche_type = "String.literal"]]
term "\<open>Université\<close> :: String.literal"
declare [[cartouche_type = "char list"]]
term "\<open>Université\<close> :: char list"
(**)
subsection\<open> Semantics \<close>
@ -1007,10 +941,78 @@ setup\<open>DOF_core.update_isa_global("docitem" ,ISA_core.ML_isa_check_docitem
section\<open> Syntax for Annotated Documentation Commands (the '' View'' Part I) \<close>
ML\<open>open Thy_Output;
(*Pure_Syn.output_document;*)
Pure_Syn.document_command;
\<close>
text\<open>dfg\<close> (* text maps to Pure_Syn.document_command; *)
(*
================== 2018 ======================================================
(* Exported from Pure_Syn *)
fun output_document state markdown txt =
let
val ctxt = Toplevel.presentation_context state;
val _ =
Context_Position.report ctxt
(Input.pos_of txt) (Markup.language_document (Input.is_delimited txt));
in Thy_Output.output_document ctxt markdown txt end;
fun document_command markdown (loc, txt) =
Toplevel.keep (fn state =>
(case loc of
NONE => ignore (output_document state markdown txt)
| SOME (_, pos) =>
error ("Illegal target specification -- not a theory context" ^ Position.here pos))) o
Toplevel.present_local_theory loc (fn state =>
ignore (output_document state markdown txt));
====================== 2017 ===================================================
(* Exported from Thy_Output *)
fun document_command markdown (loc, txt) =
Toplevel.keep (fn state =>
(case loc of
NONE => ignore (output_text state markdown txt)
| SOME (_, pos) =>
error ("Illegal target specification -- not a theory context" ^ Position.here pos))) o
Toplevel.present_local_theory loc (fn state => ignore (output_text state markdown txt));
*)
ML\<open> Pure_Syn.document_command;
structure Pure_Syn_Ext =
struct
(* This interface function has not been exported from Pure_Syn (2018);
it should replace
Thy_Output.output_text: Toplevel.state -> {markdown: bool} -> Input.source -> string (2017) *)
fun output_document state markdown src =
let
val ctxt = Toplevel.presentation_context state;
val _ = Context_Position.report ctxt
(Input.pos_of src)
(Markup.language_document (Input.is_delimited src));
in Thy_Output.output_document ctxt markdown src end;
(* this thing converts also source to (latex) string ... *)
end;
Pure_Syn_Ext.output_document : Toplevel.state -> {markdown: bool} -> Input.source -> Latex.text list;
\<close>
ML\<open>
structure ODL_Command_Parser =
struct
type meta_args_t = (((string * Position.T) *
(string * Position.T) option)
* ((string * Position.T) * string) list)
@ -1250,6 +1252,7 @@ fun update_instance_command (((oid:string,pos),cid_pos),
end
fun gen_enriched_document_command transform
markdown
(((((oid,pos),cid_pos), doc_attrs) : meta_args_t,
@ -1258,7 +1261,8 @@ fun gen_enriched_document_command transform
: theory -> theory =
let
(* as side-effect, generates markup *)
fun check_text thy = (Thy_Output.output_text(Toplevel.theory_toplevel thy) markdown toks; thy)
fun check_text thy = (Pure_Syn_Ext.output_document(Toplevel.theory_toplevel thy) markdown toks;
thy)
(* generating the level-attribute syntax *)
in
(create_and_check_docitem false oid pos cid_pos (transform doc_attrs) #> check_text)
@ -1525,11 +1529,13 @@ fun meta_args_2_string thy ((((lab, _), cid_opt), attr_list) : ODL_Command_Parse
end
(* the following 2 lines set parser and converter for LaTeX generation of meta-attributes.
Currently of *all* commands, no distinction between text* and text command.
This code depends on a MODIFIED Isabelle2017 version resulting fro; applying the files
This code depends on a MODIFIED Isabelle2017 version resulting from applying the files
under Isabell_DOF/patches.
*)
val _ = Thy_Output.set_meta_args_parser
(fn thy => (Scan.optional (ODL_Command_Parser.attributes >> meta_args_2_string thy) ""))
(fn thy => (Scan.optional ( ODL_Command_Parser.attributes
>> meta_args_2_string thy) ""))
end
@ -1578,8 +1584,61 @@ end\<close>
section\<open> Syntax for Ontological Antiquotations (the '' View'' Part II) \<close>
text\<open> @{theory Main} @{file "Isa_DOF.thy"}\<close>
(* Paradigm theory or paradigm file ?
val basic_entity = Thy_Output.antiquotation_pretty_source;
...
basic_entity \<^binding>\<open>theory\<close> (Scan.lift (Parse.position Args.embedded)) pretty_theory #>
Thy_Output.antiquotation_raw \<^binding>\<open>file\<close>
(Scan.lift (Parse.position Parse.path)) (document_antiq check_file) #>
*)
ML\<open>
(* 2017: used eg by ::: text\<open> @{theory Main}\<close>
antiquotation:
binding -> 'a context_parser ->
({source: Token.src, state: Toplevel.state, context: Proof.context} -> 'a -> string)
-> theory -> theory
*)
(* 2018 >>> *)
val basic_entity' = Thy_Output.antiquotation_raw
: binding -> 'a context_parser ->
(Proof.context -> 'a -> Latex.text)
-> theory -> theory;
val basic_entity = Thy_Output.antiquotation_pretty_source
: binding -> 'a context_parser ->
(Proof.context -> 'a -> Pretty.T)
-> theory -> theory;
(* or ? *)
val docref_scan = (Scan.lift (Parse.position Args.embedded))
: (string * Position.T) context_parser;
(*val X = Document_Antiquotation.setup \<^binding>\<open>docref\<close> docref_scan ; *)
\<close>
text\<open> @{theory Main}\<close>
ML\<open>
Latex.string : string -> Latex.text ;
Latex.text: string * Position.T -> Latex.text;
Latex.block: Latex.text list -> Latex.text;
Latex.enclose_body: string -> string -> Latex.text list -> Latex.text list;
Latex.enclose_block: string -> string -> Latex.text list -> Latex.text;
Latex.output_text: Latex.text list -> string;
\<close>
ML\<open>
Pretty.text;
Pretty.str;
Pretty.block_enclose;
\<close>
ML\<open>
structure OntoLinkParser =
struct
@ -1597,14 +1656,16 @@ fun check_and_mark ctxt cid_decl (str:{strict_checking: bool}) pos name =
andalso not(DOF_core.is_subclass ctxt cid cid_decl)
then error("reference ontologically inconsistent")
else ()
in name end
in () end
else if DOF_core.is_declared_oid_global name thy
then (if #strict_checking str
then warning("declared but undefined document reference:"^name)
else (); name)
else ())
else error("undefined document reference:"^name)
end
val _ = check_and_mark : Proof.context -> string -> {strict_checking: bool} ->
Position.T -> Symtab.key -> unit
(* generic syntax for doc_class links. *)
@ -1617,35 +1678,33 @@ val docitem_modes = Scan.optional (Args.parens (Args.$$$ defineN || Args.$$$ unc
else {unchecked = true, define= false}))
{unchecked = false, define= false} (* default *);
val docitem_antiquotation_parser = (Scan.lift (docitem_modes -- Args.text_input))
: ({define: bool, unchecked: bool} * Input.source) context_parser;
fun docitem_antiquotation_generic cid_decl
{context = ctxt, source = src:Token.src, state}
({unchecked = x, define= y}, source:Input.source) =
let fun label_latex flag = enclose (if flag then "\\label{" else "\\autoref{") "}"
val X1 = Thy_Output.output_text state {markdown=false}
: Input.source -> string
val X2 = check_and_mark ctxt
cid_decl
({strict_checking = not x})
(Input.pos_of source)
: string -> string
val X3 = label_latex y
: string -> string
in
(Thy_Output.output_text state {markdown=false} #>
check_and_mark ctxt
cid_decl
({strict_checking = not x})
(Input.pos_of source) #>
label_latex y)
source
end
fun docitem_antiquotation name cid_decl =
Thy_Output.antiquotation name docitem_antiquotation_parser (docitem_antiquotation_generic cid_decl)
fun pretty_docitem_antiquotation_generic cid_decl ctxt ({unchecked = x, define = y}, src ) =
let val _ = check_and_mark ctxt cid_decl
({strict_checking = not x})
(Input.pos_of src) (Input.source_content src)
val _ = writeln ("ZZZ" ^ Input.source_content src ^ "::" ^ cid_decl)
in (if y then Latex.enclose_block "\\label{" "}"
else Latex.enclose_block "\\autoref{" "}")
[Latex.string (Input.source_content src)]
(* Future:
(if y then Latex.enclose_block ("\\labelX[type="^cid_decl^"]{") "}"
else Latex.enclose_block ("\\autorefX[type="^cid_decl^"]{") "}")
[Latex.string (Input.source_content src)]
*)
end
fun docitem_antiquotation bind cid =
Thy_Output.antiquotation_raw bind docitem_antiquotation_parser
(pretty_docitem_antiquotation_generic cid);
fun check_and_mark_term ctxt oid =
let val thy = Context.theory_of ctxt;
@ -1660,8 +1719,7 @@ fun check_and_mark_term ctxt oid =
in value end
else error("undefined document reference:"^oid)
end
fun ML_antiquotation_docitem_value (ctxt, toks) =
(Scan.lift (Args.cartouche_input)
@ -1669,17 +1727,15 @@ fun ML_antiquotation_docitem_value (ctxt, toks) =
((check_and_mark_term ctxt o Input.source_content) inp)))
(ctxt, toks)
(* Setup for general docrefs of the global DOF_core.default_cid - class ("text")*)
val _ = Theory.setup((docitem_antiquotation @{binding docref} DOF_core.default_cid) #>
(* deprecated syntax ^^^^^^*)
(docitem_antiquotation @{binding docitem_ref} DOF_core.default_cid) #>
(* deprecated syntax ^^^^^^^^^^*)
docitem_antiquotation @{binding docitem} DOF_core.default_cid #>
(* Thy_Output.antiquotation @{binding docitem} docitem_antiquotation_parser
(docitem_antiquotation_generic DOF_core.default_cid) #>
*)
ML_Antiquotation.inline @{binding docitem_value} ML_antiquotation_docitem_value)
val _ = Theory.setup
(docitem_antiquotation \<^binding>\<open>docref\<close> DOF_core.default_cid #>
(* deprecated syntax ^^^^^^*)
docitem_antiquotation \<^binding>\<open>docitem_ref\<close> DOF_core.default_cid #>
(* deprecated syntax ^^^^^^^^^^^*)
docitem_antiquotation \<^binding>\<open>docitem\<close> DOF_core.default_cid #>
ML_Antiquotation.inline \<^binding>\<open>docitem_value\<close> ML_antiquotation_docitem_value)
end (* struct *)
\<close>
@ -1754,12 +1810,6 @@ fun trace_attr_2_ML ctxt (oid:string,pos) =
in toML (compute_trace_ML ctxt oid @{here} pos) end
local
(* copied from "$ISABELLE_HOME/src/Pure/Thy/thy_output.ML" *)
fun basic_entities name scan pretty =
Thy_Output.antiquotation name scan (fn {source, context = ctxt, ...} =>
Thy_Output.output ctxt o Thy_Output.maybe_pretty_source pretty ctxt source);
fun basic_entity name scan = basic_entities name (scan >> single);
fun pretty_attr_access_style ctxt (style, ((oid,pos),(attr,pos'))) =
Thy_Output.pretty_term ctxt (style (compute_attr_access (Context.Proof ctxt)
@ -1769,19 +1819,20 @@ fun pretty_trace_style ctxt (style, (oid,pos)) =
"trace" oid pos pos));
in
val _ = Theory.setup
(ML_Antiquotation.inline @{binding docitem_attribute}
(ML_Antiquotation.inline \<^binding>\<open>docitem_attribute\<close>
(fn (ctxt,toks) => (parse_attribute_access >> attr_2_ML ctxt) (ctxt, toks)) #>
ML_Antiquotation.inline @{binding trace_attribute}
ML_Antiquotation.inline \<^binding>\<open>trace_attribute\<close>
(fn (ctxt,toks) => (parse_oid >> trace_attr_2_ML ctxt) (ctxt, toks)) #>
basic_entity @{binding trace_attribute} parse_oid' pretty_trace_style #>
basic_entity @{binding docitem_attribute} parse_attribute_access' pretty_attr_access_style
basic_entity \<^binding>\<open>trace_attribute\<close> parse_oid' pretty_trace_style #>
basic_entity \<^binding>\<open>docitem_attribute\<close> parse_attribute_access' pretty_attr_access_style
)
end
end
\<close>
text\<open> Note that the functions \<^verbatim>\<open>basic_entities\<close> and \<^verbatim>\<open>basic_entity\<close> in @{ML_structure AttributeAccess}
are copied from @{file "$ISABELLE_HOME/src/Pure/Thy/thy_output.ML"} \<close>
text\<open> Note that the functions \<^verbatim>\<open>basic_entities\<close> and \<^verbatim>\<open>basic_entity\<close> in
@{ML_structure AttributeAccess} are copied from
@{file "$ISABELLE_HOME/src/Pure/Thy/thy_output.ML"} \<close>
section\<open> Syntax for Ontologies (the '' View'' Part III) \<close>
@ -1874,21 +1925,21 @@ fun add_doc_class_cmd overloaded (raw_params, binding)
val _ =
Outer_Syntax.command @{command_keyword doc_class} "define document class"
Outer_Syntax.command \<^command_keyword>\<open>doc_class\<close> "define document class"
((Parse_Spec.overloaded
-- (Parse.type_args_constrained -- Parse.binding)
-- (@{keyword "="}
|-- Scan.option (Parse.typ --| @{keyword "+"})
-- Scan.repeat1
(Parse.const_binding -- Scan.option (@{keyword "<="} |-- Parse.term)))
-- ( Scan.optional (@{keyword "rejects"} |-- Parse.enum1 "," Parse.term) []
-- Scan.repeat (@{keyword "accepts"} |-- Parse.term)))
-- (Parse.type_args_constrained -- Parse.binding)
-- (\<^keyword>\<open>=\<close>
|-- Scan.option (Parse.typ --| \<^keyword>\<open>+\<close>)
-- Scan.repeat1 (Parse.const_binding -- Scan.option (\<^keyword>\<open><=\<close> |-- Parse.term))
)
-- ( Scan.optional (\<^keyword>\<open>rejects\<close> |-- Parse.enum1 "," Parse.term) []
-- Scan.repeat (\<^keyword>\<open>accepts\<close> |-- Parse.term))
)
>> (fn (((overloaded, x), (y, z)),(rejectS,accept_rex)) =>
Toplevel.theory (add_doc_class_cmd {overloaded = overloaded} x y z rejectS accept_rex)));
end (* struct *)
\<close>
ML\<open>Thy_Output.document_command; Thy_Output.output_text\<close>
end

View File

@ -4,9 +4,10 @@ theory RegExpInterface
imports "Functional-Automata.Execute"
begin
text\<open> The implementation of the monitoring concept follows the following design decisions:
\<^enum> We re-use generated code from the AFP submissions @{theory Regular_Set} and
@{theory Automata}, converted by the code-generator into executable SML code
\<^enum> We re-use generated code from the AFP submissions @{theory "Regular-Sets.Regular_Set"} and
@{theory "Functional-Automata.Automata"}, converted by the code-generator into executable SML code
(ports to future Isabelle versions should just reuse future versions of these)
\<^enum> Monitor-Expressions are regular expressions (in some adapted syntax)
over Document Class identifiers; they denote the language of all possible document object
@ -40,7 +41,8 @@ text{* or better equivalently: *}
value "\<lbrace>(\<lfloor>CHR ''a''\<rfloor> || \<lfloor>CHR ''b''\<rfloor>) ~~ \<lfloor>CHR ''c''\<rfloor>\<rbrace>\<^sup>*"
section{* Some Standard and Derived Semantics *}
text\<open> This is just a reminder - already defined in @{theory Regular_Exp} as @{term lang}.\<close>
text\<open> This is just a reminder - already defined in @{theory "Regular-Sets.Regular_Exp"}
as @{term lang}.\<close>
text{* In the following, we give a semantics for our regular expressions, which so far have
just been a term language (i.e. abstract syntax). The semantics is a ``denotational semantics'',

View File

@ -8,13 +8,14 @@ begin
section\<open> Some examples of Isabelle's standard antiquotations. \<close>
(* some show-off of standard anti-quotations: *)
text \<open>THIS IS A TEXT\<close>
term "[]"
text\<open> @{thm refl} of name @{thm [source] refl}
@{thm[mode=Rule] conjI}
@{file "../../Isa_DOF.thy"}
@{value "3+4::int"}
@{const hd}
@{theory List}}
@{theory HOL.List}}
@{term "3"}
@{type bool}
@{term [show_types] "f x = a + x"} \<close>
@ -28,12 +29,13 @@ text\<open>An "anonymous" text-item, automatically coerced into the top-class "t
text*[tralala] \<open> Brexit means Brexit \<close>
text\<open>Examples for declaration of typed doc-items "assumption" and "hypothesis",
concepts defined in the underlying ontology @{theory "CENELEC_50128"}. \<close>
concepts defined in the underlying ontology @{theory "Draft.CENELEC_50128"}. \<close>
text*[ass1::assumption] \<open> The subsystem Y is safe. \<close>
text*[hyp1::hypothesis] \<open> P not equal NP \<close>
text\<open>A real example fragment from a larger project, declaring a text-element as a
"safety-related application condition", a concept defined in the @{theory "CENELEC_50128"}
"safety-related application condition", a concept defined in the
@{theory "Draft.CENELEC_50128"}
ontology:\<close>
text*[new_ass::hypothesis]\<open>Under the assumption @{assumption \<open>ass1\<close>} we establish the following: ... \<close>
@ -80,7 +82,7 @@ text\<open> @{docitem \<open>lalala\<close>} -- produces warning. \<close>
text\<open> @{docitem (unchecked) \<open>lalala\<close>} -- produces no warning. \<close>
text\<open> @{docitem \<open>ass122\<close>} -- global reference to a text-item in another file. \<close>
text\<open> @{EC \<open>ass122\<close>} -- global reference to a exported constraint in another file.
Note that the link is actually a srac, which, according to
the ontology, happens to be an "ec". \<close>

View File

@ -1,7 +1,12 @@
theory mini_odo
(*
imports "Isabelle_DOF.CENELEC_50128"
"Isabelle_DOF.scholarly_paper"
*)
imports "../../../ontologies/CENELEC_50128"
"../../../ontologies/scholarly_paper"
begin
section\<open> Some examples of Isabelle's standard antiquotations. \<close>
@ -13,7 +18,7 @@ text\<open> @{thm refl} of name @{thm [source] refl}
@{file "mini_odo.thy"}
@{value "3+4::int"}
@{const hd}
@{theory List}
@{theory HOL.List}
@{term "3"}
@{type bool}
@{term [show_types] "f x = a + x"} \<close>
@ -27,13 +32,13 @@ text\<open>An "anonymous" text-item, automatically coerced into the top-class "t
text*[tralala] \<open> Brexit means Brexit \<close>
text\<open>Examples for declaration of typed doc-items "assumption" and "hypothesis",
concepts defined in the underlying ontology @{theory "CENELEC_50128"}. \<close>
concepts defined in the underlying ontology @{theory "Draft.CENELEC_50128"}. \<close>
text*[ass1::assumption] \<open> The subsystem Y is safe. \<close>
text*[hyp1::hypothesis] \<open> P not equal NP \<close>
text\<open>A real example fragment from a larger project, declaring a text-element as a
"safety-related application condition", a concept defined in the @{theory "CENELEC_50128"}
ontology:\<close>
"safety-related application condition", a concept defined in the
@{theory "Draft.CENELEC_50128"} ontology:\<close>
text*[new_ass::hypothesis]\<open>Under the assumption @{assumption \<open>ass1\<close>} we establish the following: ... \<close>

View File

@ -13,7 +13,8 @@ print_doc_items
section\<open>Definitions, Lemmas, Theorems, Assertions\<close>
text*[aa::F, property = "[@{term ''True''}]"]\<open>Our definition of the HOL-Logic has the following properties:\<close>
text*[aa::F, property = "[@{term ''True''}]"]
\<open>Our definition of the HOL-Logic has the following properties:\<close>
assert*[aa::F] "True"

View File

@ -4,7 +4,7 @@ theory Concept_Example
imports "../../ontologies/Conceptual" (* we use the generic "Conceptual" ontology *)
begin
text\<open>@{theory Conceptual} provides a monitor @{typ M} enforcing a particular document structure.
text\<open>@{theory Draft.Conceptual} provides a monitor @{typ M} enforcing a particular document structure.
Here, we say: From now on, this structural rules are respected wrt. all doc\_classes M is
enabled for.\<close>
open_monitor*[struct::M]
@ -21,7 +21,11 @@ update_instance*[d::D, a1 := X2]
text\<open> ... in ut tortor ... @{docitem \<open>a\<close>} ... @{A \<open>a\<close>}\<close>
text*[c2::C, x = "''delta''"] \<open> ... in ut tortor eleifend augue pretium consectetuer. \<close>
text*[c2::C, x = "\<open>delta\<close>"] \<open> ... in ut tortor eleifend augue pretium consectetuer. \<close>
text\<open>Note that both the notations @{term "''beta''"} and @{term "\<open>beta\<close>"} are possible;
the former is a more ancient format only supporting pure ascii, while the latter also supports
fancy unicode such as: @{term "\<open>\<beta>\<^sub>i''\<close>"} \<close>
text*[f::F] \<open> Lectus accumsan velit ultrices, ... }\<close>
@ -33,8 +37,8 @@ update_instance*[f::F,r:="[@{thm ''Concept_Example.some_proof''}]"]
text\<open> ..., mauris amet, id elit aliquam aptent id, ... @{docitem \<open>a\<close>} \<close>
text\<open>Here we add and maintain a link that is actually modeled as m-to-n relation ...\<close>
update_instance*[f::F,b:="{(@{docitem ''a''}::A,@{docitem ''c1''}::C),
(@{docitem ''a''}, @{docitem ''c2''})}"]
update_instance*[f::F,b:="{(@{docitem \<open>a\<close>}::A,@{docitem \<open>c1\<close>}::C),
(@{docitem \<open>a\<close>}, @{docitem \<open>c2\<close>})}"]
close_monitor*[struct]

View File

@ -43,6 +43,9 @@ subsection*[d::A, x = "4"] \<open> Lorem ipsum dolor sit amet, ... \<close>
(* test : update should not fail, invariant still valid *)
update_instance*[d::A, x += "1"]
(* test : with the next step it should fail :
update_instance*[d::A, x += "1"]
*)
section\<open>Example: Monitor Class Invariant\<close>

View File

@ -14,7 +14,7 @@ They are the key-mechanism to denote
\<^item> Ontological Links, i.e. attributes refering to document classes defined by the ontology
\<^item> Ontological F-Links, i.e. attributes referring to formal entities inside Isabelle (such as thm's)
This file contains a number of examples resulting from the @{theory "Conceptual"} - ontology;
This file contains a number of examples resulting from the @{theory "Draft.Conceptual"} - ontology;
the emphasis of this presentation is to present the expressivity of ODL on a paradigmatical example.
\<close>

View File

@ -1,5 +1,8 @@
theory Noodles
imports "small_math"
theory Noodles
imports "../../ontologies/small_math"
"../../ontologies/technical_report"
begin
title*[t::title]\<open>On Noodles\<close>
title*[t::title]\<open>On Noodles\<close>
end

View File

@ -8,7 +8,7 @@ open_monitor*[this::article]
title*[t1::title]\<open>On Noodles\<close>
text*[simon::author]\<open>Simon Foster\<close>
text*[a::abstract, keywordlist = "[topology]"]
text*[a::abstract, keywordlist = "[''topology'']"]
\<open>We present the first fundamental results on the goundbreaking theory of noodles...\<close>
section*[intro::introduction]\<open>Introduction\<close>

View File

@ -4,17 +4,17 @@ theory Concept_Example
imports "../../ontologies/Conceptual" (* we use the generic "Conceptual" ontology *)
begin
text\<open>@{theory Conceptual} provides a monitor @{typ M} enforcing a particular document structure.
Here, we say: From now on, this structural rules are respected wrt. all doc\_classes M is
enabled for.\<close>
text\<open>@{theory \<open>Draft.Conceptual\<close>} provides a monitor @{typ M} enforcing a particular document
structure. Here, we say: From now on, this structural rules are respected wrt. all
\<^theory_text>\<open>doc_class\<close>es @{typ M} is enabled for.\<close>
open_monitor*[struct::M]
section*[a::A, x = "3"] \<open> Lorem ipsum dolor sit amet, ... \<close>
text*[c1::C, x = "''beta''"] \<open> ... suspendisse non arcu malesuada mollis, nibh morbi, ... \<close>
text*[d::D, a1 = "X3"] \<open> ... phasellus amet id massa nunc, pede suscipit repellendus,
... @{C \<open>c1\<close>} @{thm "refl"}\<close>
... @{C c1} @{thm "refl"}\<close>
update_instance*[d::D, a1 := X2]

View File

@ -1,6 +1,7 @@
(*<*)
theory "00_Frontmatter"
imports "Isabelle_DOF.technical_report"
(* imports "Isabelle_DOF.technical_report" *)
imports "../../../ontologies/technical_report"
begin
open_monitor*[this::report]

View File

@ -1,6 +1,7 @@
(*<*)
theory MyCommentedIsabelle
imports "Isabelle_DOF.technical_report"
(* imports "Isabelle_DOF.technical_report" *)
imports "../../../ontologies/technical_report"
begin
@ -62,7 +63,7 @@ code-base. The preferred programming style is purely functional: \<close>
ML\<open> fun fac x = if x = 0 then 1 else x * fac(x-1) ;
fac 10;
\<close>
-- or
(* or *)
ML\<open> type state = { a : int, b : string}
fun incr_state ({a, b}:state) = {a=a+1, b=b}
\<close>
@ -664,7 +665,7 @@ datatype thy = Thy of
*)
Theory.check: Proof.context -> string * Position.T -> theory;
Theory.check: {long: bool} -> Proof.context -> string * Position.T -> theory;
Theory.local_setup: (Proof.context -> Proof.context) -> unit;
Theory.setup: (theory -> theory) -> unit; (* The thing to extend the table of "command"s with parser - callbacks. *)
@ -803,107 +804,31 @@ Goal.prove_global : theory -> string list -> term list -> term ->
\<close>
section\<open>The Isar Engine\<close>
text\<open>The main structure of the Isar-engine is @{ ML_structure Toplevel} and provides and
internal @{ ML_type state} with the necessary infrastructure ---
i.e. the operations to pack and unpack theories and Proof.contexts --- on it:
\<close>
ML\<open>
Toplevel.theory;
Toplevel.presentation_context_of; (* Toplevel is a kind of table with call-back functions *)
Consts.the_const; (* T is a kind of signature ... *)
Variable.import_terms;
Vartab.update;
fun control_antiquotation name s1 s2 =
Thy_Output.antiquotation name (Scan.lift Args.cartouche_input)
(fn {state, ...} => enclose s1 s2 o Thy_Output.output_text state {markdown = false});
Output.output;
Syntax.read_input ;
Input.source_content;
(*
basic_entity @{binding const} (Args.const {proper = true, strict = false}) pretty_const #>
*)
\<close>
ML\<open>
Config.get @{context} Thy_Output.display;
Config.get @{context} Thy_Output.source;
Config.get @{context} Thy_Output.modes;
Thy_Output.document_command;
(* is:
fun document_command markdown (loc, txt) =
Toplevel.keep (fn state =>
(case loc of
NONE => ignore (output_text state markdown txt)
| SOME (_, pos) =>
error ("Illegal target specification -- not a theory context" ^ Position.here pos))) o
Toplevel.present_local_theory loc (fn state => ignore (output_text state markdown txt));
end;
*)
\<close>
ML\<open> Thy_Output.document_command {markdown = true} \<close>
(* Structures related to LaTeX Generation *)
ML\<open> Latex.output_ascii;
Latex.output_token
(* Hm, generierter output for
subsection*[Shaft_Encoder_characteristics]{ * Shaft Encoder Characteristics * } :
\begin{isamarkuptext}%
\isa{{\isacharbackslash}label{\isacharbraceleft}general{\isacharunderscore}hyps{\isacharbraceright}}%
\end{isamarkuptext}\isamarkuptrue%
\isacommand{subsection{\isacharasterisk}}\isamarkupfalse%
{\isacharbrackleft}Shaft{\isacharunderscore}Encoder{\isacharunderscore}characteristics{\isacharbrackright}{\isacharverbatimopen}\ Shaft\ Encoder\ Characteristics\ {\isacharverbatimclose}%
Generierter output for: text\<open>\label{sec:Shaft-Encoder-characteristics}\<close>
\begin{isamarkuptext}%
\label{sec:Shaft-Encoder-characteristics}%
\end{isamarkuptext}\isamarkuptrue%
*)
\<close>
ML\<open>
Thy_Output.maybe_pretty_source :
(Proof.context -> 'a -> Pretty.T) -> Proof.context -> Token.src -> 'a list -> Pretty.T list;
Thy_Output.output: Proof.context -> Pretty.T list -> string;
(* nuescht besonderes *)
fun document_antiq check_file ctxt (name, pos) =
let
(* val dir = master_directory (Proof_Context.theory_of ctxt); *)
(* val _ = check_path check_file ctxt dir (name, pos); *)
in
space_explode "/" name
|> map Latex.output_ascii
|> space_implode (Latex.output_ascii "/" ^ "\\discretionary{}{}{}")
|> enclose "\\isatt{" "}"
end;
Toplevel.theory_toplevel: theory -> Toplevel.state;
Toplevel.toplevel: Toplevel.state;
Toplevel.is_toplevel: Toplevel.state -> bool;
Toplevel.is_theory: Toplevel.state -> bool;
Toplevel.is_proof: Toplevel.state -> bool;
Toplevel.is_skipped_proof: Toplevel.state -> bool;
Toplevel.level: Toplevel.state -> int;
Toplevel.context_of: Toplevel.state -> Proof.context;
Toplevel.generic_theory_of: Toplevel.state -> generic_theory;
Toplevel.theory_of: Toplevel.state -> theory;
Toplevel.proof_of: Toplevel.state -> Proof.state;
Toplevel.presentation_context: Toplevel.state -> Proof.context;
\<close>
ML\<open> Type_Infer_Context.infer_types \<close>
ML\<open> Type_Infer_Context.prepare_positions \<close>
subsection \<open>Transaction Management in the Isar-Engine : The Toplevel \<close>
ML\<open>
Thy_Output.output_text: Toplevel.state -> {markdown: bool} -> Input.source -> string;
Thy_Output.document_command;
Toplevel.exit: Toplevel.transition -> Toplevel.transition;
Toplevel.keep: (Toplevel.state -> unit) -> Toplevel.transition -> Toplevel.transition;
@ -926,8 +851,6 @@ Toplevel.present_local_theory:
| SOME (_, pos) =>
error ("Illegal target specification -- not a theory context" ^ Position.here pos))) o
Toplevel.present_local_theory loc (fn state => ignore (output_text state markdown txt)); *)
Thy_Output.document_command : {markdown: bool} -> (xstring * Position.T) option * Input.source ->
Toplevel.transition -> Toplevel.transition;
(* Isar Toplevel Steuerung *)
Toplevel.keep : (Toplevel.state -> unit) -> Toplevel.transition -> Toplevel.transition;
@ -959,22 +882,6 @@ Toplevel.theory : (theory -> theory) -> Toplevel.transition -> Toplevel.transiti
fun theory f = theory' (K f); *)
Thy_Output.document_command : {markdown: bool} -> (xstring * Position.T) option * Input.source ->
Toplevel.transition -> Toplevel.transition;
(* fun document_command markdown (loc, txt) =
Toplevel.keep (fn state =>
(case loc of
NONE => ignore (output_text state markdown txt)
| SOME (_, pos) =>
error ("Illegal target specification -- not a theory context" ^ Position.here pos))) o
Toplevel.present_local_theory loc (fn state => ignore (output_text state markdown txt));
*)
Thy_Output.output_text : Toplevel.state -> {markdown: bool} -> Input.source -> string ;
(* this is where antiquotation expansion happens : uses eval_antiquote *)
\<close>
@ -1011,50 +918,48 @@ Toplevel.theory : (theory -> theory) -> Toplevel.transition -> Toplevel.transiti
fun theory f = theory' (K f); *)
Thy_Output.document_command : {markdown: bool} -> (xstring * Position.T) option * Input.source ->
Toplevel.transition -> Toplevel.transition;
(* fun document_command markdown (loc, txt) =
Toplevel.keep (fn state =>
(case loc of
NONE => ignore (output_text state markdown txt)
| SOME (_, pos) =>
error ("Illegal target specification -- not a theory context" ^ Position.here pos))) o
Toplevel.present_local_theory loc (fn state => ignore (output_text state markdown txt));
*)
Thy_Output.output_text : Toplevel.state -> {markdown: bool} -> Input.source -> string ;
(* this is where antiquotation expansion happens : uses eval_antiquote *)
\<close>
subsection\<open> Configuration flags of fixed type in the Isar-engine. \<close>
text\<open>The toplevel also provides an infrastructure for managing configuration options
for system components. Based on a sum-type @{ML_type Config.value }
with the alternatives \<^verbatim>\<open> Bool of bool | Int of int | Real of real | String of string\<close>
and building the parametric configuration types @{ML_type "'a Config.T" } and the
instance \<^verbatim>\<open>type raw = value T\<close>, for all registered configurations the protocol:
\<close>
ML\<open>
Config.get @{context} Thy_Output.quotes;
Config.get @{context} Thy_Output.display;
Config.get: Proof.context -> 'a Config.T -> 'a;
Config.map: 'a Config.T -> ('a -> 'a) -> Proof.context -> Proof.context;
Config.put: 'a Config.T -> 'a -> Proof.context -> Proof.context;
Config.get_global: theory -> 'a Config.T -> 'a;
Config.map_global: 'a Config.T -> ('a -> 'a) -> theory -> theory;
Config.put_global: 'a Config.T -> 'a -> theory -> theory;
\<close>
text\<open>... etc. is defined.\<close>
text\<open>Example registration of an config attribute XS232: \<close>
ML\<open>
val (XS232, XS232_setup)
= Attrib.config_bool \<^binding>\<open>XS232\<close> (K false);
val _ = Theory.setup XS232_setup;
\<close>
(* or: just command:
setup\<open>XS232_setup\<close>
*)
text\<open>Another mechanism are global synchronised variables:\<close>
ML\<open> (* For example *)
val C = Synchronized.var "Pretty.modes" "latEEex";
(* Synchronized: a mechanism to bookkeep global
variables with synchronization mechanism included *)
Synchronized.value C;
(*
fun output ctxt prts =
603 prts
604 |> Config.get ctxt quotes ? map Pretty.quote
605 |> (if Config.get ctxt display then
606 map (Pretty.indent (Config.get ctxt indent) #> string_of_margin ctxt #> Output.output)
607 #> space_implode "\\isasep\\isanewline%\n"
608 #> Latex.environment "isabelle"
609 else
610 map
611 ((if Config.get ctxt break then string_of_margin ctxt else Pretty.unformatted_string_of)
612 #> Output.output)
613 #> space_implode "\\isasep\\isanewline%\n"
614 #> enclose "\\isa{" "}");
*)
\<close>
chapter\<open>Front-End \<close>
@ -1388,11 +1293,16 @@ val Z = let val attribute = Parse.position Parse.name --
Scan.optional (Parse.$$$ "=" |-- Parse.!!! Parse.name) "";
in (Scan.optional(Parse.$$$ "," |-- (Parse.enum "," attribute))) end ;
(* this leads to constructions like the following, where a parser for a *)
fn name => (Thy_Output.antiquotation name (Scan.lift (Parse.position Args.cartouche_input)));
Thy_Output.antiquotation_pretty_source \<^binding>\<open>theory\<close> (Scan.lift (Parse.position Args.embedded));
Thy_Output.antiquotation_raw \<^binding>\<open>file\<close> (Scan.lift (Parse.position Parse.path)) ;
fn name => (Thy_Output.antiquotation_pretty_source name (Scan.lift (Parse.position Args.cartouche_input)));
\<close>
section\<open>\<close>
ML\<open>Sign.add_trrules\<close>
section\<open> The PIDE Framework \<close>
subsection\<open> Markup \<close>
@ -1512,19 +1422,35 @@ Output.output "bla_1:";
\<close>
section \<open> Output: LaTeX \<close>
ML\<open>
Thy_Output.verbatim_text;
Thy_Output.output_text: Toplevel.state -> {markdown: bool} -> Input.source -> string;
Thy_Output.antiquotation:
binding ->
'a context_parser ->
({context: Proof.context, source: Token.src, state: Toplevel.state} -> 'a -> string) ->
theory -> theory;
Thy_Output.output: Proof.context -> Pretty.T list -> string;
Thy_Output.output_text: Toplevel.state -> {markdown: bool} -> Input.source -> string;
Thy_Output.output : Proof.context -> Pretty.T list -> string;
ML\<open> open Thy_Output;
output_document: Proof.context -> {markdown: bool} -> Input.source -> Latex.text list;
output_source: Proof.context -> string -> Latex.text list;
present_thy: Options.T -> theory -> segment list -> Latex.text list;
pretty_term: Proof.context -> term -> Pretty.T;
pretty_thm: Proof.context -> thm -> Pretty.T;
lines: Latex.text list -> Latex.text list;
items: Latex.text list -> Latex.text list;
isabelle: Proof.context -> Latex.text list -> Latex.text;
isabelle_typewriter: Proof.context -> Latex.text list -> Latex.text;
typewriter: Proof.context -> string -> Latex.text;
verbatim: Proof.context -> string -> Latex.text;
source: Proof.context -> Token.src -> Latex.text;
pretty: Proof.context -> Pretty.T -> Latex.text;
pretty_source: Proof.context -> Token.src -> Pretty.T -> Latex.text;
pretty_items: Proof.context -> Pretty.T list -> Latex.text;
pretty_items_source: Proof.context -> Token.src -> Pretty.T list -> Latex.text;
antiquotation_pretty:
binding -> 'a context_parser -> (Proof.context -> 'a -> Pretty.T) -> theory -> theory;
antiquotation_pretty_source:
binding -> 'a context_parser -> (Proof.context -> 'a -> Pretty.T) -> theory -> theory;
antiquotation_raw:
binding -> 'a context_parser -> (Proof.context -> 'a -> Latex.text) -> theory -> theory;
antiquotation_verbatim:
binding -> 'a context_parser -> (Proof.context -> 'a -> string) -> theory -> theory;
\<close>
@ -1533,7 +1459,7 @@ ML\<open>
Syntax_Phases.reports_of_scope;
\<close>
(* STOP HERE JUNK ZONE :
(* Pretty.T, pretty-operations. *)
ML\<open>
@ -1732,6 +1658,122 @@ As one can see, check-routines internally generate the markup.
*)
Consts.the_const; (* T is a kind of signature ... *)
Variable.import_terms;
Vartab.update;
fun control_antiquotation name s1 s2 =
Thy_Output.antiquotation name (Scan.lift Args.cartouche_input)
(fn {state, ...} => enclose s1 s2 o Thy_Output.output_text state {markdown = false});
Output.output;
Syntax.read_input ;
Input.source_content;
(*
basic_entity @{binding const} (Args.const {proper = true, strict = false}) pretty_const #>
*)
chapter\<open>LaTeX Document Generation\<close>
text\<open>MORE TO COME\<close>
ML\<open> Thy_Output.document_command {markdown = true} \<close>
(* Structures related to LaTeX Generation *)
ML\<open> Latex.output_ascii;
Latex.output_token
(* Hm, generierter output for
subsection*[Shaft_Encoder_characteristics]{ * Shaft Encoder Characteristics * } :
\begin{isamarkuptext}%
\isa{{\isacharbackslash}label{\isacharbraceleft}general{\isacharunderscore}hyps{\isacharbraceright}}%
\end{isamarkuptext}\isamarkuptrue%
\isacommand{subsection{\isacharasterisk}}\isamarkupfalse%
{\isacharbrackleft}Shaft{\isacharunderscore}Encoder{\isacharunderscore}characteristics{\isacharbrackright}{\isacharverbatimopen}\ Shaft\ Encoder\ Characteristics\ {\isacharverbatimclose}%
Generierter output for: text\<open>\label{sec:Shaft-Encoder-characteristics}\<close>
\begin{isamarkuptext}%
\label{sec:Shaft-Encoder-characteristics}%
\end{isamarkuptext}\isamarkuptrue%
*)
\<close>
ML\<open>
Thy_Output.maybe_pretty_source :
(Proof.context -> 'a -> Pretty.T) -> Proof.context -> Token.src -> 'a list -> Pretty.T list;
Thy_Output.output: Proof.context -> Pretty.T list -> string;
(* nuescht besonderes *)
fun document_antiq check_file ctxt (name, pos) =
let
(* val dir = master_directory (Proof_Context.theory_of ctxt); *)
(* val _ = check_path check_file ctxt dir (name, pos); *)
in
space_explode "/" name
|> map Latex.output_ascii
|> space_implode (Latex.output_ascii "/" ^ "\\discretionary{}{}{}")
|> enclose "\\isatt{" "}"
end;
\<close>
ML\<open>
Thy_Output.output_text: Toplevel.state -> {markdown: bool} -> Input.source -> string;
Thy_Output.document_command;
Thy_Output.document_command : {markdown: bool} -> (xstring * Position.T) option * Input.source ->
Toplevel.transition -> Toplevel.transition;
(* fun document_command markdown (loc, txt) =
Toplevel.keep (fn state =>
(case loc of
NONE => ignore (output_text state markdown txt)
| SOME (_, pos) =>
error ("Illegal target specification -- not a theory context" ^ Position.here pos))) o
Toplevel.present_local_theory loc (fn state => ignore (output_text state markdown txt));
*)
Thy_Output.output_text : Toplevel.state -> {markdown: bool} -> Input.source -> string ;
(* this is where antiquotation expansion happens : uses eval_antiquote *)
Thy_Output.document_command : {markdown: bool} -> (xstring * Position.T) option * Input.source ->
Toplevel.transition -> Toplevel.transition;
(* fun document_command markdown (loc, txt) =
Toplevel.keep (fn state =>
(case loc of
NONE => ignore (output_text state markdown txt)
| SOME (_, pos) =>
error ("Illegal target specification -- not a theory context" ^ Position.here pos))) o
Toplevel.present_local_theory loc (fn state => ignore (output_text state markdown txt));
*)
Thy_Output.output_text : Toplevel.state -> {markdown: bool} -> Input.source -> string ;
(* this is where antiquotation expansion happens : uses eval_antiquote *)
*)
section\<open>Inner Syntax\<close>
text\<open>MORE TO COME\<close>
ML\<open>Sign.add_trrules\<close>
section*[c::conclusion]\<open>Conclusion\<close>
text\<open>More to come\<close>
section*[bib::bibliography]\<open>Bibliography\<close>

View File

@ -28,8 +28,6 @@ Safety assessment is focused on but not limited to the safety properties of a sy
Definition*[assessor::concept, tag="''assessor''"]
\<open>entity that carries out an assessment\<close>
text\<open>@{docitem \<open>assessment\<close>}\<close>
Definition*[COTS::concept, tag="''commercial off-the-shelf software''"]
\<open>software defined by market-driven need, commercially available and whose fitness for purpose
has been demonstrated by a broad spectrum of commercial users\<close>
@ -73,7 +71,7 @@ from the intended performance or behaviour (cf @{concept \<open>error\<close>})\
Definition*[failure::concept]
\<open>unacceptable difference between required and observed performance\<close>
Definition*[FT::concept, tag="\<open>fault tolerance\<close>"]
Definition*[FT::concept, tag="''fault tolerance''"]
\<open>built-in capability of a system to provide continued correct provision of service as specified,
in the presence of a limited number of hardware or software faults\<close>
@ -262,16 +260,6 @@ datatype phase = SYSDEV_ext (* System Development Phase (external) *)
| SD (* Software Deployment *)
| SM (* Software Maintenance *)
datatype sil = SIL0 | SIL1 | SIL2 | SIL3 | SIL4
type_synonym saftety_integraytion_level = sil
doc_class cenelec_text = text_element +
phase :: "phase"
sil :: sil
is_concerned :: "role set" <= "UNIV"
abbreviation software_requirement :: "phase" where "software_requirement \<equiv> SR"
abbreviation software_architecture :: "phase" where "software_architecture \<equiv> SA"
abbreviation software_design :: "phase" where "software_design \<equiv> SD"
@ -288,6 +276,9 @@ term "SR" (* meta-test *)
section\<open>Objectives, Conformance and Software Integrity Levels\<close>
datatype sil = SIL0 | SIL1 | SIL2 | SIL3 | SIL4
type_synonym saftety_integraytion_level = sil
doc_class objectives =
@ -295,7 +286,7 @@ doc_class objectives =
is_concerned :: "role set"
doc_class requirement = cenelec_text +
doc_class requirement = text_element +
long_name :: "string option"
is_concerned :: "role set"
@ -459,6 +450,11 @@ doc_class judgement =
section\<open> Design and Test Documents \<close>
doc_class cenelec_text = text_element +
phase :: "phase"
is_concerned :: "role set" <= "UNIV"
doc_class SYSREQS = cenelec_text +
phase :: "phase" <= "SYSDEV_ext"
@ -696,36 +692,31 @@ doc_class test_documentation =
section\<open> META : Testing and Validation \<close>
text\<open>Test : @{concept \<open>COTS\<close>}\<close>
ML\<open>
DOF_core.name2doc_class_name @{theory} "requirement";
DOF_core.name2doc_class_name @{theory} "SRAC";
DOF_core.is_defined_cid_global "SRAC" @{theory};
DOF_core.is_defined_cid_global "EC" @{theory};
"XXXXXXXXXXXXXXXXX";
DOF_core.is_subclass @{context} "CENELEC_50128.EC" "CENELEC_50128.EC";
\<close>
ML\<open>
DOF_core.is_subclass @{context} "CENELEC_50128.EC" "CENELEC_50128.EC";
DOF_core.is_subclass @{context} "CENELEC_50128.SRAC" "CENELEC_50128.EC";
DOF_core.is_subclass @{context} "CENELEC_50128.EC" "CENELEC_50128.SRAC";
DOF_core.is_subclass @{context} "CENELEC_50128.EC" "CENELEC_50128.test_requirement";
"XXXXXXXXXXXXXXXXX";
val {docobj_tab={maxano, tab=ref_tab},docclass_tab=class_tab,...} = DOF_core.get_data @{context};
\<close>
ML\<open>val {docobj_tab={maxano, tab=ref_tab},docclass_tab=class_tab,...} = DOF_core.get_data @{context};
Symtab.dest ref_tab;
Symtab.dest class_tab;
\<close>
ML\<open>
"XXXXXXXXXXXXXXXXX";
DOF_core.get_attributes_local "SRAC" @{context};
@{term assumption_kind}
val internal_data_of_SRAC_definition = DOF_core.get_attributes_local "SRAC" @{context}
\<close>
ML\<open>
DOF_core.name2doc_class_name @{theory} "requirement";
Syntax.parse_typ @{context} "requirement";
@ -734,10 +725,5 @@ Syntax.read_typ @{context} "hypothesis" handle _ => dummyT;
Proof_Context.init_global;
\<close>
text\<open>
@{theory_text [display] \<open>definition a\<^sub>E \<equiv> True
lemma XXX : "True = False " by auto\<close>}
\<close>
end

View File

@ -17,10 +17,10 @@ where the author of the exam is not expected to be physically present.
datatype ContentClass =
setter -- \<open>the 'author' of the exam\<close>
| checker -- \<open>the 'proof-reader' of the exam\<close>
| externalExaminer -- \<open>an external 'proof-reader' of the exam\<close>
| student -- \<open>the victim ;-) ... \<close>
setter (* \<open>the 'author' of the exam\<close> *)
| checker (* \<open>the 'proof-reader' of the exam\<close> *)
| externalExaminer (* \<open>an external 'proof-reader' of the exam\<close> *)
| student (* \<open>the victim ;-) ... \<close> *)
doc_class Author =
@ -47,7 +47,7 @@ doc_class Exam_item =
doc_class Header = Exam_item +
examSubject :: "(Subject) list"
date :: string
timeAllowed :: int -- minutes
timeAllowed :: int (* minutes *)
type_synonym SubQuestion = string
@ -58,7 +58,7 @@ doc_class Answer_Formal_Step = Exam_item +
doc_class Answer_YesNo = Exam_item +
step_label :: string
yes_no :: bool -- \<open>for checkboxes\<close>
yes_no :: bool (* \<open>for checkboxes\<close> *)
datatype Question_Type =
formal | informal | mixed

View File

@ -10,7 +10,7 @@ doc_class title =
doc_class subtitle =
abbrev :: "string option" <= "None"
-- \<open>adding a contribution list and checking that it is cited as well in tech as in conclusion. ? \<close>
(* adding a contribution list and checking that it is cited as well in tech as in conclusion. ? *)
doc_class author =
email :: "string" <= "''''"
@ -18,6 +18,7 @@ doc_class author =
orcid :: "string" <= "''''"
affiliation :: "string"
doc_class abstract =
keywordlist :: "string list" <= "[]"
principal_theorems :: "thm list"
@ -111,7 +112,7 @@ local
fun group f g cidS [] = []
|group f g cidS (a::S) = case find_first (f a) cidS of
NONE => [a] :: group f g cidS S
| SOME cid => let val (pref,suff) = take_prefix (g cid) S
| SOME cid => let val (pref,suff) = chop_prefix (g cid) S
in (a::pref)::(group f g cidS suff) end;
fun partition ctxt cidS trace =

View File

@ -111,7 +111,7 @@ local
fun group f g cidS [] = []
|group f g cidS (a::S) = case find_first (f a) cidS of
NONE => [a] :: group f g cidS S
| SOME cid => let val (pref,suff) = take_prefix (g cid) S
| SOME cid => let val (pref,suff) = chop_prefix (g cid) S
in (a::pref)::(group f g cidS suff) end;
fun partition ctxt cidS trace =

View File

@ -1,279 +1,205 @@
(* Title: Pure/Thy/thy_output.ML
Author: Markus Wenzel, TU Muenchen
Author: Makarius
Theory document output with antiquotations.
Theory document output.
*)
signature THY_OUTPUT =
sig
val display: bool Config.T
val quotes: bool Config.T
val margin: int Config.T
val indent: int Config.T
val source: bool Config.T
val break: bool Config.T
val modes: string Config.T
val add_wrapper: ((unit -> string) -> unit -> string) -> Proof.context -> Proof.context
val add_option: binding -> (string -> Proof.context -> Proof.context) -> theory -> theory
val check_command: Proof.context -> xstring * Position.T -> string
val check_option: Proof.context -> xstring * Position.T -> string
val print_antiquotations: bool -> Proof.context -> unit
val antiquotation: binding -> 'a context_parser ->
({source: Token.src, state: Toplevel.state, context: Proof.context} -> 'a -> string) ->
theory -> theory
val boolean: string -> bool
val integer: string -> int
val eval_antiquote: Toplevel.state -> Antiquote.text_antiquote -> string
val output_text: Toplevel.state -> {markdown: bool} -> Input.source -> string
val present_thy: theory -> (Toplevel.transition * Toplevel.state) list -> Token.T list -> Buffer.T
val output_document: Proof.context -> {markdown: bool} -> Input.source -> Latex.text list
val check_comments: Proof.context -> Symbol_Pos.T list -> unit
val output_token: Proof.context -> Token.T -> Latex.text list
val output_source: Proof.context -> string -> Latex.text list
type segment = {span: Command_Span.span, command: Toplevel.transition, state: Toplevel.state}
val present_thy: Options.T -> theory -> segment list -> Latex.text list
val set_meta_args_parser : (theory -> string parser) -> unit
val pretty_text: Proof.context -> string -> Pretty.T
val pretty_term: Proof.context -> term -> Pretty.T
val pretty_thm: Proof.context -> thm -> Pretty.T
val str_of_source: Token.src -> string
val maybe_pretty_source: (Proof.context -> 'a -> Pretty.T) -> Proof.context ->
Token.src -> 'a list -> Pretty.T list
val string_of_margin: Proof.context -> Pretty.T -> string
val output: Proof.context -> Pretty.T list -> string
val verbatim_text: Proof.context -> string -> string
val document_command: {markdown: bool} -> (xstring * Position.T) option * Input.source ->
Toplevel.transition -> Toplevel.transition
val lines: Latex.text list -> Latex.text list
val items: Latex.text list -> Latex.text list
val isabelle: Proof.context -> Latex.text list -> Latex.text
val isabelle_typewriter: Proof.context -> Latex.text list -> Latex.text
val typewriter: Proof.context -> string -> Latex.text
val verbatim: Proof.context -> string -> Latex.text
val source: Proof.context -> Token.src -> Latex.text
val pretty: Proof.context -> Pretty.T -> Latex.text
val pretty_source: Proof.context -> Token.src -> Pretty.T -> Latex.text
val pretty_items: Proof.context -> Pretty.T list -> Latex.text
val pretty_items_source: Proof.context -> Token.src -> Pretty.T list -> Latex.text
val antiquotation_pretty:
binding -> 'a context_parser -> (Proof.context -> 'a -> Pretty.T) -> theory -> theory
val antiquotation_pretty_source:
binding -> 'a context_parser -> (Proof.context -> 'a -> Pretty.T) -> theory -> theory
val antiquotation_raw:
binding -> 'a context_parser -> (Proof.context -> 'a -> Latex.text) -> theory -> theory
val antiquotation_verbatim:
binding -> 'a context_parser -> (Proof.context -> 'a -> string) -> theory -> theory
end;
structure Thy_Output: THY_OUTPUT =
struct
(** options **)
(* output document source *)
val display = Attrib.setup_option_bool ("thy_output_display", \<^here>);
val break = Attrib.setup_option_bool ("thy_output_break", \<^here>);
val quotes = Attrib.setup_option_bool ("thy_output_quotes", \<^here>);
val margin = Attrib.setup_option_int ("thy_output_margin", \<^here>);
val indent = Attrib.setup_option_int ("thy_output_indent", \<^here>);
val source = Attrib.setup_option_bool ("thy_output_source", \<^here>);
val modes = Attrib.setup_option_string ("thy_output_modes", \<^here>);
val output_symbols = single o Latex.symbols_output;
structure Wrappers = Proof_Data
(
type T = ((unit -> string) -> unit -> string) list;
fun init _ = [];
);
fun add_wrapper wrapper = Wrappers.map (cons wrapper);
val wrap = Wrappers.get #> fold (fn wrapper => fn f => wrapper f);
(** maintain global antiquotations **)
structure Antiquotations = Theory_Data
(
type T =
(Token.src -> Toplevel.state -> Proof.context -> string) Name_Space.table *
(string -> Proof.context -> Proof.context) Name_Space.table;
val empty : T =
(Name_Space.empty_table Markup.document_antiquotationN,
Name_Space.empty_table Markup.document_antiquotation_optionN);
val extend = I;
fun merge ((commands1, options1), (commands2, options2)) : T =
(Name_Space.merge_tables (commands1, commands2),
Name_Space.merge_tables (options1, options2));
);
val get_antiquotations = Antiquotations.get o Proof_Context.theory_of;
fun add_command name cmd thy = thy
|> Antiquotations.map (apfst (Name_Space.define (Context.Theory thy) true (name, cmd) #> snd));
fun add_option name opt thy = thy
|> Antiquotations.map (apsnd (Name_Space.define (Context.Theory thy) true (name, opt) #> snd));
fun check_command ctxt = #1 o Name_Space.check (Context.Proof ctxt) (#1 (get_antiquotations ctxt));
fun check_option ctxt = #1 o Name_Space.check (Context.Proof ctxt) (#2 (get_antiquotations ctxt));
fun command src state ctxt =
let val (src', f) = Token.check_src ctxt (#1 o get_antiquotations) src
in f src' state ctxt end;
fun option ((xname, pos), s) ctxt =
fun output_comment ctxt (kind, syms) =
(case kind of
Comment.Comment =>
Input.cartouche_content syms
|> output_document (ctxt |> Config.put Document_Antiquotation.thy_output_display false)
{markdown = false}
|> Latex.enclose_body "%\n\\isamarkupcmt{" "%\n}"
| Comment.Cancel =>
Symbol_Pos.cartouche_content syms
|> output_symbols
|> Latex.enclose_body "%\n\\isamarkupcancel{" "}"
| Comment.Latex =>
[Latex.symbols (Symbol_Pos.cartouche_content syms)])
and output_comment_document ctxt (comment, syms) =
(case comment of
SOME kind => output_comment ctxt (kind, syms)
| NONE => [Latex.symbols syms])
and output_document_text ctxt syms =
Comment.read_body syms |> maps (output_comment_document ctxt)
and output_document ctxt {markdown} source =
let
val (_, opt) =
Name_Space.check (Context.Proof ctxt) (#2 (get_antiquotations ctxt)) (xname, pos);
in opt s ctxt end;
fun print_antiquotations verbose ctxt =
let
val (commands, options) = get_antiquotations ctxt;
val command_names = map #1 (Name_Space.markup_table verbose ctxt commands);
val option_names = map #1 (Name_Space.markup_table verbose ctxt options);
in
[Pretty.big_list "document antiquotations:" (map Pretty.mark_str command_names),
Pretty.big_list "document antiquotation options:" (map Pretty.mark_str option_names)]
end |> Pretty.writeln_chunks;
fun antiquotation name scan body =
add_command name
(fn src => fn state => fn ctxt =>
let val (x, ctxt') = Token.syntax scan src ctxt
in body {source = src, state = state, context = ctxt'} x end);
(** syntax of antiquotations **)
(* option values *)
fun boolean "" = true
| boolean "true" = true
| boolean "false" = false
| boolean s = error ("Bad boolean value: " ^ quote s);
fun integer s =
let
fun int ss =
(case Library.read_int ss of (i, []) => i
| _ => error ("Bad integer value: " ^ quote s));
in (case Symbol.explode s of "-" :: ss => ~ (int ss) | ss => int ss) end;
(* outer syntax *)
local
val property =
Parse.position Parse.name -- Scan.optional (Parse.$$$ "=" |-- Parse.!!! Parse.name) "";
val properties =
Scan.optional (Parse.$$$ "[" |-- Parse.!!! (Parse.enum "," property --| Parse.$$$ "]")) [];
in
val antiq =
Parse.!!!
(Parse.token Parse.liberal_name -- properties -- Parse.args --| Scan.ahead Parse.eof)
>> (fn ((name, props), args) => (props, name :: args));
end;
(* eval antiquote *)
local
fun eval_antiq state (opts, src) =
let
val preview_ctxt = fold option opts (Toplevel.presentation_context_of state);
val print_ctxt = Context_Position.set_visible false preview_ctxt;
fun cmd ctxt = wrap ctxt (fn () => command src state ctxt) ();
val _ = cmd preview_ctxt;
val print_modes = space_explode "," (Config.get print_ctxt modes) @ [Latex.latexN];
in Print_Mode.with_modes print_modes (fn () => cmd print_ctxt) () end;
in
fun eval_antiquote _ (Antiquote.Text ss) = Symbol_Pos.content ss
| eval_antiquote state (Antiquote.Control {name, body, ...}) =
eval_antiq state
([], Token.make_src name (if null body then [] else [Token.read_cartouche body]))
| eval_antiquote state (Antiquote.Antiq {range = (pos, _), body, ...}) =
let
val keywords =
(case try Toplevel.presentation_context_of state of
SOME ctxt => Thy_Header.get_keywords' ctxt
| NONE =>
error ("Unknown context -- cannot expand document antiquotations" ^
Position.here pos));
in eval_antiq state (Token.read_antiq keywords antiq (body, pos)) end;
end;
(* output text *)
fun output_text state {markdown} source =
let
val is_reported =
(case try Toplevel.context_of state of
SOME ctxt => Context_Position.is_visible ctxt
| NONE => true);
val pos = Input.pos_of source;
val syms = Input.source_explode source;
val _ =
if is_reported then
Position.report pos (Markup.language_document (Input.is_delimited source))
else ();
val output_antiquotes = map (eval_antiquote state) #> implode;
val output_antiquotes =
maps (Document_Antiquotation.evaluate (output_document_text ctxt) ctxt);
fun output_line line =
(if Markdown.line_is_item line then "\\item " else "") ^
(if Markdown.line_is_item line then [Latex.string "\\item "] else []) @
output_antiquotes (Markdown.line_content line);
fun output_blocks blocks = space_implode "\n\n" (map output_block blocks)
and output_block (Markdown.Par lines) = cat_lines (map output_line lines)
fun output_block (Markdown.Par lines) =
Latex.block (separate (Latex.string "\n") (map (Latex.block o output_line) lines))
| output_block (Markdown.List {kind, body, ...}) =
Latex.environment (Markdown.print_kind kind) (output_blocks body);
Latex.environment_block (Markdown.print_kind kind) (output_blocks body)
and output_blocks blocks = separate (Latex.string "\n\n") (map output_block blocks);
in
if Toplevel.is_skipped_proof state then ""
if Toplevel.is_skipped_proof (Toplevel.presentation_state ctxt) then []
else if markdown andalso exists (Markdown.is_control o Symbol_Pos.symbol) syms
then
let
val ants = Antiquote.parse pos syms;
val ants = Antiquote.parse_comments pos syms;
val reports = Antiquote.antiq_reports ants;
val blocks = Markdown.read_antiquotes ants;
val _ = if is_reported then Position.reports (reports @ Markdown.reports blocks) else ();
val _ = Context_Position.reports ctxt (reports @ Markdown.reports blocks);
in output_blocks blocks end
else
let
val ants = Antiquote.parse pos (Symbol_Pos.trim_blanks syms);
val ants = Antiquote.parse_comments pos (trim (Symbol.is_blank o Symbol_Pos.symbol) syms);
val reports = Antiquote.antiq_reports ants;
val _ = if is_reported then Position.reports (reports @ Markdown.text_reports ants) else ();
val _ = Context_Position.reports ctxt (reports @ Markdown.text_reports ants);
in output_antiquotes ants end
end;
(* output tokens with formal comments *)
local
val output_symbols_antiq =
(fn Antiquote.Text syms => output_symbols syms
| Antiquote.Control {name = (name, _), body, ...} =>
Latex.string (Latex.output_symbols [Symbol.encode (Symbol.Control name)]) ::
output_symbols body
| Antiquote.Antiq {body, ...} =>
Latex.enclose_body "%\n\\isaantiq\n" "{}%\n\\endisaantiq\n" (output_symbols body));
fun output_comment_symbols ctxt {antiq} (comment, syms) =
(case (comment, antiq) of
(NONE, false) => output_symbols syms
| (NONE, true) =>
Antiquote.parse_comments (#1 (Symbol_Pos.range syms)) syms
|> maps output_symbols_antiq
| (SOME comment, _) => output_comment ctxt (comment, syms));
fun output_body ctxt antiq bg en syms =
Comment.read_body syms
|> maps (output_comment_symbols ctxt {antiq = antiq})
|> Latex.enclose_body bg en;
in
fun output_token ctxt tok =
let
fun output antiq bg en =
output_body ctxt antiq bg en (Input.source_explode (Token.input_of tok));
in
(case Token.kind_of tok of
Token.Comment NONE => []
| Token.Command => output false "\\isacommand{" "}"
| Token.Keyword =>
if Symbol.is_ascii_identifier (Token.content_of tok)
then output false "\\isakeyword{" "}"
else output false "" ""
| Token.String => output false "{\\isachardoublequoteopen}" "{\\isachardoublequoteclose}"
| Token.Alt_String => output false "{\\isacharbackquoteopen}" "{\\isacharbackquoteclose}"
| Token.Verbatim => output true "{\\isacharverbatimopen}" "{\\isacharverbatimclose}"
| Token.Cartouche => output false "{\\isacartoucheopen}" "{\\isacartoucheclose}"
| _ => output false "" "")
end handle ERROR msg => error (msg ^ Position.here (Token.pos_of tok));
fun output_source ctxt s =
output_body ctxt false "" "" (Symbol_Pos.explode (s, Position.none));
fun check_comments ctxt =
Comment.read_body #> List.app (fn (comment, syms) =>
let
val pos = #1 (Symbol_Pos.range syms);
val _ =
comment |> Option.app (fn kind =>
Context_Position.reports ctxt (map (pair pos) (Markup.cartouche :: Comment.markups kind)));
val _ = output_comment_symbols ctxt {antiq = false} (comment, syms);
in if comment = SOME Comment.Comment then check_comments ctxt syms else () end);
end;
(** present theory source **)
(*NB: arranging white space around command spans is a black art*)
val is_white = Token.is_space orf Token.is_informal_comment;
val is_black = not o is_white;
val is_white_comment = Token.is_informal_comment;
val is_black_comment = Token.is_formal_comment;
(* presentation tokens *)
datatype token =
No_Token
Ignore_Token
| Basic_Token of Token.T
| Markup_Token of string * string * Input.source
| Markup_Env_Token of string * string * Input.source
| Markup_Token of string * Input.source
| Markup_Env_Token of string * Input.source
| Raw_Token of Input.source;
fun basic_token pred (Basic_Token tok) = pred tok
| basic_token _ _ = false;
val improper_token = basic_token Token.is_improper;
val comment_token = basic_token Token.is_comment;
val white_token = basic_token is_white;
val white_comment_token = basic_token is_white_comment;
val blank_token = basic_token Token.is_blank;
val newline_token = basic_token Token.is_newline;
(* output token *)
fun output_token state tok =
fun present_token ctxt tok =
(case tok of
No_Token => ""
| Basic_Token tok => Latex.output_token tok
| Markup_Token (cmd, meta_args, source) =>
"%\n\\isamarkup" ^ cmd ^ meta_args ^ "{" ^ output_text state {markdown = false} source ^ "%\n}\n"
| Markup_Env_Token (cmd, meta_args, source) =>
Latex.environment ("isamarkup" ^ cmd)
(meta_args ^ output_text state {markdown = true} source)
Ignore_Token => []
| Basic_Token tok => output_token ctxt tok
| Markup_Token (cmd, source) =>
Latex.enclose_body ("%\n\\isamarkup" ^ cmd ^ "{") "%\n}\n"
(output_document ctxt {markdown = false} source)
| Markup_Env_Token (cmd, source) =>
[Latex.environment_block ("isamarkup" ^ cmd) (output_document ctxt {markdown = true} source)]
| Raw_Token source =>
"%\n" ^ output_text state {markdown = true} source ^ "\n");
Latex.string "%\n" :: output_document ctxt {markdown = true} source @ [Latex.string "\n"]);
(* command spans *)
@ -285,16 +211,16 @@ datatype span = Span of command * (source * source * source * source) * bool;
fun make_span cmd src =
let
fun take_newline (tok :: toks) =
fun chop_newline (tok :: toks) =
if newline_token (fst tok) then ([tok], toks, true)
else ([], tok :: toks, false)
| take_newline [] = ([], [], false);
| chop_newline [] = ([], [], false);
val (((src_prefix, src_main), src_suffix1), (src_suffix2, src_appendix, newline)) =
src
|> take_prefix (improper_token o fst)
||>> take_suffix (improper_token o fst)
||>> take_prefix (comment_token o fst)
||> take_newline;
|> chop_prefix (white_token o fst)
||>> chop_suffix (white_token o fst)
||>> chop_prefix (white_comment_token o fst)
||> chop_newline;
in Span (cmd, (src_prefix, src_main, src_suffix1 @ src_suffix2, src_appendix), newline) end;
@ -307,42 +233,72 @@ fun err_bad_nesting pos =
fun edge which f (x: string option, y) =
if x = y then I
else (case which (x, y) of NONE => I | SOME txt => Buffer.add (f txt));
else (case which (x, y) of NONE => I | SOME txt => cons (Latex.string (f txt)));
val begin_tag = edge #2 Latex.begin_tag;
val end_tag = edge #1 Latex.end_tag;
fun open_delim delim e = edge #2 Latex.begin_delim e #> delim #> edge #2 Latex.end_delim e;
fun close_delim delim e = edge #1 Latex.begin_delim e #> delim #> edge #1 Latex.end_delim e;
fun read_tag s =
(case space_explode "%" s of
["", b] => (SOME b, NONE)
| [a, b] => (NONE, SOME (a, b))
| _ => error ("Bad document_tags specification: " ^ quote s));
in
fun present_span keywords span state state' (tag_stack, active_tag, newline, buffer, present_cont) =
fun make_command_tag options keywords =
let
val document_tags =
map read_tag (space_explode "," (Options.string options \<^system_option>\<open>document_tags\<close>));
val document_tags_default = map_filter #1 document_tags;
val document_tags_command = map_filter #2 document_tags;
in
fn {cmd_name, cmd_tags, tag, active_tag} => fn state => fn state' =>
let
val tag' = try hd (fold (update (op =)) cmd_tags (the_list tag));
val keyword_tags =
if cmd_name = "end" andalso Toplevel.is_end_theory state' then ["theory"]
else Keyword.command_tags keywords cmd_name;
val command_tags =
the_list (AList.lookup (op =) document_tags_command cmd_name) @
keyword_tags @ document_tags_default;
val active_tag' =
if is_some tag' then tag'
else
(case command_tags of
default_tag :: _ => SOME default_tag
| [] =>
if Keyword.is_vacuous keywords cmd_name andalso Toplevel.is_proof state
then active_tag
else NONE);
in {tag' = tag', active_tag' = active_tag'} end
end;
fun present_span thy command_tag span state state'
(tag_stack, active_tag, newline, latex, present_cont) =
let
val ctxt' =
Toplevel.presentation_context state'
handle Toplevel.UNDEF => Proof_Context.get_global thy Context.PureN;
val present = fold (fn (tok, (flag, 0)) =>
Buffer.add (output_token state' tok)
#> Buffer.add flag
fold cons (present_token ctxt' tok)
#> cons (Latex.string flag)
| _ => I);
val Span ((cmd_name, cmd_pos, cmd_tags), srcs, span_newline) = span;
val (tag, tags) = tag_stack;
val tag' = try hd (fold (update (op =)) cmd_tags (the_list tag));
val {tag', active_tag'} =
command_tag {cmd_name = cmd_name, cmd_tags = cmd_tags, tag = tag, active_tag = active_tag}
state state';
val edge = (active_tag, active_tag');
val nesting = Toplevel.level state' - Toplevel.level state;
val active_tag' =
if is_some tag' then tag'
else if cmd_name = "end" andalso not (Toplevel.is_toplevel state') then NONE
else
(case Keyword.command_tags keywords cmd_name of
default_tag :: _ => SOME default_tag
| [] =>
if Keyword.is_vacuous keywords cmd_name andalso Toplevel.is_proof state
then active_tag
else NONE);
val edge = (active_tag, active_tag');
val newline' =
if is_none active_tag' then span_newline else newline;
@ -354,8 +310,8 @@ fun present_span keywords span state state' (tag_stack, active_tag, newline, buf
tg :: tgs => (tg, tgs)
| [] => err_bad_nesting (Position.here cmd_pos));
val buffer' =
buffer
val latex' =
latex
|> end_tag edge
|> close_delim (fst present_cont) edge
|> snd present_cont
@ -365,12 +321,12 @@ fun present_span keywords span state state' (tag_stack, active_tag, newline, buf
val present_cont' =
if newline then (present (#3 srcs), present (#4 srcs))
else (I, present (#3 srcs) #> present (#4 srcs));
in (tag_stack', active_tag', newline', buffer', present_cont') end;
in (tag_stack', active_tag', newline', latex', present_cont') end;
fun present_trailer ((_, tags), active_tag, _, buffer, present_cont) =
fun present_trailer ((_, tags), active_tag, _, latex, present_cont) =
if not (null tags) then err_bad_nesting " at end of theory"
else
buffer
latex
|> end_tag (active_tag, NONE)
|> close_delim (fst present_cont) (active_tag, NONE)
|> snd present_cont;
@ -386,9 +342,9 @@ val markup_true = "\\isamarkuptrue%\n";
val markup_false = "\\isamarkupfalse%\n";
val space_proper =
Scan.one Token.is_blank -- Scan.many Token.is_comment -- Scan.one Token.is_proper;
Scan.one Token.is_blank -- Scan.many is_white_comment -- Scan.one is_black;
val is_improper = not o (Token.is_proper orf Token.is_begin_ignore orf Token.is_end_ignore);
val is_improper = not o (is_black orf Token.is_begin_ignore orf Token.is_end_ignore);
val improper = Scan.many is_improper;
val improper_end = Scan.repeat (Scan.unless space_proper (Scan.one is_improper));
val blank_end = Scan.repeat (Scan.unless space_proper (Scan.one Token.is_blank));
@ -413,29 +369,27 @@ val meta_args_parser_hook = Unsynchronized.ref((fn thy => fn s => ("",s)): theo
in
fun present_thy thy command_results toks =
type segment = {span: Command_Span.span, command: Toplevel.transition, state: Toplevel.state};
fun present_thy options thy (segments: segment list) =
let
val keywords = Thy_Header.get_keywords thy;
(* tokens *)
val ignored = Scan.state --| ignore
>> (fn d => (NONE, (No_Token, ("", d))));
>> (fn d => (NONE, (Ignore_Token, ("", d))));
fun markup pred mk flag = Scan.peek (fn d =>
improper |--
Parse.position (Scan.one (fn tok => Token.is_command tok andalso
pred keywords (Token.content_of tok))) --
Parse.position (Scan.one (fn tok =>
Token.is_command tok andalso pred keywords (Token.content_of tok))) --
Scan.repeat tag --
(improper |--
(Parse.!!!!
( (!meta_args_parser_hook thy)
-- ( (improper -- locale -- improper)
|-- (Parse.document_source))
--| improper_end)))
>> (fn (((tok, pos'), tags), (meta_args,source)) =>
Parse.!!!! ((improper -- locale -- improper) |-- Parse.document_source --| improper_end)
>> (fn (((tok, pos'), tags), source) =>
let val name = Token.content_of tok
in (SOME (name, pos', tags), (mk (name, meta_args, source), (flag, d))) end));
in (SOME (name, pos', tags), (mk (name, source), (flag, d))) end));
val command = Scan.peek (fn d =>
Scan.optional (Scan.one Token.is_command_modifier ::: improper) [] --
@ -444,11 +398,15 @@ fun present_thy thy command_results toks =
map (fn tok => (NONE, (Basic_Token tok, ("", d)))) cmd_mod @
[(SOME (Token.content_of cmd, Token.pos_of cmd, tags),
(Basic_Token cmd, (markup_false, d)))]));
(*
val cmt = Scan.peek (fn d =>
Scan.one is_black_comment >> (fn tok => (NONE, (Basic_Token tok, ("", d)))));
*)
val cmt = Scan.peek (fn d =>
(Parse.$$$ "--" || Parse.$$$ Symbol.comment) |--
Parse.!!!! (improper |-- Parse.document_source) >>
(fn source => (NONE, (Markup_Token ("cmt", "", source), ("", d)))));
(fn source => (NONE, (Markup_Token ("cmt", source), ("", d)))));
val other = Scan.peek (fn d =>
Parse.not_eof >> (fn tok => (NONE, (Basic_Token tok, ("", d)))));
@ -457,7 +415,7 @@ fun present_thy thy command_results toks =
(ignored ||
markup Keyword.is_document_heading Markup_Token markup_true ||
markup Keyword.is_document_body Markup_Env_Token markup_true ||
markup Keyword.is_document_raw (Raw_Token o #3) "") >> single ||
markup Keyword.is_document_raw (Raw_Token o #2) "") >> single ||
command ||
(cmt || other) >> single;
@ -470,13 +428,13 @@ fun present_thy thy command_results toks =
val cmd = Scan.one (is_some o fst);
val non_cmd = Scan.one (is_none o fst andf not o is_eof) >> #2;
val comments = Scan.many (comment_token o fst o snd);
val white_comments = Scan.many (white_comment_token o fst o snd);
val blank = Scan.one (blank_token o fst o snd);
val newline = Scan.one (newline_token o fst o snd);
val before_cmd =
Scan.option (newline -- comments) --
Scan.option (newline -- comments) --
Scan.option (blank -- comments) -- cmd;
Scan.option (newline -- white_comments) --
Scan.option (newline -- white_comments) --
Scan.option (blank -- white_comments) -- cmd;
val span =
Scan.repeat non_cmd -- cmd --
@ -485,25 +443,34 @@ fun present_thy thy command_results toks =
>> (fn (((toks1, (cmd, tok2)), toks3), tok4) =>
make_span (the cmd) (toks1 @ (tok2 :: (toks3 @ the_default [] tok4))));
val spans = toks
|> take_suffix Token.is_space |> #1
|> Source.of_list
val spans = segments
|> maps (Command_Span.content o #span)
|> drop_suffix Token.is_space
|> Source.of_list
|> Source.source' 0 Token.stopper (Scan.error (Scan.bulk tokens >> flat))
|> Source.source stopper (Scan.error (Scan.bulk span))
|> Source.exhaust;
val command_results =
segments |> map_filter (fn {command, state, ...} =>
if Toplevel.is_ignored command then NONE else SOME (command, state));
(* present commands *)
val command_tag = make_command_tag options keywords;
fun present_command tr span st st' =
Toplevel.setmp_thread_position tr (present_span keywords span st st');
Toplevel.setmp_thread_position tr (present_span thy command_tag span st st');
fun present _ [] = I
| present st (((tr, st'), span) :: rest) = present_command tr span st st' #> present st' rest;
| present st ((span, (tr, st')) :: rest) = present_command tr span st st' #> present st' rest;
in
if length command_results = length spans then
((NONE, []), NONE, true, Buffer.empty, (I, I))
|> present Toplevel.toplevel (command_results ~~ spans)
((NONE, []), NONE, true, [], (I, I))
|> present Toplevel.toplevel (spans ~~ command_results)
|> present_trailer
|> rev
else error "Messed-up outer syntax for presentation"
end;
@ -513,174 +480,76 @@ end;
(** setup default output **)
(** standard output operations **)
(* options *)
(* pretty printing *)
val _ = Theory.setup
(add_option @{binding show_types} (Config.put show_types o boolean) #>
add_option @{binding show_sorts} (Config.put show_sorts o boolean) #>
add_option @{binding show_structs} (Config.put show_structs o boolean) #>
add_option @{binding show_question_marks} (Config.put show_question_marks o boolean) #>
add_option @{binding show_abbrevs} (Config.put show_abbrevs o boolean) #>
add_option @{binding names_long} (Config.put Name_Space.names_long o boolean) #>
add_option @{binding names_short} (Config.put Name_Space.names_short o boolean) #>
add_option @{binding names_unique} (Config.put Name_Space.names_unique o boolean) #>
add_option @{binding eta_contract} (Config.put Syntax_Trans.eta_contract o boolean) #>
add_option @{binding display} (Config.put display o boolean) #>
add_option @{binding break} (Config.put break o boolean) #>
add_option @{binding quotes} (Config.put quotes o boolean) #>
add_option @{binding mode} (add_wrapper o Print_Mode.with_modes o single) #>
add_option @{binding margin} (Config.put margin o integer) #>
add_option @{binding indent} (Config.put indent o integer) #>
add_option @{binding source} (Config.put source o boolean) #>
add_option @{binding goals_limit} (Config.put Goal_Display.goals_limit o integer));
(* basic pretty printing *)
fun perhaps_trim ctxt =
not (Config.get ctxt display) ? Symbol.trim_blanks;
fun pretty_text ctxt =
Pretty.chunks o map Pretty.str o map (perhaps_trim ctxt) o split_lines;
fun pretty_term ctxt t = Syntax.pretty_term (Variable.auto_fixes t ctxt) t;
fun pretty_term ctxt t =
Syntax.pretty_term (Variable.auto_fixes t ctxt) t;
fun pretty_thm ctxt = pretty_term ctxt o Thm.full_prop_of;
fun pretty_term_style ctxt (style, t) =
pretty_term ctxt (style t);
fun pretty_thm_style ctxt (style, th) =
pretty_term ctxt (style (Thm.full_prop_of th));
fun pretty_term_typ ctxt (style, t) =
let val t' = style t
in pretty_term ctxt (Type.constraint (Term.fastype_of t') t') end;
fun pretty_term_typeof ctxt (style, t) =
Syntax.pretty_typ ctxt (Term.fastype_of (style t));
fun pretty_const ctxt c =
let
val t = Const (c, Consts.type_scheme (Proof_Context.consts_of ctxt) c)
handle TYPE (msg, _, _) => error msg;
val ([t'], _) = Variable.import_terms true [t] ctxt;
in pretty_term ctxt t' end;
fun pretty_abbrev ctxt s =
let
val t = Syntax.read_term (Proof_Context.set_mode Proof_Context.mode_abbrev ctxt) s;
fun err () = error ("Abbreviated constant expected: " ^ Syntax.string_of_term ctxt t);
val (head, args) = Term.strip_comb t;
val (c, T) = Term.dest_Const head handle TERM _ => err ();
val (U, u) = Consts.the_abbreviation (Proof_Context.consts_of ctxt) c
handle TYPE _ => err ();
val t' = Term.betapplys (Envir.expand_atom T (U, u), args);
val eq = Logic.mk_equals (t, t');
val ctxt' = Variable.auto_fixes eq ctxt;
in Proof_Context.pretty_term_abbrev ctxt' eq end;
fun pretty_locale ctxt (name, pos) =
let
val thy = Proof_Context.theory_of ctxt
in (Pretty.str o Locale.extern thy o Locale.check thy) (name, pos) end;
fun pretty_class ctxt =
Pretty.str o Proof_Context.extern_class ctxt o Proof_Context.read_class ctxt;
fun pretty_type ctxt s =
let val Type (name, _) = Proof_Context.read_type_name {proper = true, strict = false} ctxt s
in Pretty.str (Proof_Context.extern_type ctxt name) end;
fun pretty_prf full ctxt = Proof_Syntax.pretty_clean_proof_of ctxt full;
fun pretty_theory ctxt (name, pos) = (Theory.check ctxt (name, pos); Pretty.str name);
(* default output *)
val str_of_source = space_implode " " o map Token.unparse o Token.args_of_src;
val lines = separate (Latex.string "\\isanewline%\n");
val items = separate (Latex.string "\\isasep\\isanewline%\n");
fun maybe_pretty_source pretty ctxt src xs =
map (pretty ctxt) xs (*always pretty in order to exhibit errors!*)
|> (if Config.get ctxt source then K [pretty_text ctxt (str_of_source src)] else I);
fun isabelle ctxt body =
if Config.get ctxt Document_Antiquotation.thy_output_display
then Latex.environment_block "isabelle" body
else Latex.block (Latex.enclose_body "\\isa{" "}" body);
fun string_of_margin ctxt = Pretty.string_of_margin (Config.get ctxt margin);
fun isabelle_typewriter ctxt body =
if Config.get ctxt Document_Antiquotation.thy_output_display
then Latex.environment_block "isabellett" body
else Latex.block (Latex.enclose_body "\\isatt{" "}" body);
fun output ctxt prts =
prts
|> Config.get ctxt quotes ? map Pretty.quote
|> (if Config.get ctxt display then
map (Pretty.indent (Config.get ctxt indent) #> string_of_margin ctxt #> Output.output)
#> space_implode "\\isasep\\isanewline%\n"
#> Latex.environment "isabelle"
else
map
((if Config.get ctxt break then string_of_margin ctxt else Pretty.unformatted_string_of)
#> Output.output)
#> space_implode "\\isasep\\isanewline%\n"
#> enclose "\\isa{" "}");
fun typewriter ctxt s =
isabelle_typewriter ctxt [Latex.string (Latex.output_ascii s)];
fun verbatim ctxt =
if Config.get ctxt Document_Antiquotation.thy_output_display
then Document_Antiquotation.indent_lines ctxt #> typewriter ctxt
else split_lines #> map (typewriter ctxt) #> lines #> Latex.block;
fun source ctxt =
Token.args_of_src
#> map (Token.unparse #> Document_Antiquotation.prepare_lines ctxt)
#> space_implode " "
#> output_source ctxt
#> isabelle ctxt;
fun pretty ctxt =
Document_Antiquotation.output ctxt #> Latex.string #> single #> isabelle ctxt;
fun pretty_source ctxt src prt =
if Config.get ctxt Document_Antiquotation.thy_output_source
then source ctxt src else pretty ctxt prt;
fun pretty_items ctxt =
map (Document_Antiquotation.output ctxt #> Latex.string) #> items #> isabelle ctxt;
fun pretty_items_source ctxt src prts =
if Config.get ctxt Document_Antiquotation.thy_output_source
then source ctxt src else pretty_items ctxt prts;
(* verbatim text *)
(* antiquotation variants *)
fun verbatim_text ctxt =
if Config.get ctxt display then
split_lines #> map (prefix (Symbol.spaces (Config.get ctxt indent))) #> cat_lines #>
Latex.output_ascii #> Latex.environment "isabellett"
else
split_lines #>
map (Latex.output_ascii #> enclose "\\isatt{" "}") #>
space_implode "\\isasep\\isanewline%\n";
fun antiquotation_pretty name scan f =
Document_Antiquotation.setup name scan
(fn {context = ctxt, argument = x, ...} => pretty ctxt (f ctxt x));
fun antiquotation_pretty_source name scan f =
Document_Antiquotation.setup name scan
(fn {context = ctxt, source = src, argument = x} => pretty_source ctxt src (f ctxt x));
(* antiquotations for basic entities *)
fun antiquotation_raw name scan f =
Document_Antiquotation.setup name scan
(fn {context = ctxt, argument = x, ...} => f ctxt x);
local
fun basic_entities name scan pretty =
antiquotation name scan (fn {source, context = ctxt, ...} =>
output ctxt o maybe_pretty_source pretty ctxt source);
fun basic_entities_style name scan pretty =
antiquotation name scan (fn {source, context = ctxt, ...} => fn (style, xs) =>
output ctxt
(maybe_pretty_source (fn ctxt => fn x => pretty ctxt (style, x)) ctxt source xs));
fun basic_entity name scan = basic_entities name (scan >> single);
in
val _ = Theory.setup
(basic_entities_style @{binding thm} (Term_Style.parse -- Attrib.thms) pretty_thm_style #>
basic_entity @{binding prop} (Term_Style.parse -- Args.prop) pretty_term_style #>
basic_entity @{binding term} (Term_Style.parse -- Args.term) pretty_term_style #>
basic_entity @{binding term_type} (Term_Style.parse -- Args.term) pretty_term_typ #>
basic_entity @{binding typeof} (Term_Style.parse -- Args.term) pretty_term_typeof #>
basic_entity @{binding const} (Args.const {proper = true, strict = false}) pretty_const #>
basic_entity @{binding abbrev} (Scan.lift Args.embedded_inner_syntax) pretty_abbrev #>
basic_entity @{binding typ} Args.typ_abbrev Syntax.pretty_typ #>
basic_entity @{binding locale} (Scan.lift (Parse.position Args.name)) pretty_locale #>
basic_entity @{binding class} (Scan.lift Args.embedded_inner_syntax) pretty_class #>
basic_entity @{binding type} (Scan.lift Args.embedded) pretty_type #>
basic_entities @{binding prf} Attrib.thms (pretty_prf false) #>
basic_entities @{binding full_prf} Attrib.thms (pretty_prf true) #>
basic_entity @{binding theory} (Scan.lift (Parse.position Args.name)) pretty_theory);
end;
(** document command **)
fun document_command markdown (loc, txt) =
Toplevel.keep (fn state =>
(case loc of
NONE => ignore (output_text state markdown txt)
| SOME (_, pos) =>
error ("Illegal target specification -- not a theory context" ^ Position.here pos))) o
Toplevel.present_local_theory loc (fn state => ignore (output_text state markdown txt));
fun antiquotation_verbatim name scan f =
antiquotation_raw name scan (fn ctxt => verbatim ctxt o f ctxt);
end;